cc_staff
1,857
edits
mNo edit summary |
mNo edit summary |
||
Line 1,310: | Line 1,310: | ||
ansysedt -monitor -UseElectronicsPPE -ng -distributed -machinelist list=localhost:1:$SLURM_NTASKS \ | ansysedt -monitor -UseElectronicsPPE -ng -distributed -machinelist list=localhost:1:$SLURM_NTASKS \ | ||
-batchoptions $OPTIONS_TXT -batchsolve "$YOUR_AEDT_FILE" | -batchoptions $OPTIONS_TXT -batchsolve "$YOUR_AEDT_FILE" | ||
}} | |||
</tab> | |||
</tabs> | |||
== Ansys ROCKY == <!--T:109--> | |||
<!--T:1091--> | |||
Ansys Rocky can be run interactively on a graham compute node for testing Rocky simulations in headless (non-gpu) mode before running inside a slurm script by reserving a compute node with <code>salloc --time=04:00:00 --nodes=1 --tasks=3 [--gpus=t4:1] --mem=32G --account=def-account</code> and then loading the following local modules <code>module load rocky/2023R2 ansys/2023R2</code> and then finally running <code>Rocky --simulate "mysim.rocky" --resume=0 --ncpus=48 [--use-gpu=USE_GPU --gpu-num=GPU_NUM]</code> where the contents in the square brackets are optional when using Rocky with a [https://www.ansys.com/blog/mastering-multi-gpu-ansys-rocky-software-enhancing-its-performance gpu]. | |||
=== Slurm scripts === <!--T:1092--> | |||
<!--T:1093--> | |||
Ansys Rocky batch jobs may be submitted to graham cluster queue with the following two scripts. These scripts are only usable on graham since rocky is only installed (locally) on this cluster at the present time. A full listing of command line options can be obtained by running Rocky on the command line with the <code>-h</code> switch. Please note these scripts have not been tested as of this writing. If a coupled solution is being run (for instance with Ansys fluent) then these should be done using cpus of the same node. When running Rocky with more than 4cpus <I>rocky_hpc</I> licenses will be used which are included in the SHARCNET license. | |||
<!--T:1577--> | |||
<tabs> | |||
<tab name="CPU only"> | |||
{{File | |||
|name=script-rocky-cpu.sh | |||
|lang="bash" | |||
|contents= | |||
#!/bin/bash | |||
<!--T:2809--> | |||
#SBATCH --account=account # Specify your account (def or rrg) | |||
#SBATCH --time=00-02:00 # Specify time (DD-HH:MM) | |||
#SBATCH --mem=24G # Specify memory (set to 0 for all node memory) | |||
#SBATCH --cpus-per-task=6 # Specify cores (graham 32 or 44 to use all cores) | |||
#SBATCH --nodes=1 # Request one node (do not change) | |||
<!--T:2839--> | |||
module load StdEnv/2023 | |||
module load rocky/2023R2 ansys/2023R2 # only available on graham (do not change) | |||
<!--T:2810--> | |||
Rocky --simulate “mysim.rocky” --resume=0 --ncpus=48 --use-gpu=0 | |||
}} | |||
</tab> | |||
<tab name="GPU based"> | |||
{{File | |||
|name=script-rocky-gpu.sh | |||
|lang="bash" | |||
|contents= | |||
#!/bin/bash | |||
<!--T:2816--> | |||
#SBATCH --account=account # Specify your account (def or reg) | |||
#SBATCH --time=00-01:00 # Specify time (DD-HH:MM) | |||
#SBATCH --mem=24G # Specify memory (set to 0 for all node memory) | |||
#SBATCH --cpus-per-task=6 # Specify cores (graham 32 or 44 to use all cores) | |||
#SBATCH --gres=gpu:t4:2 # Specify gpu type : gpu quantity (4 max) | |||
#SBATCH --nodes=1 # Request one node (do not change) | |||
<!--T:2839--> | |||
module load StdEnv/2023 | |||
module load rocky/2023R2 ansys/2023R2 # only available on graham (do not change) | |||
<!--T:2810--> | |||
Rocky --simulate “mysim.rocky” --resume=0 --ncpus=$SLURM_CPUS_PER_TASK --use-gpu=1 --gpu-num=$SLURM_GPUS_ON_NODE | |||
}} | }} | ||
</tab> | </tab> |