Difference between revisions of "Applications/Orca"
From HPC
m |
m (→Further Information) |
||
| (16 intermediate revisions by the same user not shown) | |||
| Line 2: | Line 2: | ||
== Application Details == | == Application Details == | ||
| − | * Description : CP2K is a quantum chemistry and solid state physics software package that can perform atomistic simulations of solid state, liquid, molecular, periodic, material, crystal, and biological systems. | + | * Description: CP2K is a quantum chemistry and solid-state physics software package that can perform atomistic simulations of solid-state, liquid, molecular, periodic, material, crystal, and biological systems. |
| − | * Versions : | + | * Versions : 4.0.0, 4.0.1, 4.1.0, 4.1.1, 4.1.2, 4.2.0 and 4.2.1 |
| − | * Module names : orca/ | + | * Module names : orca/4.0.0, orca/4.0.1, orca/4.1.0/openmpi213, orca,4.1.0/openmpi313, orca/4.1.1, orca/4.1.2, orca/4.2.0 and orca/4.2.1, orca/5.0.2 |
| + | |||
* License: Free for academic use | * License: Free for academic use | ||
| − | + | '''Note''' : orca versions >4.0.0 requires openMPI 3 | |
| − | '''Note''' : orca | ||
| − | |||
== Modules Available == | == Modules Available == | ||
| − | |||
| − | |||
* module add orca/4.0.0 | * module add orca/4.0.0 | ||
* module add orca/4.0.1 | * module add orca/4.0.1 | ||
| + | * module add orca/4.1.0/openmpi213 | ||
| + | * module add orca/4.1.0/openmpi313 | ||
| + | * module add orca/4.1.1 | ||
| + | * module add orca/4.1.2 | ||
| + | * module add orca/4.2.0 | ||
| + | * module add orca/4.2.1 | ||
| + | * module add orca/5.0.2 | ||
| + | |||
| Line 27: | Line 32: | ||
#!/bin/bash | #!/bin/bash | ||
| − | + | #SBATCH -J orc_4N | |
| − | #SBATCH -J | ||
#SBATCH -N 1 | #SBATCH -N 1 | ||
| − | #SBATCH --ntasks-per-node | + | #SBATCH --ntasks-per-node 28 |
#SBATCH -o %N.%j.%a.out | #SBATCH -o %N.%j.%a.out | ||
#SBATCH -e %N.%j.%a.err | #SBATCH -e %N.%j.%a.err | ||
#SBATCH -p compute | #SBATCH -p compute | ||
#SBATCH --exclusive | #SBATCH --exclusive | ||
| − | |||
echo $SLURM_JOB_NODELIST | echo $SLURM_JOB_NODELIST | ||
module purge | module purge | ||
| − | module | + | module load orca/5.0.2 |
| − | |||
export I_MPI_FABRICS=shm:tmi | export I_MPI_FABRICS=shm:tmi | ||
| Line 50: | Line 52: | ||
mpirun --version | mpirun --version | ||
| − | ORCA= | + | export ORCA=`which orca` |
| − | export OMP_NUM_THREADS= | + | export OMP_NUM_THREADS=2 |
| + | echo "Using ORCA located at " $ORCA | ||
#CHANGE HERE FOR INPUT FILE (.inp) | #CHANGE HERE FOR INPUT FILE (.inp) | ||
| − | inpfile= | + | inpfile=py-lpno-pccsd-freq.inp |
| + | |||
#test out | #test out | ||
outfile=(${inpfile//inp/out}) | outfile=(${inpfile//inp/out}) | ||
echo writing output into : $outfile | echo writing output into : $outfile | ||
| − | SCRATCH=/ | + | SCRATCH=/local/$USER/$SLURM_JOB_ID |
echo Creating temp dir $SCRATCH | echo Creating temp dir $SCRATCH | ||
mkdir -p $SCRATCH || exit $? | mkdir -p $SCRATCH || exit $? | ||
echo Coping files. srun cp is equivalent to loop over each node + scp | echo Coping files. srun cp is equivalent to loop over each node + scp | ||
cp -r $SLURM_SUBMIT_DIR/$inpfile $SCRATCH || exit $? | cp -r $SLURM_SUBMIT_DIR/$inpfile $SCRATCH || exit $? | ||
| + | #cp -r $SLURM_SUBMIT_DIR/oh-pi-p-b3lyp-freq.hess $SCRATCH || exit $? | ||
cd $SCRATCH | cd $SCRATCH | ||
| Line 71: | Line 76: | ||
echo calculation finished - copying files back to home directory | echo calculation finished - copying files back to home directory | ||
| + | cp $SCRATCH/* $SLURM_SUBMIT_DIR | ||
cp $SCRATCH/*.gbw $SLURM_SUBMIT_DIR | cp $SCRATCH/*.gbw $SLURM_SUBMIT_DIR | ||
cp $SCRATCH/*.hess $SLURM_SUBMIT_DIR | cp $SCRATCH/*.hess $SLURM_SUBMIT_DIR | ||
| Line 78: | Line 84: | ||
echo calculation finished - removing scratch dir | echo calculation finished - removing scratch dir | ||
rm -rf $SCRATCH | rm -rf $SCRATCH | ||
| + | |||
</pre> | </pre> | ||
| Line 83: | Line 90: | ||
<pre style="background-color: black; color: white; border: 2px solid black; font-family: monospace, sans-serif;"> | <pre style="background-color: black; color: white; border: 2px solid black; font-family: monospace, sans-serif;"> | ||
[username@login01 ~]$ sbatch orca-test.job | [username@login01 ~]$ sbatch orca-test.job | ||
| − | Submitted batch job | + | Submitted batch job 3189522 |
</pre> | </pre> | ||
{| | {| | ||
|style="width:5%; border-width: 0" | [[File:icon_tick.png]] | |style="width:5%; border-width: 0" | [[File:icon_tick.png]] | ||
| − | |style="width:95%; border-width: 0" | '''Orca version 4.0.0''' requires openMPI version | + | |style="width:95%; border-width: 0" | '''Orca version 4.0.0''' requires openMPI version 3 (openmpi3) |
|- | |- | ||
|} | |} | ||
| Line 93: | Line 100: | ||
== Further Information == | == Further Information == | ||
| − | * [https://orcaforum. | + | * [https://orcaforum.kofo.mpg.de/index.php https://orcaforum.kofo.mpg.de/index.php] |
| − | |||
| − | + | {{Modulepagenav}} | |
Latest revision as of 10:54, 16 November 2022
Contents
Application Details
- Description: CP2K is a quantum chemistry and solid-state physics software package that can perform atomistic simulations of solid-state, liquid, molecular, periodic, material, crystal, and biological systems.
- Versions : 4.0.0, 4.0.1, 4.1.0, 4.1.1, 4.1.2, 4.2.0 and 4.2.1
- Module names : orca/4.0.0, orca/4.0.1, orca/4.1.0/openmpi213, orca,4.1.0/openmpi313, orca/4.1.1, orca/4.1.2, orca/4.2.0 and orca/4.2.1, orca/5.0.2
- License: Free for academic use
Note : orca versions >4.0.0 requires openMPI 3
Modules Available
- module add orca/4.0.0
- module add orca/4.0.1
- module add orca/4.1.0/openmpi213
- module add orca/4.1.0/openmpi313
- module add orca/4.1.1
- module add orca/4.1.2
- module add orca/4.2.0
- module add orca/4.2.1
- module add orca/5.0.2
Usage Examples
Batch Submission
#!/bin/bash
#SBATCH -J orc_4N
#SBATCH -N 1
#SBATCH --ntasks-per-node 28
#SBATCH -o %N.%j.%a.out
#SBATCH -e %N.%j.%a.err
#SBATCH -p compute
#SBATCH --exclusive
echo $SLURM_JOB_NODELIST
module purge
module load orca/5.0.2
export I_MPI_FABRICS=shm:tmi
export I_MPI_FALLBACK=no
module list
mpirun --version
export ORCA=`which orca`
export OMP_NUM_THREADS=2
echo "Using ORCA located at " $ORCA
#CHANGE HERE FOR INPUT FILE (.inp)
inpfile=py-lpno-pccsd-freq.inp
#test out
outfile=(${inpfile//inp/out})
echo writing output into : $outfile
SCRATCH=/local/$USER/$SLURM_JOB_ID
echo Creating temp dir $SCRATCH
mkdir -p $SCRATCH || exit $?
echo Coping files. srun cp is equivalent to loop over each node + scp
cp -r $SLURM_SUBMIT_DIR/$inpfile $SCRATCH || exit $?
#cp -r $SLURM_SUBMIT_DIR/oh-pi-p-b3lyp-freq.hess $SCRATCH || exit $?
cd $SCRATCH
$ORCA $inpfile > $SLURM_SUBMIT_DIR/$outfile
echo calculation finished - copying files back to home directory
cp $SCRATCH/* $SLURM_SUBMIT_DIR
cp $SCRATCH/*.gbw $SLURM_SUBMIT_DIR
cp $SCRATCH/*.hess $SLURM_SUBMIT_DIR
cp $SCRATCH/*.trj $SLURM_SUBMIT_DIR
cp $SCRATCH/*.xyz* $SLURM_SUBMIT_DIR
echo calculation finished - removing scratch dir
rm -rf $SCRATCH
[username@login01 ~]$ sbatch orca-test.job Submitted batch job 3189522
| |
Orca version 4.0.0 requires openMPI version 3 (openmpi3) |