Difference between revisions of "Applications/Orca"

From HPC
Jump to: navigation , search
m
m (Further Information)
 
(24 intermediate revisions by the same user not shown)
Line 2: Line 2:
 
== Application Details ==
 
== Application Details ==
  
* Description : CP2K is a quantum chemistry and solid state physics software package that can perform atomistic simulations of solid state, liquid, molecular, periodic, material, crystal, and biological systems.
+
* Description: CP2K is a quantum chemistry and solid-state physics software package that can perform atomistic simulations of solid-state, liquid, molecular, periodic, material, crystal, and biological systems.
* Versions : 3.0.0, 3.0.0 and 4.0.0.2
+
* Versions : 4.0.0, 4.0.1, 4.1.0, 4.1.1, 4.1.2, 4.2.0 and 4.2.1
* Module names : orca/3.0.0, orca/3.0.0 and orca/4.0.0
+
* Module names : orca/4.0.0, orca/4.0.1, orca/4.1.0/openmpi213, orca,4.1.0/openmpi313, orca/4.1.1, orca/4.1.2, orca/4.2.0 and orca/4.2.1, orca/5.0.2
 +
 
 
* License:  Free for academic use
 
* License:  Free for academic use
  
 +
'''Note''' : orca versions >4.0.0 requires openMPI 3
  
  
 
== Modules Available ==
 
== Modules Available ==
  
* module add orca/3.0.0
 
* module add orca/3.0.3
 
 
* module add orca/4.0.0
 
* module add orca/4.0.0
 +
* module add orca/4.0.1
 +
* module add orca/4.1.0/openmpi213
 +
* module add orca/4.1.0/openmpi313
 +
* module add orca/4.1.1
 +
*  module add orca/4.1.2
 +
*  module add orca/4.2.0
 +
*  module add orca/4.2.1
 +
* module add orca/5.0.2
 +
  
  
Line 23: Line 32:
  
 
#!/bin/bash
 
#!/bin/bash
 
+
#SBATCH -J orc_4N
#SBATCH -J orca_Zr-L1
 
 
#SBATCH -N 1
 
#SBATCH -N 1
#SBATCH --ntasks-per-node 14
+
#SBATCH --ntasks-per-node 28
 
#SBATCH -o %N.%j.%a.out
 
#SBATCH -o %N.%j.%a.out
 
#SBATCH -e %N.%j.%a.err
 
#SBATCH -e %N.%j.%a.err
 
#SBATCH -p compute
 
#SBATCH -p compute
 
#SBATCH --exclusive
 
#SBATCH --exclusive
#SBATCH --time=48:00:00
 
  
 
echo $SLURM_JOB_NODELIST
 
echo $SLURM_JOB_NODELIST
  
 
module purge
 
module purge
module add orca/3.0.3
+
module load orca/5.0.2
module add openmpi/gcc/1.10.2
 
  
 
export I_MPI_FABRICS=shm:tmi
 
export I_MPI_FABRICS=shm:tmi
Line 46: Line 52:
 
mpirun --version
 
mpirun --version
  
ORCA=/trinity/clustervision/CentOS/7/apps/orca/3.0.3/orca
+
export ORCA=`which orca`
export OMP_NUM_THREADS=14
+
export OMP_NUM_THREADS=2
 +
echo "Using ORCA located at " $ORCA
  
 
#CHANGE HERE FOR INPUT FILE (.inp)
 
#CHANGE HERE FOR INPUT FILE (.inp)
inpfile=Zr-amine-cyclam-3-Ligand.inp
+
inpfile=py-lpno-pccsd-freq.inp
 +
 
 
#test out
 
#test out
 
outfile=(${inpfile//inp/out})
 
outfile=(${inpfile//inp/out})
 
echo writing output into : $outfile
 
echo writing output into : $outfile
  
SCRATCH=/tmp/$USER/$SLURM_JOB_ID
+
SCRATCH=/local/$USER/$SLURM_JOB_ID
 
echo Creating temp dir $SCRATCH
 
echo Creating temp dir $SCRATCH
 
mkdir -p $SCRATCH || exit $?
 
mkdir -p $SCRATCH || exit $?
 
echo Coping files. srun cp is equivalent to loop over each node + scp
 
echo Coping files. srun cp is equivalent to loop over each node + scp
 
cp -r $SLURM_SUBMIT_DIR/$inpfile  $SCRATCH || exit $?
 
cp -r $SLURM_SUBMIT_DIR/$inpfile  $SCRATCH || exit $?
 +
#cp -r $SLURM_SUBMIT_DIR/oh-pi-p-b3lyp-freq.hess  $SCRATCH || exit $?
  
 
cd $SCRATCH
 
cd $SCRATCH
Line 67: Line 76:
 
echo calculation finished - copying files back to home directory
 
echo calculation finished - copying files back to home directory
  
 +
cp $SCRATCH/* $SLURM_SUBMIT_DIR
 
cp $SCRATCH/*.gbw $SLURM_SUBMIT_DIR
 
cp $SCRATCH/*.gbw $SLURM_SUBMIT_DIR
 
cp $SCRATCH/*.hess $SLURM_SUBMIT_DIR
 
cp $SCRATCH/*.hess $SLURM_SUBMIT_DIR
Line 74: Line 84:
 
echo calculation finished - removing scratch dir
 
echo calculation finished - removing scratch dir
 
rm -rf  $SCRATCH
 
rm -rf  $SCRATCH
 +
 
</pre>
 
</pre>
  
Line 79: Line 90:
 
<pre style="background-color: black; color: white; border: 2px solid black; font-family: monospace, sans-serif;">
 
<pre style="background-color: black; color: white; border: 2px solid black; font-family: monospace, sans-serif;">
 
[username@login01 ~]$ sbatch orca-test.job
 
[username@login01 ~]$ sbatch orca-test.job
Submitted batch job 189522
+
Submitted batch job 3189522
 
</pre>
 
</pre>
 +
{|
 +
|style="width:5%; border-width: 0" | [[File:icon_tick.png]]
 +
|style="width:95%; border-width: 0" | '''Orca version 4.0.0''' requires openMPI version 3 (openmpi3)
 +
|-
 +
|}
  
 
== Further Information ==
 
== Further Information ==
  
* [https://orcaforum.cec.mpg.de/ https://orcaforum.cec.mpg.de/]
+
* [https://orcaforum.kofo.mpg.de/index.php https://orcaforum.kofo.mpg.de/index.php]
 
 
  
[[Category:Applications]]
+
{{Modulepagenav}}

Latest revision as of 10:54, 16 November 2022

Application Details

  • Description: CP2K is a quantum chemistry and solid-state physics software package that can perform atomistic simulations of solid-state, liquid, molecular, periodic, material, crystal, and biological systems.
  • Versions : 4.0.0, 4.0.1, 4.1.0, 4.1.1, 4.1.2, 4.2.0 and 4.2.1
  • Module names : orca/4.0.0, orca/4.0.1, orca/4.1.0/openmpi213, orca,4.1.0/openmpi313, orca/4.1.1, orca/4.1.2, orca/4.2.0 and orca/4.2.1, orca/5.0.2
  • License: Free for academic use

Note : orca versions >4.0.0 requires openMPI 3


Modules Available

  • module add orca/4.0.0
  • module add orca/4.0.1
  • module add orca/4.1.0/openmpi213
  • module add orca/4.1.0/openmpi313
  • module add orca/4.1.1
  • module add orca/4.1.2
  • module add orca/4.2.0
  • module add orca/4.2.1
  • module add orca/5.0.2


Usage Examples

Batch Submission


#!/bin/bash
#SBATCH -J orc_4N
#SBATCH -N 1
#SBATCH --ntasks-per-node 28
#SBATCH -o %N.%j.%a.out
#SBATCH -e %N.%j.%a.err
#SBATCH -p compute
#SBATCH --exclusive

echo $SLURM_JOB_NODELIST

module purge
module load orca/5.0.2

export I_MPI_FABRICS=shm:tmi
export I_MPI_FALLBACK=no

module list

mpirun --version

export ORCA=`which orca`
export OMP_NUM_THREADS=2
echo "Using ORCA located at " $ORCA

#CHANGE HERE FOR INPUT FILE (.inp)
inpfile=py-lpno-pccsd-freq.inp

#test out
outfile=(${inpfile//inp/out})
echo writing output into : $outfile

SCRATCH=/local/$USER/$SLURM_JOB_ID
echo Creating temp dir $SCRATCH
mkdir -p $SCRATCH || exit $?
echo Coping files. srun cp is equivalent to loop over each node + scp
cp -r $SLURM_SUBMIT_DIR/$inpfile  $SCRATCH || exit $?
#cp -r $SLURM_SUBMIT_DIR/oh-pi-p-b3lyp-freq.hess  $SCRATCH || exit $?

cd $SCRATCH

$ORCA   $inpfile > $SLURM_SUBMIT_DIR/$outfile

echo calculation finished - copying files back to home directory

cp $SCRATCH/* $SLURM_SUBMIT_DIR
cp $SCRATCH/*.gbw $SLURM_SUBMIT_DIR
cp $SCRATCH/*.hess $SLURM_SUBMIT_DIR
cp $SCRATCH/*.trj $SLURM_SUBMIT_DIR
cp $SCRATCH/*.xyz* $SLURM_SUBMIT_DIR

echo calculation finished - removing scratch dir
rm -rf  $SCRATCH


[username@login01 ~]$ sbatch orca-test.job
Submitted batch job 3189522
Icon tick.png Orca version 4.0.0 requires openMPI version 3 (openmpi3)

Further Information





Modules | Main Page | Further Topics