Difference between revisions of "Applications/Orca"
From HPC
m |
m |
||
Line 28: | Line 28: | ||
#!/bin/bash | #!/bin/bash | ||
− | + | #SBATCH -J orc_4N | |
− | #SBATCH -J | ||
#SBATCH -N 1 | #SBATCH -N 1 | ||
− | #SBATCH --ntasks-per-node | + | #SBATCH --ntasks-per-node 28 |
#SBATCH -o %N.%j.%a.out | #SBATCH -o %N.%j.%a.out | ||
#SBATCH -e %N.%j.%a.err | #SBATCH -e %N.%j.%a.err | ||
#SBATCH -p compute | #SBATCH -p compute | ||
#SBATCH --exclusive | #SBATCH --exclusive | ||
− | |||
echo $SLURM_JOB_NODELIST | echo $SLURM_JOB_NODELIST | ||
module purge | module purge | ||
− | module | + | module load orca/4.0.1 |
− | |||
export I_MPI_FABRICS=shm:tmi | export I_MPI_FABRICS=shm:tmi | ||
Line 51: | Line 48: | ||
mpirun --version | mpirun --version | ||
− | ORCA= | + | export ORCA=`which orca` |
− | export OMP_NUM_THREADS= | + | export OMP_NUM_THREADS=2 |
+ | echo "Using ORCA located at " $ORCA | ||
#CHANGE HERE FOR INPUT FILE (.inp) | #CHANGE HERE FOR INPUT FILE (.inp) | ||
− | inpfile= | + | inpfile=py-lpno-pccsd-freq.inp |
+ | |||
#test out | #test out | ||
outfile=(${inpfile//inp/out}) | outfile=(${inpfile//inp/out}) | ||
echo writing output into : $outfile | echo writing output into : $outfile | ||
− | SCRATCH=/ | + | SCRATCH=/local/$USER/$SLURM_JOB_ID |
echo Creating temp dir $SCRATCH | echo Creating temp dir $SCRATCH | ||
mkdir -p $SCRATCH || exit $? | mkdir -p $SCRATCH || exit $? | ||
echo Coping files. srun cp is equivalent to loop over each node + scp | echo Coping files. srun cp is equivalent to loop over each node + scp | ||
cp -r $SLURM_SUBMIT_DIR/$inpfile $SCRATCH || exit $? | cp -r $SLURM_SUBMIT_DIR/$inpfile $SCRATCH || exit $? | ||
+ | #cp -r $SLURM_SUBMIT_DIR/oh-pi-p-b3lyp-freq.hess $SCRATCH || exit $? | ||
cd $SCRATCH | cd $SCRATCH | ||
Line 72: | Line 72: | ||
echo calculation finished - copying files back to home directory | echo calculation finished - copying files back to home directory | ||
+ | cp $SCRATCH/* $SLURM_SUBMIT_DIR | ||
cp $SCRATCH/*.gbw $SLURM_SUBMIT_DIR | cp $SCRATCH/*.gbw $SLURM_SUBMIT_DIR | ||
cp $SCRATCH/*.hess $SLURM_SUBMIT_DIR | cp $SCRATCH/*.hess $SLURM_SUBMIT_DIR | ||
Line 79: | Line 80: | ||
echo calculation finished - removing scratch dir | echo calculation finished - removing scratch dir | ||
rm -rf $SCRATCH | rm -rf $SCRATCH | ||
+ | |||
</pre> | </pre> | ||
Revision as of 10:35, 4 February 2019
Contents
Application Details
- Description : CP2K is a quantum chemistry and solid state physics software package that can perform atomistic simulations of solid state, liquid, molecular, periodic, material, crystal, and biological systems.
- Versions : 3.0.0, 3.0.0, 4.0.0, 4.0.1, 4.1.0 and 4.1.1
- Module names : orca/3.0.0, orca/3.0.0, orca/4.0.0, orca/4.0.1, orca/4.1.0/openmpi212 and openmpi313 and 4.1.1/openmpi313
- License: Free for academic use
Note : orca versions >4.0.0 requires openMPI 3
Modules Available
- module add orca/3.0.0
- module add orca/3.0.3
- module add orca/4.0.0
- module add orca/4.0.1
- module add orca/4.1.0/openmpi212 and openmpi313
- module add orca/4.1.1/openmpi313
Usage Examples
Batch Submission
#!/bin/bash #SBATCH -J orc_4N #SBATCH -N 1 #SBATCH --ntasks-per-node 28 #SBATCH -o %N.%j.%a.out #SBATCH -e %N.%j.%a.err #SBATCH -p compute #SBATCH --exclusive echo $SLURM_JOB_NODELIST module purge module load orca/4.0.1 export I_MPI_FABRICS=shm:tmi export I_MPI_FALLBACK=no module list mpirun --version export ORCA=`which orca` export OMP_NUM_THREADS=2 echo "Using ORCA located at " $ORCA #CHANGE HERE FOR INPUT FILE (.inp) inpfile=py-lpno-pccsd-freq.inp #test out outfile=(${inpfile//inp/out}) echo writing output into : $outfile SCRATCH=/local/$USER/$SLURM_JOB_ID echo Creating temp dir $SCRATCH mkdir -p $SCRATCH || exit $? echo Coping files. srun cp is equivalent to loop over each node + scp cp -r $SLURM_SUBMIT_DIR/$inpfile $SCRATCH || exit $? #cp -r $SLURM_SUBMIT_DIR/oh-pi-p-b3lyp-freq.hess $SCRATCH || exit $? cd $SCRATCH $ORCA $inpfile > $SLURM_SUBMIT_DIR/$outfile echo calculation finished - copying files back to home directory cp $SCRATCH/* $SLURM_SUBMIT_DIR cp $SCRATCH/*.gbw $SLURM_SUBMIT_DIR cp $SCRATCH/*.hess $SLURM_SUBMIT_DIR cp $SCRATCH/*.trj $SLURM_SUBMIT_DIR cp $SCRATCH/*.xyz* $SLURM_SUBMIT_DIR echo calculation finished - removing scratch dir rm -rf $SCRATCH
[username@login01 ~]$ sbatch orca-test.job Submitted batch job 189522
Orca version 4.0.0 requires openMPI version 3 (openmpi3) |