Skip to content

Commit

Permalink
Merge pull request sxs-collaboration#6094 from Bronoulli/develop_Ocea…
Browse files Browse the repository at this point in the history
…nGcc

Update Ocean submit script
  • Loading branch information
knelli2 authored Jul 26, 2024
2 parents f728c6c + fa50321 commit a6a8ee4
Show file tree
Hide file tree
Showing 4 changed files with 58 additions and 198 deletions.
2 changes: 2 additions & 0 deletions support/Environments/ocean_gcc.sh
Original file line number Diff line number Diff line change
Expand Up @@ -92,7 +92,9 @@ spectre_run_cmake() {
-D CMAKE_Fortran_COMPILER=${GCC_HOME}/gfortran \
-D USE_PCH=ON \
-D BUILD_PYTHON_BINDINGS=ON \
-D MACHINE=Ocean \
-D BOOTSTRAP_PY_DEPS=ON \
-D DEBUG_SYMBOLS=OFF \
"$@" \
$SPECTRE_HOME
}
12 changes: 12 additions & 0 deletions support/Machines/Ocean.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
# Distributed under the MIT License.
# See LICENSE.txt for details.

Machine:
Name: Ocean
Description: |
Supercomputer at Cal State Fullerton hosted by Geoffrey Lovelace.
DefaultTasksPerNode: 1
DefaultProcsPerTasks: 20
DefaultQueue: "orca-1"
DefaultTimeLimit: "1-00:00:00"
LaunchCommandSingleNode: []
105 changes: 44 additions & 61 deletions support/SubmitScripts/Ocean.sh
Original file line number Diff line number Diff line change
@@ -1,69 +1,52 @@
#!/bin/bash -
#SBATCH -o spectre.stdout
#SBATCH -e spectre.stderr
#SBATCH --ntasks-per-node 20
#SBATCH -J KerrSchild
#SBATCH --nodes 2
#SBATCH -p orca-1
#SBATCH -t 12:00:00
#SBATCH -D .
{% extends "SubmitTemplateBase.sh" %}

# Distributed under the MIT License.
# See LICENSE.txt for details.

# To run a job on Ocean:
# - Set the -J, --nodes, and -t options above, which correspond to job name,
# number of nodes, and wall time limit in HH:MM:SS, respectively.
# - Set the build directory, run directory, executable name,
# and input file below.
#
# NOTE: The executable will not be copied from the build directory, so if you
# update your build directory this file will use the updated executable.
#
# Optionally, if you need more control over how SpECTRE is launched on
# Ocean you can edit the launch command at the end of this file directly.
#
# To submit the script to the queue run:
# sbatch Ocean.sh
# Ocean is a supercomputer at Cal State, Fullerton.
# More information:
# https://github.com/sxs-collaboration/WelcomeToSXS/wiki/Ocean

# Replace these paths with the path to your build directory and to the
# directory where you want the output to appear, i.e. the run directory
# E.g., if you cloned spectre in your home directory, set
# SPECTRE_BUILD_DIR to ${HOME}/spectre/build. If you want to run in a
# directory called "Run" in the current directory, set
# SPECTRE_RUN_DIR to ${PWD}/Run
export SPECTRE_BUILD_DIR=${HOME}/Codes/spectre/spectre/build_singularity_release
export SPECTRE_RUN_DIR=${PWD}/Run
{% block head %}
{{ super() -}}
#SBATCH --nodes {{ num_nodes | default(1) }}
#SBATCH --ntasks-per-node 1
#SBATCH --cpus-per-task 20
#SBATCH -p {{ queue | default("orca-1") }}
#SBATCH -t {{ time_limit | default("1-00:00:00") }}
{% endblock %}

# Choose the executable and input file to run
# To use an input file in the current directory, set
# SPECTRE_INPUT_FILE to ${PWD}/InputFileName.yaml
export SPECTRE_EXECUTABLE=${SPECTRE_BUILD_DIR}/bin/EvolveGeneralizedHarmonic
export SPECTRE_INPUT_FILE=${PWD}/KerrSchild.yaml

# These commands load the relevant modules and cd into the run directory,
# creating it if it doesn't exist
module load ohpc
mkdir -p ${SPECTRE_RUN_DIR}
cd ${SPECTRE_RUN_DIR}

# Copy the input file into the run directory, to preserve it
cp ${SPECTRE_INPUT_FILE} ${SPECTRE_RUN_DIR}/

# Set desired permissions for files created with this script
umask 0022

# Set the path to include the build directory's bin directory
export PATH=${SPECTRE_BUILD_DIR}/bin:$PATH

# Flag to stop blas in CCE from parallelizing without charm++
{% block run_command %}
export OPENBLAS_NUM_THREADS=1

# The 19 is there because Charm++ uses one thread per node for communication
# Here, -np should take the number of nodes (must be the same as --nodes
# in the #SBATCH options above).
SPECTRE_COMMAND="${SPECTRE_EXECUTABLE} +ppn 19 +pemap 0-18 +commap 19"

mpirun -np ${SLURM_JOB_NUM_NODES} --map-by ppr:1:node singularity exec \
/opt/ohpc/pub/containers/spectre_ocean.sif \
bash -c "${SPECTRE_COMMAND} --input-file ${SPECTRE_INPUT_FILE}"
# Generate nodelist file
echo "Running on the following nodes:"
echo ${SLURM_NODELIST}
touch nodelist.$SLURM_JOBID
for node in $(echo $SLURM_NODELIST | scontrol show hostnames); do
echo "host ${node}" >> nodelist.$SLURM_JOBID
done

# Set worker threads and run command
WORKER_THREADS=$((SLURM_NTASKS * CHARM_PPN))
SPECTRE_COMMAND="${SPECTRE_EXECUTABLE} ++np ${SLURM_NTASKS} \
++p ${WORKER_THREADS} ++ppn ${CHARM_PPN} \
++nodelist nodelist.${SLURM_JOBID}"


# When invoking through `charmrun`, charm will initiate remote sessions which
# will wipe out environment settings unless it is forced to re-initialize the
# spectre environment between the start of the remote session and starting the
# spectre executable
echo "#!/bin/sh
source ${SPECTRE_HOME}/support/Environments/ocean_gcc.sh
spectre_load_modules
\$@
" > ${RUN_DIR}/runscript.${SLURM_JOBID}
chmod u+x ${RUN_DIR}/runscript.${SLURM_JOBID}

# Run
charmrun ++runscript ${RUN_DIR}/runscript.${SLURM_JOBID} \
${SPECTRE_COMMAND} --input-file ${SPECTRE_INPUT_FILE} \
${SPECTRE_CHECKPOINT:+ +restart "${SPECTRE_CHECKPOINT}"}
{% endblock %}
137 changes: 0 additions & 137 deletions support/SubmitScripts/OceanClang.sh

This file was deleted.

0 comments on commit a6a8ee4

Please sign in to comment.