[main]
testTopDir = /raid/testing/castro-gpu/
webTopDir  = /raid/www/Castro/test-suite/gpu/

sourceTree = C_Src
numMakeJobs = 20

suiteName = Castro-gpu
sub_title = CUDA

goUpLink = 1

reportActiveTestsOnly = 1

COMP = gnu
add_to_c_make_command = XTRA_NVCC_FLAGS=-allow-unsupported-compiler USE_CUDA=TRUE CUDA_ARCH=70 TINY_PROFILE=TRUE HYPRE_DIR=/opt/hypre/hypre-git-gpu

use_ctools = 0

purge_output = 1

summary_job_info_field1 = EOS
summary_job_info_field2 = NETWORK

#globalAddToExecString = diffusion.use_mlmg_solver=1 gravity.use_mlmg_solver=1

# MPIcommand should use the placeholders:
#   @host@ to indicate where to put the hostname to run on
#   @nprocs@ to indicate where to put the number of processors
#   @command@ to indicate where to put the command to run
#
# only tests with useMPI = 1 will run in parallel
# nprocs is problem dependent and specified in the individual problem
# sections.

MPIcommand = mpiexec -n @nprocs@ @command@

default_branch = development

# email
sendEmailWhenFail = 0
emailTo = castro-development@googlegroups.com
emailBody = check http://groot.astro.sunysb.edu/Castro/test-suite/gpu/

# slack
slack_post = 1
slack_webhookfile = /raid/testing/.slack.webhook
slack_channel = "#gpu"
slack_username = "i am groot"


[AMReX]
dir = /raid/testing/castro-gpu/amrex
branch = development
#branch = pr-2754

[source]
dir = /raid/testing/castro-gpu/Castro/
branch = development

# this is a safeguard in case any problem GNUmakefiles hardcode in CASTRO_HOME
comp_string = CASTRO_HOME=@source@

[extra-Microphysics]
dir = /raid/testing/castro-gpu/Microphysics
branch = development
comp_string = MICROPHYSICS_HOME=@self@


[Sedov-2d]
buildDir = Exec/hydro_tests/Sedov/
inputFile = inputs.2d.sph_in_cylcoords.testsuite
dim = 2
restartTest = 0
useMPI = 0
useOMP = 0
compileTest = 0
doVis = 0
#analysisRoutine = Exec/hydro_tests/Sedov/testsuite_analysis/sedov_2d_sph_in_cyl.py
#analysisMainArgs = source_dir
#analysisOutputImage = Sedov-2d.png
keywords = not3d

[Sedov-3d]
buildDir = Exec/hydro_tests/Sedov/
inputFile = inputs.3d.sph.testsuite
dim = 3
restartTest = 0
useMPI = 0
useOMP = 0
compileTest = 0
doVis = 0
#analysisRoutine = Exec/hydro_tests/Sedov/testsuite_analysis/sedov_3d_sph.py
#analysisMainArgs = source_dir
#analysisOutputImage = Sedov-3d.png

[flame_wave]
buildDir = Exec/science/flame_wave
inputFile = inputs_2d.testsuite
link1File = helm_table.dat
dim = 2
useMPI = 0
useOMP = 0
compileTest = 0
keywords = helm, diffusion, not3d, rotation, react, gravity

[flame_wave-3d]
buildDir = Exec/science/flame_wave
inputFile = inputs_3d.testsuite.gpu
link1File = helm_table.dat
dim = 3
useMPI = 0
useOMP = 0
compileTest = 0
keywords = helm, diffusion, not3d, rotation, react, gravity
addToCompileString = NETWORK_DIR=iso7
runtime_params = amrex.the_arena_is_managed=1 amrex.abort_on_out_of_gpu_memory=0 castro.hydro_memory_footprint_ratio=3

[wdmerger_3D]
buildDir = Exec/science/wdmerger/tests/wdmerger_3D
inputFile = inputs_test_wdmerger_3D
dim = 3
useMPI = 0
useOMP = 0
link1File = helm_table.dat
debug = 0
keywords = helm, gravity

[dustcollapse-restart]
buildDir = Exec/gravity_tests/DustCollapse
inputFile = inputs_3d_poisson_regtest_restart
dim = 3
useMPI = 0
useOMP = 0
debug = 0
keywords = helm, gravity
addToCompileString = EOS_DIR=gamma_law_general
restartTest = 1
restartFileNum = 5
runtime_params = amr.blocking_factor=8

[rad-SuOlson]
buildDir = Exec/radiation_tests/RadSuOlson
inputFile = inputs
dim = 1
useMPI = 1
numprocs = 1
keywords = rad, not3d
addToCompileString = NGROUPS=1