[main]
testTopDir = /raid/testing/maestroex-gpu
webTopDir  = /raid/www/MAESTROeX/test-suite/gpu

sourceTree = C_Src
numMakeJobs = 20

suiteName = MAESTROeX-SBU
sub_title = gpu version

goUpLink = 1

reportActiveTestsOnly = 1

COMP = gnu
add_to_c_make_command = CUDA_ARCH=70 USE_CUDA=TRUE INTEGRATOR_DIR=VODE

use_ctools = 0

plot_file_name = maestro.plot_base_name

purge_output = 1

summary_job_info_field1 = EOS
summary_job_info_field2 = NETWORK

#globalAddToExecString = diffusion.use_mlmg_solver=1 gravity.use_mlmg_solver=1

# MPIcommand should use the placeholders:
#   @host@ to indicate where to put the hostname to run on
#   @nprocs@ to indicate where to put the number of processors
#   @command@ to indicate where to put the command to run
#
# only tests with useMPI = 1 will run in parallel
# nprocs is problem dependent and specified in the individual problem
# sections.

MPIcommand = mpiexec -n @nprocs@ @command@

default_branch = development

# email
sendEmailWhenFail = 0
emailTo = castro-development@googlegroups.com
emailBody = check http://groot.astro.sunysb.edu/MAESTROeX/test-suite/gpu/

# slack
slack_post = 1
slack_webhookfile = /home/zingale/.slack.webhook
slack_channel = "#maestro"
slack_username = "i am groot"


[AMReX]
dir = /raid/testing/maestroex-gpu/amrex
branch = development

[source]
dir = /raid/testing/maestroex-gpu/MAESTROeX
branch = development

[extra-Microphysics]
dir = /raid/testing/maestroex-gpu/Microphysics
branch = development
comp_string = MICROPHYSICS_HOME=@self@

[RT]
buildDir = Exec/test_problems/rt
inputFile = inputs_2d_regression
dim = 2
doVis = 0
compileTest = 0
restartTest = 0
useMPI = 0
useOMP = 0
keywords = hydro


[reacting_bubble-2d-amr]
buildDir = Exec/test_problems/reacting_bubble
inputFile = inputs_2d_regression.2levels
dim = 2
doVis = 0
compileTest = 0
restartTest = 0
useMPI = 0
useOMP = 0
keywords = hydro
link1File = helm_table.dat
link2File = model.hse.cool.coulomb

[reacting_bubble-3d]
buildDir = Exec/test_problems/reacting_bubble
inputFile = inputs_3d_gpu_regression
dim = 3
doVis = 0
compileTest = 0
restartTest = 0
useMPI = 0
useOMP = 0
keywords = hydro
link1File = helm_table.dat
link2File = model.hse.cool.coulomb

[wdconvect-amr]
buildDir = Exec/science/wdconvect
inputFile = inputs_files/inputs_3d_regression.2levels
dim = 3
doVis = 0
compileTest = 0
restartTest = 0
useMPI = 0
useOMP = 0
keywords = hydro
link1File = helm_table.dat
link2File = model_files/kepler_new_6.25e8.hybrid.hse.320