[main] testTopDir = /raid/testing/microphysics-gpu/ webTopDir = /raid/www/Microphysics/test-suite/gpu/ sourceTree = C_Src numMakeJobs = 20 suiteName = Microphysics-gpu goUpLink = 1 reportActiveTestsOnly = 1 COMP = pgi add_to_c_make_command = CUDA_ARCH=70 COMPILE_CUDA_PATH=/usr/local/cuda-10.2 USE_CUDA=TRUE use_ctools = 0 purge_output = 1 summary_job_info_field1 = EOS summary_job_info_field2 = NETWORK #globalAddToExecString = diffusion.use_mlmg_solver=1 gravity.use_mlmg_solver=1 # MPIcommand should use the placeholders: # @host@ to indicate where to put the hostname to run on # @nprocs@ to indicate where to put the number of processors # @command@ to indicate where to put the command to run # # only tests with useMPI = 1 will run in parallel # nprocs is problem dependent and specified in the individual problem # sections. MPIcommand = mpiexec -n @nprocs@ @command@ default_branch = development # email sendEmailWhenFail = 0 emailTo = castro-development@googlegroups.com emailBody = check http://groot.astro.sunysb.edu/Microphysics/test-suite/gpu/ # slack slack_post = 1 slack_webhookfile = /raid/testing/.slack.webhook slack_channel = "#gpu" slack_username = "i am groot" [AMReX] dir = /raid/testing/microphysics-gpu/amrex branch = development [source] dir = /raid/testing/microphysics-gpu/Microphysics branch = development [test_eos-helmholtz] buildDir = unit_test/test_eos/ inputFile = input_eos probinFile = probin addToCompileString = EOS_DIR=helmholtz link1File = helm_table.dat dim = 3 restartTest = 0 useMPI = 0 useOMP = 0 compileTest = 0 doVis = 0 compareFile = test_eos.helmholtz [test_eos_C-helmholtz] buildDir = unit_test/test_eos inputFile = input_eos probinFile = probin dim = 3 link1File = helm_table.dat useMPI = 0 useOMP = 0 addToCompileString = EOS_DIR=helmholtz compareFile = test_eos.helmholtz.cxx runtime_params = do_cxx=1 [test_react-aprox13-VODE90] buildDir = unit_test/test_react/ inputFile = inputs_aprox13 probinFile = probin.aprox13 dim = 3 link1File = helm_table.dat useMPI = 0 useOMP = 0 addToCompileString = NETWORK_DIR=aprox13 INTEGRATOR_DIR=VODE compareFile = react_aprox13_test_react.VODE [test_aprox_rates] buildDir = unit_test/test_aprox_rates/ inputFile = input_aprox_rates probinFile = probin dim = 3 link1File = helm_table.dat useMPI = 0 useOMP = 0 addToCompileString = NETWORK_DIR=aprox19 INTEGRATOR_DIR=VODE compareFile = test_aprox_rates.helmholtz [test_aprox_rates_C] buildDir = unit_test/test_aprox_rates inputFile = input_aprox_rates probinFile = probin dim = 3 link1File = helm_table.dat useMPI = 0 useOMP = 0 addToCompileString = NETWORK_DIR=aprox19 INTEGRATOR_DIR=VODE compareFile = test_aprox_rates.helmholtz.cxx runtime_params = do_cxx=1 [test_screening] buildDir = unit_test/test_screening/ inputFile = inputs probinFile = probin dim = 3 link1File = helm_table.dat useMPI = 0 useOMP = 0 addToCompileString = NETWORK_DIR=aprox21 INTEGRATOR_DIR=VODE compareFile = test_screening [test_screening_C] buildDir = unit_test/test_screening/ inputFile = inputs probinFile = probin dim = 3 link1File = helm_table.dat useMPI = 0 useOMP = 0 addToCompileString = NETWORK_DIR=aprox21 INTEGRATOR_DIR=VODE compareFile = test_screening.cxx runtime_params = do_cxx=1