|
string | run_automated.machine = 'summit' |
|
| run_automated.parser = argparse.ArgumentParser( description='Run performance tests and write results in files' ) |
|
| run_automated.dest |
|
| run_automated.action |
|
| run_automated.default |
|
| run_automated.help |
|
| run_automated.type |
|
| run_automated.choices |
|
| run_automated.args = parser.parse_args() |
|
| run_automated.n_node_list_string = args.n_node_list.split(',') |
|
list | run_automated.n_node_list = [int(i) for i in n_node_list_string] |
|
| run_automated.start_date = args.start_date |
|
string | run_automated.run_name = 'custom_perftest' |
|
string | run_automated.perf_database_file = 'my_tests_database.h5' |
|
bool | run_automated.rename_archive = False |
|
bool | run_automated.store_full_input = False |
|
bool | run_automated.update_perf_log_repo = False |
|
bool | run_automated.push_on_perf_log_repo = False |
|
| run_automated.recompile = args.recompile |
|
bool | run_automated.pull_3_repos = False |
|
| run_automated.compiler = args.compiler |
|
| run_automated.architecture = args.architecture |
|
| run_automated.source_dir_base = args.path_source |
|
| run_automated.res_dir_base = args.path_results |
|
bool | run_automated.browse_output_files = False |
|
bool | run_automated.browse_output_file = True |
|
int | run_automated.n_repeat = 2 |
|
| run_automated.test_list = get_test_list(n_repeat) |
|
string | run_automated.warpx_dir = source_dir_base + '/warpx/' |
|
string | run_automated.picsar_dir = source_dir_base + '/picsar/' |
|
string | run_automated.amrex_dir = source_dir_base + '/amrex/' |
|
string | run_automated.perf_logs_repo = source_dir_base + 'perf_logs/' |
|
dictionary | run_automated.compiler_name = {'intel': 'intel', 'gnu': 'gcc', 'pgi':'pgi'} |
|
dictionary | run_automated.module_Cname = {'cpu': 'haswell', 'knl': 'knl,quad,cache', 'gpu':''} |
|
dictionary | run_automated.csv_file = {'cori':'cori_knl.csv', 'summit':'summit.csv'} |
|
string | run_automated.cwd = warpx_dir + 'Tools/PerformanceTests/' |
|
string | run_automated.path_hdf5 = cwd |
|
string | run_automated.bin_dir = cwd + 'Bin/' |
|
| run_automated.bin_name = executable_name(compiler, architecture) |
|
string | run_automated.log_dir = cwd |
|
| run_automated.day = time.strftime('%d') |
|
| run_automated.month = time.strftime('%m') |
|
| run_automated.year = time.strftime('%Y') |
|
| run_automated.config_command = get_config_command(compiler, architecture) |
|
| run_automated.git_repo = git.cmd.Git( picsar_dir ) |
|
string | run_automated.make_realclean_command = " make realclean WARPX_HOME=../.. " \ |
|
string | run_automated.make_command = "make -j 16 WARPX_HOME=../.. " \ |
|
| run_automated.repo_path |
|
| run_automated.filename |
|
| run_automated.name |
|
| run_automated.res_dir = res_dir_base |
|
list | run_automated.runtime_param_list = [] |
|
| run_automated.test_list_n_node = copy.deepcopy(test_list) |
|
| run_automated.job_time_min = time_min(len(test_list)) |
|
| run_automated.batch_string = get_batch_string(test_list_n_node, job_time_min, module_Cname[architecture], n_node) |
|
string | run_automated.runtime_param_string = ' amr.n_cell=' + ' '.join(str(i) for i in current_run.n_cell) |
|
| run_automated.run_string = get_run_string(current_run, architecture, n_node, count, bin_name, runtime_param_string) |
|
| run_automated.submit_job_command = get_submit_job_command() |
|
string | run_automated.output_filename = 'out_' + '_'.join([current_run.input_file, str(n_node), str(current_run.n_mpi_per_node), str(current_run.n_omp), str(count)]) + '.txt' |
|
| run_automated.df_newline = extract_dataframe(res_dir + output_filename, current_run.n_step) |
|
| run_automated.df_base = pd.read_hdf(path_hdf5 + perf_database_file, 'all_data') |
|
| run_automated.updated_df = df_base.append(df_newline, ignore_index=True) |
|
| run_automated.key |
|
| run_automated.mode |
|
| run_automated.format |
|
| run_automated.index = git_repo.index |
|
int | run_automated.loc_counter = 0 |
|
| run_automated.res_dir_arch = res_dir_base |
|