E3SM Coupled Model XML Files¶
XML files for E3SM in CIMEROOT/config/e3sm.
CIMEROOT/config/e3sm¶
E3SM XML settings for short term archiver.
<components version="2.0">
<comp_archive_spec compname="cam" compclass="atm">
<rest_file_extension>[ri]</rest_file_extension>
<rest_file_extension>rh\d*</rest_file_extension>
<rest_file_extension>rs</rest_file_extension>
<hist_file_extension>h\d*.*\.nc$</hist_file_extension>
<hist_file_extension>e</hist_file_extension>
<rest_history_varname>nhfil</rest_history_varname>
<rpointer>
<rpointer_file>rpointer.atm$NINST_STRING</rpointer_file>
<rpointer_content>$CASE.cam$NINST_STRING.r.$DATENAME.nc </rpointer_content>
</rpointer>
<test_file_names>
<tfile disposition="copy">rpointer.atm</tfile>
<tfile disposition="copy">rpointer.atm_9999</tfile>
<tfile disposition="copy">casename.cam.r.1976-01-01-00000.nc</tfile>
<tfile disposition="copy">casename.cam.rh4.1976-01-01-00000.nc</tfile>
<tfile disposition="move">casename.cam.h0.1976-01-01-00000.nc</tfile>
<tfile disposition="ignore">casename.cam.h0.1976-01-01-00000.nc.base</tfile>
<tfile disposition="move">casename.cam_0002.e.postassim.1976-01-01-00000.nc</tfile>
<tfile disposition="move">casename.cam_0002.e.preassim.1976-01-01-00000.nc</tfile>
<tfile disposition="copy">casename.cam.i.1976-01-01-00000.nc</tfile>
<tfile disposition="ignore">anothercasename.cam.i.1976-01-01-00000.nc</tfile>
</test_file_names>
</comp_archive_spec>
<comp_archive_spec compname="clm" compclass="lnd">
<rest_file_extension>r</rest_file_extension>
<rest_file_extension>rh\d?</rest_file_extension>
<hist_file_extension>h\d*.*\.nc$</hist_file_extension>
<hist_file_extension>e</hist_file_extension>
<rest_history_varname>locfnh</rest_history_varname>
<rpointer>
<rpointer_file>rpointer.lnd$NINST_STRING</rpointer_file>
<rpointer_content>./$CASE.clm2$NINST_STRING.r.$DATENAME.nc</rpointer_content>
</rpointer>
<test_file_names>
<tfile disposition="copy">rpointer.lnd</tfile>
<tfile disposition="copy">rpointer.lnd_9999</tfile>
<tfile disposition="copy">casename.clm2.r.1976-01-01-00000.nc</tfile>
<tfile disposition="copy">casename.clm2.rh4.1976-01-01-00000.nc</tfile>
<tfile disposition="move">casename.clm2.h0.1976-01-01-00000.nc</tfile>
<tfile disposition="ignore">casename.clm2.h0.1976-01-01-00000.nc.base</tfile>
<tfile disposition="move">casename.clm2_0002.e.postassim.1976-01-01-00000.nc</tfile>
<tfile disposition="move">casename.clm2_0002.e.preassim.1976-01-01-00000.nc</tfile>
<tfile disposition="ignore">anothercasename.clm2.i.1976-01-01-00000.nc</tfile>
</test_file_names>
</comp_archive_spec>
<comp_archive_spec compname="mosart" compclass="rof">
<rest_file_extension>r</rest_file_extension>
<rest_file_extension>rh\d*</rest_file_extension>
<hist_file_extension>h\d*</hist_file_extension>
<rest_history_varname>unset</rest_history_varname>
<rpointer>
<rpointer_file>rpointer.rof$NINST_STRING</rpointer_file>
<rpointer_content>$CASE.mosart$NINST_STRING.r.$DATENAME.nc</rpointer_content>
</rpointer>
</comp_archive_spec>
<comp_archive_spec compname="cice" compclass="ice">
<rest_file_extension>[ri]</rest_file_extension>
<hist_file_extension>h\d*</hist_file_extension>
<rest_history_varname>unset</rest_history_varname>
<rpointer>
<rpointer_file>rpointer.ice$NINST_STRING</rpointer_file>
<rpointer_content>./$CASE.cice$NINST_STRING.r.$DATENAME.nc</rpointer_content>
</rpointer>
<test_file_names>
<tfile disposition="copy">rpointer.ice</tfile>
<tfile disposition="copy">casename.cice.r.1976-01-01-00000.nc</tfile>
<tfile disposition="move">casename.cice.h.1976-01-01-00000.nc</tfile>
</test_file_names>
</comp_archive_spec>
<comp_archive_spec compname="mpassi" compclass="ice">
<rest_file_extension>rst</rest_file_extension>
<rest_file_extension>rst.am.timeSeriesStatsMonthly</rest_file_extension>
<hist_file_extension>hist</hist_file_extension>
<rest_history_varname>unset</rest_history_varname>
<rpointer>
<rpointer_file>rpointer.ice$NINST_STRING</rpointer_file>
<rpointer_content>$MPAS_DATENAME</rpointer_content>
</rpointer>
<test_file_names>
<tfile disposition="copy">rpointer.ice</tfile>
<tfile disposition="copy">mpassi.rst.1976-01-01_00000.nc</tfile>
<tfile disposition="copy">mpassi.rst.am.timeSeriesStatsMonthly.1976-01-01_00000.nc</tfile>
<tfile disposition="move">mpassi.hist.1976-01-01_00000.nc</tfile>
<tfile disposition="move">mpassi.hist.am.regionalStatistics.0001.01.nc</tfile>
</test_file_names>
</comp_archive_spec>
<comp_archive_spec compname="mpaso" compclass="ocn">
<rest_file_extension>rst</rest_file_extension>
<rest_file_extension>rst.am.timeSeriesStatsMonthly</rest_file_extension>
<hist_file_extension>hist</hist_file_extension>
<rest_history_varname>unset</rest_history_varname>
<rpointer>
<rpointer_file>rpointer.ocn$NINST_STRING</rpointer_file>
<rpointer_content>$MPAS_DATENAME</rpointer_content>
</rpointer>
<test_file_names>
<tfile disposition="copy">rpointer.ocn</tfile>
<tfile disposition="copy">mpaso.rst.1976-01-01_00000.nc</tfile>
<tfile disposition="copy">mpaso.rst.am.timeSeriesStatsMonthly.1976-01-01_00000.nc</tfile>
<tfile disposition="move">mpaso.hist.am.globalStats.1976-01-01.nc</tfile>
<tfile disposition="move">mpaso.hist.am.highFrequencyOutput.1976-01-01_00.00.00.nc</tfile>
</test_file_names>
</comp_archive_spec>
<comp_archive_spec compname="mali" compclass="glc">
<rest_file_extension>rst</rest_file_extension>
<rest_file_extension>rst.am.timeSeriesStatsMonthly</rest_file_extension>
<hist_file_extension>hist</hist_file_extension>
<rest_history_varname>unset</rest_history_varname>
<rpointer>
<rpointer_file>rpointer.glc$NINST_STRING</rpointer_file>
<rpointer_content>./mali$NINST_STRING.rst.$MPAS_DATENAME.nc</rpointer_content>
</rpointer>
<test_file_names>
<tfile disposition="copy">rpointer.glc</tfile>
<tfile disposition="copy">mali.rst.1976-01-01_00000.nc</tfile>
<tfile disposition="copy">mali.rst.am.timeSeriesStatsMonthly.1976-01-01_00000.nc</tfile>
<tfile disposition="move">mali.hist.am.globalStats.1976-01-01.nc</tfile>
<tfile disposition="move">mali.hist.am.highFrequencyOutput.1976-01-01_00.00.00.nc</tfile>
</test_file_names>
</comp_archive_spec>
<comp_archive_spec compclass="esp" compname="dart">
<rest_file_extension>e\.\w+inf\w+</rest_file_extension>
<hist_file_extension>[ei]</hist_file_extension>
<rest_history_varname>unset</rest_history_varname>
<rpointer>
<rpointer_file>rpointer.unset</rpointer_file>
<rpointer_content>unset</rpointer_content>
</rpointer>
<test_file_names>
<tfile disposition="move">casename.dart.e.pop_preassim_priorinf_mean.1976-01-01-00000.nc</tfile>
</test_file_names>
</comp_archive_spec>
</components>
E3SM XML settings for defining CASEROOT env_*.xml file entries.
<?xml version="1.0"?>
<?xml-stylesheet type="text/xsl" href="definitions_variables.xsl" ?>
<entry_id version="2.0">
<entry id="MODEL">
<type>char</type>
<default_value>e3sm</default_value>
<group>case_der</group>
<file>env_case.xml</file>
<desc>model system name</desc>
</entry>
<!-- ============================================================ -->
<!-- Filenames for case config, grids, machines and pio -->
<!-- ============================================================ -->
<entry id="CASEFILE_HEADERS">
<type>char</type>
<default_value>$CIMEROOT/config/config_headers.xml</default_value>
<group>case_der</group>
<file>env_case.xml</file>
<desc>contains both header and group information for all the case env_*.xml files </desc>
</entry>
<entry id="BATCH_SPEC_FILE">
<type>char</type>
<default_value>$CIMEROOT/config/$MODEL/machines/config_batch.xml</default_value>
<group>case_last</group>
<file>env_case.xml</file>
<desc>file containing batch system details for target system (for documentation only - DO NOT EDIT)</desc>
<schema>$CIMEROOT/config/xml_schemas/config_batch.xsd</schema>
</entry>
<entry id="WORKFLOW_SPEC_FILE">
<type>char</type>
<default_value>$CIMEROOT/config/$MODEL/machines/config_workflow.xml</default_value>
<group>case_last</group>
<file>env_case.xml</file>
<desc>file containing workflow (for documentation only - DO NOT EDIT)</desc>
<schema>$CIMEROOT/config/xml_schemas/config_workflow.xsd</schema>
</entry>
<entry id="INPUTDATA_SPEC_FILE">
<type>char</type>
<default_value>$CIMEROOT/config/$MODEL/config_inputdata.xml</default_value>
<group>case_last</group>
<file>env_case.xml</file>
<desc>file containing inputdata server descriptions (for documentation only - DO NOT EDIT)</desc>
<schema>$CIMEROOT/config/xml_schemas/config_inputdata.xsd</schema>
</entry>
<entry id="GRIDS_SPEC_FILE">
<type>char</type>
<default_value>$CIMEROOT/config/$MODEL/config_grids.xml</default_value>
<group>case_last</group>
<file>env_case.xml</file>
<desc>file containing specification of all supported model grids, domains and mapping files (for documentation only - DO NOT EDIT)</desc>
</entry>
<entry id="COMPILERS_SPEC_FILE">
<type>char</type>
<default_value>$CIMEROOT/config/$MODEL/machines/config_compilers.xml</default_value>
<group>case_last</group>
<file>env_case.xml</file>
<desc>file containing compiler specifications for target model primary component (for documentation only - DO NOT EDIT)</desc>
<schema>$CIMEROOT/config/xml_schemas/config_compilers_v2.xsd</schema>
</entry>
<entry id="MACHINES_SPEC_FILE">
<type>char</type>
<default_value>$CIMEROOT/config/$MODEL/machines/config_machines.xml</default_value>
<group>case_last</group>
<file>env_case.xml</file>
<desc>file containing machine specifications for target model primary component (for documentation only - DO NOT EDIT)</desc>
<schema>$CIMEROOT/config/xml_schemas/config_machines.xsd</schema>
</entry>
<entry id="PIO_SPEC_FILE">
<type>char</type>
<default_value>$CIMEROOT/config/$MODEL/machines/config_pio.xml</default_value>
<group>case_last</group>
<file>env_case.xml</file>
<desc>file containing specification of pio settings for target model possible machine, compiler, mpilib, compset and/or grid attributes (for documentation only - DO NOT EDIT)</desc>
</entry>
<entry id="CONFIG_TESTS_FILE">
<type>char</type>
<values>
<value>$CIMEROOT/config/config_tests.xml</value>
</values>
<group>test</group>
<file>env_test.xml</file>
<desc>file containing system test descriptions </desc>
</entry>
<!-- ============================================================ -->
<!-- Filenames for determining compsets and tests file -->
<!-- Depends on component attribute value -->
<!-- ============================================================ -->
<entry id="COMPSETS_SPEC_FILE">
<type>char</type>
<default_value>unset</default_value>
<values>
<value component="allactive">$CIMEROOT/config/$MODEL/allactive/config_compsets.xml</value>
<value component="drv" >$CIMEROOT/src/drivers/$COMP_INTERFACE/cime_config/config_compsets.xml</value>
<value component="cam" >$SRCROOT/components/cam/cime_config/config_compsets.xml</value>
<value component="clm" >$SRCROOT/components/clm/cime_config/config_compsets.xml</value>
<value component="cice" >$SRCROOT/components/cice/cime_config/config_compsets.xml</value>
<value component="mpaso" >$SRCROOT/components/mpas-ocean/cime_config/config_compsets.xml</value>
<value component="mali" >$SRCROOT/components/mpas-albany-landice/cime_config/config_compsets.xml</value>
<value component="mpassi" >$SRCROOT/components/mpas-seaice/cime_config/config_compsets.xml</value>
</values>
<group>case_last</group>
<file>env_case.xml</file>
<desc>file containing specification of all compsets for primary component (for documentation only - DO NOT EDIT)</desc>
<schema>$CIMEROOT/config/xml_schemas/config_compsets.xsd</schema>
</entry>
<entry id="PES_SPEC_FILE">
<type>char</type>
<default_value>unset</default_value>
<values>
<value component="allactive">$CIMEROOT/config/$MODEL/allactive/config_pesall.xml</value>
<value component="drv" >$CIMEROOT/src/drivers/$COMP_INTERFACE/cime_config/config_pes.xml</value>
<value component="cam" >$CIMEROOT/config/$MODEL/allactive/config_pesall.xml</value>
<value component="clm" >$CIMEROOT/config/$MODEL/allactive/config_pesall.xml</value>
<value component="cice" >$CIMEROOT/config/$MODEL/allactive/config_pesall.xml</value>
<value component="mpaso" >$CIMEROOT/config/$MODEL/allactive/config_pesall.xml</value>
<value component="mali" >$CIMEROOT/config/$MODEL/allactive/config_pesall.xml</value>
<value component="mpassi" >$CIMEROOT/config/$MODEL/allactive/config_pesall.xml</value>
</values>
<group>case_last</group>
<file>env_case.xml</file>
<desc>file containing specification of all pe-layouts for primary component (for documentation only - DO NOT EDIT)</desc>
<schema>$CIMEROOT/config/xml_schemas/config_pes.xsd</schema>
</entry>
<entry id="ARCHIVE_SPEC_FILE">
<type>char</type>
<values>
<value>$CIMEROOT/config/e3sm/config_archive.xml</value>
<value component="drv" >$CIMEROOT/src/drivers/$COMP_INTERFACE/cime_config/config_archive.xml</value>
<!-- data model components -->
<value component="drof">$CIMEROOT/src/components/data_comps/drof/cime_config/config_archive.xml</value>
<value component="datm">$CIMEROOT/src/components/data_comps/datm/cime_config/config_archive.xml</value>
<value component="dice">$CIMEROOT/src/components/data_comps/dice/cime_config/config_archive.xml</value>
<value component="dlnd">$CIMEROOT/src/components/data_comps/dlnd/cime_config/config_archive.xml</value>
<value component="docn">$CIMEROOT/src/components/data_comps/docn/cime_config/config_archive.xml</value>
<value component="dwav">$CIMEROOT/src/components/data_comps/dwav/cime_config/config_archive.xml</value>
<!-- external model components -->
<value component="cam" >$SRCROOT/components/cam/cime_config/config_archive.xml</value>
<value component="clm" >$SRCROOT/components/clm/cime_config/config_archive.xml</value>
<value component="cice" >$SRCROOT/components/cice/cime_config/config_archive.xml</value>
</values>
<group>case_last</group>
<file>env_case.xml</file>
<desc>file containing specification of archive files for each component (for documentation only - DO NOT EDIT)</desc>
<schema>$CIMEROOT/config/xml_schemas/config_archive.xsd</schema>
</entry>
<entry id="SYSTEM_TESTS_DIR">
<type>char</type>
<values>
<value component="any">$CIMEROOT/scripts/lib/CIME/SystemTests</value>
<value component="clm">$SRCROOT/components/clm/cime_config/SystemTests</value>
<value component="cam">$SRCROOT/components/cam/cime_config/SystemTests</value>
<value component="cice">$SRCROOT/components/cice/cime_config/SystemTests</value>
</values>
<group>test</group>
<file>env_test.xml</file>
<desc>directories containing cime compatible system test modules</desc>
</entry>
<entry id="TESTS_SPEC_FILE">
<type>char</type>
<default_value>unset</default_value>
<values>
<value component="allactive">$CIMEROOT/config/$MODEL/allactive/testlist_allactive.xml</value>
<value component="drv" >$CIMEROOT/src/drivers/$COMP_INTERFACE/cime_config/testdefs/testlist_drv.xml</value>
<value component="cam" >$SRCROOT/components/cam/cime_config/testdefs/testlist_cam.xml</value>
<value component="clm" >$SRCROOT/components/clm/cime_config/testdefs/testlist_clm.xml</value>
<value component="cice" >$SRCROOT/components/cice/cime_config/testdefs/testlist_cice.xml</value>
<value component="mosart" >$SRCROOT/components/mosart/cime_config/testdefs/testlist_mosart.xml</value>
</values>
<group>case_last</group>
<file>env_case.xml</file>
<desc>file containing specification of all system tests for primary component (for documentation only - DO NOT EDIT)</desc>
</entry>
<entry id="TESTS_MODS_DIR">
<type>char</type>
<default_value>unset</default_value>
<values>
<value component="allactive">$CIMEROOT/config/$MODEL/testmods_dirs</value>
<value component="drv" >$CIMEROOT/src/drivers/$COMP_INTERFACE/cime_config/testdefs/testmods_dirs</value>
<value component="cam" >$SRCROOT/components/cam/cime_config/testdefs/testmods_dirs</value>
<value component="clm" >$SRCROOT/components/clm/cime_config/testdefs/testmods_dirs</value>
<value component="cice" >$SRCROOT/components/cice/cime_config/testdefs/testmods_dirs</value>
<value component="mosart" >$SRCROOT/components/mosart/cime_config/testdefs/testmods_dirs</value>
</values>
<group>case_last</group>
<file>env_case.xml</file>
<desc>directory containing test modifications for primary component tests (for documentation only - DO NOT EDIT)</desc>
</entry>
<entry id="USER_MODS_DIR">
<type>char</type>
<default_value>unset</default_value>
<values>
<value component="allactive">$CIMEROOT/config/$MODEL/usermods_dirs</value>
<value component="drv" >$CIMEROOT/src/drivers/$COMP_INTERFACE/cime_config/usermods_dirs</value>
<value component="cam" >$SRCROOT/components/cam/cime_config/usermods_dirs</value>
<value component="clm" >$SRCROOT/components/clm/cime_config/usermods_dirs</value>
<value component="cice" >$SRCROOT/components/cice/cime_config/usermods_dirs</value>
<value component="mosart" >$SRCROOT/components/mosart/cime_config/usermods_dirs</value>
</values>
<group>case_last</group>
<file>env_case.xml</file>
<desc>directory containing user modifications for primary components (for documentation only - DO NOT EDIT)</desc>
</entry>
<entry id="NAMELIST_DEFINITION_FILE">
<type>char</type>
<default_value>unset</default_value>
<values>
<value component="drv" >$CIMEROOT/src/drivers/$COMP_INTERFACE/cime_config/namelist_definition_drv.xml</value>
<!-- data model components -->
<value component="drof">$CIMEROOT/src/components/data_comps/drof/cime_config/namelist_definition_drof.xml</value>
<value component="datm">$CIMEROOT/src/components/data_comps/datm/cime_config/namelist_definition_datm.xml</value>
<value component="dice">$CIMEROOT/src/components/data_comps/dice/cime_config/namelist_definition_dice.xml</value>
<value component="dlnd">$CIMEROOT/src/components/data_comps/dlnd/cime_config/namelist_definition_dlnd.xml</value>
<value component="docn">$CIMEROOT/src/components/data_comps/docn/cime_config/namelist_definition_docn.xml</value>
<value component="dwav">$CIMEROOT/src/components/data_comps/dwav/cime_config/namelist_definition_dwav.xml</value>
<!-- external model components -->
<!-- TODO
<value component="cam" >$SRCROOT/components/cam/bld/namelist_files/namelist_definition.xml</value>
<value component="cice" >$SRCROOT/components/cice/cime_config/namelist_definition_cice.xml</value>
<value component="clm" >$SRCROOT/components/clm/bld/namelist_files/namelist_definition_clm4_5.xml</value>
<value component="clm" >$SRCROOT/components/clm/bld/namelist_files/namelist_definition_clm4_0.xml</value>
-->
</values>
<group>case_last</group>
<file>env_case.xml</file>
<desc>file containing namelist_definitions for all components </desc>
<schema>$CIMEROOT/config/xml_schemas/entry_id_namelist.xsd</schema>
</entry>
<!-- =============================================================== -->
<!-- File names for all component specific configuration variables -->
<!-- =============================================================== -->
<entry id="CONFIG_CPL_FILE">
<type>char</type>
<values>
<value>$CIMEROOT/src/drivers/$COMP_INTERFACE/cime_config/config_component.xml</value>
</values>
<group>case_last</group>
<file>env_case.xml</file>
<desc>file containing all non-component specific case configuration variables (for documentation only - DO NOT EDIT)</desc>
<schema version="2.0">$CIMEROOT/config/xml_schemas/entry_id.xsd</schema>
<schema version="3.0">$CIMEROOT/config/xml_schemas/entry_id_version3.xsd</schema>
</entry>
<entry id="CONFIG_CPL_FILE_MODEL_SPECIFIC">
<type>char</type>
<values>
<value>$CIMEROOT/src/drivers/$COMP_INTERFACE/cime_config/config_component_$MODEL.xml</value>
</values>
<group>case_last</group>
<file>env_case.xml</file>
<desc>file containing all component specific driver configuration variables (for documentation only - DO NOT EDIT)</desc>
<schema version="2.0">$CIMEROOT/config/xml_schemas/entry_id.xsd</schema>
<schema version="3.0">$CIMEROOT/config/xml_schemas/entry_id_version3.xsd</schema>
</entry>
<entry id="CONFIG_ATM_FILE">
<type>char</type>
<default_value>unset</default_value>
<values>
<value component="cam" >$SRCROOT/components/cam/cime_config/config_component.xml</value>
<value component="datm">$CIMEROOT/src/components/data_comps/datm/cime_config/config_component.xml</value>
<value component="satm">$CIMEROOT/src/components/stub_comps/satm/cime_config/config_component.xml</value>
<value component="xatm">$CIMEROOT/src/components/xcpl_comps/xatm/cime_config/config_component.xml</value>
</values>
<group>case_last</group>
<file>env_case.xml</file>
<desc>file containing specification of component specific definitions and values(for documentation only - DO NOT EDIT)</desc>
<schema version="2.0">$CIMEROOT/config/xml_schemas/entry_id.xsd</schema>
<schema version="3.0">$CIMEROOT/config/xml_schemas/entry_id_version3.xsd</schema>
</entry>
<entry id="CONFIG_LND_FILE">
<type>char</type>
<default_value>unset</default_value>
<values>
<value component="vic" >$CIMEROOT/config/$MODEL//vic/config_component.xml</value>
<value component="clm" >$SRCROOT/components/clm/cime_config/config_component.xml</value>
<value component="dlnd">$CIMEROOT/src/components/data_comps/dlnd/cime_config/config_component.xml</value>
<value component="slnd">$CIMEROOT/src/components/stub_comps/slnd/cime_config/config_component.xml</value>
<value component="xlnd">$CIMEROOT/src/components/xcpl_comps/xlnd/cime_config/config_component.xml</value>
</values>
<group>case_last</group>
<file>env_case.xml</file>
<desc>file containing specification of component specific definitions and values(for documentation only - DO NOT EDIT)</desc>
<schema version="2.0">$CIMEROOT/config/xml_schemas/entry_id.xsd</schema>
<schema version="3.0">$CIMEROOT/config/xml_schemas/entry_id_version3.xsd</schema>
</entry>
<entry id="CONFIG_ROF_FILE">
<type>char</type>
<default_value>unset</default_value>
<values>
<value component="mosart" >$SRCROOT/components/mosart/cime_config/config_component.xml</value>
<value component="drof" >$CIMEROOT/src/components/data_comps/drof/cime_config/config_component.xml</value>
<value component="srof" >$CIMEROOT/src/components/stub_comps/srof/cime_config/config_component.xml</value>
<value component="xrof" >$CIMEROOT/src/components/xcpl_comps/xrof/cime_config/config_component.xml</value>
</values>
<group>case_last</group>
<file>env_case.xml</file>
<desc>file containing specification of component specific definitions and values(for documentation only - DO NOT EDIT)</desc>
<schema version="2.0">$CIMEROOT/config/xml_schemas/entry_id.xsd</schema>
<schema version="3.0">$CIMEROOT/config/xml_schemas/entry_id_version3.xsd</schema>
</entry>
<entry id="CONFIG_ICE_FILE">
<type>char</type>
<default_value>unset</default_value>
<values>
<value component="mpassi">$SRCROOT/components/mpas-seaice/cime_config/config_component.xml</value>
<value component="cice">$SRCROOT/components/cice/cime_config/config_component.xml</value>
<value component="dice">$CIMEROOT/src/components/data_comps/dice/cime_config/config_component.xml</value>
<value component="sice">$CIMEROOT/src/components/stub_comps/sice/cime_config/config_component.xml</value>
<value component="xice">$CIMEROOT/src/components/xcpl_comps/xice/cime_config/config_component.xml</value>
</values>
<group>case_last</group>
<file>env_case.xml</file>
<desc>file containing specification of component specific definitions and values(for documentation only - DO NOT EDIT)</desc>
<schema version="2.0">$CIMEROOT/config/xml_schemas/entry_id.xsd</schema>
<schema version="3.0">$CIMEROOT/config/xml_schemas/entry_id_version3.xsd</schema>
</entry>
<entry id="CONFIG_OCN_FILE">
<type>char</type>
<default_value>unset</default_value>
<values>
<value component="mpaso" >$SRCROOT/components/mpas-ocean/cime_config/config_component.xml</value>
<value component="docn" >$CIMEROOT/src/components/data_comps/docn/cime_config/config_component.xml</value>
<value component="socn" >$CIMEROOT/src/components/stub_comps/socn/cime_config/config_component.xml</value>
<value component="xocn" >$CIMEROOT/src/components/xcpl_comps/xocn/cime_config/config_component.xml</value>
</values>
<group>case_last</group>
<file>env_case.xml</file>
<desc>file containing specification of component specific definitions and values(for documentation only - DO NOT EDIT)</desc>
<schema version="2.0">$CIMEROOT/config/xml_schemas/entry_id.xsd</schema>
<schema version="3.0">$CIMEROOT/config/xml_schemas/entry_id_version3.xsd</schema>
</entry>
<entry id="CONFIG_GLC_FILE">
<type>char</type>
<default_value>unset</default_value>
<values>
<value component="mali">$SRCROOT/components/mpas-albany-landice/cime_config/config_component.xml</value>
<value component="dglc">$CIMEROOT/src/components/data_comps/dglc/cime_config/config_component.xml</value>
<value component="sglc">$CIMEROOT/src/components/stub_comps/sglc/cime_config/config_component.xml</value>
<value component="xglc">$CIMEROOT/src/components/xcpl_comps/xglc/cime_config/config_component.xml</value>
</values>
<group>case_last</group>
<file>env_case.xml</file>
<desc>file containing specification of component specific definitions and values(for documentation only - DO NOT EDIT)</desc>
<schema version="2.0">$CIMEROOT/config/xml_schemas/entry_id.xsd</schema>
<schema version="3.0">$CIMEROOT/config/xml_schemas/entry_id_version3.xsd</schema>
</entry>
<entry id="CONFIG_IAC_FILE">
<type>char</type>
<default_value>unset</default_value>
<values>
<value component="siac">$CIMEROOT/src/components/stub_comps/siac/cime_config/config_component.xml</value>
</values>
<group>case_last</group>
<file>env_case.xml</file>
<desc>file containing specification of component specific definitions and values(for documentation only - DO NOT EDIT)</desc>
<schema version="2.0">$CIMEROOT/config/xml_schemas/entry_id.xsd</schema>
<schema version="3.0">$CIMEROOT/config/xml_schemas/entry_id_version3.xsd</schema>
</entry>
<entry id="CONFIG_WAV_FILE">
<type>char</type>
<default_value>unset</default_value>
<values>
<value component="ww" >$SRCROOT/components/ww3/cime_config/config_component.xml</value>
<value component="dwav">$CIMEROOT/src/components/data_comps/dwav/cime_config/config_component.xml</value>
<value component="swav">$CIMEROOT/src/components/stub_comps/swav/cime_config/config_component.xml</value>
<value component="xwav">$CIMEROOT/src/components/xcpl_comps/xwav/cime_config/config_component.xml</value>
</values>
<group>case_last</group>
<file>env_case.xml</file>
<desc>file containing specification of component specific definitions and values(for documentation only - DO NOT EDIT)</desc>
<schema version="2.0">$CIMEROOT/config/xml_schemas/entry_id.xsd</schema>
<schema version="3.0">$CIMEROOT/config/xml_schemas/entry_id_version3.xsd</schema>
</entry>
<entry id="CONFIG_ESP_FILE">
<type>char</type>
<default_value>$CIMEROOT/src/components/stub_comps/sesp/cime_config/config_component.xml</default_value>
<values>
<value component="sesp">$CIMEROOT/src/components/stub_comps/sesp/cime_config/config_component.xml</value>
<value component="desp">$CIMEROOT/src/components/data_comps/desp/cime_config/config_component.xml</value>
</values>
<group>case_last</group>
<file>env_case.xml</file>
<desc>file containing specification of component specific definitions and values(for documentation only - DO NOT EDIT)</desc>
<schema version="2.0">$CIMEROOT/config/xml_schemas/entry_id.xsd</schema>
<schema version="3.0">$CIMEROOT/config/xml_schemas/entry_id_version3.xsd</schema>
</entry>
</entry_id>
E3SM XML settings for defining supported grids.
<?xml version="1.0"?>
<grid_data version="2.0">
<help>
=========================================
GRID naming convention
=========================================
The notation for the grid longname is
a%name_l%name_oi%name_r%name_m%mask_g%name_w%name
where
a% => atm, l% => lnd, oi% => ocn/ice, r% => river, m% => mask, g% => glc, w% => wav
Supported out of the box grid configurations are given via alias specification in
the file "config_grids.xml". Each grid alias can also be associated with the
following optional attributes
compset (Regular expression for compset matches that are required for this grid)
not_compset (Regular expression for compset matches that are not permitted this grid)
Using the alias and the optional "compset" and "not_compset" attributes a grid longname is created
Note that the mask is for information only - and is not an attribute of the grid
By default, if the mask is not specified below, it will be set to the ocnice grid
And if there is no ocnice grid (such as for single column, the mask is null since it does not mean anything)
</help>
<grids>
<model_grid_defaults>
<grid name="atm" compset="SATM">null</grid>
<grid name="lnd" compset="SLND">null</grid>
<grid name="ocnice" compset="SOCN">null</grid>
<grid name="rof" compset="SROF">null</grid>
<grid name="rof" compset="DWAV">rx1</grid>
<grid name="rof" compset="RTM">r05</grid>
<grid name="rof" compset="MOSART">r05</grid>
<grid name="rof" compset="DROF">rx1</grid>
<grid name="rof" compset="DROF%CPLHIST">r05</grid>
<grid name="rof" compset="XROF">r05</grid>
<grid name="glc" compset="SGLC">null</grid>
<grid name="glc" compset="CISM1">gland5UM</grid>
<grid name="glc" compset="CISM2">gland4</grid>
<grid name="glc" compset="XGLC">gland4</grid>
<grid name="wav" compset="SWAV">null</grid>
<grid name="wav" compset="DWAV">ww3a</grid>
<grid name="wav" compset="WW3">ww3a</grid>
<grid name="wav" compset="XWAV">ww3a</grid>
<grid name="iac" compset="SIAC">null</grid>
</model_grid_defaults>
<model_grid alias="g16_g16" compset="DATM.+DROF">
<grid name="atm">gx1v6</grid>
<grid name="lnd">gx1v6</grid>
<grid name="ocnice">gx1v6</grid>
<grid name="rof">rx1</grid>
<grid name="glc">null</grid>
<grid name="wav">null</grid>
<mask>gx1v6</mask>
</model_grid>
<model_grid alias="CLM_USRDAT" compset="(DATM|SATM).+CLM">
<grid name="atm">CLM_USRDAT</grid>
<grid name="lnd">CLM_USRDAT</grid>
<grid name="ocnice">CLM_USRDAT</grid>
<grid name="rof">null</grid>
<grid name="glc">null</grid>
<grid name="wav">null</grid>
<mask>reg</mask>
</model_grid>
<model_grid alias="1x1_numaIA" compset="DATM.+CLM">
<grid name="atm">1x1_numaIA</grid>
<grid name="lnd">1x1_numaIA</grid>
<grid name="ocnice">1x1_numaIA</grid>
<grid name="rof">null</grid>
<grid name="glc">null</grid>
<grid name="wav">null</grid>
<mask>reg</mask>
</model_grid>
<model_grid alias="1x1_brazil" compset="DATM.+CLM">
<grid name="atm">1x1_brazil</grid>
<grid name="lnd">1x1_brazil</grid>
<grid name="ocnice">1x1_brazil</grid>
<grid name="rof">null</grid>
<grid name="glc">null</grid>
<grid name="wav">null</grid>
<mask>reg</mask>
</model_grid>
<model_grid alias="1x1_smallvilleIA" compset="(DATM|SATM).+CLM">
<grid name="atm">1x1_smallvilleIA</grid>
<grid name="lnd">1x1_smallvilleIA</grid>
<grid name="ocnice">1x1_smallvilleIA</grid>
<grid name="rof">null</grid>
<grid name="glc">null</grid>
<grid name="wav">null</grid>
<mask>reg</mask>
</model_grid>
<model_grid alias="1x1_camdenNJ" compset="DATM.+CLM">
<grid name="atm">1x1_camdenNJ</grid>
<grid name="lnd">1x1_camdenNJ</grid>
<grid name="ocnice">1x1_camdenNJ</grid>
<grid name="rof">null</grid>
<grid name="glc">null</grid>
<grid name="wav">null</grid>
<mask>reg</mask>
</model_grid>
<model_grid alias="1x1_mexicocityMEX" compset="DATM.+CLM">
<grid name="atm">1x1_mexicocityMEX</grid>
<grid name="lnd">1x1_mexicocityMEX</grid>
<grid name="ocnice">1x1_mexicocityMEX</grid>
<grid name="rof">null</grid>
<grid name="glc">null</grid>
<grid name="wav">null</grid>
<mask>reg</mask>
</model_grid>
<model_grid alias="1x1_vancouverCAN" compset="DATM.+CLM">
<grid name="atm">1x1_vancouverCAN</grid>
<grid name="lnd">1x1_vancouverCAN</grid>
<grid name="ocnice">1x1_vancouverCAN</grid>
<grid name="rof">null</grid>
<grid name="glc">null</grid>
<grid name="wav">null</grid>
<mask>reg</mask>
</model_grid>
<model_grid alias="1x1_tropicAtl" compset="DATM.+CLM">
<grid name="atm">1x1_tropicAtl</grid>
<grid name="lnd">1x1_tropicAtl</grid>
<grid name="ocnice">1x1_tropicAtl</grid>
<grid name="rof">null</grid>
<grid name="glc">null</grid>
<grid name="wav">null</grid>
<mask>reg</mask>
</model_grid>
<model_grid alias="1x1_urbanc_alpha" compset="DATM.+CLM">
<grid name="atm">1x1_urbanc_alpha</grid>
<grid name="lnd">1x1_urbanc_alpha</grid>
<grid name="ocnice">1x1_urbanc_alpha</grid>
<grid name="rof">null</grid>
<grid name="glc">null</grid>
<grid name="wav">null</grid>
<mask>reg</mask>
</model_grid>
<model_grid alias="5amazon" compset="DATM.+CLM">
<grid name="atm">5x5_amazon</grid>
<grid name="lnd">5x5_amazon</grid>
<grid name="ocnice">5x5_amazon</grid>
<grid name="rof">null</grid>
<grid name="glc">null</grid>
<grid name="wav">null</grid>
<mask>reg</mask>
</model_grid>
<model_grid alias="hcru_hcru" compset="(DATM|SATM).+CLM">
<grid name="atm">360x720cru</grid>
<grid name="lnd">360x720cru</grid>
<grid name="ocnice">360x720cru</grid>
<grid name="rof">r05</grid>
<grid name="glc">null</grid>
<grid name="wav">null</grid>
<mask>360x720cru</mask>
</model_grid>
<model_grid alias="NLDAS_NLDAS" compset="(DATM_SATM).+CLM">
<grid name="atm">NLDAS</grid>
<grid name="lnd">NLDAS</grid>
<grid name="ocnice">NLDAS</grid>
<grid name="rof">NLDAS</grid>
<grid name="glc">null</grid>
<grid name="wav">null</grid>
<mask>NLDAS</mask>
</model_grid>
<!-- eulerian grids -->
<model_grid alias="T31_g37">
<grid name="atm">T31</grid>
<grid name="lnd">T31</grid>
<grid name="ocnice">gx3v7</grid>
<grid name="rof">r05</grid>
<grid name="glc">null</grid>
<grid name="wav">null</grid>
<mask>gx3v7</mask>
</model_grid>
<model_grid alias="T31_T31" compset="(DOCN|XOCN|SOCN)">
<grid name="atm">T31</grid>
<grid name="lnd">T31</grid>
<grid name="ocnice">T31</grid>
<grid name="rof">r05</grid>
<grid name="glc">null</grid>
<grid name="wav">null</grid>
<mask>gx3v7</mask>
</model_grid>
<model_grid alias="T42_T42" compset="(DOCN|XOCN|SOCN)">
<grid name="atm">T42</grid>
<grid name="lnd">T42</grid>
<grid name="ocnice">T42</grid>
<grid name="rof">r05</grid>
<grid name="glc">null</grid>
<grid name="wav">null</grid>
<mask>usgs</mask>
</model_grid>
<model_grid alias="T62_g37" compset="(DATM|XATM|SATM)">
<grid name="atm">T62</grid>
<grid name="lnd">T62</grid>
<grid name="ocnice">gx3v7</grid>
<grid name="rof">rx1</grid>
<grid name="glc">null</grid>
<grid name="wav">null</grid>
<mask>gx3v7</mask>
</model_grid>
<model_grid alias="T62_g16" compset="(DATM|XATM|SATM)">
<grid name="atm">T62</grid>
<grid name="lnd">T62</grid>
<grid name="ocnice">gx1v6</grid>
<grid name="rof">rx1</grid>
<grid name="glc">null</grid>
<grid name="wav">null</grid>
<mask>gx1v6</mask>
</model_grid>
<model_grid alias="T62_m120" compset="(DATM|XATM|SATM)">
<grid name="atm">T62</grid>
<grid name="lnd">T62</grid>
<grid name="ocnice">mpas120</grid>
<grid name="rof">rx1</grid>
<grid name="glc">null</grid>
<grid name="wav">null</grid>
<mask>mpas120</mask>
</model_grid>
<model_grid alias="T62_mgx1" compset="(DATM|XATM|SATM)">
<grid name="atm">T62</grid>
<grid name="lnd">T62</grid>
<grid name="ocnice">mpasgx1</grid>
<grid name="rof">rx1</grid>
<grid name="glc">null</grid>
<grid name="wav">null</grid>
<mask>mpasgx1</mask>
</model_grid>
<model_grid alias="T62_oEC60to30" compset="(DATM|XATM|SATM)">
<grid name="atm">T62</grid>
<grid name="lnd">T62</grid>
<grid name="ocnice">oEC60to30</grid>
<grid name="rof">rx1</grid>
<grid name="glc">null</grid>
<grid name="wav">null</grid>
<mask>oEC60to30</mask>
</model_grid>
<model_grid alias="T62_oEC60to30v3" compset="(DATM|XATM|SATM)">
<grid name="atm">T62</grid>
<grid name="lnd">T62</grid>
<grid name="ocnice">oEC60to30v3</grid>
<grid name="rof">rx1</grid>
<grid name="glc">null</grid>
<grid name="wav">null</grid>
<mask>oEC60to30v3</mask>
</model_grid>
<model_grid alias="T62_oEC60to30wLI" compset="(DATM|XATM|SATM)">
<grid name="atm">T62</grid>
<grid name="lnd">T62</grid>
<grid name="ocnice">oEC60to30wLI</grid>
<grid name="rof">rx1</grid>
<grid name="glc">null</grid>
<grid name="wav">null</grid>
<mask>oEC60to30wLI</mask>
</model_grid>
<model_grid alias="T62_oEC60to30v3wLI" compset="(DATM|XATM|SATM)">
<grid name="atm">T62</grid>
<grid name="lnd">T62</grid>
<grid name="ocnice">oEC60to30v3wLI</grid>
<grid name="rof">rx1</grid>
<grid name="glc">null</grid>
<grid name="wav">null</grid>
<mask>oEC60to30v3wLI</mask>
</model_grid>
<model_grid alias="T62_oRRS30to10" compset="(DATM|XATM|SATM)">
<grid name="atm">T62</grid>
<grid name="lnd">T62</grid>
<grid name="ocnice">oRRS30to10</grid>
<grid name="rof">rx1</grid>
<grid name="glc">null</grid>
<grid name="wav">null</grid>
<mask>oRRS30to10</mask>
</model_grid>
<model_grid alias="T62_oRRS30to10wLI" compset="(DATM|XATM|SATM)">
<grid name="atm">T62</grid>
<grid name="lnd">T62</grid>
<grid name="ocnice">oRRS30to10wLI</grid>
<grid name="rof">rx1</grid>
<grid name="glc">null</grid>
<grid name="wav">null</grid>
<mask>oRRS30to10wLI</mask>
</model_grid>
<model_grid alias="T62_oRRS30to10v3" compset="(DATM|XATM|SATM)">
<grid name="atm">T62</grid>
<grid name="lnd">T62</grid>
<grid name="ocnice">oRRS30to10v3</grid>
<grid name="rof">rx1</grid>
<grid name="glc">null</grid>
<grid name="wav">null</grid>
<mask>oRRS30to10v3</mask>
</model_grid>
<model_grid alias="T62_oRRS30to10v3wLI" compset="(DATM|XATM|SATM)">
<grid name="atm">T62</grid>
<grid name="lnd">T62</grid>
<grid name="ocnice">oRRS30to10v3wLI</grid>
<grid name="rof">rx1</grid>
<grid name="glc">null</grid>
<grid name="wav">null</grid>
<mask>oRRS30to10v3wLI</mask>
</model_grid>
<model_grid alias="T62_oRRS18to6" compset="(DATM|XATM|SATM)">
<grid name="atm">T62</grid>
<grid name="lnd">T62</grid>
<grid name="ocnice">oRRS18to6</grid>
<grid name="rof">rx1</grid>
<grid name="glc">null</grid>
<grid name="wav">null</grid>
<mask>oRRS18to6</mask>
</model_grid>
<model_grid alias="T62_oRRS18to6v3" compset="(DATM|XATM|SATM)">
<grid name="atm">T62</grid>
<grid name="lnd">T62</grid>
<grid name="ocnice">oRRS18to6v3</grid>
<grid name="rof">rx1</grid>
<grid name="glc">null</grid>
<grid name="wav">null</grid>
<mask>oRRS18to6v3</mask>
</model_grid>
<model_grid alias="T62_oRRS15to5" compset="(DATM|XATM|SATM)">
<grid name="atm">T62</grid>
<grid name="lnd">T62</grid>
<grid name="ocnice">oRRS15to5</grid>
<grid name="rof">rx1</grid>
<grid name="glc">null</grid>
<grid name="wav">null</grid>
<mask>oRRS15to5</mask>
</model_grid>
<model_grid alias="T62_oARRM60to10" compset="(DATM|XATM|SATM)">
<grid name="atm">T62</grid>
<grid name="lnd">T62</grid>
<grid name="ocnice">oARRM60to10</grid>
<grid name="rof">rx1</grid>
<grid name="glc">null</grid>
<grid name="wav">null</grid>
<mask>oARRM60to10</mask>
</model_grid>
<model_grid alias="T62_oARRM60to6" compset="(DATM|XATM|SATM)">
<grid name="atm">T62</grid>
<grid name="lnd">T62</grid>
<grid name="ocnice">oARRM60to6</grid>
<grid name="rof">rx1</grid>
<grid name="glc">null</grid>
<grid name="wav">null</grid>
<mask>oARRM60to6</mask>
</model_grid>
<model_grid alias="TL319_oEC60to30v3" compset="(DATM|XATM|SATM)">
<grid name="atm">TL319</grid>
<grid name="lnd">TL319</grid>
<grid name="ocnice">oEC60to30v3</grid>
<grid name="rof">JRA025</grid>
<grid name="glc">null</grid>
<grid name="wav">null</grid>
<mask>oEC60to30v3</mask>
</model_grid>
<model_grid alias="TL319_oARRM60to10" compset="(DATM|XATM|SATM)">
<grid name="atm">TL319</grid>
<grid name="lnd">TL319</grid>
<grid name="ocnice">oARRM60to10</grid>
<grid name="rof">JRA025</grid>
<grid name="glc">null</grid>
<grid name="wav">null</grid>
<mask>oARRM60to10</mask>
</model_grid>
<model_grid alias="TL319_oARRM60to6" compset="(DATM|XATM|SATM)">
<grid name="atm">TL319</grid>
<grid name="lnd">TL319</grid>
<grid name="ocnice">oARRM60to6</grid>
<grid name="rof">JRA025</grid>
<grid name="glc">null</grid>
<grid name="wav">null</grid>
<mask>oARRM60to6</mask>
</model_grid>
<!-- finite volume grids -->
<model_grid alias="f02_g16">
<grid name="atm">0.23x0.31</grid>
<grid name="lnd">0.23x0.31</grid>
<grid name="ocnice">gx1v6</grid>
<grid name="rof">r05</grid>
<grid name="glc">null</grid>
<grid name="wav">null</grid>
<mask>gx1v6</mask>
</model_grid>
<model_grid alias="f05_g16">
<grid name="atm">0.47x0.63</grid>
<grid name="lnd">0.47x0.63</grid>
<grid name="ocnice">gx1v6</grid>
<grid name="rof">r05</grid>
<grid name="glc">null</grid>
<grid name="wav">null</grid>
<mask>gx1v6</mask>
</model_grid>
<model_grid alias="f09_g16">
<grid name="atm">0.9x1.25</grid>
<grid name="lnd">0.9x1.25</grid>
<grid name="ocnice">gx1v6</grid>
<grid name="rof">r05</grid>
<grid name="glc">null</grid>
<grid name="wav">null</grid>
<mask>gx1v6</mask>
</model_grid>
<model_grid alias="f09_m120">
<grid name="atm">0.9x1.25</grid>
<grid name="lnd">0.9x1.25</grid>
<grid name="ocnice">mpas120</grid>
<grid name="rof">r05</grid>
<grid name="glc">null</grid>
<grid name="wav">null</grid>
<mask>mpas120</mask>
</model_grid>
<model_grid alias="f09_g16" compset="SGLC.+DWAV">
<grid name="atm">0.9x1.25</grid>
<grid name="lnd">0.9x1.25</grid>
<grid name="ocnice">gx1v6</grid>
<grid name="rof">r05</grid>
<grid name="glc">null</grid>
<grid name="wav">ww3a</grid>
<mask>gx1v6</mask>
</model_grid>
<model_grid alias="f19_g16">
<grid name="atm">1.9x2.5</grid>
<grid name="lnd">1.9x2.5</grid>
<grid name="ocnice">gx1v6</grid>
<grid name="rof">r05</grid>
<grid name="glc">null</grid>
<grid name="wav">null</grid>
<mask>gx1v6</mask>
</model_grid>
<model_grid alias="f19_g16" compset="SGLC.+DWAV">
<grid name="atm">1.9x2.5</grid>
<grid name="lnd">1.9x2.5</grid>
<grid name="ocnice">gx1v6</grid>
<grid name="rof">r05</grid>
<grid name="glc">null</grid>
<grid name="wav">ww3a</grid>
<mask>gx1v6</mask>
</model_grid>
<model_grid alias="f45_g37">
<grid name="atm">4x5</grid>
<grid name="lnd">4x5</grid>
<grid name="ocnice">gx3v7</grid>
<grid name="rof">r05</grid>
<grid name="glc">null</grid>
<grid name="wav">null</grid>
<mask>gx3v7</mask>
</model_grid>
<model_grid alias="f19_g16_r01">
<grid name="atm">1.9x2.5</grid>
<grid name="lnd">1.9x2.5</grid>
<grid name="ocnice">gx1v6</grid>
<grid name="rof">r01</grid>
<grid name="glc">null</grid>
<grid name="wav">null</grid>
<mask>gx1v6</mask>
</model_grid>
<model_grid alias="f02_f02" compset="(DOCN|XOCN|SOCN|AQP1)">
<grid name="atm">0.23x0.31</grid>
<grid name="lnd">0.23x0.31</grid>
<grid name="ocnice">0.23x0.31</grid>
<grid name="rof">r05</grid>
<grid name="glc">null</grid>
<grid name="wav">null</grid>
<mask>gx1v6</mask>
</model_grid>
<model_grid alias="f09_f09" compset="(DOCN|XOCN|SOCN|AQP1)">
<grid name="atm">0.9x1.25</grid>
<grid name="lnd">0.9x1.25</grid>
<grid name="ocnice">0.9x1.25</grid>
<grid name="rof">r05</grid>
<grid name="glc">null</grid>
<grid name="wav">null</grid>
<mask>gx1v6</mask>
</model_grid>
<model_grid alias="f19_f19" compset="(DOCN|XOCN|SOCN|AQP1)">
<grid name="atm">1.9x2.5</grid>
<grid name="lnd">1.9x2.5</grid>
<grid name="ocnice">1.9x2.5</grid>
<grid name="rof">r05</grid>
<grid name="glc">null</grid>
<grid name="wav">null</grid>
<mask>gx1v6</mask>
</model_grid>
<model_grid alias="f25_f25" compset="(DOCN|XOCN|SOCN|AQP1)">
<grid name="atm">2.5x3.33</grid>
<grid name="lnd">2.5x3.33</grid>
<grid name="ocnice">2.5x3.33</grid>
<grid name="rof">r05</grid>
<grid name="glc">null</grid>
<grid name="wav">null</grid>
<mask>gx1v6</mask>
</model_grid>
<model_grid alias="f45_f45" compset="(DOCN|XOCN|SOCN|AQP1)">
<grid name="atm">4x5</grid>
<grid name="lnd">4x5</grid>
<grid name="ocnice">4x5</grid>
<grid name="rof">r05</grid>
<grid name="glc">null</grid>
<grid name="wav">null</grid>
<mask>gx3v7</mask>
</model_grid>
<model_grid alias="f10_f10" compset="(DOCN|XOCN|SOCN|AQP1)">
<grid name="atm">10x15</grid>
<grid name="lnd">10x15</grid>
<grid name="ocnice">10x15</grid>
<grid name="rof">r05</grid>
<grid name="glc">null</grid>
<grid name="wav">null</grid>
<mask>usgs</mask>
</model_grid>
<!-- spectral element grids -->
<model_grid alias="ne4_oQU480">
<grid name="atm">ne4np4</grid>
<grid name="lnd">ne4np4</grid>
<grid name="ocnice">oQU480</grid>
<grid name="rof">r05</grid>
<grid name="glc">null</grid>
<grid name="wav">null</grid>
<mask>oQU480</mask>
</model_grid>
<model_grid alias="ne4_oQU240">
<grid name="atm">ne4np4</grid>
<grid name="lnd">ne4np4</grid>
<grid name="ocnice">oQU240</grid>
<grid name="rof">r05</grid>
<grid name="glc">null</grid>
<grid name="wav">null</grid>
<mask>oQU240</mask>
</model_grid>
<model_grid alias="ne4_oQU240wLI">
<grid name="atm">ne4np4</grid>
<grid name="lnd">ne4np4</grid>
<grid name="ocnice">oQU240wLI</grid>
<grid name="rof">r05</grid>
<grid name="glc">null</grid>
<grid name="wav">null</grid>
<mask>oQU240wLI</mask>
</model_grid>
<model_grid alias="ne11_oQU240">
<grid name="atm">ne11np4</grid>
<grid name="lnd">ne11np4</grid>
<grid name="ocnice">oQU240</grid>
<grid name="rof">r05</grid>
<grid name="glc">null</grid>
<grid name="wav">null</grid>
<mask>oQU240</mask>
</model_grid>
<model_grid alias="ne16_g37">
<grid name="atm">ne16np4</grid>
<grid name="lnd">ne16np4</grid>
<grid name="ocnice">gx3v7</grid>
<grid name="rof">r05</grid>
<grid name="glc">null</grid>
<grid name="wav">null</grid>
<mask>gx3v7</mask>
</model_grid>
<model_grid alias="ne30_g16">
<grid name="atm">ne30np4</grid>
<grid name="lnd">ne30np4</grid>
<grid name="ocnice">gx1v6</grid>
<grid name="rof">r05</grid>
<grid name="glc">null</grid>
<grid name="wav">null</grid>
<mask>gx1v6</mask>
</model_grid>
<model_grid alias="ne30_m120">
<grid name="atm">ne30np4</grid>
<grid name="lnd">ne30np4</grid>
<grid name="ocnice">mpas120</grid>
<grid name="rof">r05</grid>
<grid name="glc">null</grid>
<grid name="wav">null</grid>
<mask>mpas120</mask>
</model_grid>
<model_grid alias="ne30_m120_g">
<grid name="atm">ne30np4</grid>
<grid name="lnd">ne30np4</grid>
<grid name="ocnice">mpas120</grid>
<grid name="rof">r05</grid>
<grid name="glc">mpas.gis20km</grid>
<grid name="wav">null</grid>
<mask>mpas120</mask>
</model_grid>
<model_grid alias="ne30_f19_g16">
<grid name="atm">ne30np4</grid>
<grid name="lnd">1.9x2.5</grid>
<grid name="ocnice">gx1v6</grid>
<grid name="rof">r05</grid>
<grid name="glc">null</grid>
<grid name="wav">null</grid>
<mask>gx1v6</mask>
</model_grid>
<model_grid alias="ne30_f09_g16">
<grid name="atm">ne30np4</grid>
<grid name="lnd">0.9x1.25</grid>
<grid name="ocnice">gx1v6</grid>
<grid name="rof">r05</grid>
<grid name="glc">null</grid>
<grid name="wav">null</grid>
<mask>gx1v6</mask>
</model_grid>
<model_grid alias="ne60_g16">
<grid name="atm">ne60np4</grid>
<grid name="lnd">ne60np4</grid>
<grid name="ocnice">gx1v6</grid>
<grid name="rof">r05</grid>
<grid name="glc">null</grid>
<grid name="wav">null</grid>
<mask>gx1v6</mask>
</model_grid>
<model_grid alias="ne120_g16">
<grid name="atm">ne120np4</grid>
<grid name="lnd">ne120np4</grid>
<grid name="ocnice">gx1v6</grid>
<grid name="rof">r05</grid>
<grid name="glc">null</grid>
<grid name="wav">null</grid>
<mask>gx1v6</mask>
</model_grid>
<model_grid alias="ne120_oRRS18">
<grid name="atm">ne120np4</grid>
<grid name="lnd">ne120np4</grid>
<grid name="ocnice">oRRS18to6</grid>
<grid name="rof">r0125</grid>
<grid name="glc">null</grid>
<grid name="wav">null</grid>
<mask>oRRS18to6</mask>
</model_grid>
<model_grid alias="ne120_oRRS18v3">
<grid name="atm">ne120np4</grid>
<grid name="lnd">ne120np4</grid>
<grid name="ocnice">oRRS18to6v3</grid>
<grid name="rof">r0125</grid>
<grid name="glc">null</grid>
<grid name="wav">null</grid>
<mask>oRRS18to6v3</mask>
</model_grid>
<model_grid alias="ne120_oRRS15">
<grid name="atm">ne120np4</grid>
<grid name="lnd">ne120np4</grid>
<grid name="ocnice">oRRS15to5</grid>
<grid name="rof">r0125</grid>
<grid name="glc">null</grid>
<grid name="wav">null</grid>
<mask>oRRS15to5</mask>
</model_grid>
<model_grid alias="ne240_f02_g16">
<grid name="atm">ne240np4</grid>
<grid name="lnd">0.23x0.31</grid>
<grid name="ocnice">gx1v6</grid>
<grid name="rof">r05</grid>
<grid name="glc">null</grid>
<grid name="wav">null</grid>
<mask>gx1v6</mask>
</model_grid>
<model_grid alias="ne4_ne4" compset="(DOCN|XOCN|SOCN|AQP1)">
<grid name="atm">ne4np4</grid>
<grid name="lnd">ne4np4</grid>
<grid name="ocnice">ne4np4</grid>
<grid name="rof">r05</grid>
<grid name="glc">null</grid>
<grid name="wav">null</grid>
<mask>oQU240</mask>
</model_grid>
<model_grid alias="ne4pg1_ne4pg1" compset="(DOCN|XOCN|SOCN|AQP1)">
<grid name="atm">ne4np4.pg1</grid>
<grid name="lnd">ne4np4.pg1</grid>
<grid name="ocnice">ne4np4.pg1</grid>
<grid name="rof">r05</grid>
<grid name="glc">null</grid>
<grid name="wav">null</grid>
<mask>oQU240</mask>
</model_grid>
<model_grid alias="ne4pg2_ne4pg2" compset="(DOCN|XOCN|SOCN|AQP1)">
<grid name="atm">ne4np4.pg2</grid>
<grid name="lnd">ne4np4.pg2</grid>
<grid name="ocnice">ne4np4.pg2</grid>
<grid name="rof">r05</grid>
<grid name="glc">null</grid>
<grid name="wav">null</grid>
<mask>oQU240</mask>
</model_grid>
<model_grid alias="ne8_ne8" compset="(DOCN|XOCN|SOCN|AQP1)">
<grid name="atm">ne8np4</grid>
<grid name="lnd">ne8np4</grid>
<grid name="ocnice">ne8np4</grid>
<grid name="rof">r05</grid>
<grid name="glc">null</grid>
<grid name="wav">null</grid>
<mask>oQU240</mask>
</model_grid>
<model_grid alias="ne8pg1_ne8pg1" compset="(DOCN|XOCN|SOCN|AQP1)">
<grid name="atm">ne8np4.pg1</grid>
<grid name="lnd">ne8np4.pg1</grid>
<grid name="ocnice">ne8np4.pg1</grid>
<grid name="rof">r05</grid>
<grid name="glc">null</grid>
<grid name="wav">null</grid>
<mask>oQU240</mask>
</model_grid>
<model_grid alias="ne8pg2_ne8pg2" compset="(DOCN|XOCN|SOCN|AQP1)">
<grid name="atm">ne8np4.pg2</grid>
<grid name="lnd">ne8np4.pg2</grid>
<grid name="ocnice">ne8np4.pg2</grid>
<grid name="rof">r05</grid>
<grid name="glc">null</grid>
<grid name="wav">null</grid>
<mask>oQU240</mask>
</model_grid>
<model_grid alias="ne11_ne11" compset="(DOCN|XOCN|SOCN|AQP1)">
<grid name="atm">ne11np4</grid>
<grid name="lnd">ne11np4</grid>
<grid name="ocnice">ne11np4</grid>
<grid name="rof">r05</grid>
<grid name="glc">null</grid>
<grid name="wav">null</grid>
<mask>oQU240</mask>
</model_grid>
<model_grid alias="armx8v3_armx8v3" compset="(DOCN|XOCN|SOCN|AQP1)">
<grid name="atm">ne0np4_arm_x8v3_lowcon</grid>
<grid name="lnd">ne0np4_arm_x8v3_lowcon</grid>
<grid name="ocnice">ne0np4_arm_x8v3_lowcon</grid>
<grid name="rof">null</grid>
<grid name="glc">null</grid>
<grid name="wav">null</grid>
<mask>gx1v6</mask>
</model_grid>
<model_grid alias="conusx4v1_conusx4v1" compset="(DOCN|XOCN|SOCN|AQP1)">
<grid name="atm">ne0np4_conus_x4v1_lowcon</grid>
<grid name="lnd">ne0np4_conus_x4v1_lowcon</grid>
<grid name="ocnice">ne0np4_conus_x4v1_lowcon</grid>
<grid name="rof">null</grid>
<grid name="glc">null</grid>
<grid name="wav">null</grid>
<mask>tx0.1v2</mask>
</model_grid>
<model_grid alias="svalbardx8v1_svalbardx8v1" compset="(DOCN|XOCN|SOCN|AQP1)">
<grid name="atm">ne0np4_svalbard_x8v1_lowcon</grid>
<grid name="lnd">ne0np4_svalbard_x8v1_lowcon</grid>
<grid name="ocnice">ne0np4_svalbard_x8v1_lowcon</grid>
<grid name="rof">null</grid>
<grid name="glc">null</grid>
<grid name="wav">null</grid>
<mask>tx0.1v2</mask>
</model_grid>
<model_grid alias="sooberingoax4x8v1_sooberingoax4x8v1" compset="(DOCN|XOCN|SOCN|AQP1)">
<grid name="atm">ne0np4_sooberingoa_x4x8v1_lowcon</grid>
<grid name="lnd">ne0np4_sooberingoa_x4x8v1_lowcon</grid>
<grid name="ocnice">ne0np4_sooberingoa_x4x8v1_lowcon</grid>
<grid name="rof">null</grid>
<grid name="glc">null</grid>
<grid name="wav">null</grid>
<mask>tx0.1v2</mask>
</model_grid>
<model_grid alias="enax4v1_enax4v1" compset="(DOCN|XOCN|SOCN|AQP1)">
<grid name="atm">ne0np4_enax4v1</grid>
<grid name="lnd">ne0np4_enax4v1</grid>
<grid name="ocnice">ne0np4_enax4v1</grid>
<grid name="rof">null</grid>
<grid name="glc">null</grid>
<grid name="wav">null</grid>
<mask>oRRS18to6</mask>
</model_grid>
<model_grid alias="enax4v1_ne30_enax4v1" compset="(DOCN|XOCN|SOCN|AQP1)">
<grid name="atm">ne0np4_enax4v1</grid>
<grid name="lnd">ne30np4</grid>
<grid name="ocnice">ne0np4_enax4v1</grid>
<grid name="rof">null</grid>
<grid name="glc">null</grid>
<grid name="wav">null</grid>
<mask>oRRS18to6</mask>
</model_grid>
<model_grid alias="ne16_ne16" compset="(DOCN|XOCN|SOCN|AQP1)">
<grid name="atm">ne16np4</grid>
<grid name="lnd">ne16np4</grid>
<grid name="ocnice">ne16np4</grid>
<grid name="rof">r05</grid>
<grid name="glc">null</grid>
<grid name="wav">null</grid>
<mask>gx3v7</mask>
</model_grid>
<model_grid alias="ne16pg1_ne16pg1" compset="(DOCN|XOCN|SOCN|AQP1)">
<grid name="atm">ne16np4.pg1</grid>
<grid name="lnd">ne16np4.pg1</grid>
<grid name="ocnice">ne16np4.pg1</grid>
<grid name="rof">r05</grid>
<grid name="glc">null</grid>
<grid name="wav">null</grid>
<mask>gx3v7</mask>
</model_grid>
<model_grid alias="ne16pg2_ne16pg2" compset="(DOCN|XOCN|SOCN|AQP1)">
<grid name="atm">ne16np4.pg2</grid>
<grid name="lnd">ne16np4.pg2</grid>
<grid name="ocnice">ne16np4.pg2</grid>
<grid name="rof">r05</grid>
<grid name="glc">null</grid>
<grid name="wav">null</grid>
<mask>gx3v7</mask>
</model_grid>
<model_grid alias="ne30_ne30" compset="(DOCN|XOCN|SOCN|AQP1)">
<grid name="atm">ne30np4</grid>
<grid name="lnd">ne30np4</grid>
<grid name="ocnice">ne30np4</grid>
<grid name="rof">r05</grid>
<grid name="glc">null</grid>
<grid name="wav">null</grid>
<mask>gx1v6</mask>
</model_grid>
<model_grid alias="ne30pg1_ne30pg1" compset="(DOCN|XOCN|SOCN|AQP1)">
<grid name="atm">ne30np4.pg1</grid>
<grid name="lnd">ne30np4.pg1</grid>
<grid name="ocnice">ne30np4.pg1</grid>
<grid name="rof">r05</grid>
<grid name="glc">null</grid>
<grid name="wav">null</grid>
<mask>gx1v6</mask>
</model_grid>
<model_grid alias="ne30pg2_ne30pg2" compset="(DOCN|XOCN|SOCN|AQP1)">
<grid name="atm">ne30np4.pg2</grid>
<grid name="lnd">ne30np4.pg2</grid>
<grid name="ocnice">ne30np4.pg2</grid>
<grid name="rof">r05</grid>
<grid name="glc">null</grid>
<grid name="wav">null</grid>
<mask>gx1v6</mask>
</model_grid>
<model_grid alias="ne32_ne32" compset="(DOCN|XOCN|SOCN|AQP1)">
<grid name="atm">ne32np4</grid>
<grid name="lnd">ne32np4</grid>
<grid name="ocnice">ne32np4</grid>
<grid name="rof">r05</grid>
<grid name="glc">null</grid>
<grid name="wav">null</grid>
<mask>gx1v6</mask>
</model_grid>
<model_grid alias="ne32pg2_ne32pg2" compset="(DOCN|XOCN|SOCN|AQP1)">
<grid name="atm">ne32np4.pg2</grid>
<grid name="lnd">ne32np4.pg2</grid>
<grid name="ocnice">ne32np4.pg2</grid>
<grid name="rof">r05</grid>
<grid name="glc">null</grid>
<grid name="wav">null</grid>
<mask>gx1v6</mask>
</model_grid>
<model_grid alias="ne30_oEC_ICG" compset="(CAM5.+CLM.+MPASO%SPUNUP)">
<grid name="atm">ne30np4</grid>
<grid name="lnd">ne30np4</grid>
<grid name="ocnice">oEC60to30_ICG</grid>
<grid name="rof">r05</grid>
<grid name="glc">null</grid>
<grid name="wav">null</grid>
<mask>oEC60to30</mask>
</model_grid>
<model_grid alias="ne30_oECv3_ICG" compset="(CAM5.+CLM.+MPASO%SPUNUP)">
<grid name="atm">ne30np4</grid>
<grid name="lnd">ne30np4</grid>
<grid name="ocnice">oEC60to30v3_ICG</grid>
<grid name="rof">r05</grid>
<grid name="glc">null</grid>
<grid name="wav">null</grid>
<mask>oEC60to30v3</mask>
</model_grid>
<model_grid alias="ne30pg2_oECv3_ICG" compset="(CAM5.+CLM.+MPASO%SPUNUP)">
<grid name="atm">ne30np4.pg2</grid>
<grid name="lnd">ne30np4.pg2</grid>
<grid name="ocnice">oEC60to30v3_ICG</grid>
<grid name="rof">r05</grid>
<grid name="glc">null</grid>
<grid name="wav">null</grid>
<mask>oEC60to30v3</mask>
</model_grid>
<model_grid alias="ne30_oECv3wLI_ICG" compset="(CAM5.+CLM.+MPASO%SPUNUP)">
<grid name="atm">ne30np4</grid>
<grid name="lnd">ne30np4</grid>
<grid name="ocnice">oEC60to30v3wLI_ICG</grid>
<grid name="rof">r05</grid>
<grid name="glc">null</grid>
<grid name="wav">null</grid>
<mask>oEC60to30v3wLI</mask>
</model_grid>
<model_grid alias="ne25_ne25" compset="(DOCN|XOCN|SOCN|AQP1)">
<grid name="atm">ne25np4</grid>
<grid name="lnd">ne25np4</grid>
<grid name="ocnice">ne25np4</grid>
<grid name="rof">r05</grid>
<grid name="glc">null</grid>
<grid name="wav">null</grid>
<mask>gx1v6</mask>
</model_grid>
<model_grid alias="ne25pg2_ne25pg2" compset="(DOCN|XOCN|SOCN|AQP1)">
<grid name="atm">ne25np4.pg2</grid>
<grid name="lnd">ne25np4.pg2</grid>
<grid name="ocnice">ne25np4.pg2</grid>
<grid name="rof">r05</grid>
<grid name="glc">null</grid>
<grid name="wav">null</grid>
<mask>gx1v6</mask>
</model_grid>
<model_grid alias="ne35_ne35" compset="(DOCN|XOCN|SOCN|AQP1)">
<grid name="atm">ne35np4</grid>
<grid name="lnd">ne35np4</grid>
<grid name="ocnice">ne35np4</grid>
<grid name="rof">r05</grid>
<grid name="glc">null</grid>
<grid name="wav">null</grid>
<mask>gx1v6</mask>
</model_grid>
<model_grid alias="ne35pg2_ne35pg2" compset="(DOCN|XOCN|SOCN|AQP1)">
<grid name="atm">ne35np4.pg2</grid>
<grid name="lnd">ne35np4.pg2</grid>
<grid name="ocnice">ne35np4.pg2</grid>
<grid name="rof">r05</grid>
<grid name="glc">null</grid>
<grid name="wav">null</grid>
<mask>gx1v6</mask>
</model_grid>
<model_grid alias="ne40_ne40" compset="(DOCN|XOCN|SOCN|AQP1)">
<grid name="atm">ne40np4</grid>
<grid name="lnd">ne40np4</grid>
<grid name="ocnice">ne40np4</grid>
<grid name="rof">r05</grid>
<grid name="glc">null</grid>
<grid name="wav">null</grid>
<mask>gx1v6</mask>
</model_grid>
<model_grid alias="ne40pg2_ne40pg2" compset="(DOCN|XOCN|SOCN|AQP1)">
<grid name="atm">ne40np4.pg2</grid>
<grid name="lnd">ne40np4.pg2</grid>
<grid name="ocnice">ne40np4.pg2</grid>
<grid name="rof">r05</grid>
<grid name="glc">null</grid>
<grid name="wav">null</grid>
<mask>gx1v6</mask>
</model_grid>
<model_grid alias="ne45_ne45" compset="(DOCN|XOCN|SOCN|AQP1)">
<grid name="atm">ne45np4</grid>
<grid name="lnd">ne45np4</grid>
<grid name="ocnice">ne45np4</grid>
<grid name="rof">r05</grid>
<grid name="glc">null</grid>
<grid name="wav">null</grid>
<mask>gx1v6</mask>
</model_grid>
<model_grid alias="ne45pg2_ne45pg2" compset="(DOCN|XOCN|SOCN|AQP1)">
<grid name="atm">ne45np4.pg2</grid>
<grid name="lnd">ne45np4.pg2</grid>
<grid name="ocnice">ne45np4.pg2</grid>
<grid name="rof">r05</grid>
<grid name="glc">null</grid>
<grid name="wav">null</grid>
<mask>gx1v6</mask>
</model_grid>
<model_grid alias="ne120_oRRS18v3_ICG" compset="(CAM5.+CLM.+MPASO%SPUNUP)">
<grid name="atm">ne120np4</grid>
<grid name="lnd">ne120np4</grid>
<grid name="ocnice">oRRS18to6v3_ICG</grid>
<grid name="rof">r0125</grid>
<grid name="glc">null</grid>
<grid name="wav">null</grid>
<mask>oRRS18to6v3</mask>
</model_grid>
<model_grid alias="ne60_ne60" compset="(DOCN|XOCN|SOCN|AQP1)">
<grid name="atm">ne60np4</grid>
<grid name="lnd">ne60np4</grid>
<grid name="ocnice">ne60np4</grid>
<grid name="rof">r05</grid>
<grid name="glc">null</grid>
<grid name="wav">null</grid>
<mask>gx1v6</mask>
</model_grid>
<model_grid alias="ne60pg1_ne60pg1" compset="(DOCN|XOCN|SOCN|AQP1)">
<grid name="atm">ne60np4.pg1</grid>
<grid name="lnd">ne60np4.pg1</grid>
<grid name="ocnice">ne60np4.pg1</grid>
<grid name="rof">r05</grid>
<grid name="glc">null</grid>
<grid name="wav">null</grid>
<mask>gx1v6</mask>
</model_grid>
<model_grid alias="ne60pg2_ne60pg2" compset="(DOCN|XOCN|SOCN|AQP1)">
<grid name="atm">ne60np4.pg2</grid>
<grid name="lnd">ne60np4.pg2</grid>
<grid name="ocnice">ne60np4.pg2</grid>
<grid name="rof">r05</grid>
<grid name="glc">null</grid>
<grid name="wav">null</grid>
<mask>gx1v6</mask>
</model_grid>
<model_grid alias="ne64_ne64" compset="(DOCN|XOCN|SOCN|AQP1)">
<grid name="atm">ne64np4</grid>
<grid name="lnd">ne64np4</grid>
<grid name="ocnice">ne64np4</grid>
<grid name="rof">r05</grid>
<grid name="glc">null</grid>
<grid name="wav">null</grid>
<mask>gx1v6</mask>
</model_grid>
<model_grid alias="ne64pg2_ne64pg2" compset="(DOCN|XOCN|SOCN|AQP1)">
<grid name="atm">ne64np4.pg2</grid>
<grid name="lnd">ne64np4.pg2</grid>
<grid name="ocnice">ne64np4.pg2</grid>
<grid name="rof">r05</grid>
<grid name="glc">null</grid>
<grid name="wav">null</grid>
<mask>gx1v6</mask>
</model_grid>
<model_grid alias="ne120_ne120" compset="(DOCN|XOCN|SOCN|AQP1)">
<grid name="atm">ne120np4</grid>
<grid name="lnd">ne120np4</grid>
<grid name="ocnice">ne120np4</grid>
<grid name="rof">r05</grid>
<grid name="glc">null</grid>
<grid name="wav">null</grid>
<mask>gx1v6</mask>
</model_grid>
<model_grid alias="ne120pg2_ne120pg2" compset="(DOCN|XOCN|SOCN|AQP1)">
<grid name="atm">ne120np4.pg2</grid>
<grid name="lnd">ne120np4.pg2</grid>
<grid name="ocnice">ne120np4.pg2</grid>
<grid name="rof">r05</grid>
<grid name="glc">null</grid>
<grid name="wav">null</grid>
<mask>gx1v6</mask>
</model_grid>
<model_grid alias="ne240_ne240" compset="(DOCN|XOCN|SOCN|AQP1)">
<grid name="atm">ne240np4</grid>
<grid name="lnd">ne240np4</grid>
<grid name="ocnice">ne240np4</grid>
<grid name="rof">null</grid>
<grid name="glc">null</grid>
<grid name="wav">null</grid>
<mask>tx0.1v2</mask>
</model_grid>
<model_grid alias="ne512np4_360x720cru_ne512np4">
<grid name="atm">ne512np4</grid>
<grid name="lnd">360x720cru</grid>
<grid name="ocnice">ne512np4</grid>
<grid name="rof">r05</grid>
<grid name="glc">null</grid>
<grid name="wav">null</grid>
<mask>oRRS15to5</mask>
</model_grid>
<model_grid alias="ne512np4_360x720cru_oRRS15to5">
<grid name="atm">ne512np4</grid>
<grid name="lnd">360x720cru</grid>
<grid name="ocnice">oRRS15to5</grid>
<grid name="rof">r05</grid>
<grid name="glc">null</grid>
<grid name="wav">null</grid>
<mask>oRRS15to5</mask>
</model_grid>
<model_grid alias="ne1024np4_360x720cru_ne1024np4" compset="(DOCN|XOCN|SOCN|AQP1)">
<grid name="atm">ne1024np4</grid>
<grid name="lnd">360x720cru</grid>
<grid name="ocnice">ne1024np4</grid>
<grid name="rof">null</grid>
<grid name="glc">null</grid>
<grid name="wav">null</grid>
<mask>oRRS15to5</mask>
</model_grid>
<model_grid alias="ne1024np4_360x720cru_oRRS15to5" compset="(DOCN|XOCN|SOCN|AQP1)">
<grid name="atm">ne1024np4</grid>
<grid name="lnd">360x720cru</grid>
<grid name="ocnice">oRRS15to5</grid>
<grid name="rof">null</grid>
<grid name="glc">null</grid>
<grid name="wav">null</grid>
<mask>oRRS15to5</mask>
</model_grid>
<!--- mali grids -->
<model_grid alias="ne30_oECv3_aisgis" compset="_MALI">
<grid name="atm">ne30np4</grid>
<grid name="lnd">ne30np4</grid>
<grid name="ocnice">oEC60to30v3</grid>
<grid name="rof">r05</grid>
<grid name="glc">mpas.aisgis20km</grid>
<grid name="wav">null</grid>
<mask>oEC60to30v3</mask>
</model_grid>
<model_grid alias="ne30_oECv3wLI_aisgis" compset="_MALI">
<grid name="atm">ne30np4</grid>
<grid name="lnd">ne30np4</grid>
<grid name="ocnice">oEC60to30v3wLI</grid>
<grid name="rof">r05</grid>
<grid name="glc">mpas.aisgis20km</grid>
<grid name="wav">null</grid>
<mask>oEC60to30v3wLI</mask>
</model_grid>
<model_grid alias="ne30_oECv3_gis" compset="_MALI">
<grid name="atm">ne30np4</grid>
<grid name="lnd">ne30np4</grid>
<grid name="ocnice">oEC60to30v3</grid>
<grid name="rof">r05</grid>
<grid name="glc">mpas.gis20km</grid>
<grid name="wav">null</grid>
<mask>oEC60to30v3</mask>
</model_grid>
<model_grid alias="f09_g16_g" compset="_MALI">
<grid name="atm">0.9x1.25</grid>
<grid name="lnd">0.9x1.25</grid>
<grid name="ocnice">gx1v6</grid>
<grid name="rof">r05</grid>
<grid name="glc">mpas.gis20km</grid>
<grid name="wav">null</grid>
<mask>gx1v6</mask>
</model_grid>
<model_grid alias="f09_g16_a" compset="_MALI">
<grid name="atm">0.9x1.25</grid>
<grid name="lnd">0.9x1.25</grid>
<grid name="ocnice">gx1v6</grid>
<grid name="rof">r05</grid>
<grid name="glc">mpas.ais20km</grid>
<grid name="wav">null</grid>
<mask>gx1v6</mask>
</model_grid>
<!-- mali + mpaso grids -->
<model_grid alias="T62_m120_g" compset="MPASO.*_MALI">
<grid name="atm">T62</grid>
<grid name="lnd">T62</grid>
<grid name="ocnice">mpas120</grid>
<grid name="rof">rx1</grid>
<grid name="glc">mpas.gis20km</grid>
<grid name="wav">null</grid>
<mask>mpas120</mask>
</model_grid>
<model_grid alias="f09_oEC_a" compset="MPASO.*_MALI">
<grid name="atm">0.9x1.25</grid>
<grid name="lnd">0.9x1.25</grid>
<grid name="ocnice">oEC60to30</grid>
<grid name="rof">r05</grid>
<grid name="glc">mpas.ais20km</grid>
<grid name="wav">null</grid>
<mask>oEC60to30</mask>
</model_grid>
<!-- new runoff grids for data runoff model DROF -->
<model_grid alias="T31_g37_rx1" compset="_DROF">
<grid name="atm">T31</grid>
<grid name="lnd">T31</grid>
<grid name="ocnice">gx3v7</grid>
<grid name="rof">rx1</grid>
<grid name="glc">null</grid>
<grid name="wav">null</grid>
<mask>gx3v7</mask>
</model_grid>
<model_grid alias="f45_g37_rx1" compset="_DROF">
<grid name="atm">4x5</grid>
<grid name="lnd">4x5</grid>
<grid name="ocnice">gx3v7</grid>
<grid name="rof">rx1</grid>
<grid name="glc">null</grid>
<grid name="wav">null</grid>
<mask>gx3v7</mask>
</model_grid>
<model_grid alias="f19_g16_rx1" compset="_DROF">
<grid name="atm">1.9x2.5</grid>
<grid name="lnd">1.9x2.5</grid>
<grid name="ocnice">gx1v6</grid>
<grid name="rof">rx1</grid>
<grid name="glc">null</grid>
<grid name="wav">null</grid>
<mask>gx1v6</mask>
</model_grid>
<model_grid alias="ne30_g16_rx1" compset="_DROF">
<grid name="atm">ne30np4</grid>
<grid name="lnd">ne30np4</grid>
<grid name="ocnice">gx1v6</grid>
<grid name="rof">rx1</grid>
<grid name="glc">null</grid>
<grid name="wav">null</grid>
<mask>gx1v6</mask>
</model_grid>
<model_grid alias="ne30_f19_g16_rx1" compset="_DROF">
<grid name="atm">ne30np4</grid>
<grid name="lnd">1.9x2.5</grid>
<grid name="ocnice">gx1v6</grid>
<grid name="rof">rx1</grid>
<grid name="glc">null</grid>
<grid name="wav">null</grid>
<mask>gx1v6</mask>
</model_grid>
<!-- ww3 grids -->
<model_grid alias="ww3a_ww3a" compset="_WW3">
<grid name="atm">ww3a</grid>
<grid name="lnd">ww3a</grid>
<grid name="ocnice">ww3a</grid>
<grid name="rof">null</grid>
<grid name="glc">null</grid>
<grid name="wav">ww3a</grid>
<mask>ww3a</mask>
</model_grid>
<model_grid alias="f19_g16_r05_ww3" compset="(_DWAV|_XWAV|_WW3)">
<grid name="atm">1.9x2.5</grid>
<grid name="lnd">1.9x2.5</grid>
<grid name="ocnice">gx1v6</grid>
<grid name="rof">r05</grid>
<grid name="glc">null</grid>
<grid name="wav">ww3a</grid>
<mask>gx1v6</mask>
</model_grid>
<model_grid alias="f09_g16_r05_ww3" compset="(_DWAV|_XWAV|_WW3)">
<grid name="atm">0.9x1.25</grid>
<grid name="lnd">1.9x2.5</grid>
<grid name="ocnice">gx1v6</grid>
<grid name="rof">r05</grid>
<grid name="glc">null</grid>
<grid name="wav">ww3a</grid>
<mask>gx1v6</mask>
</model_grid>
<model_grid alias="T31_g37_r05_ww3" compset="(_DWAV|_XWAV|_WW3)">
<grid name="atm">T31</grid>
<grid name="lnd">T31</grid>
<grid name="ocnice">gx3v7</grid>
<grid name="rof">r05</grid>
<grid name="glc">null</grid>
<grid name="wav">ww3a</grid>
<mask>gx3v7</mask>
</model_grid>
<model_grid alias="T31_g37_rx1_ww3>" compset="(_DROF.*_DWAV|_DROF.*_WW3)">
<grid name="atm">T31</grid>
<grid name="lnd">T31</grid>
<grid name="ocnice">gx3v7</grid>
<grid name="rof">rx1</grid>
<grid name="glc">null</grid>
<grid name="wav">ww3a</grid>
<mask>gx3v7</mask>
</model_grid>
<model_grid alias="T62_g16_rx1_ww3" compset="(DATM.+DWAV|XATM.+XWAV|SATM.+SWAV)">
<grid name="atm">T62</grid>
<grid name="lnd">T62</grid>
<grid name="ocnice">gx1v6</grid>
<grid name="rof">rx1</grid>
<grid name="glc">null</grid>
<grid name="wav">ww3a</grid>
<mask>gx1v6</mask>
</model_grid>
<model_grid alias="T62_oQU480" compset="(DATM|XATM|SATM)">
<grid name="atm">T62</grid>
<grid name="lnd">T62</grid>
<grid name="ocnice">oQU480</grid>
<grid name="rof">rx1</grid>
<grid name="glc">null</grid>
<grid name="wav">null</grid>
<mask>oQU480</mask>
</model_grid>
<model_grid alias="T62_oQU240" compset="(DATM|XATM|SATM)">
<grid name="atm">T62</grid>
<grid name="lnd">T62</grid>
<grid name="ocnice">oQU240</grid>
<grid name="rof">rx1</grid>
<grid name="glc">null</grid>
<grid name="wav">null</grid>
<mask>oQU240</mask>
</model_grid>
<model_grid alias="T62_oQU240wLI" compset="(DATM|XATM|SATM)">
<grid name="atm">T62</grid>
<grid name="lnd">T62</grid>
<grid name="ocnice">oQU240wLI</grid>
<grid name="rof">rx1</grid>
<grid name="glc">null</grid>
<grid name="wav">null</grid>
<mask>oQU240wLI</mask>
</model_grid>
<model_grid alias="T62_oQU120" compset="(DATM|XATM|SATM)">
<grid name="atm">T62</grid>
<grid name="lnd">T62</grid>
<grid name="ocnice">oQU120</grid>
<grid name="rof">rx1</grid>
<grid name="glc">null</grid>
<grid name="wav">null</grid>
<mask>oQU120</mask>
</model_grid>
<model_grid alias="ne16_oQU240">
<grid name="atm">ne16np4</grid>
<grid name="lnd">ne16np4</grid>
<grid name="ocnice">oQU240</grid>
<grid name="rof">r05</grid>
<grid name="glc">null</grid>
<grid name="wav">null</grid>
<mask>oQU240</mask>
</model_grid>
<model_grid alias="ne16_oQU240_a">
<grid name="atm">ne16np4</grid>
<grid name="lnd">ne16np4</grid>
<grid name="ocnice">oQU240</grid>
<grid name="rof">r05</grid>
<grid name="glc">mpas.ais20km</grid>
<grid name="wav">null</grid>
<mask>oQU240</mask>
</model_grid>
<model_grid alias="ne30_oQU120">
<grid name="atm">ne30np4</grid>
<grid name="lnd">ne30np4</grid>
<grid name="ocnice">oQU120</grid>
<grid name="rof">r05</grid>
<grid name="glc">null</grid>
<grid name="wav">null</grid>
<mask>oQU120</mask>
</model_grid>
<model_grid alias="ne30_oQU120_a">
<grid name="atm">ne30np4</grid>
<grid name="lnd">ne30np4</grid>
<grid name="ocnice">oQU120</grid>
<grid name="rof">r05</grid>
<grid name="glc">mpas.ais20km</grid>
<grid name="wav">null</grid>
<mask>oQU120</mask>
</model_grid>
<model_grid alias="ne30_oEC">
<grid name="atm">ne30np4</grid>
<grid name="lnd">ne30np4</grid>
<grid name="ocnice">oEC60to30</grid>
<grid name="rof">r05</grid>
<grid name="glc">null</grid>
<grid name="wav">null</grid>
<mask>oEC60to30</mask>
</model_grid>
<model_grid alias="ne30_oECv3">
<grid name="atm">ne30np4</grid>
<grid name="lnd">ne30np4</grid>
<grid name="ocnice">oEC60to30v3</grid>
<grid name="rof">r05</grid>
<grid name="glc">null</grid>
<grid name="wav">null</grid>
<mask>oEC60to30v3</mask>
</model_grid>
<model_grid alias="ne30_oECwLI">
<grid name="atm">ne30np4</grid>
<grid name="lnd">ne30np4</grid>
<grid name="ocnice">oEC60to30wLI</grid>
<grid name="rof">r05</grid>
<grid name="glc">null</grid>
<grid name="wav">null</grid>
<mask>oEC60to30wLI</mask>
</model_grid>
<model_grid alias="ne30_oECv3wLI">
<grid name="atm">ne30np4</grid>
<grid name="lnd">ne30np4</grid>
<grid name="ocnice">oEC60to30v3wLI</grid>
<grid name="rof">r05</grid>
<grid name="glc">null</grid>
<grid name="wav">null</grid>
<mask>oEC60to30v3wLI</mask>
</model_grid>
<model_grid alias="ne30_oRRS30">
<grid name="atm">ne30np4</grid>
<grid name="lnd">ne30np4</grid>
<grid name="ocnice">oRRS30to10</grid>
<grid name="rof">r05</grid>
<grid name="glc">null</grid>
<grid name="wav">null</grid>
<mask>oRRS30to10</mask>
</model_grid>
<model_grid alias="ne30_oRRS30wLI">
<grid name="atm">ne30np4</grid>
<grid name="lnd">ne30np4</grid>
<grid name="ocnice">oRRS30to10wLI</grid>
<grid name="rof">r05</grid>
<grid name="glc">null</grid>
<grid name="wav">null</grid>
<mask>oRRS30to10wLI</mask>
</model_grid>
<model_grid alias="ne30_oRRS30v3">
<grid name="atm">ne30np4</grid>
<grid name="lnd">ne30np4</grid>
<grid name="ocnice">oRRS30to10v3</grid>
<grid name="rof">r05</grid>
<grid name="glc">null</grid>
<grid name="wav">null</grid>
<mask>oRRS30to10v3</mask>
</model_grid>
<model_grid alias="ne30_oRRS30v3wLI">
<grid name="atm">ne30np4</grid>
<grid name="lnd">ne30np4</grid>
<grid name="ocnice">oRRS30to10v3wLI</grid>
<grid name="rof">r05</grid>
<grid name="glc">null</grid>
<grid name="wav">null</grid>
<mask>oRRS30to10v3wLI</mask>
</model_grid>
<model_grid alias="twpx4v1_twpx4v1" compset="(DOCN|XOCN|SOCN|AQP1)">
<grid name="atm">ne0np4_twpx4v1</grid>
<grid name="lnd">ne0np4_twpx4v1</grid>
<grid name="ocnice">ne0np4_twpx4v1</grid>
<grid name="rof">null</grid>
<grid name="glc">null</grid>
<grid name="wav">null</grid>
<mask>oRRS18to6v3</mask>
</model_grid>
<model_grid alias="T62_oQU120_ais20" compset="MPASO.*_MALI">
<grid name="atm">T62</grid>
<grid name="lnd">T62</grid>
<grid name="ocnice">oQU120</grid>
<grid name="rof">rx1</grid>
<grid name="glc">mpas.ais20km</grid>
<grid name="wav">null</grid>
<mask>oQU120</mask>
</model_grid>
<model_grid alias="T62_oEC60to30v3wLI_ais20" compset="MPASO.*_MALI">
<grid name="atm">T62</grid>
<grid name="lnd">T62</grid>
<grid name="ocnice">oEC60to30v3wLI</grid>
<grid name="rof">rx1</grid>
<grid name="glc">mpas.ais20km</grid>
<grid name="wav">null</grid>
<mask>oEC60to30v3wLI</mask>
</model_grid>
<!-- The following grid is only used for ADWAV testing -->
<model_grid alias="ww3a" compset="_WW3|DWAV">
<grid name="wav">ww3a</grid>
</model_grid>
</grids>
<domains>
<!-- ======================================================== -->
<!-- Component grid domain specifications -->
<!-- ======================================================== -->
<domain name="reg">
<nx/>
<ny/>
<desc>regional grid mask: </desc>
</domain>
<domain name="usgs">
<nx/>
<ny/>
<desc>USGS mask</desc>
</domain>
<domain name="null">
<nx>0</nx>
<ny>0</ny>
<desc>null is no grid: </desc>
</domain>
<domain name="CLM_USRDAT">
<nx>1</nx>
<ny>1</ny>
<file grid="atm|lnd">$DIN_LOC_ROOT/share/domains/domain.clm/domain.lnd.${CLM_USRDAT_NAME}_navy.nc</file>
<desc>user specified domain - only valid for DATM/CLM compset</desc>
</domain>
<domain name="1x1_numaIA">
<nx>1</nx>
<ny>1</ny>
<file grid="atm|lnd">$DIN_LOC_ROOT/share/domains/domain.clm/domain.lnd.1x1pt-numaIA_navy.110106.nc</file>
<desc>1x1 Numa Iowa -- only valid for DATM/CLM compset</desc>
</domain>
<domain name="1x1_brazil">
<nx>1</nx>
<ny>1</ny>
<file grid="atm|lnd">$DIN_LOC_ROOT/share/domains/domain.clm/domain.lnd.1x1pt-brazil_navy.090715.nc</file>
<desc>1x1 Brazil -- only valid for DATM/CLM compset</desc>
</domain>
<domain name="1x1_smallvilleIA">
<nx>1</nx>
<ny>1</ny>
<file grid="atm|lnd">$DIN_LOC_ROOT/share/domains/domain.clm/domain.lnd.1x1pt-smallvilleIA_test.110106.nc</file>
<desc>1x1 Smallville Iowa Crop Test Case -- only valid for DATM/CLM compset</desc>
</domain>
<domain name="1x1_camdenNJ">
<nx>1</nx>
<ny>1</ny>
<file grid="atm|lnd">$DIN_LOC_ROOT/share/domains/domain.clm/domain.lnd.1x1pt-camdenNJ_navy.111004.nc</file>
<desc>1x1 Camden New Jersey -- only valid for DATM/CLM compset</desc>
</domain>
<domain name="1x1_mexicocityMEX">
<nx>1</nx>
<ny>1</ny>
<file grid="atm|lnd">$DIN_LOC_ROOT/share/domains/domain.clm/domain.lnd.1x1pt-mexicocityMEX_navy.090715.nc</file>
<desc>1x1 Mexico City Mexico -- only valid for DATM/CLM compset</desc>
</domain>
<domain name="1x1_vancouverCAN">
<nx>1</nx>
<ny>1</ny>
<file grid="atm|lnd">$DIN_LOC_ROOT/share/domains/domain.clm/domain.lnd.1x1pt-vancouverCAN_navy.090715.nc</file>
<desc>1x1 Vancouver Canada -- only valid for DATM/CLM compset</desc>
</domain>
<domain name="1x1_tropicAtl">
<nx>1</nx>
<ny>1</ny>
<file grid="atm|lnd">$DIN_LOC_ROOT/share/domains/domain.clm/domain.lnd.1x1pt-tropicAtl_test.111004.nc</file>
<desc>1x1 Tropical Atlantic Test Case -- only valid for DATM/CLM compset</desc>
</domain>
<domain name="1x1_urbanc_alpha">
<nx>1</nx>
<ny>1</ny>
<file grid="atm|lnd">$DIN_LOC_ROOT/share/domains/domain.clm/domain.lnd.1x1pt-urbanc_alpha_test.110201.nc</file>
<desc>1x1 Urban C Alpha Test Case -- only valid for DATM/CLM compset</desc>
</domain>
<domain name="5x5_amazon">
<nx>1</nx>
<ny>1</ny>
<file grid="atm|lnd">$DIN_LOC_ROOT/share/domains/domain.clm/domain.lnd.5x5pt-amazon_navy.090715.nc</file>
<desc>5x5 Amazon regional case -- only valid for DATM/CLM compset</desc>
</domain>
<domain name="360x720cru">
<nx>720</nx>
<ny>360</ny>
<file grid="atm|lnd">$DIN_LOC_ROOT/share/domains/domain.clm/domain.lnd.360x720_cruncep.100429.nc</file>
<file grid="atm|lnd" mask="oRRS15to5">$DIN_LOC_ROOT/share/domains/domain.lnd.360x720cru_oRRS15to5.190417.nc</file>
<desc>Exact half-degree CRUNCEP datm forcing grid with CRUNCEP land-mask -- only valid for DATM/CLM compset</desc>
</domain>
<domain name="NLDAS">
<nx>464</nx>
<ny>224</ny>
<file grid="atm|lnd">$DIN_LOC_ROOT/share/domains/domain.clm/domain.lnd.nldas2_0224x0464_c110415.nc</file>
<desc>NLDAS US one eighth degree grid -- only valid for DATM/CLM compset</desc>
</domain>
<domain name="0.23x0.31">
<nx>1152</nx>
<ny>768</ny>
<file grid="atm|lnd" mask="gx1v6">$DIN_LOC_ROOT/share/domains/domain.lnd.fv0.23x0.31_gx1v6.100517.nc</file>
<file grid="ice|ocn" mask="gx1v6">$DIN_LOC_ROOT/share/domains/domain.ocn.0.23x0.31_gx1v6_101108.nc</file>
<desc>0.23x0.31 is FV 1/4-deg grid:</desc>
</domain>
<domain name="0.47x0.63">
<nx>576</nx>
<ny>384</ny>
<file grid="atm|lnd" mask="gx1v6">$DIN_LOC_ROOT/share/domains/domain.lnd.fv0.47x0.63_gx1v6.090407.nc</file>
<file grid="ice|ocn" mask="gx1v6">$DIN_LOC_ROOT/share/domains/domain.ocn.0.47x0.63_gx1v6_090408.nc</file>
<desc>0.47x0.63 is FV 1/2-deg grid:</desc>
</domain>
<domain name="0.9x1.25">
<nx>288</nx>
<ny>192</ny>
<file grid="atm|lnd" mask="gx1v6">$DIN_LOC_ROOT/share/domains/domain.lnd.fv0.9x1.25_gx1v6.090309.nc</file>
<file grid="atm|lnd" mask="mp120v1">$DIN_LOC_ROOT/share/domains/domain.lnd.fv0.9x1.25_mp120v1.111018.nc</file>
<file grid="ice|ocn" mask="gx1v6">$DIN_LOC_ROOT/share/domains/domain.ocn.0.9x1.25_gx1v6_090403.nc</file>
<desc>0.9x1.25 is FV 1-deg grid:</desc>
</domain>
<domain name="1.9x2.5">
<nx>144</nx>
<ny>96</ny>
<file grid="atm|lnd" mask="gx1v6">$DIN_LOC_ROOT/share/domains/domain.lnd.fv1.9x2.5_gx1v6.090206.nc</file>
<file grid="ice|ocn" mask="gx1v6">$DIN_LOC_ROOT/share/domains/domain.ocn.1.9x2.5_gx1v6_090403.nc</file>
<desc>1.9x2.5 is FV 2-deg grid:</desc>
</domain>
<domain name="4x5">
<nx>72</nx>
<ny>46</ny>
<file grid="atm|lnd" mask="gx3v7">$DIN_LOC_ROOT/share/domains/domain.lnd.fv4x5_gx3v7.091218.nc</file>
<file grid="ice|ocn" mask="gx3v7">$DIN_LOC_ROOT/share/domains/domain.ocn.4x5_gx3v7_100120.nc</file>
<desc>4x5 is FV 4-deg grid:</desc>
</domain>
<domain name="2.5x3.33">
<nx>108</nx>
<ny>72</ny>
<file grid="atm|lnd" mask="gx3v7">$DIN_LOC_ROOT/share/domains/domain.lnd.fv2.5x3.33_gx3v7.110223.nc</file>
<file grid="ice|ocn" mask="gx3v7">$DIN_LOC_ROOT/share/domains/domain.ocn.fv2.5x3.33_gx3v7_110223.nc</file>
<desc>2.5x3.33 is FV 3-deg grid:</desc>
</domain>
<domain name="10x15">
<nx>24</nx>
<ny>19</ny>
<file grid="atm|lnd" mask="usgs">$DIN_LOC_ROOT/share/domains/domain.clm/domain.lnd.fv10x15_USGS.110713.nc</file>
<file grid="ice|ocn" mask="usgs">$DIN_LOC_ROOT/share/domains/domain.clm/domain.camocn.10x15_USGS_070807.nc</file>
<desc>10x15 is FV 10-deg grid:</desc>
</domain>
<!--=====================================================================-->
<!--=====================================================================-->
<!-- Eulerian grid domains -->
<!--=====================================================================-->
<!--=====================================================================-->
<domain name="T341">
<nx>1024</nx>
<ny>512</ny>
<desc>T341 is Gaussian grid:</desc>
</domain>
<domain name="T85">
<nx>256</nx>
<ny>128</ny>
<desc>T85 is Gaussian grid:</desc>
</domain>
<domain name="T62">
<nx>192</nx>
<ny>96</ny>
<file grid="atm|lnd" mask="gx1v6">$DIN_LOC_ROOT/share/domains/domain.lnd.T62_gx1v6.090320.nc</file>
<file grid="atm|lnd" mask="gx3v7">$DIN_LOC_ROOT/share/domains/domain.lnd.T62_gx3v7.090911.nc</file>
<file grid="atm|lnd" mask="mpasgx1">$DIN_LOC_ROOT/share/domains/domain.lnd.T62_mpasgx1.150903.nc</file>
<file grid="atm|lnd" mask="oQU480">$DIN_LOC_ROOT/share/domains/domain.lnd.T62_oQU480.151209.nc</file>
<file grid="atm|lnd" mask="oQU240">$DIN_LOC_ROOT/share/domains/domain.lnd.T62_oQU240.151209.nc</file>
<file grid="atm|lnd" mask="oQU240wLI">$DIN_LOC_ROOT/share/domains/domain.lnd.T62_oQU240wLI_mask.160929.nc</file>
<file grid="atm|lnd" mask="oQU120">$DIN_LOC_ROOT/share/domains/domain.lnd.T62_oQU120.151209.nc</file>
<file grid="atm|lnd" mask="oEC60to30">$DIN_LOC_ROOT/share/domains/domain.lnd.T62_oEC60to30.150616.nc</file>
<file grid="atm|lnd" mask="oEC60to30v3">$DIN_LOC_ROOT/share/domains/domain.lnd.T62_oEC60to30v3.161222.nc</file>
<file grid="atm|lnd" mask="oEC60to30wLI">$DIN_LOC_ROOT/share/domains/domain.lnd.T62_oEC60to30wLI_mask.160830.nc</file>
<file grid="atm|lnd" mask="oEC60to30v3wLI">$DIN_LOC_ROOT/share/domains/domain.lnd.T62_oEC60to30v3wLI_mask.170328.nc</file>
<file grid="atm|lnd" mask="oRRS30to10">$DIN_LOC_ROOT/share/domains/domain.lnd.T62_oRRS30to10.150722.nc</file>
<file grid="atm|lnd" mask="oRRS30to10wLI">$DIN_LOC_ROOT/share/domains/domain.lnd.T62_oRRS30to10wLI_mask.171109.nc</file>
<file grid="atm|lnd" mask="oRRS30to10v3">$DIN_LOC_ROOT/share/domains/domain.lnd.T62_oRRS30to10v3.171129.nc</file>
<file grid="atm|lnd" mask="oRRS30to10v3wLI">$DIN_LOC_ROOT/share/domains/domain.lnd.T62_oRRS30to10v3wLI_mask.171109.nc</file>
<file grid="atm|lnd" mask="oRRS18to6">$DIN_LOC_ROOT/share/domains/domain.lnd.T62_oRRS18to6.160831.nc</file>
<file grid="atm|lnd" mask="oRRS18to6v3">$DIN_LOC_ROOT/share/domains/domain.lnd.T62_oRRS18to6v3.170111.nc</file>
<file grid="atm|lnd" mask="oRRS15to5">$DIN_LOC_ROOT/share/domains/domain.lnd.T62_oRRS15to5.150722.nc</file>
<file grid="atm|lnd" mask="oARRM60to10">$DIN_LOC_ROOT/share/domains/domain.lnd.T62_oARRM60to10.180716.nc</file>
<file grid="atm|lnd" mask="oARRM60to6">$DIN_LOC_ROOT/share/domains/domain.lnd.T62_oARRM60to6.180803.nc</file>
<desc>T62 is Gaussian grid:</desc>
</domain>
<domain name="T42">
<nx>128</nx>
<ny>64</ny>
<file grid="atm|lnd" mask="usgs">$DIN_LOC_ROOT/share/domains/domain.clm/domain.lnd.T42_USGS.111004.nc</file>
<file grid="ice|ocn" mask="usgs">$DIN_LOC_ROOT/atm/cam/ocnfrac/domain.camocn.64x128_USGS_070807.nc</file>
<desc>T42 is Gaussian grid:</desc>
</domain>
<domain name="T31">
<nx>96</nx>
<ny>48</ny>
<desc>T31 is Gaussian grid:</desc>
</domain>
<domain name="TL319">
<nx>640</nx>
<ny>320</ny>
<file grid="atm|lnd" mask="oEC60to30v3">$DIN_LOC_ROOT/share/domains/domain.lnd.TL319_oEC60to30v3.181203.nc</file>
<file grid="ice|ocn" mask="oEC60to30v3">$DIN_LOC_ROOT/share/domains/domain.ocn.TL319_oEC60to30v3.181203.nc</file>
<file grid="atm|lnd" mask="oARRM60to10">$DIN_LOC_ROOT/share/domains/domain.lnd.TL319_oARRM60to10.180905.nc</file>
<file grid="ice|ocn" mask="oARRM60to10">$DIN_LOC_ROOT/share/domains/domain.ocn.TL319_oARRM60to10.180905.nc</file>
<file grid="atm|lnd" mask="oARRM60to6">$DIN_LOC_ROOT/share/domains/domain.lnd.TL319_oARRM60to6.180905.nc</file>
<file grid="ice|ocn" mask="oARRM60to6">$DIN_LOC_ROOT/share/domains/domain.ocn.TL319_oARRM60to6.180905.nc</file>
<desc>TL319 is JRA lat/lon grid:</desc>
</domain>
<!--=====================================================================-->
<!--=====================================================================-->
<!-- SE grid domains -->
<!--=====================================================================-->
<!--=====================================================================-->
<!-- ne4 -->
<domain name="ne4np4">
<nx>866</nx>
<ny>1</ny>
<file grid="atm|lnd" mask="oQU480">$DIN_LOC_ROOT/share/domains/domain.lnd.ne4np4_oQU480.180702.nc</file>
<file grid="atm|lnd" mask="oQU240">$DIN_LOC_ROOT/share/domains/domain.lnd.ne4np4_oQU240.160614.nc</file>
<file grid="ice|ocn" mask="oQU480">$DIN_LOC_ROOT/share/domains/domain.ocn.ne4np4_oQU480.180702.nc</file>
<file grid="ice|ocn" mask="oQU240">$DIN_LOC_ROOT/share/domains/domain.ocn.ne4np4_oQU240.160614.nc</file>
<desc>ne4np4 is Spectral Elem 7.5-deg grid:</desc>
</domain>
<domain name="ne4np4.pg1">
<nx>96</nx>
<ny>1</ny>
<file grid="atm|lnd" mask="oQU240">DUMMY</file>
<file grid="ice|ocn" mask="oQU240">DUMMY</file>
<desc>ne4np4.pg1 is Spectral Elem 7.5-deg grid w/ 1x1 FV physics grid per element:</desc>
</domain>
<domain name="ne4np4.pg2">
<nx>384</nx>
<ny>1</ny>
<file grid="atm|lnd" mask="oQU240">domain.lnd.ne4pg2_oQU240.190321.nc</file>
<file grid="ice|ocn" mask="oQU240">domain.ocn.ne4pg2_oQU240.190321.nc</file>
<desc>ne4np4.pg2 is Spectral Elem 7.5-deg grid w/ 2x2 FV physics grid per element:</desc>
</domain>
<!--=====================================================================-->
<!-- ne11 -->
<domain name="ne11np4">
<nx>6536</nx>
<ny>1</ny>
<file grid="atm|lnd" mask="oQU240">$DIN_LOC_ROOT/share/domains/domain.lnd.ne11np4_oQU240.160614.nc</file>
<file grid="ice|ocn" mask="oQU240">$DIN_LOC_ROOT/share/domains/domain.ocn.ne11np4_oQU240.160614.nc</file>
<desc>ne11np4 is Spectral Elem 2.7-deg grid:</desc>
</domain>
<!--=====================================================================-->
<!-- ne16 -->
<domain name="ne16np4">
<nx>13826</nx>
<ny>1</ny>
<file grid="atm|lnd" mask="gx3v7">$DIN_LOC_ROOT/share/domains/domain.lnd.ne16np4_gx3v7.120406.nc</file>
<file grid="atm|lnd" mask="oQU240">$DIN_LOC_ROOT/share/domains/domain.lnd.ne16np4_oQU240.151211.nc</file>
<file grid="ice|ocn" mask="gx3v7">$DIN_LOC_ROOT/share/domains/domain.ocn.ne16np4_gx3v7.121113.nc</file>
<file grid="ice|ocn" mask="oQU240">$DIN_LOC_ROOT/share/domains/domain.ocn.ne16np4_oQU240.151211.nc</file>
<desc>ne16np4 is Spectral Elem 2-deg grid:</desc>
</domain>
<domain name="ne16np4.pg1">
<nx>1536</nx>
<ny>1</ny>
<file grid="atm|lnd" mask="gx1v6">DUMMY</file>
<file grid="ice|ocn" mask="gx1v6">DUMMY</file>
<desc>ne16np4.pg1 is Spectral Elem 2-deg grid w/ 1x1 FV physics grid per element:</desc>
</domain>
<domain name="ne16np4.pg2">
<nx>6144</nx>
<ny>1</ny>
<file grid="atm|lnd" mask="gx1v6">DUMMY</file>
<file grid="ice|ocn" mask="gx1v6">DUMMY</file>
<desc>ne16np4.pg2 is Spectral Elem 2-deg grid w/ 2x2 FV physics grid per element:</desc>
</domain>
<!--=====================================================================-->
<!-- ne30 -->
<domain name="ne30np4">
<nx>48602</nx>
<ny>1</ny>
<file grid="atm|lnd" mask="gx1v6">$DIN_LOC_ROOT/share/domains/domain.lnd.ne30np4_gx1v6.110905.nc</file>
<file grid="atm|lnd" mask="oQU120">$DIN_LOC_ROOT/share/domains/domain.lnd.ne30np4_oQU120.160401.nc</file>
<file grid="atm|lnd" mask="oEC60to30">$DIN_LOC_ROOT/share/domains/domain.lnd.ne30np4_oEC60to30.20151214.nc</file>
<file grid="atm|lnd" mask="oEC60to30v3">$DIN_LOC_ROOT/share/domains/domain.lnd.ne30np4_oEC60to30v3.161222.nc</file>
<file grid="atm|lnd" mask="oEC60to30wLI">$DIN_LOC_ROOT/share/domains/domain.lnd.ne30np4_oEC60to30wLI_mask.160915.nc</file>
<file grid="atm" mask="oEC60to30v3wLI">$DIN_LOC_ROOT/share/domains/domain.lnd.ne30np4_oEC60to30v3wLI_mask.170802.nc</file>
<file grid="atm|lnd" mask="oRRS30to10">$DIN_LOC_ROOT/share/domains/domain.lnd.ne30np4_oRRS30to10.160419.nc</file>
<file grid="atm|lnd" mask="oRRS30to10wLI">$DIN_LOC_ROOT/share/domains/domain.lnd.ne30np4_oRRS30to10wLI.160930.nc</file>
<file grid="atm|lnd" mask="oRRS30to10v3">$DIN_LOC_ROOT/share/domains/domain.lnd.ne30np4_oRRS30to10v3.171101.nc</file>
<file grid="atm|lnd" mask="oRRS30to10v3wLI">$DIN_LOC_ROOT/share/domains/domain.lnd.ne30np4_oRRS30to10v3wLI_mask.171109.nc</file>
<file grid="ice|ocn" mask="gx1v6">$DIN_LOC_ROOT/share/domains/domain.ocn.ne30np4_gx1v6_110217.nc</file>
<file grid="ice|ocn" mask="oQU120">$DIN_LOC_ROOT/share/domains/domain.ocn.ne30np4_oQU120.160401.nc</file>
<file grid="ice|ocn" mask="oEC60to30">$DIN_LOC_ROOT/share/domains/domain.ocn.ne30np4_oEC60to30.20151214.nc</file>
<file grid="ice|ocn" mask="oEC60to30v3">$DIN_LOC_ROOT/share/domains/domain.ocn.ne30np4_oEC60to30v3.161222.nc</file>
<file grid="ice|ocn" mask="oEC60to30wLI">$DIN_LOC_ROOT/share/domains/domain.ocn.ne30np4_oEC60to30wLI_mask.160915.nc</file>
<file grid="ice|ocn" mask="oEC60to30v3wLI">$DIN_LOC_ROOT/share/domains/domain.ocn.ne30np4_oEC60to30v3wLI_mask.160915.nc</file>
<file grid="ice|ocn" mask="oRRS30to10">$DIN_LOC_ROOT/share/domains/domain.ocn.ne30np4_oRRS30to10.160419.nc</file>
<file grid="ice|ocn" mask="oRRS30to10wLI">$DIN_LOC_ROOT/share/domains/domain.ocn.ne30np4_oRRS30to10wLI.160930.nc</file>
<file grid="ice|ocn" mask="oRRS30to10v3">$DIN_LOC_ROOT/share/domains/domain.ocn.ne30np4_oRRS30to10v3.171101.nc</file>
<file grid="ice|ocn" mask="oRRS30to10v3wLI">$DIN_LOC_ROOT/share/domains/domain.ocn.ne30np4_oRRS30to10v3wLI_mask.171109.nc</file>
<file grid="lnd" mask="oEC60to30v3wLI">$DIN_LOC_ROOT/share/domains/domain.lnd.ne30np4_oEC60to30v3wLI_mask.170802.nc</file>
<desc>ne30np4 is Spectral Elem 1-deg grid:</desc>
</domain>
<domain name="ne30np4.pg1">
<nx>5400</nx>
<ny>1</ny>
<file grid="atm|lnd" mask="gx1v6">DUMMY</file>
<file grid="ice|ocn" mask="gx1v6">DUMMY</file>
<desc>ne30np4.pg1 is Spectral Elem 1-deg grid w/ 1x1 FV physics grid per element:</desc>
</domain>
<domain name="ne30np4.pg2">
<nx>21600</nx>
<ny>1</ny>
<file grid="atm|lnd" mask="oEC60to30v3">$DIN_LOC_ROOT/share/domains/domain.lnd.ne30pg2_oECv3.190806.nc</file>
<file grid="ice|ocn" mask="oEC60to30v3">$DIN_LOC_ROOT/share/domains/domain.ocn.ne30pg2_oECv3.190806.nc</file>
<file grid="atm|lnd" mask="gx1v6">$DIN_LOC_ROOT/share/domains/domain.lnd.ne30pg2_gx1v6.190806.nc</file>
<file grid="ice|ocn" mask="gx1v6">$DIN_LOC_ROOT/share/domains/domain.ocn.ne30pg2_gx1v6.190806.nc</file>
<desc>ne30np4.pg2 is Spectral Elem 1-deg grid w/ 2x2 FV physics grid per element:</desc>
</domain>
<!--=====================================================================-->
<!-- ne60 -->
<domain name="ne60np4">
<nx>194402</nx>
<ny>1</ny>
<file grid="atm|lnd" mask="gx1v6">$DIN_LOC_ROOT/share/domains/domain.lnd.ne60np4_gx1v6.120406.nc</file>
<file grid="ice|ocn" mask="gx1v6">$DIN_LOC_ROOT/share/domains/domain.ocn.ne60np4_gx1v6.121113.nc</file>
<desc>ne60np4 is Spectral Elem 1/2-deg grid:</desc>
</domain>
<domain name="ne60np4.pg1">
<nx>21600</nx>
<ny>1</ny>
<file grid="atm|lnd" mask="gx1v6">DUMMY</file>
<file grid="ice|ocn" mask="gx1v6">DUMMY</file>
<desc>ne60np4.pg1 is Spectral Elem 1/2-deg grid w/ 1x1 FV physics grid per element:</desc>
</domain>
<domain name="ne60np4.pg2">
<nx>86400</nx>
<ny>1</ny>
<file grid="atm|lnd" mask="gx1v6">DUMMY</file>
<file grid="ice|ocn" mask="gx1v6">DUMMY</file>
<desc>ne60np4.pg2 is Spectral Elem 1/2-deg grid w/ 2x2 FV physics grid per element:</desc>
</domain>
<!--=====================================================================-->
<!-- ne120 -->
<domain name="ne120np4">
<nx>777602</nx>
<ny>1</ny>
<file grid="atm|lnd" mask="gx1v6">$DIN_LOC_ROOT/share/domains/domain.lnd.ne120np4_gx1v6.110502.nc</file>
<file grid="atm|lnd" mask="oRRS18to6">$DIN_LOC_ROOT/share/domains/domain.lnd.ne120np4_oRRS18to6.160831.nc</file>
<file grid="atm|lnd" mask="oRRS18to6v3">$DIN_LOC_ROOT/share/domains/domain.lnd.ne120np4_oRRS18to6v3.170111.nc</file>
<file grid="atm|lnd" mask="oRRS15to5">$DIN_LOC_ROOT/share/domains/domain.lnd.ne120np4_oRRS15to5.160207.nc</file>
<file grid="ice|ocn" mask="gx1v6">$DIN_LOC_ROOT/share/domains/domain.ocn.ne120np4_gx1v6.121113.nc</file>
<file grid="ice|ocn" mask="oRRS18to6">$DIN_LOC_ROOT/share/domains/domain.ocn.ne120np4_oRRS18to6.160831.nc</file>
<file grid="ice|ocn" mask="oRRS18to6v3">$DIN_LOC_ROOT/share/domains/domain.ocn.ne120np4_oRRS18to6v3.170111.nc</file>
<file grid="ice|ocn" mask="oRRS15to5">$DIN_LOC_ROOT/share/domains/domain.ocn.ne120np4_oRRS15to5.160207.nc</file>
<desc>ne120np4 is Spectral Elem 1/4-deg grid:</desc>
</domain>
<domain name="ne120np4.pg2">
<nx>345600</nx>
<ny>1</ny>
<file grid="atm|lnd" mask="gx1v6">DUMMY</file>
<file grid="ice|ocn" mask="gx1v6">DUMMY</file>
<desc>ne120np4 is Spectral Elem 1/4-deg grid w/ 2x2 FV physics grid</desc>
</domain>
<!-- ne240 -->
<domain name="ne240np4">
<nx>3110402</nx>
<ny>1</ny>
<file grid="atm|lnd" mask="gx1v6">$DIN_LOC_ROOT/share/domains/domain.lnd.ne240np4_gx1v6.111226.nc</file>
<file grid="ice|ocn" mask="gx1v6">$DIN_LOC_ROOT/share/domains/domain.ocn.ne240np4_gx1v6.111226.nc</file>
<file grid="atm|lnd" mask="tx0.1v2">$DIN_LOC_ROOT/share/domains/domain.lnd.ne240np4_tx0.1v2.170822.nc</file>
<file grid="ice|ocn" mask="tx0.1v2">$DIN_LOC_ROOT/share/domains/domain.ocn.ne240np4_tx0.1v2.170822.nc</file>
<desc>ne240np4 is Spectral Elem 1/8-deg grid:</desc>
</domain>
<domain name="ne512np4">
<nx>14155778</nx>
<ny>1</ny>
<file grid="atm|lnd" mask="oRRS15to5">$DIN_LOC_ROOT/share/domains/domain.lnd.ne512np4_oRRS15to5.190417.nc</file>
<file grid="ice|ocn" mask="oRRS15to5">$DIN_LOC_ROOT/share/domains/domain.ocn.ne512np4_oRRS15to5.190417.nc</file>
<desc>ne512np4 is Spectral Elem 6km grid:</desc>
</domain>
<domain name="ne1024np4">
<nx>56623106</nx>
<ny>1</ny>
<file grid="atm|lnd" mask="oRRS15to5">$DIN_LOC_ROOT/share/domains/domain.lnd.ne1024np4_oRRS15to5.190514.nc</file>
<file grid="ice|ocn" mask="oRRS15to5">$DIN_LOC_ROOT/share/domains/domain.ocn.ne1024np4_oRRS15to5.190514.nc</file>
<desc>ne1024np4 is Spectral Elem 3km grid:</desc>
</domain>
<!--=====================================================================-->
<!--=====================================================================-->
<domain name="gx1v6">
<nx>320</nx>
<ny>384</ny>
<file grid="atm|ice|lnd|ocn">$DIN_LOC_ROOT/share/domains/domain.ocn.gx1v6.090206.nc</file>
<desc>gx1v6 is displaced Greenland pole v6 1-deg grid:</desc>
</domain>
<domain name="gx3v7">
<nx>100</nx>
<ny>116</ny>
<file grid="ice|ocn">$DIN_LOC_ROOT/share/domains/domain.ocn.gx3v7.120323.nc</file>
<desc>gx3v7 is displaced Greenland pole v7 3-deg grid:</desc>
</domain>
<!-- MPAS grids -->
<domain name="mpasgx1">
<nx>86354</nx>
<ny>1</ny>
<file grid="ice|ocn">$DIN_LOC_ROOT/share/domains/domain.ocn.mpasgx1.150903.nc</file>
<desc>mpasgx1 is a MPAS seaice grid that is roughly 1 degree resolution:</desc>
</domain>
<domain name="mpas120">
<nx>28574</nx>
<ny>1</ny>
<file grid="ocn">$DIN_LOC_ROOT/share/domains/domain.ocn.mpas120.121116.nc</file>
<desc>mpas120 is a MPAS ocean grid that is roughly 1 degree resolution:</desc>
</domain>
<domain name="oEC60to30">
<nx>234988</nx>
<ny>1</ny>
<file grid="ice|ocn">$DIN_LOC_ROOT/share/domains/domain.ocn.oEC60to30.150616.nc</file>
<desc>oEC60to30 is a MPAS ocean grid generated with the eddy closure density function that is roughly comparable to the pop 1 degree resolution:</desc>
</domain>
<domain name="oEC60to30v3">
<nx>235160</nx>
<ny>1</ny>
<file grid="ice|ocn">$DIN_LOC_ROOT/share/domains/domain.ocn.oEC60to30v3.161222.nc</file>
<desc>oEC60to30v3 is a MPAS ocean grid generated with the eddy closure density function that is roughly comparable to the pop 1 degree resolution:</desc>
</domain>
<domain name="oEC60to30wLI">
<nx>236689</nx>
<ny>1</ny>
<file grid="ice|ocn">$DIN_LOC_ROOT/share/domains/domain.ocn.oEC60to30wLI.160830.nc</file>
<desc>oEC60to30wLI is a MPAS ocean grid generated with the eddy closure density function with 30 km gridcells at the equator, 60 km at mid-latitudes, and 35 km at high latitudes. It is roughly comparable to the POP 1 degree resolution. Additionally, it has ocean under landice cavities:</desc>
</domain>
<domain name="oEC60to30v3wLI">
<nx>236358</nx>
<ny>1</ny>
<file grid="ice|ocn">$DIN_LOC_ROOT/share/domains/domain.ocn.ne30np4_oEC60to30v3wLI-nomask.180906.nc</file>
<desc>oEC60to30v3wLI is a MPAS ocean grid generated with the eddy closure density function with 30 km gridcells at the equator, 60 km at mid-latitudes, and 35 km at high latitudes. It is roughly comparable to the POP 1 degree resolution. Additionally, it has ocean under landice cavities:</desc>
</domain>
<domain name="oRRS30to10">
<nx>1444565</nx>
<ny>1</ny>
<file grid="ocn|ice">$DIN_LOC_ROOT/share/domains/domain.ocn.oRRS30to10.150722.nc</file>
<desc>oRRS30to10 is an MPAS ocean grid with a mesh density function that is roughly proportional to the Rossby radius of deformation, with 30 km gridcells at low and 10 km gridcells at high latitudes:</desc>
</domain>
<domain name="oRRS30to10v3">
<nx>1445361</nx>
<ny>1</ny>
<file grid="ocn|ice">$DIN_LOC_ROOT/share/domains/domain.ocn.oRRS30to10v3.171129.nc</file>
<desc>oRRS30to10v3 is an MPAS ocean grid with a mesh density function that is roughly proportional to the Rossby radius of deformation, with 30 km gridcells at low and 10 km gridcells at high latitudes:</desc>
</domain>
<domain name="oRRS30to10wLI">
<nx>1462411</nx>
<ny>1</ny>
<file grid="ocn|ice">$DIN_LOC_ROOT/share/domains/domain.ocn.oRRS30to10wLI.160930.nc</file>
<desc>oRRS30to10wLI is an MPAS ocean grid with a mesh density function that is roughly proportional to the Rossby radius of deformation, with 30 km gridcells at low and 10 km gridcells at high latitudes: Additionally, it has ocean under landice cavities:</desc>
</domain>
<domain name="oRRS30to10v3wLI">
<nx>1460217</nx>
<ny>1</ny>
<file grid="ocn|ice">$DIN_LOC_ROOT/share/domains/domain.ocn.oRRS30to10v3wLI.171109.nc</file>
<desc>oRRS30to10v3wLI is an MPAS ocean grid with a mesh density function that is roughly proportional to the Rossby radius of deformation, with 30 km gridcells at low and 10 km gridcells at high latitudes: Additionally, it has ocean under landice cavities:</desc>
</domain>
<domain name="oEC60to30_ICG">
<nx>234988</nx>
<ny>1</ny>
<file grid="ice|ocn" mask="oEC60to30">$DIN_LOC_ROOT/share/domains/domain.ocn.oEC60to30.150616.nc</file>
<desc>oEC60to30_ICG is a MPAS ocean grid with initial conditions, generated with the eddy closure density function with 30 km gridcells at the equator, 60 km at mid-latitudes, and 35 km at high latitudes. It is roughly comparable to the POP 1 degree resolution. This version of initial conditions is spun-up from a G compset run:</desc>
</domain>
<domain name="oEC60to30v3_ICG">
<nx>235160</nx>
<ny>1</ny>
<file grid="ice|ocn" mask="oEC60to30v3">$DIN_LOC_ROOT/share/domains/domain.ocn.oEC60to30v3.161222.nc</file>
<desc>oEC60to30v3_ICG is a MPAS ocean grid with initial conditions, generated with the eddy closure density function with 30 km gridcells at the equator, 60 km at mid-latitudes, and 35 km at high latitudes. It is roughly comparable to the POP 1 degree resolution. This version of initial conditions is spun-up from a G compset run:</desc>
</domain>
<domain name="oEC60to30v3wLI_ICG">
<nx>236358</nx>
<ny>1</ny>
<file grid="ice|ocn" mask="oEC60to30v3wLI">$DIN_LOC_ROOT/share/domains/domain.ocn.ne30np4_oEC60to30v3wLI-nomask.180906.nc</file>
<desc>oEC60to30v3wLI is a MPAS ocean grid generated with the eddy closure density function with 30 km gridcells at the equator, 60 km at mid-latitudes, and 35 km at high latitudes. It is roughly comparable to the POP 1 degree resolution. Additionally, it has ocean under landice cavities:</desc>
</domain>
<domain name="oRRS18to6">
<nx>3697425</nx>
<ny>1</ny>
<file grid="ice|ocn">$DIN_LOC_ROOT/share/domains/domain.ocn.oRRS18to6.160831.nc</file>
<desc>oRRS18to6 is an MPAS ocean grid with a mesh density function that is roughly proportional to the Rossby radius of deformation, with 18 km gridcells at low and 6 km gridcells at high latitudes:</desc>
</domain>
<domain name="oRRS18to6v3">
<nx>3693225</nx>
<ny>1</ny>
<file grid="ice|ocn">$DIN_LOC_ROOT/share/domains/domain.ocn.oRRS18to6v3.170111.nc</file>
<desc>oRRS18to6v3 is an MPAS ocean grid with a mesh density function that is roughly proportional to the Rossby radius of deformation, with 18 km gridcells at low and 6 km gridcells at high latitudes:</desc>
</domain>
<domain name="oRRS18to6v3_ICG">
<nx>3693225</nx>
<ny>1</ny>
<file grid="ice|ocn" mask="oRRS18to6v3">$DIN_LOC_ROOT/share/domains/domain.ocn.oRRS18to6v3.170111.nc</file>
<desc>oRRS18to6v3_ICG is an MPAS ocean grid with a mesh density function that is roughly proportional to the Rossby radius of deformation, with 18 km gridcells at low and 6 km gridcells at high latitudes. This version of initial conditions is spun-up from a G compset run:</desc>
</domain>
<domain name="oARRM60to10">
<nx>619264</nx>
<ny>1</ny>
<file grid="ice|ocn">$DIN_LOC_ROOT/share/domains/domain.ocn.oARRM60to10.180716.nc</file>
<desc>oARRM60to10 is an Arctic-Region-Refined MPAS ocean grid with 30 km gridcells at -90 deg latitude, 60 km gridcells at -40 deg latitude, 30 km gridcells at the equator, and 10 km gridcells at 90 deg latitude; North Atlantic and North Pacific have different resolution between 0 and 60 deg latitudes:</desc>
</domain>
<domain name="oARRM60to6">
<nx>1208625</nx>
<ny>1</ny>
<file grid="ice|ocn">$DIN_LOC_ROOT/share/domains/domain.ocn.oARRM60to6.180803.nc</file>
<desc>oARRM60to6 is an Arctic-Region-Refined MPAS ocean grid with 30 km gridcells at -90 deg latitude, 60 km gridcells at -40 deg latitude, 30 km gridcells at the equator, and 6 km gridcells at 90 deg latitude; North Atlantic and North Pacific have different resolution between 0 and 60 deg latitudes:</desc>
</domain>
<!-- ROF (river) grids-->
<domain name="rx1">
<nx>360</nx>
<ny>180</ny>
<desc>rx1 is 1 degree river routing grid (only valid for DROF):</desc>
</domain>
<domain name="r05">
<nx>720</nx>
<ny>360</ny>
<desc>r05 is 1/2 degree river routing grid:</desc>
</domain>
<domain name="r01">
<nx>3600</nx>
<ny>1800</ny>
<desc>r01 is 1/10 degree river routing grid:</desc>
</domain>
<domain name="r0125">
<nx>2880</nx>
<ny>1440</ny>
<desc>r0125 is 1/8 degree river routing grid:</desc>
</domain>
<domain name="mp120v1">
<nx>28993</nx>
<ny>1</ny>
<file grid="ice|ocn">$DIN_LOC_ROOT/share/domains/domain.ocn.mp120v1.111018.nc</file>
<desc>mp120v1 is a MPAS ocean grid that is roughly 1 degree resolution:</desc>
</domain>
<domain name="mp120r10v1">
<nx>139734</nx>
<ny>1</ny>
<desc>mp120r10v1 is a MPAS grid:</desc>
</domain>
<!-- GLC:MALI domains -->
<domain name="mpas.aisgis20km">
<nx>53100</nx>
<ny>1</ny>
<desc>mpas.aisgis20km is a uniform-resolution 20km MALI grid of the Antarctic and Greenland Ice Sheets. It is primarily intended for testing.</desc>
</domain>
<domain name="mpas.gis20km">
<nx>7425</nx>
<ny>1</ny>
<desc>mpas.gis20km is a uniform-resolution 20km MALI grid of the Greenland Ice Sheet. It is primarily intended for testing.</desc>
</domain>
<domain name="mpas.ais20km">
<nx>45675</nx>
<ny>1</ny>
<desc>mpas.ais20km is a uniform-resolution 20km MALI grid of the Antarctic Ice Sheet. It is primarily intended for testing.</desc>
</domain>
<!-- WW3 domains-->
<domain name="ww3a">
<nx>90</nx>
<ny>50</ny>
<file grid="atm|lnd">$DIN_LOC_ROOT/share/domains/domain.lnd.ww3a_ww3a.120222.nc</file>
<file grid="ice|ocn">$DIN_LOC_ROOT/share/domains/domain.ocn.ww3a_ww3a.120222.nc</file>
<desc>WW3 90 x 50 global grid</desc>
</domain>
<!-- RRM grids -->
<domain name="ne0np4_arm_x8v3_lowcon">
<nx>92558</nx>
<ny>1</ny>
<file grid="atm|lnd" mask="gx1v6">$DIN_LOC_ROOT/share/domains/domain.lnd.armx8v3_gx1v6.140517.nc</file>
<file grid="ice|ocn" mask="gx1v6">$DIN_LOC_ROOT/share/domains/domain.ocn.armx8v3_gx1v6.140517.nc</file>
<desc>1-deg with 1/8-deg over U.S. (version 3):</desc>
</domain>
<domain name="ne0np4_enax4v1">
<nx>78788</nx>
<ny>1</ny>
<file grid="atm|lnd" mask="gx1v6">$DIN_LOC_ROOT/share/domains/domain.lnd.enax4v1_gx1v6.170523.nc</file>
<file grid="atm|lnd" mask="oRRS18to6">$DIN_LOC_ROOT/share/domains/domain.lnd.enax4v1_oRRS18to6.170621.nc</file>
<file grid="ice|ocn" mask="gx1v6">$DIN_LOC_ROOT/share/domains/domain.ocn.enax4v1_gx1v6.170523.nc</file>
<file grid="ice|ocn" mask="oRRS18to6">$DIN_LOC_ROOT/share/domains/domain.ocn.enax4v1_oRRS18to6.170621.nc</file>
<desc>1-deg with 1/4-deg over Eastern North Atlantic (version 1):</desc>
</domain>
<domain name="ne0np4_twpx4v1">
<nx>81434</nx>
<ny>1</ny>
<file grid="atm|lnd" mask="oRRS18to6v3">$DIN_LOC_ROOT/share/domains/domain.lnd.twpx4v1_oRRS18to6v3.170629.nc</file>
<file grid="ice|ocn" mask="oRRS18to6v3">$DIN_LOC_ROOT/share/domains/domain.ocn.twpx4v1_oRRS18to6v3.170629.nc</file>
<desc>1-deg with 1/4-deg over Tropical West Pacific (version 1):</desc>
</domain>
<domain name="ne0np4_conus_x4v1_lowcon">
<nx>89147</nx>
<ny>1</ny>
<file grid="atm|lnd" mask="tx0.1v2">$DIN_LOC_ROOT/share/domains/domain.lnd.conusx4v1_tx0.1v2.161129.nc</file>
<file grid="ice|ocn" mask="tx0.1v2">$DIN_LOC_ROOT/share/domains/domain.ocn.conusx4v1_tx0.1v2.161129.nc</file>
<desc>1-deg with 1/4-deg over CONUS (version 1):</desc>
</domain>
<domain name="tx0.1v2">
<nx>3600</nx>
<ny>2400</ny>
<file grid="atm|ice|lnd|ocn">$DIN_LOC_ROOT/share/domains/domain.ocn.tx0.1v2.090218.nc</file>
<desc>tx0.1v2 is an old mask used for CONUS:</desc>
</domain>
<domain name="oQU480">
<nx>1791</nx>
<ny>1</ny>
<file grid="ice|ocn">$DIN_LOC_ROOT/share/domains/domain.ocn.oQU480.151209.nc</file>
<desc>oQU480 is an MPAS ocean mesh with quasi-uniform 480 km grid cells, nominally 4 degree resolution:</desc>
</domain>
<domain name="oQU240">
<nx>7153</nx>
<ny>1</ny>
<file grid="ice|ocn">$DIN_LOC_ROOT/share/domains/domain.ocn.oQU240.151209.nc</file>
<desc>oQU240 is an MPAS ocean mesh with quasi-uniform 240 km grid cells, nominally 2 degree resolution:</desc>
</domain>
<domain name="oQU240wLI">
<nx>7268</nx>
<ny>1</ny>
<file grid="ice|ocn">$DIN_LOC_ROOT/share/domains/domain.ocn.oQU240wLI.160929.nc</file>
<desc>oQU240wLI is an MPAS ocean mesh with quasi-uniform 240 km grid cells, nominally 2 degree resolution. Additionally, it has ocean under landice cavities.:</desc>
</domain>
<domain name="oQU120">
<nx>28571</nx>
<ny>1</ny>
<file grid="ice|ocn">$DIN_LOC_ROOT/share/domains/domain.ocn.oQU120.160401.nc</file>
<desc>oQU120 is an MPAS ocean mesh with quasi-uniform 120 km grid cells, nominally 1 degree resolution:</desc>
</domain>
<domain name="oRRS15to5">
<nx>5778136</nx>
<ny>1</ny>
<file grid="ice|ocn">$DIN_LOC_ROOT/share/domains/domain.ocn.oRRS15to5.160207.nc</file>
<desc>oRRS15to5 is an MPAS ocean grid with a mesh density function that is roughly proportional to the Rossby radius of deformation, with 15 km gridcells at low and 5 km gridcells at high latitudes:</desc>
</domain>
</domains>
<!-- The following are the required grid maps that must not be idmap if the -->
<!-- attributes grid1 and grid2 are not equal -->
<required_gridmaps>
<required_gridmap grid1="atm_grid" grid2="ocn_grid">ATM2OCN_FMAPNAME</required_gridmap>
<required_gridmap grid1="atm_grid" grid2="ocn_grid">ATM2OCN_SMAPNAME</required_gridmap>
<required_gridmap grid1="atm_grid" grid2="ocn_grid">ATM2OCN_VMAPNAME</required_gridmap>
<required_gridmap grid1="atm_grid" grid2="ocn_grid">OCN2ATM_FMAPNAME</required_gridmap>
<required_gridmap grid1="atm_grid" grid2="ocn_grid">OCN2ATM_SMAPNAME</required_gridmap>
<required_gridmap grid1="atm_grid" grid2="lnd_grid">ATM2LND_FMAPNAME</required_gridmap>
<required_gridmap grid1="atm_grid" grid2="lnd_grid">ATM2LND_SMAPNAME</required_gridmap>
<required_gridmap grid1="atm_grid" grid2="lnd_grid">LND2ATM_FMAPNAME</required_gridmap>
<required_gridmap grid1="atm_grid" grid2="lnd_grid">LND2ATM_SMAPNAME</required_gridmap>
<required_gridmap grid1="atm_grid" grid2="wav_grid">ATM2WAV_SMAPNAME</required_gridmap>
<required_gridmap grid1="ocn_grid" grid2="wav_grid">OCN2WAV_SMAPNAME</required_gridmap>
<required_gridmap grid1="ocn_grid" grid2="wav_grid">ICE2WAV_SMAPNAME</required_gridmap> <!-- ??? -->
<!-- <required_gridmap grid1="ocn_grid" grid2="rof_grid" not_compset="_POP">ROF2OCN_FMAPNAME</required_gridmap> ?? -->
<required_gridmap grid1="ocn_grid" grid2="rof_grid" >ROF2OCN_LIQ_RMAPNAME</required_gridmap>
<required_gridmap grid1="ocn_grid" grid2="rof_grid" >ROF2OCN_ICE_RMAPNAME</required_gridmap>
<required_gridmap grid1="lnd_grid" grid2="rof_grid">LND2ROF_FMAPNAME</required_gridmap>
<required_gridmap grid1="lnd_grid" grid2="rof_grid">ROF2LND_FMAPNAME</required_gridmap>
</required_gridmaps>
<gridmaps>
<!-- ======================================================== -->
<!-- Mapping -->
<!-- ======================================================== -->
<!--- atm to ocean and ocean to atm mapping files -->
<gridmap atm_grid="0.23x0.31" ocn_grid="gx1v6">
<map name="ATM2OCN_FMAPNAME">cpl/cpl6/map_fv0.23x0.31_to_gx1v6_aave_da_100423.nc</map>
<map name="ATM2OCN_SMAPNAME">cpl/cpl6/map_fv0.23x0.31_to_gx1v6_bilin_da_100423.nc</map>
<map name="ATM2OCN_VMAPNAME">cpl/cpl6/map_fv0.23x0.31_to_gx1v6_bilin_da_100423.nc</map>
<map name="OCN2ATM_FMAPNAME">cpl/cpl6/map_gx1v6_to_fv0.23x0.31_aave_da_100423.nc</map>
<map name="OCN2ATM_SMAPNAME">cpl/cpl6/map_gx1v6_to_fv0.23x0.31_aave_da_100423.nc</map>
</gridmap>
<gridmap atm_grid="0.47x0.63" ocn_grid="gx1v6">
<map name="ATM2OCN_FMAPNAME">cpl/cpl6/map_fv0.47x0.63_to_gx1v6_aave_da_090407.nc</map>
<map name="ATM2OCN_SMAPNAME">cpl/cpl6/map_fv0.47x0.63_to_gx1v6_patch_090401.nc</map>
<map name="ATM2OCN_VMAPNAME">cpl/cpl6/map_fv0.47x0.63_to_gx1v6_patch_090401.nc</map>
<map name="OCN2ATM_FMAPNAME">cpl/cpl6/map_gx1v6_to_fv0.47x0.63_aave_da_090407.nc</map>
<map name="OCN2ATM_SMAPNAME">cpl/cpl6/map_gx1v6_to_fv0.47x0.63_aave_da_090407.nc</map>
</gridmap>
<gridmap atm_grid="0.9x1.25" ocn_grid="gx1v6">
<map name="ATM2OCN_FMAPNAME">cpl/gridmaps/fv0.9x1.25/map_fv0.9x1.25_TO_gx1v6_aave.130322.nc</map>
<map name="ATM2OCN_SMAPNAME">cpl/gridmaps/fv0.9x1.25/map_fv0.9x1.25_TO_gx1v6_blin.130322.nc</map>
<map name="ATM2OCN_VMAPNAME">cpl/gridmaps/fv0.9x1.25/map_fv0.9x1.25_TO_gx1v6_patc.130322.nc</map>
<map name="OCN2ATM_FMAPNAME">cpl/gridmaps/gx1v6/map_gx1v6_TO_fv0.9x1.25_aave.130322.nc</map>
<map name="OCN2ATM_SMAPNAME">cpl/gridmaps/gx1v6/map_gx1v6_TO_fv0.9x1.25_aave.130322.nc</map>
</gridmap>
<gridmap atm_grid="0.9x1.25" ocn_grid="mpas120">
<map name="ATM2OCN_FMAPNAME">cpl/gridmaps/fv0.9x1.25/map_0.9x1.25_TO_mpas120_aave.151109.nc</map>
<map name="ATM2OCN_SMAPNAME">cpl/gridmaps/fv0.9x1.25/map_0.9x1.25_TO_mpas120_bilin.151109.nc</map>
<map name="ATM2OCN_VMAPNAME">cpl/gridmaps/fv0.9x1.25/map_0.9x1.25_TO_mpas120_patc.151109.nc</map>
<map name="OCN2ATM_FMAPNAME">cpl/gridmaps/mpas120/map_mpas120_TO_0.9x1.25_aave.151109.nc</map>
<map name="OCN2ATM_SMAPNAME">cpl/gridmaps/mpas120/map_mpas120_TO_0.9x1.25_aave.151109.nc</map>
</gridmap>
<gridmap atm_grid="0.9x1.25" ocn_grid="mp120v1">
<map name="ATM2OCN_FMAPNAME">cpl/cpl6/map_fv0.9x1.25_to_mp120v1_aave_da_111004.nc</map>
<map name="ATM2OCN_SMAPNAME">cpl/cpl6/map_fv0.9x1.25_to_mp120v1_aave_da_111004.nc</map>
<map name="ATM2OCN_VMAPNAME">cpl/cpl6/map_fv0.9x1.25_to_mp120v1_aave_da_111004.nc</map>
<map name="OCN2ATM_FMAPNAME">cpl/cpl6/map_mp120v1_to_fv0.9x1.25_aave_da_111004.nc</map>
<map name="OCN2ATM_SMAPNAME">cpl/cpl6/map_mp120v1_to_fv0.9x1.25_aave_da_111004.nc</map>
</gridmap>
<gridmap atm_grid="1.9x2.5" ocn_grid="gx1v6">
<map name="ATM2OCN_FMAPNAME">cpl/gridmaps/fv1.9x2.5/map_fv1.9x2.5_TO_gx1v6_aave.130322.nc</map>
<map name="ATM2OCN_SMAPNAME">cpl/gridmaps/fv1.9x2.5/map_fv1.9x2.5_TO_gx1v6_blin.130322.nc</map>
<map name="ATM2OCN_VMAPNAME">cpl/gridmaps/fv1.9x2.5/map_fv1.9x2.5_TO_gx1v6_patc.130322.nc</map>
<map name="OCN2ATM_FMAPNAME">cpl/gridmaps/gx1v6/map_gx1v6_TO_fv1.9x2.5_aave.130322.nc</map>
<map name="OCN2ATM_SMAPNAME">cpl/gridmaps/gx1v6/map_gx1v6_TO_fv1.9x2.5_aave.130322.nc</map>
</gridmap>
<gridmap atm_grid="4x5" ocn_grid="gx3v7">
<map name="ATM2OCN_FMAPNAME">cpl/cpl6/map_fv4x5_to_gx3v7_aave_da_091218.nc</map>
<map name="ATM2OCN_SMAPNAME">cpl/cpl6/map_fv4x5_to_gx3v7_bilin_da_091218.nc</map>
<map name="ATM2OCN_VMAPNAME">cpl/cpl6/map_fv4x5_to_gx3v7_bilin_da_091218.nc</map>
<map name="OCN2ATM_FMAPNAME">cpl/cpl6/map_gx3v7_to_fv4x5_aave_da_091218.nc</map>
<map name="OCN2ATM_SMAPNAME">cpl/cpl6/map_gx3v7_to_fv4x5_aave_da_091218.nc</map>
</gridmap>
<gridmap atm_grid="ne4np4" ocn_grid="gx3v7">
<map name="ATM2OCN_FMAPNAME">cpl/gridmaps/ne4np4/</map>
<map name="ATM2OCN_VMAPNAME">cpl/gridmaps/ne4np4/</map>
<map name="ATM2OCN_SMAPNAME">cpl/gridmaps/ne4np4/</map>
<map name="OCN2ATM_FMAPNAME">cpl/gridmaps/gx3v7/</map>
<map name="OCN2ATM_SMAPNAME">cpl/gridmaps/gx3v7/</map>
</gridmap>
<gridmap atm_grid="ne4np4" ocn_grid="oQU480">
<map name="ATM2OCN_FMAPNAME">cpl/gridmaps/ne4np4/map_ne4np4_to_oQU480_aave.180702.nc</map>
<map name="ATM2OCN_VMAPNAME">cpl/gridmaps/ne4np4/map_ne4np4_to_oQU480_conserve.180702.nc</map>
<map name="ATM2OCN_SMAPNAME">cpl/gridmaps/ne4np4/map_ne4np4_to_oQU480_conserve.180702.nc</map>
<map name="OCN2ATM_FMAPNAME">cpl/gridmaps/oQU480/map_oQU480_to_ne4np4_aave.180702.nc</map>
<map name="OCN2ATM_SMAPNAME">cpl/gridmaps/oQU480/map_oQU480_to_ne4np4_aave.180702.nc</map>
</gridmap>
<gridmap atm_grid="ne4np4" ocn_grid="oQU240">
<map name="ATM2OCN_FMAPNAME">cpl/gridmaps/ne4np4/map_ne4np4_to_oQU240_aave.160614.nc</map>
<map name="ATM2OCN_VMAPNAME">cpl/gridmaps/ne4np4/map_ne4np4_to_oQU240_aave.160614.nc</map>
<map name="ATM2OCN_SMAPNAME">cpl/gridmaps/ne4np4/map_ne4np4_to_oQU240_aave.160614.nc</map>
<map name="OCN2ATM_FMAPNAME">cpl/gridmaps/oQU240/map_oQU240_to_ne4np4_aave.160614.nc</map>
<map name="OCN2ATM_SMAPNAME">cpl/gridmaps/oQU240/map_oQU240_to_ne4np4_aave.160614.nc</map>
</gridmap>
<gridmap atm_grid="ne11np4" ocn_grid="gx3v7">
<map name="ATM2OCN_FMAPNAME">cpl/gridmaps/ne11np4/</map>
<map name="ATM2OCN_VMAPNAME">cpl/gridmaps/ne11np4/</map>
<map name="ATM2OCN_SMAPNAME">cpl/gridmaps/ne11np4/</map>
<map name="OCN2ATM_FMAPNAME">cpl/gridmaps/gx3v7/</map>
<map name="OCN2ATM_SMAPNAME">cpl/gridmaps/gx3v7/</map>
</gridmap>
<gridmap atm_grid="ne16np4" ocn_grid="gx3v7">
<map name="ATM2OCN_FMAPNAME">cpl/gridmaps/ne16np4/map_ne16np4_TO_gx3v7_aave.120406.nc</map>
<map name="ATM2OCN_SMAPNAME">cpl/gridmaps/ne16np4/map_ne16np4_TO_gx3v7_aave.120406.nc</map>
<map name="ATM2OCN_VMAPNAME">cpl/gridmaps/ne16np4/map_ne16np4_TO_gx3v7_aave.120406.nc</map>
<map name="OCN2ATM_FMAPNAME">cpl/gridmaps/gx3v7/map_gx3v7_TO_ne16np4_aave.120406.nc</map>
<map name="OCN2ATM_SMAPNAME">cpl/gridmaps/gx3v7/map_gx3v7_TO_ne16np4_aave.120406.nc</map>
</gridmap>
<gridmap atm_grid="ne11np4" ocn_grid="oQU240">
<map name="ATM2OCN_FMAPNAME">cpl/gridmaps/ne11np4/map_ne11np4_to_oQU240_aave.160614.nc</map>
<map name="ATM2OCN_VMAPNAME">cpl/gridmaps/ne11np4/map_ne11np4_to_oQU240_aave.160614.nc</map>
<map name="ATM2OCN_SMAPNAME">cpl/gridmaps/ne11np4/map_ne11np4_to_oQU240_aave.160614.nc</map>
<map name="OCN2ATM_FMAPNAME">cpl/gridmaps/oQU240/map_oQU240_to_ne11np4_aave.160614.nc</map>
<map name="OCN2ATM_SMAPNAME">cpl/gridmaps/oQU240/map_oQU240_to_ne11np4_aave.160614.nc</map>
</gridmap>
<gridmap atm_grid="ne30np4" ocn_grid="gx1v6">
<map name="ATM2OCN_FMAPNAME">cpl/cpl6/map_ne30np4_to_gx1v6_aave_110121.nc</map>
<map name="ATM2OCN_SMAPNAME">cpl/cpl6/map_ne30np4_to_gx1v6_native_110328.nc</map>
<map name="ATM2OCN_VMAPNAME">cpl/cpl6/map_ne30np4_to_gx1v6_native_110328.nc</map>
<map name="OCN2ATM_FMAPNAME">cpl/cpl6/map_gx1v6_to_ne30np4_aave_110121.nc</map>
<map name="OCN2ATM_SMAPNAME">cpl/cpl6/map_gx1v6_to_ne30np4_aave_110121.nc</map>
</gridmap>
<gridmap atm_grid="ne30np4" ocn_grid="oEC60to30_ICG">
<map name="ATM2OCN_FMAPNAME">cpl/gridmaps/ne30np4/map_ne30np4_TO_oEC60to30_aave.151207.nc</map>
<map name="ATM2OCN_VMAPNAME">cpl/gridmaps/ne30np4/map_ne30np4_to_oEC60to30_conserve_151207.nc</map>
<map name="ATM2OCN_SMAPNAME">cpl/gridmaps/ne30np4/map_ne30np4_to_oEC60to30_conserve_151207.nc</map>
<map name="OCN2ATM_FMAPNAME">cpl/gridmaps/oEC60to30/map_oEC60to30_TO_ne30np4_aave.151207.nc</map>
<map name="OCN2ATM_SMAPNAME">cpl/gridmaps/oEC60to30/map_oEC60to30_TO_ne30np4_aave.151207.nc</map>
</gridmap>
<gridmap atm_grid="ne30np4" ocn_grid="oEC60to30v3_ICG">
<map name="ATM2OCN_FMAPNAME">cpl/gridmaps/ne30np4/map_ne30np4_to_oEC60to30v3_aave.161222.nc</map>
<map name="ATM2OCN_VMAPNAME">cpl/gridmaps/ne30np4/map_ne30np4_to_oEC60to30v3_conserve.161222.nc</map>
<map name="ATM2OCN_SMAPNAME">cpl/gridmaps/ne30np4/map_ne30np4_to_oEC60to30v3_conserve.161222.nc</map>
<map name="OCN2ATM_FMAPNAME">cpl/gridmaps/oEC60to30v3/map_oEC60to30v3_to_ne30np4_aave.161222.nc</map>
<map name="OCN2ATM_SMAPNAME">cpl/gridmaps/oEC60to30v3/map_oEC60to30v3_to_ne30np4_aave.161222.nc</map>
</gridmap>
<gridmap atm_grid="ne30np4.pg2" ocn_grid="oEC60to30v3_ICG">
<map name="ATM2OCN_FMAPNAME">cpl/gridmaps/ne30np4/map_ne30np4_to_oEC60to30v3_aave.161222.nc</map>
<map name="ATM2OCN_VMAPNAME">cpl/gridmaps/ne30np4/map_ne30np4_to_oEC60to30v3_conserve.161222.nc</map>
<map name="ATM2OCN_SMAPNAME">cpl/gridmaps/ne30np4/map_ne30np4_to_oEC60to30v3_conserve.161222.nc</map>
<map name="OCN2ATM_FMAPNAME">cpl/gridmaps/oEC60to30v3/map_oEC60to30v3_to_ne30np4_aave.161222.nc</map>
<map name="OCN2ATM_SMAPNAME">cpl/gridmaps/oEC60to30v3/map_oEC60to30v3_to_ne30np4_aave.161222.nc</map>
</gridmap>
<gridmap atm_grid="ne30np4" ocn_grid="oEC60to30wLI">
<map name="ATM2OCN_FMAPNAME">cpl/gridmaps/ne30np4/map_ne30np4_to_oEC60to30wLI_mask_aave.160915.nc</map>
<map name="ATM2OCN_VMAPNAME">cpl/gridmaps/ne30np4/map_ne30np4_to_oEC60to30wLI_mask_aave.160915.nc</map>
<map name="ATM2OCN_SMAPNAME">cpl/gridmaps/ne30np4/map_ne30np4_to_oEC60to30wLI_nomask_aave.160915.nc</map>
<map name="OCN2ATM_FMAPNAME">cpl/gridmaps/oEC60to30wLI/map_oEC60to30wLI_mask_to_ne30np4_aave.160915.nc</map>
<map name="OCN2ATM_SMAPNAME">cpl/gridmaps/oEC60to30wLI/map_oEC60to30wLI_mask_to_ne30np4_aave.160915.nc</map>
</gridmap>
<gridmap atm_grid="ne30np4" ocn_grid="oEC60to30v3wLI">
<map name="ATM2OCN_FMAPNAME">cpl/gridmaps/ne30np4/map_ne30np4_to_oEC60to30v3wLI_mask_aave.170802.nc</map>
<map name="ATM2OCN_VMAPNAME">cpl/gridmaps/ne30np4/map_ne30np4_to_oEC60to30v3wLI_mask_conserve.170802.nc</map>
<map name="ATM2OCN_SMAPNAME">cpl/gridmaps/ne30np4/map_ne30np4_to_oEC60to30v3wLI_nomask_bilin.170802.nc</map>
<map name="OCN2ATM_FMAPNAME">cpl/gridmaps/oEC60to30v3wLI/map_oEC60to30v3wLI_nomask_to_ne30np4_aave.180906.nc</map>
<map name="OCN2ATM_SMAPNAME">cpl/gridmaps/oEC60to30v3wLI/map_oEC60to30v3wLI_nomask_to_ne30np4_aave.180906.nc</map>
</gridmap>
<gridmap atm_grid="ne30np4" ocn_grid="oEC60to30v3wLI_ICG">
<map name="ATM2OCN_FMAPNAME">cpl/gridmaps/ne30np4/map_ne30np4_to_oEC60to30v3wLI_mask_aave.170802.nc</map>
<map name="ATM2OCN_VMAPNAME">cpl/gridmaps/ne30np4/map_ne30np4_to_oEC60to30v3wLI_mask_conserve.170802.nc</map>
<map name="ATM2OCN_SMAPNAME">cpl/gridmaps/ne30np4/map_ne30np4_to_oEC60to30v3wLI_nomask_bilin.170802.nc</map>
<map name="OCN2ATM_FMAPNAME">cpl/gridmaps/oEC60to30v3wLI/map_oEC60to30v3wLI_nomask_to_ne30np4_aave.180906.nc</map>
<map name="OCN2ATM_SMAPNAME">cpl/gridmaps/oEC60to30v3wLI/map_oEC60to30v3wLI_nomask_to_ne30np4_aave.180906.nc</map>
</gridmap>
<gridmap atm_grid="ne30np4" ocn_grid="oRRS30to10wLI">
<map name="ATM2OCN_FMAPNAME">cpl/gridmaps/ne30np4/map_ne30np4_to_oRRS30to10wLI_mask_aave.160930.nc</map>
<map name="ATM2OCN_VMAPNAME">cpl/gridmaps/ne30np4/map_ne30np4_to_oRRS30to10wLI_mask_aave.160930.nc</map>
<map name="ATM2OCN_SMAPNAME">cpl/gridmaps/ne30np4/map_ne30np4_to_oRRS30to10wLI_nomask_aave.160930.nc</map>
<map name="OCN2ATM_FMAPNAME">cpl/gridmaps/oRRS30to10wLI/map_oRRS30to10wLI_mask_to_ne30np4_aave.160930.nc</map>
<map name="OCN2ATM_SMAPNAME">cpl/gridmaps/oRRS30to10wLI/map_oRRS30to10wLI_mask_to_ne30np4_aave.160930.nc</map>
</gridmap>
<gridmap atm_grid="ne30np4" ocn_grid="mpas120">
<map name="ATM2OCN_FMAPNAME">cpl/cpl6/map_ne30np4_TO_MPASO_QU120km_aave.151110.nc</map>
<map name="ATM2OCN_SMAPNAME">cpl/cpl6/map_ne30np4_TO_MPASO_QU120km_bilin.151110.nc</map>
<map name="ATM2OCN_VMAPNAME">cpl/cpl6/map_ne30np4_TO_MPASO_QU120km_bilin.151110.nc</map>
<map name="OCN2ATM_FMAPNAME">cpl/cpl6/map_MPASO_QU120km_TO_ne30np4_aave.151110.nc</map>
<map name="OCN2ATM_SMAPNAME">cpl/cpl6/map_MPASO_QU120km_TO_ne30np4_aave.151110.nc</map>
</gridmap>
<gridmap atm_grid="ne30np4" lnd_grid="0.9x1.25">
<map name="ATM2LND_FMAPNAME">cpl/gridmaps/ne30np4/map_ne30np4_TO_fv0.9x1.25_aave.120712.nc</map>
<map name="ATM2LND_SMAPNAME">cpl/gridmaps/ne30np4/map_ne30np4_TO_fv0.9x1.25_aave.120712.nc</map>
<map name="LND2ATM_FMAPNAME">cpl/gridmaps/fv0.9x1.25/map_fv0.9x1.25_TO_ne30np4_aave.120712.nc</map>
<map name="LND2ATM_SMAPNAME">cpl/gridmaps/fv0.9x1.25/map_fv0.9x1.25_TO_ne30np4_aave.120712.nc</map>
</gridmap>
<gridmap atm_grid="ne30np4" lnd_grid="1.9x2.5">
<map name="ATM2LND_FMAPNAME">cpl/cpl6/map_ne30np4_to_fv1.9x2.5_aave_da_091230.nc</map>
<map name="ATM2LND_SMAPNAME">cpl/cpl6/map_ne30np4_to_fv1.9x2.5_aave_da_091230.nc</map>
<map name="LND2ATM_FMAPNAME">cpl/cpl6/map_fv1.9x2.5_to_ne30np4_aave_da_091230.nc</map>
<map name="LND2ATM_SMAPNAME">cpl/cpl6/map_fv1.9x2.5_to_ne30np4_aave_da_091230.nc</map>
</gridmap>
<gridmap atm_grid="ne60np4" ocn_grid="gx1v6">
<map name="ATM2OCN_FMAPNAME">cpl/gridmaps/ne60np4/map_ne60np4_TO_gx1v6_aave.120406.nc</map>
<map name="ATM2OCN_SMAPNAME">cpl/gridmaps/ne60np4/map_ne60np4_TO_gx1v6_blin.120406.nc</map>
<map name="OCN2ATM_FMAPNAME">cpl/gridmaps/gx1v6/map_gx1v6_TO_ne60np4_aave.120406.nc</map>
<map name="OCN2ATM_SMAPNAME">cpl/gridmaps/gx1v6/map_gx1v6_TO_ne60np4_aave.120406.nc</map>
</gridmap>
<gridmap atm_grid="ne120np4" ocn_grid="gx1v6">
<map name="ATM2OCN_FMAPNAME">cpl/gridmaps/ne120np4/map_ne120np4_to_gx1v6_aave_110428.nc</map>
<map name="ATM2OCN_SMAPNAME">cpl/gridmaps/ne120np4/map_ne120np4_to_gx1v6_bilin_110428.nc</map>
<map name="ATM2OCN_VMAPNAME">cpl/gridmaps/ne120np4/map_ne120np4_to_gx1v6_bilin_110428.nc</map>
<map name="OCN2ATM_FMAPNAME">cpl/gridmaps/gx1v6/map_gx1v6_to_ne120np4_aave_110428.nc</map>
<map name="OCN2ATM_SMAPNAME">cpl/gridmaps/gx1v6/map_gx1v6_to_ne120np4_aave_110428.nc</map>
</gridmap>
<gridmap atm_grid="ne120np4" lnd_grid="0.9x1.25">
<map name="ATM2LND_FMAPNAME">cpl/gridmaps/ne120np4/map_ne120np4_TO_fv0.9x1.25_aave.120712.nc</map>
<map name="ATM2LND_SMAPNAME">cpl/gridmaps/ne120np4/map_ne120np4_TO_fv0.9x1.25_aave.120712.nc</map>
<map name="LND2ATM_FMAPNAME">cpl/gridmaps/fv0.9x1.25/map_fv0.9x1.25_TO_ne120np4_aave.120712.nc</map>
<map name="LND2ATM_SMAPNAME">cpl/gridmaps/fv0.9x1.25/map_fv0.9x1.25_TO_ne120np4_aave.120712.nc</map>
</gridmap>
<gridmap atm_grid="ne120np4" lnd_grid="0.23x0.31">
<map name="ATM2LND_FMAPNAME">cpl/gridmaps/ne120np4/map_ne120np4_to_fv0.23x0.31_aave_110331.nc</map>
<map name="ATM2LND_SMAPNAME">cpl/gridmaps/ne120np4/map_ne120np4_to_fv0.23x0.31_aave_110331.nc</map>
<map name="LND2ATM_FMAPNAME">cpl/gridmaps/fv0.23x0.31/map_fv0.23x0.31_to_ne120np4_aave_110331.nc</map>
<map name="LND2ATM_SMAPNAME">cpl/gridmaps/fv0.23x0.31/map_fv0.23x0.31_to_ne120np4_aave_110331.nc</map>
</gridmap>
<gridmap atm_grid="ne120np4" ocn_grid="oRRS18to6v3_ICG">
<map name="ATM2OCN_FMAPNAME">cpl/gridmaps/ne120np4/map_ne120np4_to_oRRS18to6v3_aave.170111.nc</map>
<map name="ATM2OCN_VMAPNAME">cpl/gridmaps/ne120np4/map_ne120np4_to_oRRS18to6v3_conserve.170111.nc</map>
<map name="ATM2OCN_SMAPNAME">cpl/gridmaps/ne120np4/map_ne120np4_to_oRRS18to6v3_conserve.170111.nc</map>
<map name="OCN2ATM_FMAPNAME">cpl/gridmaps/oRRS18to6v3/map_oRRS18to6v3_to_ne120np4_aave.170111.nc</map>
<map name="OCN2ATM_SMAPNAME">cpl/gridmaps/oRRS18to6v3/map_oRRS18to6v3_to_ne120np4_aave.170111.nc</map>
</gridmap>
<gridmap atm_grid="ne240np4" ocn_grid="gx1v6">
<map name="ATM2OCN_FMAPNAME">cpl/gridmaps/ne240np4/map_ne240np4_to_gx1v6_aave_110428.nc</map>
<map name="ATM2OCN_SMAPNAME">cpl/gridmaps/ne240np4/map_ne240np4_to_gx1v6_aave_110428.nc</map>
<map name="ATM2OCN_VMAPNAME">cpl/gridmaps/ne240np4/map_ne240np4_to_gx1v6_aave_110428.nc</map>
<map name="OCN2ATM_FMAPNAME">cpl/gridmaps/gx1v6/map_gx1v6_to_ne240np4_aave_110428.nc</map>
<map name="OCN2ATM_SMAPNAME">cpl/gridmaps/gx1v6/map_gx1v6_to_ne240np4_aave_110428.nc</map>
</gridmap>
<gridmap atm_grid="ne240np4" ocn_grid="tx0.1v2">
<map name="ATM2OCN_FMAPNAME">cpl/gridmaps/ne240np4/map_ne240np4_to_tx0.1v2_aave_110419.nc</map>
<map name="ATM2OCN_SMAPNAME">cpl/gridmaps/ne240np4/map_ne240np4_to_tx0.1v2_native_120327.nc</map>
<map name="ATM2OCN_VMAPNAME">cpl/gridmaps/ne240np4/map_ne240np4_to_tx0.1v2_native_120327.nc</map>
<map name="OCN2ATM_FMAPNAME">cpl/gridmaps/tx0.1v2/map_tx0.1v2_to_ne240np4_aave_110419.nc</map>
<map name="OCN2ATM_SMAPNAME">cpl/gridmaps/tx0.1v2/map_tx0.1v2_to_ne240np4_aave_110419.nc</map>
</gridmap>
<gridmap atm_grid="ne240np4" lnd_grid="0.23x0.31">
<map name="ATM2LND_FMAPNAME">cpl/gridmaps/ne240np4/map_ne240np4_to_fv0.23x0.31_aave_110428.nc</map>
<map name="ATM2LND_SMAPNAME">cpl/gridmaps/ne240np4/map_ne240np4_to_fv0.23x0.31_aave_110428.nc</map>
<map name="LND2ATM_FMAPNAME">cpl/gridmaps/fv0.23x0.31/map_fv0.23x0.31_to_ne240np4_aave_110428.nc</map>
<map name="LND2ATM_SMAPNAME">cpl/gridmaps/fv0.23x0.31/map_fv0.23x0.31_to_ne240np4_aave_110428.nc</map>
</gridmap>
<gridmap atm_grid="ne512np4" ocn_grid="oRRS15to5">
<map name="ATM2OCN_FMAPNAME">cpl/gridmaps/ne512np4/map_ne512np4_to_oRRS15to5_mono.190402.nc</map>
<map name="ATM2OCN_SMAPNAME">cpl/gridmaps/ne512np4/map_ne512np4_to_oRRS15to5_mono.190402.nc</map>
<map name="ATM2OCN_VMAPNAME">cpl/gridmaps/ne512np4/map_ne512np4_to_oRRS15to5_mono.190402.nc</map>
<map name="OCN2ATM_FMAPNAME">cpl/gridmaps/oRRS15to5/map_oRRS15to5_to_ne512np4_mono.190402.nc</map>
<map name="OCN2ATM_SMAPNAME">cpl/gridmaps/oRRS15to5/map_oRRS15to5_to_ne512np4_mono.190402.nc</map>
</gridmap>
<gridmap atm_grid="ne512np4" lnd_grid="360x720cru">
<map name="ATM2LND_FMAPNAME">cpl/gridmaps/ne512np4/map_ne512np4_to_360x720cru_mono.190409.nc</map>
<map name="ATM2LND_SMAPNAME">cpl/gridmaps/ne512np4/map_ne512np4_to_360x720cru_mono.190409.nc</map>
<map name="LND2ATM_FMAPNAME">cpl/gridmaps/360x720cru/map_360x720cru_to_ne512np4_mono.190409.nc</map>
<map name="LND2ATM_SMAPNAME">cpl/gridmaps/360x720cru/map_360x720cru_to_ne512np4_mono.190409.nc</map>
</gridmap>
<gridmap atm_grid="ne1024np4" ocn_grid="oRRS15to5">
<map name="ATM2OCN_FMAPNAME">cpl/gridmaps/ne1024np4/map_ne1024np4_to_oRRS15to5_mono.190509.nc</map>
<map name="ATM2OCN_SMAPNAME">cpl/gridmaps/ne1024np4/map_ne1024np4_to_oRRS15to5_mono.190509.nc</map>
<map name="ATM2OCN_VMAPNAME">cpl/gridmaps/ne1024np4/map_ne1024np4_to_oRRS15to5_mono.190509.nc</map>
<map name="OCN2ATM_FMAPNAME">cpl/gridmaps/oRRS15to5/map_oRRS15to5_to_ne1024np4_mono.190509.nc</map>
<map name="OCN2ATM_SMAPNAME">cpl/gridmaps/oRRS15to5/map_oRRS15to5_to_ne1024np4_mono.190509.nc</map>
</gridmap>
<gridmap atm_grid="ne1024np4" lnd_grid="360x720cru">
<map name="ATM2LND_FMAPNAME">cpl/gridmaps/ne1024np4/map_ne1024np4_to_360x720cru_mono.190508.nc</map>
<map name="ATM2LND_SMAPNAME">cpl/gridmaps/ne1024np4/map_ne1024np4_to_360x720cru_mono.190508.nc</map>
<map name="LND2ATM_FMAPNAME">cpl/gridmaps/360x720cru/map_360x720cru_to_ne1024np4_mono.190508.nc</map>
<map name="LND2ATM_SMAPNAME">cpl/gridmaps/360x720cru/map_360x720cru_to_ne1024np4_mono.190508.nc</map>
</gridmap>
<gridmap atm_grid="ne0np4_enax4v1" lnd_grid="ne30np4">
<map name="ATM2LND_FMAPNAME">cpl/cpl6/map_enax4v1_TO_ne30np4_aave.170517.nc</map>
<map name="ATM2LND_SMAPNAME">cpl/cpl6/map_enax4v1_TO_ne30np4_aave.170517.nc</map>
<map name="LND2ATM_FMAPNAME">cpl/cpl6/map_ne30np4_TO_enax4v1_aave.170517.nc</map>
<map name="LND2ATM_SMAPNAME">cpl/cpl6/map_ne30np4_TO_enax4v1_aave.170517.nc</map>
</gridmap>
<gridmap atm_grid="ne0np4_enax4v1" ocn_grid="gx1v6">
<map name="ATM2OCN_FMAPNAME">cpl/cpl6/map_enax4v1_TO_gx1v6_aave.170523.nc</map>
<map name="ATM2OCN_SMAPNAME">cpl/cpl6/map_enax4v1_TO_gx1v6_blin.170523.nc</map>
<map name="ATM2OCN_VMAPNAME">cpl/cpl6/map_enax4v1_TO_gx1v6_patc.170523.nc</map>
<map name="OCN2ATM_FMAPNAME">cpl/cpl6/map_gx1v6_TO_enax4v1_aave.170523.nc</map>
<map name="OCN2ATM_SMAPNAME">cpl/cpl6/map_gx1v6_TO_enax4v1_aave.170523.nc</map>
</gridmap>
<gridmap atm_grid="ne0np4_enax4v1" ocn_grid="oRRS18to6">
<map name="ATM2OCN_FMAPNAME">cpl/cpl6/map_enax4v1_TO_oRRS18to6_aave.170620.nc</map>
<map name="ATM2OCN_SMAPNAME">cpl/cpl6/map_enax4v1_TO_oRRS18to6_blin.170620.nc</map>
<map name="ATM2OCN_VMAPNAME">cpl/cpl6/map_enax4v1_TO_oRRS18to6_patc.170620.nc</map>
<map name="OCN2ATM_FMAPNAME">cpl/cpl6/map_oRRS18to6_TO_enax4v1_aave.170620.nc</map>
<map name="OCN2ATM_SMAPNAME">cpl/cpl6/map_oRRS18to6_TO_enax4v1_aave.170620.nc</map>
</gridmap>
<gridmap atm_grid="T62" ocn_grid="gx3v7">
<map name="ATM2OCN_FMAPNAME">cpl/gridmaps/T62/map_T62_TO_gx3v7_aave.130322.nc</map>
<map name="ATM2OCN_SMAPNAME">cpl/gridmaps/T62/map_T62_TO_gx3v7_blin.130322.nc</map>
<map name="ATM2OCN_VMAPNAME">cpl/gridmaps/T62/map_T62_TO_gx3v7_patc.130322.nc</map>
<map name="OCN2ATM_FMAPNAME">cpl/gridmaps/gx3v7/map_gx3v7_TO_T62_aave.130322.nc</map>
<map name="OCN2ATM_SMAPNAME">cpl/gridmaps/gx3v7/map_gx3v7_TO_T62_aave.130322.nc</map>
</gridmap>
<gridmap atm_grid="T62" ocn_grid="gx1v6">
<map name="ATM2OCN_FMAPNAME">cpl/gridmaps/T62/map_T62_TO_gx1v6_aave.130322.nc</map>
<map name="ATM2OCN_SMAPNAME">cpl/gridmaps/T62/map_T62_TO_gx1v6_blin.130322.nc</map>
<map name="ATM2OCN_VMAPNAME">cpl/gridmaps/T62/map_T62_TO_gx1v6_patc.130322.nc</map>
<map name="OCN2ATM_FMAPNAME">cpl/gridmaps/gx1v6/map_gx1v6_TO_T62_aave.130322.nc</map>
<map name="OCN2ATM_SMAPNAME">cpl/gridmaps/gx1v6/map_gx1v6_TO_T62_aave.130322.nc</map>
</gridmap>
<gridmap atm_grid="T62" ocn_grid="mpas120">
<map name="ATM2OCN_FMAPNAME">cpl/gridmaps/T62/map_T62_TO_mpas120_aave.121116.nc</map>
<map name="ATM2OCN_SMAPNAME">cpl/gridmaps/T62/map_T62_TO_mpas120_aave.121116.nc</map>
<map name="ATM2OCN_VMAPNAME">cpl/gridmaps/T62/map_T62_TO_mpas120_aave.121116.nc</map>
<map name="OCN2ATM_FMAPNAME">cpl/gridmaps/mpas120/map_mpas120_TO_T62_aave.121116.nc</map>
<map name="OCN2ATM_SMAPNAME">cpl/gridmaps/mpas120/map_mpas120_TO_T62_aave.121116.nc</map>
</gridmap>
<gridmap atm_grid="T62" ocn_grid="mpasgx1">
<map name="ATM2OCN_FMAPNAME">cpl/gridmaps/T62/map_T62_TO_mpasgx1_aave.150827.nc</map>
<map name="ATM2OCN_SMAPNAME">cpl/gridmaps/T62/map_T62_TO_mpasgx1_blin.150827.nc</map>
<map name="ATM2OCN_VMAPNAME">cpl/gridmaps/T62/map_T62_TO_mpasgx1_blin.150827.nc</map>
<map name="OCN2ATM_FMAPNAME">cpl/gridmaps/mpasgx1/map_mpasgx1_TO_T62_aave.150827.nc</map>
<map name="OCN2ATM_SMAPNAME">cpl/gridmaps/mpasgx1/map_mpasgx1_TO_T62_aave.150827.nc</map>
</gridmap>
<gridmap atm_grid="T62" ocn_grid="oQU480">
<map name="ATM2OCN_FMAPNAME">cpl/gridmaps/T62/map_T62_TO_oQU480_aave.151209.nc</map>
<map name="ATM2OCN_VMAPNAME">cpl/gridmaps/T62/map_T62_TO_oQU480_patc.151209.nc</map>
<map name="ATM2OCN_SMAPNAME">cpl/gridmaps/T62/map_T62_TO_oQU480_blin.151209.nc</map>
<map name="OCN2ATM_FMAPNAME">cpl/gridmaps/oQU480/map_oQU480_TO_T62_aave.151209.nc</map>
<map name="OCN2ATM_SMAPNAME">cpl/gridmaps/oQU480/map_oQU480_TO_T62_aave.151209.nc</map>
</gridmap>
<gridmap atm_grid="T62" ocn_grid="oQU240">
<map name="ATM2OCN_FMAPNAME">cpl/gridmaps/T62/map_T62_TO_oQU240_aave.151209.nc</map>
<map name="ATM2OCN_VMAPNAME">cpl/gridmaps/T62/map_T62_TO_oQU240_patc.151209.nc</map>
<map name="ATM2OCN_SMAPNAME">cpl/gridmaps/T62/map_T62_TO_oQU240_blin.151209.nc</map>
<map name="OCN2ATM_FMAPNAME">cpl/gridmaps/oQU240/map_oQU240_TO_T62_aave.151209.nc</map>
<map name="OCN2ATM_SMAPNAME">cpl/gridmaps/oQU240/map_oQU240_TO_T62_aave.151209.nc</map>
</gridmap>
<gridmap atm_grid="T62" ocn_grid="oQU240wLI">
<map name="ATM2OCN_FMAPNAME">cpl/gridmaps/T62/map_T62_TO_oQU240wLI_mask_aave.160929.nc</map>
<map name="ATM2OCN_SMAPNAME">cpl/gridmaps/T62/map_T62_TO_oQU240wLI_nomask_aave.160929.nc</map>
<map name="ATM2OCN_VMAPNAME">cpl/gridmaps/T62/map_T62_TO_oQU240wLI_mask_patc.160929.nc</map>
<map name="OCN2ATM_FMAPNAME">cpl/gridmaps/oQU240wLI/map_oQU240wLI_mask_TO_T62_aave.160929.nc</map>
<map name="OCN2ATM_SMAPNAME">cpl/gridmaps/oQU240wLI/map_oQU240wLI_mask_TO_T62_aave.160929.nc</map>
</gridmap>
<gridmap atm_grid="T62" ocn_grid="oQU120">
<map name="ATM2OCN_FMAPNAME">cpl/gridmaps/T62/map_T62_TO_oQU120_aave.151209.nc</map>
<map name="ATM2OCN_VMAPNAME">cpl/gridmaps/T62/map_T62_TO_oQU120_patc.151209.nc</map>
<map name="ATM2OCN_SMAPNAME">cpl/gridmaps/T62/map_T62_TO_oQU120_blin.151209.nc</map>
<map name="OCN2ATM_FMAPNAME">cpl/gridmaps/oQU120/map_oQU120_TO_T62_aave.151209.nc</map>
<map name="OCN2ATM_SMAPNAME">cpl/gridmaps/oQU120/map_oQU120_TO_T62_aave.151209.nc</map>
</gridmap>
<gridmap atm_grid="T62" ocn_grid="oEC60to30">
<map name="ATM2OCN_FMAPNAME">cpl/gridmaps/T62/map_T62_TO_oEC60to30_aave.150615.nc</map>
<map name="ATM2OCN_SMAPNAME">cpl/gridmaps/T62/map_T62_TO_oEC60to30_bilin.150804.nc</map>
<map name="ATM2OCN_VMAPNAME">cpl/gridmaps/T62/map_T62_TO_oEC60to30_patc.150804.nc</map>
<map name="OCN2ATM_FMAPNAME">cpl/gridmaps/oEC60to30/map_oEC60to30_TO_T62_aave.150615.nc</map>
<map name="OCN2ATM_SMAPNAME">cpl/gridmaps/oEC60to30/map_oEC60to30_TO_T62_aave.150615.nc</map>
</gridmap>
<gridmap atm_grid="T62" ocn_grid="oEC60to30v3">
<map name="ATM2OCN_FMAPNAME">cpl/gridmaps/T62/map_T62_TO_oEC60to30v3_aave.161222.nc</map>
<map name="ATM2OCN_SMAPNAME">cpl/gridmaps/T62/map_T62_TO_oEC60to30v3_blin.161222.nc</map>
<map name="ATM2OCN_VMAPNAME">cpl/gridmaps/T62/map_T62_TO_oEC60to30v3_patc.161222.nc</map>
<map name="OCN2ATM_FMAPNAME">cpl/gridmaps/oEC60to30v3/map_oEC60to30v3_TO_T62_aave.161222.nc</map>
<map name="OCN2ATM_SMAPNAME">cpl/gridmaps/oEC60to30v3/map_oEC60to30v3_TO_T62_aave.161222.nc</map>
</gridmap>
<gridmap atm_grid="T62" ocn_grid="oEC60to30wLI">
<map name="ATM2OCN_FMAPNAME">cpl/gridmaps/T62/map_T62_TO_oEC60to30wLI_aave.160830.nc</map>
<map name="ATM2OCN_SMAPNAME">cpl/gridmaps/T62/map_T62_TO_oEC60to30wLI_nm_aave.160830.nc</map>
<map name="ATM2OCN_VMAPNAME">cpl/gridmaps/T62/map_T62_TO_oEC60to30wLI_blin.160830.nc</map>
<map name="OCN2ATM_FMAPNAME">cpl/gridmaps/oEC60to30wLI/map_oEC60to30wLI_TO_T62_aave.160830.nc</map>
<map name="OCN2ATM_SMAPNAME">cpl/gridmaps/oEC60to30wLI/map_oEC60to30wLI_TO_T62_aave.160830.nc</map>
</gridmap>
<gridmap atm_grid="T62" ocn_grid="oEC60to30v3wLI">
<map name="ATM2OCN_FMAPNAME">cpl/gridmaps/T62/map_T62_TO_oEC60to30v3wLI_mask_aave.170328.nc</map>
<map name="ATM2OCN_SMAPNAME">cpl/gridmaps/T62/map_T62_TO_oEC60to30v3wLI_nomask_blin.170328.nc</map>
<map name="ATM2OCN_VMAPNAME">cpl/gridmaps/T62/map_T62_TO_oEC60to30v3wLI_mask_patc.170328.nc</map>
<map name="OCN2ATM_FMAPNAME">cpl/gridmaps/oEC60to30v3wLI/map_oEC60to30v3wLI_mask_TO_T62_aave.170328.nc</map>
<map name="OCN2ATM_SMAPNAME">cpl/gridmaps/oEC60to30v3wLI/map_oEC60to30v3wLI_mask_TO_T62_aave.170328.nc</map>
</gridmap>
<gridmap atm_grid="T62" ocn_grid="oRRS30to10">
<map name="ATM2OCN_FMAPNAME">cpl/gridmaps/T62/map_T62_TO_oRRS30to10_aave.150722.nc</map>
<map name="ATM2OCN_SMAPNAME">cpl/gridmaps/T62/map_T62_TO_oRRS30to10_blin.150722.nc</map>
<map name="ATM2OCN_VMAPNAME">cpl/gridmaps/T62/map_T62_TO_oRRS30to10_patc.150722.nc</map>
<map name="OCN2ATM_FMAPNAME">cpl/gridmaps/oRRS30to10/map_oRRS30to10_TO_T62_aave.150722.nc</map>
<map name="OCN2ATM_SMAPNAME">cpl/gridmaps/oRRS30to10/map_oRRS30to10_TO_T62_aave.150722.nc</map>
</gridmap>
<gridmap atm_grid="T62" ocn_grid="oRRS30to10v3">
<map name="ATM2OCN_FMAPNAME">cpl/gridmaps/T62/map_T62_TO_oRRS30to10v3_aave.171128.nc</map>
<map name="ATM2OCN_SMAPNAME">cpl/gridmaps/T62/map_T62_TO_oRRS30to10v3_blin.171128.nc</map>
<map name="ATM2OCN_VMAPNAME">cpl/gridmaps/T62/map_T62_TO_oRRS30to10v3_patc.171128.nc</map>
<map name="OCN2ATM_FMAPNAME">cpl/gridmaps/oRRS30to10v3/map_oRRS30to10v3_TO_T62_aave.171128.nc</map>
<map name="OCN2ATM_SMAPNAME">cpl/gridmaps/oRRS30to10v3/map_oRRS30to10v3_TO_T62_aave.171128.nc</map>
</gridmap>
<gridmap atm_grid="T62" ocn_grid="oRRS30to10wLI">
<map name="ATM2OCN_FMAPNAME">cpl/gridmaps/T62/map_T62_TO_oRRS30to10wLI_mask_aave.160930.nc</map>
<map name="ATM2OCN_SMAPNAME">cpl/gridmaps/T62/map_T62_TO_oRRS30to10wLI_nomask_aave.160930.nc</map>
<map name="ATM2OCN_VMAPNAME">cpl/gridmaps/T62/map_T62_TO_oRRS30to10wLI_mask_blin.160930.nc</map>
<map name="OCN2ATM_FMAPNAME">cpl/gridmaps/oRRS30to10wLI/map_oRRS30to10wLI_mask_TO_T62_aave.160930.nc</map>
<map name="OCN2ATM_SMAPNAME">cpl/gridmaps/oRRS30to10wLI/map_oRRS30to10wLI_mask_TO_T62_aave.160930.nc</map>
</gridmap>
<gridmap atm_grid="T62" ocn_grid="oRRS30to10v3wLI">
<map name="ATM2OCN_FMAPNAME">cpl/gridmaps/T62/map_T62_TO_oRRS30to10v3wLI_mask_aave.171109.nc</map>
<map name="ATM2OCN_SMAPNAME">cpl/gridmaps/T62/map_T62_TO_oRRS30to10v3wLI_nomask_blin.171109.nc</map>
<map name="ATM2OCN_VMAPNAME">cpl/gridmaps/T62/map_T62_TO_oRRS30to10v3wLI_mask_patc.171109.nc</map>
<map name="OCN2ATM_FMAPNAME">cpl/gridmaps/oRRS30to10v3wLI/map_oRRS30to10v3wLI_mask_TO_T62_aave.171109.nc</map>
<map name="OCN2ATM_SMAPNAME">cpl/gridmaps/oRRS30to10v3wLI/map_oRRS30to10v3wLI_mask_TO_T62_aave.171109.nc</map>
</gridmap>
<gridmap atm_grid="T62" ocn_grid="oRRS18to6">
<map name="ATM2OCN_FMAPNAME">cpl/gridmaps/T62/map_T62_to_oRRS18to6_aave.160831.nc</map>
<map name="ATM2OCN_VMAPNAME">cpl/gridmaps/T62/map_T62_to_oRRS18to6_patch.160831.nc</map>
<map name="ATM2OCN_SMAPNAME">cpl/gridmaps/T62/map_T62_to_oRRS18to6_bilin.160831.nc</map>
<map name="OCN2ATM_FMAPNAME">cpl/gridmaps/oRRS18to6/map_oRRS18to6_to_T62_aave.160831.nc</map>
<map name="OCN2ATM_SMAPNAME">cpl/gridmaps/oRRS18to6/map_oRRS18to6_to_T62_aave.160831.nc</map>
</gridmap>
<gridmap atm_grid="T62" ocn_grid="oRRS18to6v3">
<map name="ATM2OCN_FMAPNAME">cpl/gridmaps/T62/map_T62_to_oRRS18to6v3_aave.170111.nc</map>
<map name="ATM2OCN_VMAPNAME">cpl/gridmaps/T62/map_T62_to_oRRS18to6v3_patc.170111.nc</map>
<map name="ATM2OCN_SMAPNAME">cpl/gridmaps/T62/map_T62_to_oRRS18to6v3_blin.170111.nc</map>
<map name="OCN2ATM_FMAPNAME">cpl/gridmaps/oRRS18to6v3/map_oRRS18to6v3_to_T62_aave.170111.nc</map>
<map name="OCN2ATM_SMAPNAME">cpl/gridmaps/oRRS18to6v3/map_oRRS18to6v3_to_T62_aave.170111.nc</map>
</gridmap>
<gridmap atm_grid="T62" ocn_grid="oRRS15to5">
<map name="ATM2OCN_FMAPNAME">cpl/gridmaps/T62/map_T62_TO_oRRS15to5_aave.150722.nc</map>
<map name="ATM2OCN_VMAPNAME">cpl/gridmaps/T62/map_T62_TO_oRRS15to5_patc.150722.nc</map>
<map name="ATM2OCN_SMAPNAME">cpl/gridmaps/T62/map_T62_TO_oRRS15to5_blin.150722.nc</map>
<map name="OCN2ATM_FMAPNAME">cpl/gridmaps/oRRS15to5/map_oRRS15to5_TO_T62_aave.150722.nc</map>
<map name="OCN2ATM_SMAPNAME">cpl/gridmaps/oRRS15to5/map_oRRS15to5_TO_T62_aave.150722.nc</map>
</gridmap>
<gridmap atm_grid="T62" ocn_grid="oARRM60to10">
<map name="ATM2OCN_FMAPNAME">cpl/gridmaps/T62/map_T62_to_oARRM60to10_aave.180715.nc</map>
<map name="ATM2OCN_SMAPNAME">cpl/gridmaps/T62/map_T62_to_oARRM60to10_bilin.180715.nc</map>
<map name="ATM2OCN_VMAPNAME">cpl/gridmaps/T62/map_T62_to_oARRM60to10_patch.180715.nc</map>
<map name="OCN2ATM_FMAPNAME">cpl/gridmaps/oARRM60to10/map_oARRM60to10_to_T62_aave.180715.nc</map>
<map name="OCN2ATM_SMAPNAME">cpl/gridmaps/oARRM60to10/map_oARRM60to10_to_T62_aave.180715.nc</map>
</gridmap>
<gridmap atm_grid="T62" ocn_grid="oARRM60to6">
<map name="ATM2OCN_FMAPNAME">cpl/gridmaps/T62/map_T62_to_oARRM60to6_aave.180803.nc</map>
<map name="ATM2OCN_SMAPNAME">cpl/gridmaps/T62/map_T62_to_oARRM60to6_bilin.180803.nc</map>
<map name="ATM2OCN_VMAPNAME">cpl/gridmaps/T62/map_T62_to_oARRM60to6_patch.180803.nc</map>
<map name="OCN2ATM_FMAPNAME">cpl/gridmaps/oARRM60to6/map_oARRM60to6_to_T62_aave.180803.nc</map>
<map name="OCN2ATM_SMAPNAME">cpl/gridmaps/oARRM60to6/map_oARRM60to6_to_T62_aave.180803.nc</map>
</gridmap>
<gridmap atm_grid="TL319" ocn_grid="oEC60to30v3">
<map name="ATM2OCN_FMAPNAME">cpl/gridmaps/TL319/map_TL319_to_oEC60to30v3_aave.181203.nc</map>
<map name="ATM2OCN_SMAPNAME">cpl/gridmaps/TL319/map_TL319_to_oEC60to30v3_bilin.181203.nc</map>
<map name="ATM2OCN_VMAPNAME">cpl/gridmaps/TL319/map_TL319_to_oEC60to30v3_patch.181203.nc</map>
<map name="OCN2ATM_FMAPNAME">cpl/gridmaps/oEC60to30v3/map_oEC60to30v3_to_TL319_aave.181203.nc</map>
<map name="OCN2ATM_SMAPNAME">cpl/gridmaps/oEC60to30v3/map_oEC60to30v3_to_TL319_aave.181203.nc</map>
</gridmap>
<gridmap atm_grid="TL319" ocn_grid="oARRM60to10">
<map name="ATM2OCN_FMAPNAME">cpl/gridmaps/TL319/map_TL319_to_oARRM60to10_aave.180904.nc</map>
<map name="ATM2OCN_SMAPNAME">cpl/gridmaps/TL319/map_TL319_to_oARRM60to10_bilin.180904.nc</map>
<map name="ATM2OCN_VMAPNAME">cpl/gridmaps/TL319/map_TL319_to_oARRM60to10_patch.180904.nc</map>
<map name="OCN2ATM_FMAPNAME">cpl/gridmaps/oARRM60to10/map_oARRM60to10_to_TL319_aave.180904.nc</map>
<map name="OCN2ATM_SMAPNAME">cpl/gridmaps/oARRM60to10/map_oARRM60to10_to_TL319_aave.180904.nc</map>
</gridmap>
<gridmap atm_grid="TL319" ocn_grid="oARRM60to6">
<map name="ATM2OCN_FMAPNAME">cpl/gridmaps/TL319/map_TL319_to_oARRM60to6_aave.180904.nc</map>
<map name="ATM2OCN_SMAPNAME">cpl/gridmaps/TL319/map_TL319_to_oARRM60to6_bilin.180904.nc</map>
<map name="ATM2OCN_VMAPNAME">cpl/gridmaps/TL319/map_TL319_to_oARRM60to6_patch.180904.nc</map>
<map name="OCN2ATM_FMAPNAME">cpl/gridmaps/oARRM60to6/map_oARRM60to6_to_TL319_aave.180904.nc</map>
<map name="OCN2ATM_SMAPNAME">cpl/gridmaps/oARRM60to6/map_oARRM60to6_to_TL319_aave.180904.nc</map>
</gridmap>
<gridmap atm_grid="T31" ocn_grid="gx3v7">
<map name="ATM2OCN_FMAPNAME">cpl/cpl6/map_T31_to_gx3v7_aave_da_090903.nc</map>
<map name="ATM2OCN_SMAPNAME">cpl/cpl6/map_T31_to_gx3v7_patch_090903.nc</map>
<map name="ATM2OCN_VMAPNAME">cpl/cpl6/map_T31_to_gx3v7_patch_090903.nc</map>
<map name="OCN2ATM_FMAPNAME">cpl/cpl6/map_gx3v7_to_T31_aave_da_090903.nc</map>
<map name="OCN2ATM_SMAPNAME">cpl/cpl6/map_gx3v7_to_T31_aave_da_090903.nc</map>
</gridmap>
<gridmap atm_grid="T85" ocn_grid="gx1v6">
<map name="ATM2OCN_FMAPNAME">cpl/gridmaps/T85/map_T85_to_gx1v6_aave_110411.nc</map>
<map name="ATM2OCN_SMAPNAME">cpl/gridmaps/T85/map_T85_to_gx1v6_bilin_110411.nc</map>
<map name="ATM2OCN_VMAPNAME">cpl/gridmaps/T85/map_T85_to_gx1v6_bilin_110411.nc</map>
<map name="OCN2ATM_FMAPNAME">cpl/gridmaps/gx1v6/map_gx1v6_to_T85_aave_110411.nc</map>
<map name="OCN2ATM_SMAPNAME">cpl/gridmaps/gx1v6/map_gx1v6_to_T85_aave_110411.nc</map>
</gridmap>
<gridmap atm_grid="T85" lnd_grid="0.9x1.25">
<map name="ATM2LND_FMAPNAME">cpl/gridmaps/T85/map_T85_to_fv0.9x1.25_aave_110411.nc</map>
<map name="ATM2LND_SMAPNAME">cpl/gridmaps/T85/map_T85_to_fv0.9x1.25_bilin_110411.nc</map>
<map name="LND2ATM_FMAPNAME">cpl/gridmaps/fv0.9x1.25/map_fv0.9x1.25_to_T85_aave_110411.nc</map>
<map name="LND2ATM_SMAPNAME">cpl/gridmaps/fv0.9x1.25/map_fv0.9x1.25_to_T85_bilin_110411.nc</map>
</gridmap>
<gridmap atm_grid="128x256" lnd_grid="0.9x1.25">
<map name="ATM2LND_SMAPNAME">cpl/gridmaps/T85/map_T85_to_fv0.9x1.25_bilin_110411.nc</map>
<map name="LND2ATM_FMAPNAME">cpl/gridmaps/fv0.9x1.25/map_fv0.9x1.25_to_T85_aave_110411.nc</map>
<map name="ATM2LND_FMAPNAME">cpl/gridmaps/T85/map_T85_to_fv0.9x1.25_aave_110411.nc</map>
<map name="LND2ATM_SMAPNAME">cpl/gridmaps/fv0.9x1.25/map_fv0.9x1.25_to_T85_bilin_110411.nc</map>
</gridmap>
<gridmap atm_grid="128x256" lnd_grid="0.9x1.25">
<map name="ATM2LND_SMAPNAME">cpl/gridmaps/T85/map_T85_to_fv0.9x1.25_bilin_110411.nc</map>
<map name="LND2ATM_FMAPNAME">cpl/gridmaps/fv0.9x1.25/map_fv0.9x1.25_to_T85_aave_110411.nc</map>
<map name="ATM2LND_FMAPNAME">cpl/gridmaps/T85/map_T85_to_fv0.9x1.25_aave_110411.nc</map>
<map name="LND2ATM_SMAPNAME">cpl/gridmaps/fv0.9x1.25/map_fv0.9x1.25_to_T85_aave_110411.nc</map>
</gridmap>
<gridmap atm_grid="512x1024" lnd_grid="0.23x0.31">
<map name="ATM2LND_FMAPNAME">cpl/gridmaps/T341/map_T341_to_fv0.23x0.31_aave_110413.nc</map>
<map name="ATM2LND_SMAPNAME">cpl/gridmaps/T341/map_T341_to_fv0.23x0.31_aave_110413.nc</map>
<map name="LND2ATM_FMAPNAME">cpl/gridmaps/fv0.23x0.31/map_fv0.23x0.31_to_T341_aave_110413.nc</map>
<map name="LND2ATM_SMAPNAME">cpl/gridmaps/fv0.23x0.31/map_fv0.23x0.31_to_T341_aave_110413.nc</map>
</gridmap>
<!-- QL, 150525, wav to ocn, atm, ice mapping files -->
<gridmap ocn_grid="gx3v7" wav_grid="ww3a">
<map name="WAV2OCN_SMAPNAME">cpl/gridmaps/ww3a/map_ww3a_TO_gx3v7_splice_150428.nc</map>
<map name="OCN2WAV_SMAPNAME">cpl/gridmaps/gx3v7/map_gx3v7_TO_ww3a_splice_150428.nc</map>
<map name="ICE2WAV_SMAPNAME">cpl/gridmaps/gx3v7/map_gx3v7_TO_ww3a_splice_150428.nc</map>
</gridmap>
<gridmap ocn_grid="gx1v6" wav_grid="ww3a">
<map name="WAV2OCN_SMAPNAME">cpl/gridmaps/ww3a/map_ww3a_TO_gx1v6_splice_150428.nc</map>
<map name="OCN2WAV_SMAPNAME">cpl/gridmaps/gx1v6/map_gx1v6_TO_ww3a_splice_150428.nc</map>
<map name="ICE2WAV_SMAPNAME">cpl/gridmaps/gx1v6/map_gx1v6_TO_ww3a_splice_150428.nc</map>
</gridmap>
<gridmap atm_grid="48x96" wav_grid="ww3a">
<map name="ATM2WAV_SMAPNAME">cpl/gridmaps/T31/map_T31_TO_ww3a_bilin_131104.nc</map>
</gridmap>
<gridmap atm_grid="T62" wav_grid="ww3a">
<map name="ATM2WAV_SMAPNAME">cpl/gridmaps/T62/map_T62_TO_ww3a_bilin.150617.nc</map>
</gridmap>
<gridmap atm_grid="1.9x2.5" wav_grid="ww3a">
<map name="ATM2WAV_SMAPNAME">cpl/gridmaps/fv1.9x2.5/map_fv1.9x2.5_TO_ww3a_bilin_140702.nc</map>
</gridmap>
<!--- river to land and land to river mapping files -->
<gridmap lnd_grid="360x720cru" rof_grid="r01">
<map name="LND2ROF_FMAPNAME">lnd/clm2/mappingdata/maps/0.1x0.1/map_360x720_nomask_to_0.1x0.1_nomask_aave_da_c130107.nc</map>
<map name="ROF2LND_FMAPNAME">lnd/clm2/mappingdata/maps/360x720/map_0.1x0.1_nomask_to_360x720_nomask_aave_da_c130104.nc</map>
</gridmap>
<gridmap lnd_grid="1.9x2.5" rof_grid="r01">
<map name="LND2ROF_FMAPNAME">lnd/clm2/mappingdata/maps/0.1x0.1/map_1.9x2.5_nomask_to_0.1x0.1_nomask_aave_da_c120709.nc</map>
<map name="ROF2LND_FMAPNAME">lnd/clm2/mappingdata/maps/1.9x2.5/map_0.1x0.1_nomask_to_1.9x2.5_nomask_aave_da_c120709.nc</map>
</gridmap>
<gridmap lnd_grid="ne120np4" rof_grid="r01">
<map name="LND2ROF_FMAPNAME">lnd/clm2/mappingdata/maps/0.1x0.1/map_ne120np4_nomask_to_0.1x0.1_nomask_aave_da_c120711.nc</map>
<map name="ROF2LND_FMAPNAME">lnd/clm2/mappingdata/maps/ne120np4/map_0.1x0.1_nomask_to_ne120np4_nomask_aave_da_c120706.nc</map>
</gridmap>
<gridmap lnd_grid="ne120np4" rof_grid="r0125">
<map name="LND2ROF_FMAPNAME">lnd/clm2/mappingdata/maps/ne120np4/map_ne120np4_to_0.125_nomask_aave.160613.nc</map>
<map name="ROF2LND_FMAPNAME">lnd/clm2/mappingdata/maps/ne120np4/map_0.125_to_ne120np4_nomask_aave.160613.nc</map>
</gridmap>
<gridmap lnd_grid="ne240np4" rof_grid="r01">
<map name="LND2ROF_FMAPNAME">lnd/clm2/mappingdata/maps/0.1x0.1/map_ne240np4_nomask_to_0.1x0.1_nomask_aave_da_c120711.nc</map>
<map name="ROF2LND_FMAPNAME">lnd/clm2/mappingdata/maps/ne240np4/map_0.1x0.1_nomask_to_ne240np4_nomask_aave_da_c120706.nc</map>
</gridmap>
<gridmap lnd_grid="360x720cru" rof_grid="r05">
<map name="LND2ROF_FMAPNAME">lnd/clm2/mappingdata/maps/0.5x0.5/map_360x720_nomask_to_0.5x0.5_nomask_aave_da_c130103.nc</map>
<map name="ROF2LND_FMAPNAME">lnd/clm2/mappingdata/maps/360x720/map_0.5x0.5_nomask_to_360x720_nomask_aave_da_c120830.nc</map>
</gridmap>
<gridmap lnd_grid="ne4np4" rof_grid="r05">
<map name="LND2ROF_FMAPNAME">lnd/clm2/mappingdata/maps/ne4np4/map_ne4np4_TO_r05_aave.160621.nc</map>
<map name="ROF2LND_FMAPNAME">lnd/clm2/mappingdata/maps/ne4np4/map_r05_TO_ne4np4_aave.160621.nc</map>
</gridmap>
<gridmap lnd_grid="ne11np4" rof_grid="r05">
<map name="LND2ROF_FMAPNAME">lnd/clm2/mappingdata/maps/ne11np4/map_ne11np4_TO_r05_aave.160621.nc</map>
<map name="ROF2LND_FMAPNAME">lnd/clm2/mappingdata/maps/ne11np4/map_r05_TO_ne11np4_aave.160621.nc</map>
</gridmap>
<gridmap lnd_grid="ne16np4" rof_grid="r05">
<map name="LND2ROF_FMAPNAME">lnd/clm2/mappingdata/maps/ne16np4/map_ne16np4_nomask_to_0.5x0.5_nomask_aave_da_c110922.nc</map>
<map name="ROF2LND_FMAPNAME">lnd/clm2/mappingdata/maps/ne16np4/map_0.5x0.5_nomask_to_ne16np4_nomask_aave_da_c110922.nc</map>
</gridmap>
<gridmap lnd_grid="ne30np4" rof_grid="r05">
<map name="LND2ROF_FMAPNAME">lnd/clm2/mappingdata/maps/ne30np4/map_ne30np4_to_0.5x0.5rtm_aave_da_110320.nc</map>
<map name="ROF2LND_FMAPNAME">lnd/clm2/mappingdata/maps/ne30np4/map_0.5x0.5_nomask_to_ne30np4_nomask_aave_da_c121019.nc</map>
</gridmap>
<gridmap lnd_grid="ne60np4" rof_grid="r05">
<map name="LND2ROF_FMAPNAME">lnd/clm2/mappingdata/maps/ne60np4/map_ne60np4_nomask_to_0.5x0.5_nomask_aave_da_c110922.nc</map>
<map name="ROF2LND_FMAPNAME">lnd/clm2/mappingdata/maps/ne60np4/map_0.5x0.5_nomask_to_ne60np4_nomask_aave_da_c110922.nc</map>
</gridmap>
<gridmap lnd_grid="ne120np4" rof_grid="r05">
<map name="LND2ROF_FMAPNAME">lnd/clm2/mappingdata/maps/ne120np4/map_ne120np4_to_0.5x0.5rtm_aave_da_110320.nc</map>
<map name="ROF2LND_FMAPNAME">lnd/clm2/mappingdata/maps/ne120np4/map_0.5x0.5_nomask_to_ne120np4_nomask_aave_da_c121019.nc</map>
</gridmap>
<gridmap lnd_grid="ne240np4" rof_grid="r05">
<map name="LND2ROF_FMAPNAME">lnd/clm2/mappingdata/maps/ne240np4/map_ne240np4_nomask_to_0.5x0.5_nomask_aave_da_c110922.nc</map>
<map name="ROF2LND_FMAPNAME">lnd/clm2/mappingdata/maps/ne240np4/map_0.5x0.5_nomask_to_ne240np4_nomask_aave_da_c121019.nc</map>
</gridmap>
<gridmap lnd_grid="0.23x0.31" rof_grid="r05">
<map name="LND2ROF_FMAPNAME">lnd/clm2/mappingdata/maps/0.23x0.31/map_0.23x0.31_nomask_to_0.5x0.5_nomask_aave_da_c110920.nc</map>
<map name="ROF2LND_FMAPNAME">lnd/clm2/mappingdata/maps/0.23x0.31/map_0.5x0.5_nomask_to_0.23x0.31_nomask_aave_da_c110920.nc</map>
</gridmap>
<gridmap lnd_grid="0.47x0.63" rof_grid="r05">
<map name="LND2ROF_FMAPNAME">lnd/clm2/mappingdata/maps/0.47x0.63/map_0.47x0.63_nomask_to_0.5x0.5_nomask_aave_da_c120306.nc</map>
<map name="ROF2LND_FMAPNAME">lnd/clm2/mappingdata/maps/0.47x0.63/map_0.5x0.5_nomask_to_0.47x0.63_nomask_aave_da_c120306.nc</map>
</gridmap>
<gridmap lnd_grid="0.9x1.25" rof_grid="r05">
<map name="LND2ROF_FMAPNAME">lnd/clm2/mappingdata/maps/0.9x1.25/map_0.9x1.25_nomask_to_0.5x0.5_nomask_aave_da_c120522.nc</map>
<map name="ROF2LND_FMAPNAME">lnd/clm2/mappingdata/maps/0.9x1.25/map_0.5x0.5_nomask_to_0.9x1.25_nomask_aave_da_c121019.nc</map>
</gridmap>
<gridmap lnd_grid="1.9x2.5" rof_grid="r05">
<map name="LND2ROF_FMAPNAME">lnd/clm2/mappingdata/maps/1.9x2.5/map_1.9x2.5_nomask_to_0.5x0.5_nomask_aave_da_c120522.nc</map>
<map name="ROF2LND_FMAPNAME">lnd/clm2/mappingdata/maps/1.9x2.5/map_0.5x0.5_nomask_to_1.9x2.5_nomask_aave_da_c120709.nc</map>
</gridmap>
<gridmap lnd_grid="2.5x3.33" rof_grid="r05">
<map name="LND2ROF_FMAPNAME">lnd/clm2/mappingdata/maps/2.5x3.33/map_2.5x3.33_nomask_to_0.5x0.5_nomask_aave_da_c110823.nc</map>
<map name="ROF2LND_FMAPNAME">lnd/clm2/mappingdata/maps/2.5x3.33/map_0.5x0.5_nomask_to_2.5x3.33_nomask_aave_da_c110823.nc</map>
</gridmap>
<gridmap lnd_grid="10x15" rof_grid="r05">
<map name="LND2ROF_FMAPNAME">lnd/clm2/mappingdata/maps/10x15/map_10x15_to_0.5x0.5rtm_aave_da_110307.nc</map>
<map name="ROF2LND_FMAPNAME">lnd/clm2/mappingdata/maps/10x15/map_0.5x0.5_nomask_to_10x15_nomask_aave_da_c121019.nc</map>
</gridmap>
<gridmap lnd_grid="4x5" rof_grid="r05">
<map name="LND2ROF_FMAPNAME">lnd/clm2/mappingdata/maps/4x5/map_4x5_nomask_to_0.5x0.5_nomask_aave_da_c110822.nc</map>
<map name="ROF2LND_FMAPNAME">lnd/clm2/mappingdata/maps/4x5/map_0.5x0.5_nomask_to_4x5_nomask_aave_da_c110822.nc</map>
</gridmap>
<gridmap lnd_grid="T341" rof_grid="r05">
<map name="LND2ROF_FMAPNAME">lnd/clm2/mappingdata/maps/512x1024/map_512x1024_nomask_to_0.5x0.5_nomask_aave_da_c110920.nc</map>
<map name="ROF2LND_FMAPNAME">lnd/clm2/mappingdata/maps/512x1024/map_0.5x0.5_nomask_to_512x1024_nomask_aave_da_c110920.nc</map>
</gridmap>
<gridmap lnd_grid="T85" rof_grid="r05">
<map name="LND2ROF_FMAPNAME">lnd/clm2/mappingdata/maps/128x256/map_128x256_nomask_to_0.5x0.5_nomask_aave_da_c110920.nc</map>
<map name="ROF2LND_FMAPNAME">lnd/clm2/mappingdata/maps/128x256/map_0.5x0.5_nomask_to_128x256_nomask_aave_da_c110920.nc</map>
</gridmap>
<gridmap lnd_grid="T31" rof_grid="r05">
<map name="LND2ROF_FMAPNAME">lnd/clm2/mappingdata/maps/48x96/map_48x96_nomask_to_0.5x0.5_nomask_aave_da_c110822.nc</map>
<map name="ROF2LND_FMAPNAME">lnd/clm2/mappingdata/maps/48x96/map_0.5x0.5_nomask_to_48x96_nomask_aave_da_c110822.nc</map>
</gridmap>
<!--- river to ocn area overlap -->
<gridmap ocn_grid="gx1v6" rof_grid="r05">
<map name="ROF2OCN_FMAPNAME">cpl/cpl6/map_r05_TO_g16_aave.120920.nc</map>
</gridmap>
<gridmap ocn_grid="gx3v7" rof_grid="rx1">
<map name="ROF2OCN_ICE_RMAPNAME">cpl/cpl6/map_rx1_to_gx3v7_e1000r500_090903.nc</map>
<map name="ROF2OCN_LIQ_RMAPNAME">cpl/cpl6/map_rx1_to_gx3v7_e1000r500_090903.nc</map>
</gridmap>
<gridmap ocn_grid="gx1v6" rof_grid="rx1">
<map name="ROF2OCN_ICE_RMAPNAME">cpl/cpl6/map_rx1_to_gx1v6_e1000r300_090318.nc</map>
<map name="ROF2OCN_LIQ_RMAPNAME">cpl/cpl6/map_rx1_to_gx1v6_e1000r300_090318.nc</map>
</gridmap>
<gridmap ocn_grid="gx3v7" rof_grid="r05">
<map name="ROF2OCN_ICE_RMAPNAME">cpl/cpl6/map_r05_to_gx3v7_e1000r500_090903.nc</map>
<map name="ROF2OCN_LIQ_RMAPNAME">cpl/cpl6/map_r05_to_gx3v7_e1000r500_090903.nc</map>
</gridmap>
<gridmap ocn_grid="gx1v6" rof_grid="r05">
<map name="ROF2OCN_ICE_RMAPNAME">cpl/cpl6/map_r05_to_gx1v6_e1000r300_090226.nc</map>
<map name="ROF2OCN_LIQ_RMAPNAME">cpl/cpl6/map_r05_to_gx1v6_e1000r300_090226.nc</map>
</gridmap>
<gridmap ocn_grid="gx1v6" rof_grid="r01">
<map name="ROF2OCN_ICE_RMAPNAME">cpl/cpl6/map_r01_to_gx1v6_120711.nc</map>
<map name="ROF2OCN_LIQ_RMAPNAME">cpl/cpl6/map_r01_to_gx1v6_120711.nc</map>
</gridmap>
<!--- deprecated MPAS grids -->
<gridmap ocn_grid="mpasgx1" rof_grid="rx1">
<map name="ROF2OCN_ICE_RMAPNAME">cpl/cpl6/map_rx1_to_mpasgx1_nn_150910.nc</map>
<map name="ROF2OCN_LIQ_RMAPNAME">cpl/cpl6/map_rx1_to_mpasgx1_nn_150910.nc</map>
</gridmap>
<gridmap ocn_grid="mpas120" rof_grid="rx1">
<map name="ROF2OCN_ICE_RMAPNAME">cpl/cpl6/map_rx1_to_mpas120_nn_131217.nc</map>
<map name="ROF2OCN_LIQ_RMAPNAME">cpl/cpl6/map_rx1_to_mpas120_nn_131217.nc</map>
</gridmap>
<gridmap ocn_grid="mpas120" rof_grid="r05">
<map name="ROF2OCN_ICE_RMAPNAME">cpl/cpl6/map_r05_to_QU120km_nn_151110.nc</map>
<map name="ROF2OCN_LIQ_RMAPNAME">cpl/cpl6/map_r05_to_QU120km_nn_151110.nc</map>
</gridmap>
<!--- current MPAS grids -->
<gridmap ocn_grid="oQU480" rof_grid="rx1">
<map name="ROF2OCN_ICE_RMAPNAME">cpl/cpl6/map_rx1_to_oQU480_nn_151209.nc</map>
<map name="ROF2OCN_LIQ_RMAPNAME">cpl/cpl6/map_rx1_to_oQU480_nn_151209.nc</map>
</gridmap>
<gridmap ocn_grid="oQU240" rof_grid="rx1">
<map name="ROF2OCN_ICE_RMAPNAME">cpl/cpl6/map_rx1_to_oQU240_nn.160527.nc</map>
<map name="ROF2OCN_LIQ_RMAPNAME">cpl/cpl6/map_rx1_to_oQU240_nn.160527.nc</map>
</gridmap>
<gridmap ocn_grid="oQU240wLI" rof_grid="rx1">
<map name="ROF2OCN_ICE_RMAPNAME">cpl/cpl6/map_rx1_to_oQU240wLI_nn.160929.nc</map>
<map name="ROF2OCN_LIQ_RMAPNAME">cpl/cpl6/map_rx1_to_oQU240wLI_nn.160929.nc</map>
</gridmap>
<gridmap ocn_grid="oQU120" rof_grid="rx1">
<map name="ROF2OCN_ICE_RMAPNAME">cpl/cpl6/map_rx1_to_oQU120_nn.160527.nc</map>
<map name="ROF2OCN_LIQ_RMAPNAME">cpl/cpl6/map_rx1_to_oQU120_nn.160527.nc</map>
</gridmap>
<gridmap ocn_grid="oEC60to30" rof_grid="rx1">
<map name="ROF2OCN_ICE_RMAPNAME">cpl/cpl6/map_rx1_to_oEC60to30_nn.160527.nc</map>
<map name="ROF2OCN_LIQ_RMAPNAME">cpl/cpl6/map_rx1_to_oEC60to30_nn.160527.nc</map>
</gridmap>
<gridmap ocn_grid="oEC60to30v3" rof_grid="rx1">
<map name="ROF2OCN_ICE_RMAPNAME">cpl/cpl6/map_rx1_to_oEC60to30v3_smoothed.r300e600.161222.nc</map>
<map name="ROF2OCN_LIQ_RMAPNAME">cpl/cpl6/map_rx1_to_oEC60to30v3_smoothed.r300e600.161222.nc</map>
</gridmap>
<gridmap ocn_grid="oEC60to30wLI" rof_grid="rx1">
<map name="ROF2OCN_ICE_RMAPNAME">cpl/cpl6/map_rx1_to_oEC60to30wLI_nn.160830.nc</map>
<map name="ROF2OCN_LIQ_RMAPNAME">cpl/cpl6/map_rx1_to_oEC60to30wLI_nn.160830.nc</map>
</gridmap>
<gridmap ocn_grid="oEC60to30v3wLI" rof_grid="rx1">
<map name="ROF2OCN_ICE_RMAPNAME">cpl/cpl6/map_rx1_to_oEC60to30v3wLI_smoothed.r300e600.180601.nc</map>
<map name="ROF2OCN_LIQ_RMAPNAME">cpl/cpl6/map_rx1_to_oEC60to30v3wLI_smoothed.r300e600.180601.nc</map>
</gridmap>
<gridmap ocn_grid="oRRS30to10" rof_grid="rx1">
<map name="ROF2OCN_ICE_RMAPNAME">cpl/cpl6/map_rx1_to_oRRS30to10_nn.160527.nc</map>
<map name="ROF2OCN_LIQ_RMAPNAME">cpl/cpl6/map_rx1_to_oRRS30to10_nn.160527.nc</map>
</gridmap>
<gridmap ocn_grid="oRRS30to10v3" rof_grid="rx1">
<map name="ROF2OCN_ICE_RMAPNAME">cpl/cpl6/map_rx1_to_oRRS30to10v3_smoothed.r150e300.171129.nc</map>
<map name="ROF2OCN_LIQ_RMAPNAME">cpl/cpl6/map_rx1_to_oRRS30to10v3_smoothed.r150e300.171129.nc</map>
</gridmap>
<gridmap ocn_grid="oRRS30to10wLI" rof_grid="rx1">
<map name="ROF2OCN_ICE_RMAPNAME">cpl/cpl6/map_rx1_to_oRRS30to10wLI_smoothed.r150e300.160930.nc</map>
<map name="ROF2OCN_LIQ_RMAPNAME">cpl/cpl6/map_rx1_to_oRRS30to10wLI_smoothed.r150e300.160930.nc</map>
</gridmap>
<gridmap ocn_grid="oRRS30to10v3wLI" rof_grid="rx1">
<map name="ROF2OCN_ICE_RMAPNAME">cpl/cpl6/map_rx1_to_oRRS30to10v3wLI-masked_smoothed.r150e300.180611.nc</map>
<map name="ROF2OCN_LIQ_RMAPNAME">cpl/cpl6/map_rx1_to_oRRS30to10v3wLI-masked_smoothed.r150e300.180611.nc</map>
</gridmap>
<gridmap ocn_grid="oRRS18to6" rof_grid="rx1">
<map name="ROF2OCN_ICE_RMAPNAME">cpl/cpl6/map_rx1_to_oRRS18to6_nn.160830.nc</map>
<map name="ROF2OCN_LIQ_RMAPNAME">cpl/cpl6/map_rx1_to_oRRS18to6_nn.160830.nc</map>
</gridmap>
<gridmap ocn_grid="oRRS18to6v3" rof_grid="rx1">
<map name="ROF2OCN_ICE_RMAPNAME">cpl/cpl6/map_rx1_to_oRRS18to6v3_smoothed.r100e200.170111.nc</map>
<map name="ROF2OCN_LIQ_RMAPNAME">cpl/cpl6/map_rx1_to_oRRS18to6v3_smoothed.r100e200.170111.nc</map>
</gridmap>
<gridmap ocn_grid="oRRS15to5" rof_grid="rx1">
<map name="ROF2OCN_ICE_RMAPNAME">cpl/cpl6/map_rx1_to_oRRS15to5_nn.160527.nc</map>
<map name="ROF2OCN_LIQ_RMAPNAME">cpl/cpl6/map_rx1_to_oRRS15to5_nn.160527.nc</map>
</gridmap>
<gridmap ocn_grid="oARRM60to10" rof_grid="rx1">
<map name="ROF2OCN_ICE_RMAPNAME">cpl/cpl6/map_rx1_to_oARRM60to10_smoothed.r150e300.180718.nc</map>
<map name="ROF2OCN_LIQ_RMAPNAME">cpl/cpl6/map_rx1_to_oARRM60to10_smoothed.r150e300.180718.nc</map>
</gridmap>
<gridmap ocn_grid="oARRM60to6" rof_grid="rx1">
<map name="ROF2OCN_ICE_RMAPNAME">cpl/cpl6/map_rx1_to_oARRM60to6_smoothed.r150e300.180816.nc</map>
<map name="ROF2OCN_LIQ_RMAPNAME">cpl/cpl6/map_rx1_to_oARRM60to6_smoothed.r150e300.180816.nc</map>
</gridmap>
<gridmap ocn_grid="oEC60to30v3" rof_grid="JRA025">
<map name="ROF2OCN_ICE_RMAPNAME">cpl/cpl6/map_JRA025_to_oEC60to30v3_smoothed.r150e300.181204.nc</map>
<map name="ROF2OCN_LIQ_RMAPNAME">cpl/cpl6/map_JRA025_to_oEC60to30v3_smoothed.r150e300.181204.nc</map>
</gridmap>
<gridmap ocn_grid="oARRM60to10" rof_grid="JRA025">
<map name="ROF2OCN_ICE_RMAPNAME">cpl/cpl6/map_JRA025_to_oARRM60to10_smoothed.r150e300.181219.nc</map>
<map name="ROF2OCN_LIQ_RMAPNAME">cpl/cpl6/map_JRA025_to_oARRM60to10_smoothed.r150e300.181219.nc</map>
</gridmap>
<gridmap ocn_grid="oARRM60to6" rof_grid="JRA025">
<map name="ROF2OCN_ICE_RMAPNAME">cpl/cpl6/map_JRA025_to_oARRM60to6_smoothed.r150e300.181220.nc</map>
<map name="ROF2OCN_LIQ_RMAPNAME">cpl/cpl6/map_JRA025_to_oARRM60to6_smoothed.r150e300.181220.nc</map>
</gridmap>
<gridmap ocn_grid="oQU480" rof_grid="r05">
<map name="ROF2OCN_ICE_RMAPNAME">cpl/cpl6/map_r05_to_oQU480_nn.180702.nc</map>
<map name="ROF2OCN_LIQ_RMAPNAME">cpl/cpl6/map_r05_to_oQU480_nn.180702.nc</map>
</gridmap>
<gridmap ocn_grid="oQU240" rof_grid="r05">
<map name="ROF2OCN_ICE_RMAPNAME">cpl/cpl6/map_r05_to_oQU240_nn.160714.nc</map>
<map name="ROF2OCN_LIQ_RMAPNAME">cpl/cpl6/map_r05_to_oQU240_nn.160714.nc</map>
</gridmap>
<gridmap ocn_grid="oQU240wLI" rof_grid="r05">
<map name="ROF2OCN_ICE_RMAPNAME">cpl/cpl6/map_r05_to_oQU240wLI_nn.160929.nc</map>
<map name="ROF2OCN_LIQ_RMAPNAME">cpl/cpl6/map_r05_to_oQU240wLI_nn.160929.nc</map>
</gridmap>
<gridmap ocn_grid="oQU120" rof_grid="r05">
<map name="ROF2OCN_ICE_RMAPNAME">cpl/cpl6/map_r05_to_oQU120_nn.160718.nc</map>
<map name="ROF2OCN_LIQ_RMAPNAME">cpl/cpl6/map_r05_to_oQU120_nn.160718.nc</map>
</gridmap>
<gridmap ocn_grid="oEC60to30" rof_grid="r05">
<map name="ROF2OCN_ICE_RMAPNAME">cpl/cpl6/map_r05_to_oEC60to30_smoothed.r175e350.160718.nc</map>
<map name="ROF2OCN_LIQ_RMAPNAME">cpl/cpl6/map_r05_to_oEC60to30_smoothed.r175e350.160718.nc</map>
</gridmap>
<gridmap ocn_grid="oEC60to30_ICG" rof_grid="r05">
<map name="ROF2OCN_ICE_RMAPNAME">cpl/cpl6/map_r05_to_oEC60to30_smoothed.r175e350.160718.nc</map>
<map name="ROF2OCN_LIQ_RMAPNAME">cpl/cpl6/map_r05_to_oEC60to30_smoothed.r175e350.160718.nc</map>
</gridmap>
<gridmap ocn_grid="oEC60to30v3" rof_grid="r05">
<map name="ROF2OCN_ICE_RMAPNAME">cpl/cpl6/map_r05_to_oEC60to30v3_smoothed.r300e600.161222.nc</map>
<map name="ROF2OCN_LIQ_RMAPNAME">cpl/cpl6/map_r05_to_oEC60to30v3_smoothed.r300e600.161222.nc</map>
</gridmap>
<gridmap ocn_grid="oEC60to30v3_ICG" rof_grid="r05">
<map name="ROF2OCN_ICE_RMAPNAME">cpl/cpl6/map_r05_to_oEC60to30v3_smoothed.r300e600.161222.nc</map>
<map name="ROF2OCN_LIQ_RMAPNAME">cpl/cpl6/map_r05_to_oEC60to30v3_smoothed.r300e600.161222.nc</map>
</gridmap>
<gridmap ocn_grid="oEC60to30wLI" rof_grid="r05">
<map name="ROF2OCN_ICE_RMAPNAME">cpl/cpl6/map_r05_to_oEC60to30wLI_smoothed.r300e600.160926.nc</map>
<map name="ROF2OCN_LIQ_RMAPNAME">cpl/cpl6/map_r05_to_oEC60to30wLI_smoothed.r300e600.160926.nc</map>
</gridmap>
<gridmap ocn_grid="oEC60to30v3wLI" rof_grid="r05">
<map name="ROF2OCN_ICE_RMAPNAME">cpl/cpl6/map_r05_to_oEC60to30v3wLI_smoothed.r300e600.180611.nc</map>
<map name="ROF2OCN_LIQ_RMAPNAME">cpl/cpl6/map_r05_to_oEC60to30v3wLI_smoothed.r300e600.180611.nc</map>
</gridmap>
<gridmap ocn_grid="oEC60to30v3wLI_ICG" rof_grid="r05">
<map name="ROF2OCN_ICE_RMAPNAME">cpl/cpl6/map_r05_to_oEC60to30v3wLI_smoothed.r300e600.180611.nc</map>
<map name="ROF2OCN_LIQ_RMAPNAME">cpl/cpl6/map_r05_to_oEC60to30v3wLI_smoothed.r300e600.180611.nc</map>
</gridmap>
<gridmap ocn_grid="oRRS30to10" rof_grid="r05">
<map name="ROF2OCN_ICE_RMAPNAME">cpl/cpl6/map_r05_to_oRRS30to10_nn.160718.nc</map>
<map name="ROF2OCN_LIQ_RMAPNAME">cpl/cpl6/map_r05_to_oRRS30to10_nn.160718.nc</map>
</gridmap>
<gridmap ocn_grid="oRRS30to10wLI" rof_grid="r05">
<map name="ROF2OCN_ICE_RMAPNAME">cpl/cpl6/map_r05_to_oRRS30to10wLI_nn.160930.nc</map>
<map name="ROF2OCN_LIQ_RMAPNAME">cpl/cpl6/map_r05_to_oRRS30to10wLI_nn.160930.nc</map>
</gridmap>
<gridmap ocn_grid="oRRS30to10v3" rof_grid="r05">
<map name="ROF2OCN_ICE_RMAPNAME">cpl/cpl6/map_r05_to_oRRS30to10v3_smoothed.r150e300.171109.nc</map>
<map name="ROF2OCN_LIQ_RMAPNAME">cpl/cpl6/map_r05_to_oRRS30to10v3_smoothed.r150e300.171109.nc</map>
</gridmap>
<gridmap ocn_grid="oRRS30to10v3wLI" rof_grid="r05">
<map name="ROF2OCN_ICE_RMAPNAME">cpl/cpl6/map_r05_to_oRRS30to10v3wLI_smoothed.r150e300.171109.nc</map>
<map name="ROF2OCN_LIQ_RMAPNAME">cpl/cpl6/map_r05_to_oRRS30to10v3wLI_smoothed.r150e300.171109.nc</map>
</gridmap>
<gridmap ocn_grid="oRRS15to5" rof_grid="r05">
<map name="ROF2OCN_ICE_RMAPNAME">cpl/cpl6/map_r05_to_oRRS15to5_nn.160203.nc</map>
<map name="ROF2OCN_LIQ_RMAPNAME">cpl/cpl6/map_r05_to_oRRS15to5_nn.160203.nc</map>
</gridmap>
<gridmap ocn_grid="oRRS18to6" rof_grid="r0125">
<map name="ROF2OCN_ICE_RMAPNAME">cpl/cpl6/map_r0125_to_oRRS18to6_nn.160831.nc</map>
<map name="ROF2OCN_LIQ_RMAPNAME">cpl/cpl6/map_r0125_to_oRRS18to6_nn.160831.nc</map>
</gridmap>
<gridmap ocn_grid="oRRS18to6v3" rof_grid="r0125">
<map name="ROF2OCN_ICE_RMAPNAME">cpl/cpl6/map_r0125_to_oRRS18to6v3_smoothed.r50e100.170111.nc</map>
<map name="ROF2OCN_LIQ_RMAPNAME">cpl/cpl6/map_r0125_to_oRRS18to6v3_smoothed.r50e100.170111.nc</map>
</gridmap>
<gridmap ocn_grid="oRRS18to6v3_ICG" rof_grid="r0125">
<map name="ROF2OCN_ICE_RMAPNAME">cpl/cpl6/map_r0125_to_oRRS18to6v3_smoothed.r50e100.170111.nc</map>
<map name="ROF2OCN_LIQ_RMAPNAME">cpl/cpl6/map_r0125_to_oRRS18to6v3_smoothed.r50e100.170111.nc</map>
</gridmap>
<gridmap ocn_grid="oRRS15to5" rof_grid="r0125">
<map name="ROF2OCN_ICE_RMAPNAME">cpl/cpl6/map_r0125_to_oRRS15to5_nn.160720.nc</map>
<map name="ROF2OCN_LIQ_RMAPNAME">cpl/cpl6/map_r0125_to_oRRS15to5_nn.160720.nc</map>
</gridmap>
<!--- river flooding variables -->
<XROF_FLOOD_MODE lnd_grid="1.9x2.5" rof_grid="r05" ocn_grid="gx1v6" >ACTIVE</XROF_FLOOD_MODE>
<!-- ==================== -->
<!-- GLC: MALI grids -->
<!-- ==================== -->
<gridmap glc_grid="mpas.gis20km" lnd_grid="0.9x1.25">
<map name="LND2GLC_FMAPNAME">cpl/gridmaps/fv0.9x1.25/map_fv0.9x1.25_TO_mpas.gis20km_aave.150922.nc</map>
<map name="LND2GLC_SMAPNAME">cpl/gridmaps/fv0.9x1.25/map_fv0.9x1.25_TO_mpas.gis20km_bilin.150922.nc</map>
<map name="GLC2LND_FMAPNAME">cpl/gridmaps/mpas.gis20km/map_mpas.gis20km_TO_fv0.9x1.25_aave.150922.nc</map>
<map name="GLC2LND_SMAPNAME">cpl/gridmaps/mpas.gis20km/map_mpas.gis20km_TO_fv0.9x1.25_aave.150922.nc</map>
</gridmap>
<!-- ==================== -->
<!-- GLC: mpas.aisgis20km -->
<!-- ==================== -->
<gridmap glc_grid="mpas.aisgis20km" lnd_grid="ne30np4">
<map name="LND2GLC_FMAPNAME">cpl/gridmaps/ne30np4/map_ne30np4_to_mpas.aisgis20km_mono.20190805.nc</map>
<map name="LND2GLC_SMAPNAME">cpl/gridmaps/ne30np4/map_ne30np4_to_mpas.aisgis20km_intbilin.20190805.nc</map>
<map name="GLC2LND_FMAPNAME">cpl/gridmaps/mpas.aisgis20km/map_mpas.aisgis20km_to_ne30np4_monotr.20190805.nc</map>
<map name="GLC2LND_SMAPNAME">cpl/gridmaps/mpas.aisgis20km/map_mpas.aisgis20km_to_ne30np4_mono.20190805.nc</map>
</gridmap>
<gridmap glc_grid="mpas.aisgis20km" ocn_grid="oEC60to30v3">
<map name="OCN2GLC_FMAPNAME">cpl/gridmaps/oEC60to30v3/map_oEC60to30v3_to_mpas.aisgis20km_aave.190403.nc</map>
<map name="OCN2GLC_SMAPNAME">cpl/gridmaps/oEC60to30v3/map_oEC60to30v3_to_mpas.aisgis20km_bilin.190403.nc</map>
<map name="GLC2OCN_FMAPNAME">cpl/gridmaps/mpas.aisgis20km/map_mpas.aisgis20km_to_oEC60to30v3_aave.190403.nc</map>
<map name="GLC2OCN_SMAPNAME">cpl/gridmaps/mpas.aisgis20km/map_mpas.aisgis20km_to_oEC60to30v3_bilin.190403.nc</map>
</gridmap>
<gridmap glc_grid="mpas.aisgis20km" ocn_grid="oEC60to30v3wLI">
<map name="OCN2GLC_FMAPNAME">cpl/gridmaps/oEC60to30v3wLI/map_oEC60to30v3wLI_to_mpas.aisgis20km_aave.190713.nc</map>
<map name="OCN2GLC_SMAPNAME">cpl/gridmaps/oEC60to30v3wLI/map_oEC60to30v3wLI_to_mpas.aisgis20km_bilin.190713.nc</map>
<map name="GLC2OCN_FMAPNAME">cpl/gridmaps/mpas.aisgis20km/map_mpas.aisgis20km_to_oEC60to30v3wLI_aave.190713.nc</map>
<map name="GLC2OCN_SMAPNAME">cpl/gridmaps/mpas.aisgis20km/map_mpas.aisgis20km_to_oEC60to30v3wLI_bilin.190713.nc</map>
</gridmap>
<!-- ==================== -->
<!-- GLC: mpas.gis20km -->
<!-- ==================== -->
<gridmap glc_grid="mpas.gis20km" lnd_grid="ne30np4">
<map name="LND2GLC_FMAPNAME">cpl/gridmaps/ne30np4/map_ne30np4_to_mpas.gis20km_mono.171002.nc</map>
<map name="LND2GLC_SMAPNAME">cpl/gridmaps/ne30np4/map_ne30np4_to_mpas.gis20km_intbilin.171002.nc</map>
<map name="GLC2LND_FMAPNAME">cpl/gridmaps/mpas.gis20km/map_mpas.gis20km_to_ne30np4_mono.171002.nc</map>
<map name="GLC2LND_SMAPNAME">cpl/gridmaps/mpas.gis20km/map_mpas.gis20km_to_ne30np4_mono.171002.nc</map>
</gridmap>
<gridmap glc_grid="mpas.gis20km" ocn_grid="oEC60to30v3">
<map name="OCN2GLC_FMAPNAME">cpl/gridmaps/oEC60to30v3/map_oEC60to30v3_to_mpas.gis20km_aave.181115.nc</map>
<map name="OCN2GLC_FMAPNAME">cpl/gridmaps/oEC60to30v3/map_oEC60to30v3_to_mpas.gis20km_bilin.181115.nc</map>
<map name="GLC2OCN_SMAPNAME">cpl/gridmaps/mpas.gis20km/map_mpas.gis20km_to_oEC60to30v3_aave.181115.nc</map>
<map name="GLC2OCN_SMAPNAME">cpl/gridmaps/mpas.gis20km/map_mpas.gis20km_to_oEC60to30v3_aave.181115.nc</map>
</gridmap>
<!-- ==================== -->
<!-- GLC: mpas.ais20km -->
<!-- ==================== -->
<gridmap glc_grid="mpas.ais20km" lnd_grid="0.9x1.25">
<map name="LND2GLC_FMAPNAME">cpl/gridmaps/fv0.9x1.25/map_f09_TO_ais20km_aave.151005.nc</map>
<map name="LND2GLC_SMAPNAME">cpl/gridmaps/fv0.9x1.25/map_f09_TO_ais20km_bilin.151005.nc</map>
<map name="GLC2LND_FMAPNAME">cpl/gridmaps/mpas.ais20km/map_ais20km_TO_f09_aave.151005.nc</map>
<map name="GLC2LND_SMAPNAME">cpl/gridmaps/mpas.ais20km/map_ais20km_TO_f09_aave.151005.nc</map>
</gridmap>
<!-- ======================================================== -->
<!-- GRIDS: glc to ocn mapping -->
<!-- -->
<!-- Eventually we will also want glc to ice mapping files. -->
<!-- These will likely differ from the glc to ocn mapping -->
<!-- files in their degree of smoothing (or they may not be -->
<!-- smoothed at all). But for now we are only specifying -->
<!-- glc to ocn mapping files. (By default, glc to ice -->
<!-- mapping is turned off in the system.) -->
<!-- ======================================================== -->
<!-- MPASO / MALI -->
<gridmap glc_grid="mpas.gis20km" ocn_grid="mpas120">
<map name="GLC2OCN_LIQ_RMAPNAME">cpl/gridmaps/mpas.gis20km/map_mpas.gis20km_to_mpas120_nnsmooth_150924.nc</map>
<map name="GLC2OCN_ICE_RMAPNAME">cpl/gridmaps/mpas.gis20km/map_mpas.gis20km_to_mpas120_nnsmooth_150924.nc</map>
</gridmap>
<gridmap glc_grid="mpas.ais20km" ocn_grid="oEC60to30">
<map name="GLC2OCN_FMAPNAME">cpl/gridmaps/mpas.ais20km/map_ais20km_to_oEC60to30_nearestdtos.151005.nc</map>
<map name="GLC2OCN_SMAPNAME">cpl/gridmaps/mpas.ais20km/map_ais20km_to_oEC60to30_nearestdtos.151005.nc</map>
<map name="OCN2GLC_FMAPNAME">cpl/gridmaps/oEC60to30/map_oEC60to30_to_ais20km_nearestdtos.151005.nc</map>
<map name="OCN2GLC_SMAPNAME">cpl/gridmaps/oEC60to30/map_oEC60to30_to_ais20km_nearestdtos.151005.nc</map>
</gridmap>
<gridmap atm_grid="ne16np4" ocn_grid="oQU240">
<map name="ATM2OCN_FMAPNAME">cpl/gridmaps/ne16np4/map_ne16np4_to_oQU240_aave.151209.nc</map>
<map name="ATM2OCN_VMAPNAME">cpl/gridmaps/ne16np4/map_ne16np4_to_oQU240_conserve.151209.nc</map>
<map name="ATM2OCN_SMAPNAME">cpl/gridmaps/ne16np4/map_ne16np4_to_oQU240_conserve.151209.nc</map>
<map name="OCN2ATM_FMAPNAME">cpl/gridmaps/oQU240/map_oQU240_to_ne16np4_aave.151209.nc</map>
<map name="OCN2ATM_SMAPNAME">cpl/gridmaps/oQU240/map_oQU240_to_ne16np4_aave.151209.nc</map>
</gridmap>
<gridmap atm_grid="ne30np4" ocn_grid="oQU120">
<map name="ATM2OCN_FMAPNAME">cpl/gridmaps/ne30np4/map_ne30np4_to_oQU120_aave.160322.nc</map>
<map name="ATM2OCN_VMAPNAME">cpl/gridmaps/ne30np4/map_ne30np4_to_oQU120_conserve.160322.nc</map>
<map name="ATM2OCN_SMAPNAME">cpl/gridmaps/ne30np4/map_ne30np4_to_oQU120_conserve.160322.nc</map>
<map name="OCN2ATM_FMAPNAME">cpl/gridmaps/oQU120/map_oQU120_to_ne30np4_aave.160322.nc</map>
<map name="OCN2ATM_SMAPNAME">cpl/gridmaps/oQU120/map_oQU120_to_ne30np4_aave.160322.nc</map>
</gridmap>
<gridmap atm_grid="ne30np4" ocn_grid="oEC60to30">
<map name="ATM2OCN_FMAPNAME">cpl/gridmaps/ne30np4/map_ne30np4_TO_oEC60to30_aave.151207.nc</map>
<map name="ATM2OCN_VMAPNAME">cpl/gridmaps/ne30np4/map_ne30np4_to_oEC60to30_conserve_151207.nc</map>
<map name="ATM2OCN_SMAPNAME">cpl/gridmaps/ne30np4/map_ne30np4_to_oEC60to30_conserve_151207.nc</map>
<map name="OCN2ATM_FMAPNAME">cpl/gridmaps/oEC60to30/map_oEC60to30_TO_ne30np4_aave.151207.nc</map>
<map name="OCN2ATM_SMAPNAME">cpl/gridmaps/oEC60to30/map_oEC60to30_TO_ne30np4_aave.151207.nc</map>
</gridmap>
<gridmap atm_grid="ne30np4" ocn_grid="oEC60to30v3">
<map name="ATM2OCN_FMAPNAME">cpl/gridmaps/ne30np4/map_ne30np4_to_oEC60to30v3_aave.161222.nc</map>
<map name="ATM2OCN_VMAPNAME">cpl/gridmaps/ne30np4/map_ne30np4_to_oEC60to30v3_conserve_161222.nc</map>
<map name="ATM2OCN_SMAPNAME">cpl/gridmaps/ne30np4/map_ne30np4_to_oEC60to30v3_conserve_161222.nc</map>
<map name="OCN2ATM_FMAPNAME">cpl/gridmaps/oEC60to30v3/map_oEC60to30v3_to_ne30np4_aave.161222.nc</map>
<map name="OCN2ATM_SMAPNAME">cpl/gridmaps/oEC60to30v3/map_oEC60to30v3_to_ne30np4_aave.161222.nc</map>
</gridmap>
<gridmap atm_grid="ne30np4" ocn_grid="oRRS30to10">
<map name="ATM2OCN_FMAPNAME">cpl/gridmaps/ne30np4/map_ne30np4_to_oRRS30to10_aave.160419.nc</map>
<map name="ATM2OCN_VMAPNAME">cpl/gridmaps/ne30np4/map_ne30np4_to_oRRS30to10_aave.160419.nc</map>
<map name="ATM2OCN_SMAPNAME">cpl/gridmaps/ne30np4/map_ne30np4_to_oRRS30to10_aave.160419.nc</map>
<map name="OCN2ATM_FMAPNAME">cpl/gridmaps/oRRS30to10/map_oRRS30to10_to_ne30np4_aave.160419.nc</map>
<map name="OCN2ATM_SMAPNAME">cpl/gridmaps/oRRS30to10/map_oRRS30to10_to_ne30np4_aave.160419.nc</map>
</gridmap>
<gridmap atm_grid="ne30np4" ocn_grid="oRRS30to10wLI">
<map name="ATM2OCN_FMAPNAME">cpl/gridmaps/ne30np4/map_ne30np4_to_oRRS30to10wLI_mask_aave.160930.nc</map>
<map name="ATM2OCN_VMAPNAME">cpl/gridmaps/ne30np4/map_ne30np4_to_oRRS30to10wLI_mask_aave.160930.nc</map>
<map name="ATM2OCN_SMAPNAME">cpl/gridmaps/ne30np4/map_ne30np4_to_oRRS30to10wLI_nomask_aave.160930.nc</map>
<map name="OCN2ATM_FMAPNAME">cpl/gridmaps/oRRS30to10wLI/map_oRRS30to10wLI_mask_to_ne30np4_aave.160930.nc</map>
<map name="OCN2ATM_SMAPNAME">cpl/gridmaps/oRRS30to10wLI/map_oRRS30to10wLI_mask_to_ne30np4_aave.160930.nc</map>
</gridmap>
<gridmap atm_grid="ne30np4" ocn_grid="oRRS30to10v3">
<map name="ATM2OCN_FMAPNAME">cpl/gridmaps/ne30np4/map_ne30np4_to_oRRS30to10v3_aave.171218.nc</map>
<map name="ATM2OCN_VMAPNAME">cpl/gridmaps/ne30np4/map_ne30np4_to_oRRS30to10v3_conserve.171218.nc</map>
<map name="ATM2OCN_SMAPNAME">cpl/gridmaps/ne30np4/map_ne30np4_to_oRRS30to10v3_conserve.171218.nc</map>
<map name="OCN2ATM_FMAPNAME">cpl/gridmaps/oRRS30to10v3/map_oRRS30to10v3_to_ne30np4_aave.171218.nc</map>
<map name="OCN2ATM_SMAPNAME">cpl/gridmaps/oRRS30to10v3/map_oRRS30to10v3_to_ne30np4_aave.171218.nc</map>
</gridmap>
<gridmap atm_grid="ne30np4" ocn_grid="oRRS30to10v3wLI">
<map name="ATM2OCN_FMAPNAME">cpl/gridmaps/ne30np4/map_ne30np4_to_oRRS30to10v3wLI_mask_aave.171218.nc</map>
<map name="ATM2OCN_VMAPNAME">cpl/gridmaps/ne30np4/map_ne30np4_to_oRRS30to10v3wLI_mask_conserve.171218.nc</map>
<map name="ATM2OCN_SMAPNAME">cpl/gridmaps/ne30np4/map_ne30np4_to_oRRS30to10v3wLI_nomask_conserve.171218.nc</map>
<map name="OCN2ATM_FMAPNAME">cpl/gridmaps/oRRS30to10v3wLI/map_oRRS30to10v3wLI_mask_to_ne30np4_aave.171218.nc</map>
<map name="OCN2ATM_SMAPNAME">cpl/gridmaps/oRRS30to10v3wLI/map_oRRS30to10v3wLI_mask_to_ne30np4_aave.171218.nc</map>
</gridmap>
<gridmap atm_grid="ne30np4" lnd_grid="1.9x1.25">
<map name="ATM2LND_SMAPNAME">cpl/cpl6/map_ne30np4_to_fv1.9x2.5_aave_da_091230.nc</map>
<map name="LND2ATM_FMAPNAME">cpl/cpl6/map_fv1.9x2.5_to_ne30np4_aave_da_091230.nc</map>
<map name="ATM2LND_FMAPNAME">cpl/cpl6/map_ne30np4_to_fv1.9x2.5_aave_da_091230.nc</map>
<map name="LND2ATM_SMAPNAME">cpl/cpl6/map_fv1.9x2.5_to_ne30np4_aave_da_091230.nc</map>
</gridmap>
<gridmap atm_grid="ne120np4" ocn_grid="oRRS18to6">
<map name="ATM2OCN_FMAPNAME">cpl/gridmaps/ne120np4/map_ne120np4_to_oRRS18to6_aave.160831.nc</map>
<map name="ATM2OCN_VMAPNAME">cpl/gridmaps/ne120np4/map_ne120np4_to_oRRS18to6_bilin.160831.nc</map>
<map name="ATM2OCN_SMAPNAME">cpl/gridmaps/ne120np4/map_ne120np4_to_oRRS18to6_bilin.160831.nc</map>
<map name="OCN2ATM_FMAPNAME">cpl/gridmaps/oRRS18to6/map_oRRS18to6_to_ne120np4_aave.160831.nc</map>
<map name="OCN2ATM_SMAPNAME">cpl/gridmaps/oRRS18to6/map_oRRS18to6_to_ne120np4_aave.160831.nc</map>
</gridmap>
<gridmap atm_grid="ne120np4" ocn_grid="oRRS18to6v3">
<map name="ATM2OCN_FMAPNAME">cpl/gridmaps/ne120np4/map_ne120np4_to_oRRS18to6v3_aave.170111.nc</map>
<map name="ATM2OCN_VMAPNAME">cpl/gridmaps/ne120np4/map_ne120np4_to_oRRS18to6v3_conserve.170111.nc</map>
<map name="ATM2OCN_SMAPNAME">cpl/gridmaps/ne120np4/map_ne120np4_to_oRRS18to6v3_conserve.170111.nc</map>
<map name="OCN2ATM_FMAPNAME">cpl/gridmaps/oRRS18to6v3/map_oRRS18to6v3_to_ne120np4_aave.170111.nc</map>
<map name="OCN2ATM_SMAPNAME">cpl/gridmaps/oRRS18to6v3/map_oRRS18to6v3_to_ne120np4_aave.170111.nc</map>
</gridmap>
<gridmap atm_grid="ne120np4" ocn_grid="oRRS15to5">
<map name="ATM2OCN_FMAPNAME">cpl/gridmaps/ne120np4/map_ne120np4_to_oRRS15to5_aave.160203.nc</map>
<map name="ATM2OCN_VMAPNAME">cpl/gridmaps/ne120np4/map_ne120np4_to_oRRS15to5_bilin.160428.nc</map>
<map name="ATM2OCN_SMAPNAME">cpl/gridmaps/ne120np4/map_ne120np4_to_oRRS15to5_bilin.160428.nc</map>
<map name="OCN2ATM_FMAPNAME">cpl/gridmaps/oRRS15to5/map_oRRS15to5_to_ne120np4_aave.160203.nc</map>
<map name="OCN2ATM_SMAPNAME">cpl/gridmaps/oRRS15to5/map_oRRS15to5_to_ne120np4_aave.160203.nc</map>
</gridmap>
<gridmap atm_grid="48x96" ocn_grid="gx3v7">
<map name="ATM2OCN_FMAPNAME">cpl/cpl6/map_T31_to_gx3v7_aave_da_090903.nc</map>
<map name="ATM2OCN_VMAPNAME">cpl/cpl6/map_T31_to_gx3v7_patch_090903.nc</map>
<map name="ATM2OCN_SMAPNAME">cpl/cpl6/map_T31_to_gx3v7_patch_090903.nc</map>
<map name="OCN2ATM_FMAPNAME">cpl/cpl6/map_gx3v7_to_T31_aave_da_090903.nc</map>
<map name="OCN2ATM_SMAPNAME">cpl/cpl6/map_gx3v7_to_T31_aave_da_090903.nc</map>
</gridmap>
<gridmap atm_grid="128x256" ocn_grid="gx1v6">
<map name="ATM2OCN_FMAPNAME">cpl/gridmaps/T85/map_T85_to_gx1v6_aave_110411.nc</map>
<map name="ATM2OCN_VMAPNAME">cpl/gridmaps/T85/map_T85_to_gx1v6_bilin_110411.nc</map>
<map name="ATM2OCN_SMAPNAME">cpl/gridmaps/T85/map_T85_to_gx1v6_bilin_110411.nc</map>
<map name="OCN2ATM_FMAPNAME">cpl/gridmaps/gx1v6/map_gx1v6_to_T85_aave_110411.nc</map>
<map name="OCN2ATM_SMAPNAME">cpl/gridmaps/gx1v6/map_gx1v6_to_T85_aave_110411.nc</map>
</gridmap>
<gridmap lnd_grid="512x1024" rof_grid="r05">
<map name="LND2ROF_FMAPNAME">lnd/clm2/mappingdata/maps/512x1024/map_512x1024_nomask_to_0.5x0.5_nomask_aave_da_c110920.nc</map>
<map name="ROF2LND_FMAPNAME">lnd/clm2/mappingdata/maps/512x1024/map_0.5x0.5_nomask_to_512x1024_nomask_aave_da_c110920.nc</map>
</gridmap>
<gridmap lnd_grid="128x256" rof_grid="r05">
<map name="LND2ROF_FMAPNAME">lnd/clm2/mappingdata/maps/128x256/map_128x256_nomask_to_0.5x0.5_nomask_aave_da_c110920.nc</map>
<map name="ROF2LND_FMAPNAME">lnd/clm2/mappingdata/maps/128x256/map_0.5x0.5_nomask_to_128x256_nomask_aave_da_c110920.nc</map>
</gridmap>
<gridmap lnd_grid="48x96" rof_grid="r05">
<map name="LND2ROF_FMAPNAME">lnd/clm2/mappingdata/maps/48x96/map_48x96_nomask_to_0.5x0.5_nomask_aave_da_c110822.nc</map>
<map name="ROF2LND_FMAPNAME">lnd/clm2/mappingdata/maps/48x96/map_0.5x0.5_nomask_to_48x96_nomask_aave_da_c110822.nc</map>
</gridmap>
<gridmap glc_grid="mpas.ais20km" ocn_grid="oQU240">
<map name="GLC2OCN_FMAPNAME">cpl/gridmaps/mpas.ais20km/map_ais20km_to_oQU240_aave.151209.nc</map>
<map name="OCN2GLC_FMAPNAME">cpl/gridmaps/oQU240/map_oQU240_to_ais20km_aave.151209.nc</map>
<map name="GLC2OCN_SMAPNAME">cpl/gridmaps/mpas.ais20km/map_ais20km_to_oQU240_nearestdtos.151209.nc</map>
<map name="GLC2OCN_LIQ_RMAPNAME">cpl/gridmaps/mpas.ais20km/map_ais20km_to_oQU240_nearestdtos.151209.nc</map>
<map name="GLC2OCN_ICE_RMAPNAME">cpl/gridmaps/mpas.ais20km/map_ais20km_to_oQU240_nearestdtos.151209.nc</map>
<map name="OCN2GLC_SMAPNAME">cpl/gridmaps/oQU240/map_oQU240_to_ais20km_nearestdtos.151209.nc</map>
</gridmap>
<gridmap glc_grid="mpas.ais20km" ocn_grid="oQU120">
<map name="GLC2ICE_FMAPNAME">cpl/gridmaps/mpas.ais20km/map_ais20km_to_oQU120_aave.160331.nc</map>
<map name="GLC2ICE_SMAPNAME">cpl/gridmaps/mpas.ais20km/map_ais20km_to_oQU120_nearestdtos.160331.nc</map>
<map name="GLC2OCN_FMAPNAME">cpl/gridmaps/mpas.ais20km/map_ais20km_to_oQU120_aave.160331.nc</map>
<map name="GLC2OCN_SMAPNAME">cpl/gridmaps/mpas.ais20km/map_ais20km_to_oQU120_nearestdtos.160331.nc</map>
<map name="GLC2OCN_LIQ_RMAPNAME">cpl/gridmaps/mpas.ais20km/map_ais20km_to_oQU120_nearestdtos.160331.nc</map>
<map name="GLC2OCN_ICE_RMAPNAME">cpl/gridmaps/mpas.ais20km/map_ais20km_to_oQU120_nearestdtos.160331.nc</map>
<map name="OCN2GLC_FMAPNAME">cpl/gridmaps/oQU120/map_oQU120_to_ais20km_aave.160331.nc</map>
<map name="OCN2GLC_SMAPNAME">cpl/gridmaps/oQU120/map_oQU120_to_ais20km_neareststod.160331.nc</map>
</gridmap>
<gridmap glc_grid="mpas.ais20km" ocn_grid="oEC60to30v3wLI">
<map name="GLC2ICE_FMAPNAME">cpl/gridmaps/mpas.ais20km/map_ais20km_to_oEC60to30v3wLI_nomask_aave.190207.nc</map>
<map name="GLC2ICE_SMAPNAME">cpl/gridmaps/mpas.ais20km/map_ais20km_to_oEC60to30v3wLI_nomask_nearestdtos.190207.nc</map>
<map name="GLC2OCN_FMAPNAME">cpl/gridmaps/mpas.ais20km/map_ais20km_to_oEC60to30v3wLI_nomask_aave.190207.nc</map>
<map name="GLC2OCN_SMAPNAME">cpl/gridmaps/mpas.ais20km/map_ais20km_to_oEC60to30v3wLI_nomask_nearestdtos.190207.nc</map>
<map name="GLC2OCN_LIQ_RMAPNAME">cpl/gridmaps/mpas.ais20km/map_ais20km_to_oEC60to30v3wLI_nomask_nearestdtos.190207.nc</map>
<map name="GLC2OCN_ICE_RMAPNAME">cpl/gridmaps/mpas.ais20km/map_ais20km_to_oEC60to30v3wLI_nomask_nearestdtos.190207.nc</map>
<map name="OCN2GLC_FMAPNAME">cpl/gridmaps/oEC60to30v3wLI/map_oEC60to30v3wLI_nomask_to_ais20km_aave.190207.nc</map>
<map name="OCN2GLC_SMAPNAME">cpl/gridmaps/oEC60to30v3wLI/map_oEC60to30v3wLI_nomask_to_ais20km_neareststod.190207.nc</map>
</gridmap>
</gridmaps>
</grid_data>
CIMEROOT/config/e3sm/allactive¶
E3SM XML settings for all-active component set (compset) configurations.
<?xml version="1.0"?>
<compsets>
<help>
=========================================
compset naming convention
=========================================
The compset longname below has the specified order
atm, lnd, ice, ocn, river, glc wave model-options
The notation for the compset longname is
TIME_ATM[%phys]_LND[%phys]_ICE[%phys]_OCN[%phys]_ROF[%phys]_GLC[%phys]_WAV[%phys][_ESP%phys][_BGC%phys]
Where for the CAM specific compsets below the following is supported
TIME = Time period (e.g. 2000, HIST, RCP8...)
ATM = [CAM4, CAM5, SATM]
LND = [CLM45, SLND]
ICE = [MPASSI, CICE, DICE, SICE]
OCN = [MPASO, DOCN, SOCN]
ROF = [MOSART, SROF]
GLC = [MALI, SGLC]
WAV = [DWAV, XWAV, SWAV]
ESP = [SESP]
BGC = optional BGC scenario
The OPTIONAL %phys attributes specify submodes of the given system
For example DOCN%DOM is the data ocean model for DOCN
ALL data models must have a %phys option that corresponds to the data model mode
Each compset node is associated with the following elements
- lname
- alias
- support (optional description of the support level for this compset)
Each compset node can also have the following attributes
- grid (optional regular expression match for grid to work with the compset)
</help>
<!-- E3SM CMIP6 COMPSETS science compsets -->
<compset>
<alias>A_WCYCL1850S_CMIP6</alias>
<lname>1850_CAM5%CMIP6_CLM45%SPBC_MPASSI%SPUNUP_MPASO%SPUNUP_MOSART_SGLC_SWAV</lname>
</compset>
<compset>
<alias>A_WCYCL20TRS_CMIP6</alias>
<lname>20TR_CAM5%CMIP6_CLM45%SPBC_MPASSI%SPUNUP_MPASO%SPUNUP_MOSART_SGLC_SWAV</lname>
</compset>
<compset>
<alias>A_WCYCLSSP585_CMIP6</alias>
<lname>SSP585_CAM5%CMIP6_CLM45%SPBC_MPASSI%SPUNUP_MPASO%SPUNUP_MOSART_SGLC_SWAV</lname>
</compset>
<compset>
<alias>A_WCYCL1950S_CMIP6_LR</alias>
<lname>1950_CAM5%CMIP6-LR_CLM45%SPBC_MPASSI%SPUNUP_MPASO%SPUNUP_MOSART_SGLC_SWAV</lname>
</compset>
<compset>
<alias>A_WCYCL1950S_CMIP6_HR</alias>
<lname>1950_CAM5%CMIP6-HR_CLM45%SPBC_MPASSI%SPUNUP_MPASO%SPUNUP_MOSART_SGLC_SWAV</lname>
</compset>
<compset>
<alias>A_WCYCL1950S_CMIP6_LRtunedHR</alias>
<lname>1950_CAM5%CMIP6-LRtunedHR_CLM45%SPBC_MPASSI%SPUNUP_MPASO%SPUNUP_MOSART_SGLC_SWAV</lname>
</compset>
<!-- E3SM v1 science compsets -->
<compset>
<alias>A_WCYCL2000</alias>
<lname>2000_CAM5%AV1C-L_CLM45%SPBC_MPASSI_MPASO_MOSART_SGLC_SWAV</lname>
</compset>
<compset>
<alias>A_WCYCL2000S</alias>
<lname>2000_CAM5%AV1C-L_CLM45%SPBC_MPASSI%SPUNUP_MPASO%SPUNUP_MOSART_SGLC_SWAV</lname>
</compset>
<compset>
<alias>A_WCYCL1850</alias>
<lname>1850_CAM5%AV1C-L_CLM45%SPBC_MPASSI_MPASO_MOSART_SGLC_SWAV</lname>
</compset>
<compset>
<alias>A_WCYCL1850S</alias>
<lname>1850_CAM5%AV1C-L_CLM45%SPBC_MPASSI%SPUNUP_MPASO%SPUNUP_MOSART_SGLC_SWAV</lname>
</compset>
<compset>
<alias>A_WCYCL20TR</alias>
<lname>20TR_CAM5%AV1C-L_CLM45%SPBC_MPASSI_MPASO_MOSART_SGLC_SWAV</lname>
</compset>
<compset>
<alias>A_WCYCL20TRS</alias>
<lname>20TR_CAM5%AV1C-L_CLM45%SPBC_MPASSI%SPUNUP_MPASO%SPUNUP_MOSART_SGLC_SWAV</lname>
</compset>
<compset>
<alias>A_CRYO</alias>
<lname>2000_CAM5%AV1C-L_CLM45%SPBC_MPASSI_MPASO_MOSART_MALI_SWAV</lname>
</compset>
<!-- E3SM v1 science compsets (hires) -->
<compset>
<alias>A_WCYCL2000_H01A</alias>
<lname>2000_CAM5%AV1C-H01A_CLM45%SPBC_MPASSI_MPASO_MOSART_SGLC_SWAV</lname>
</compset>
<compset>
<alias>A_WCYCL2000_H01AS</alias>
<lname>2000_CAM5%AV1C-H01A_CLM45%SPBC_MPASSI%SPUNUP_MPASO%SPUNUP_MOSART_SGLC_SWAV</lname>
</compset>
<compset>
<alias>A_WCYCL1850_H01A</alias>
<lname>1850_CAM5%AV1C-H01A_CLM45%SPBC_MPASSI_MPASO_MOSART_SGLC_SWAV</lname>
</compset>
<compset>
<alias>A_WCYCL1850_H01AS</alias>
<lname>1850_CAM5%AV1C-H01A_CLM45%SPBC_MPASSI%SPUNUP_MPASO%SPUNUP_MOSART_SGLC_SWAV</lname>
</compset>
<compset>
<alias>A_WCYCL20TR_H01A</alias>
<lname>20TR_CAM5%AV1C-H01A_CLM45%SPBC_MPASSI_MPASO_MOSART_SGLC_SWAV</lname>
</compset>
<compset>
<alias>A_WCYCL20TR_H01AS</alias>
<lname>20TR_CAM5%AV1C-H01A_CLM45%SPBC_MPASSI%SPUNUP_MPASO%SPUNUP_MOSART_SGLC_SWAV</lname>
</compset>
<!-- E3SM V1 BGC experiment compsets -->
<compset>
<alias>BGCEXP_BCRC_CNPRDCTC_1850</alias>
<lname>1850_CAM5%CMIP6_CLM45%CNPRDCTCBC_MPASSI%BGC_MPASO%OIECOOIDMS_MOSART_SGLC_SWAV_BGC%BCRC</lname>
</compset>
<compset>
<alias>BGCEXP_BCRC_CNPRDCTC_1850S</alias>
<lname>1850_CAM5%CMIP6_CLM45%CNPRDCTCBC_MPASSI%SPUNUP_MPASO%SPUNUP_MOSART_SGLC_SWAV_BGC%BCRC</lname>
</compset>
<compset>
<alias>BGCEXP_BCRC_CNPRDCTC_20TR</alias>
<lname>20TR_CAM5%CMIP6_CLM45%CNPRDCTCBC_MPASSI%BGC_MPASO%OIECOOIDMS_MOSART_SGLC_SWAV_BGC%BCRC</lname>
</compset>
<compset>
<alias>BGCEXP_BCRC_CNPRDCTC_20TRS</alias>
<lname>20TR_CAM5%CMIP6_CLM45%CNPRDCTCBC_MPASSI%SPUNUP_MPASO%SPUNUP_MOSART_SGLC_SWAV_BGC%BCRC</lname>
</compset>
<compset>
<alias>BGCEXP_BCRD_CNPRDCTC_20TR</alias>
<lname>20TR_CAM5%CMIP6_CLM45%CNPRDCTCBC_MPASSI%BGC_MPASO%OIECOOIDMS_MOSART_SGLC_SWAV_BGC%BCRD</lname>
</compset>
<compset>
<alias>BGCEXP_BCRD_CNPRDCTC_20TRS</alias>
<lname>20TR_CAM5%CMIP6_CLM45%CNPRDCTCBC_MPASSI%SPUNUP_MPASO%SPUNUP_MOSART_SGLC_SWAV_BGC%BCRD</lname>
</compset>
<compset>
<alias>BGCEXP_BDRC_CNPRDCTC_20TR</alias>
<lname>20TR_CAM5%CMIP6_CLM45%CNPRDCTCBC_MPASSI%BGC_MPASO%OIECOOIDMS_MOSART_SGLC_SWAV_BGC%BDRC</lname>
</compset>
<compset>
<alias>BGCEXP_BDRC_CNPRDCTC_20TRS</alias>
<lname>20TR_CAM5%CMIP6_CLM45%CNPRDCTCBC_MPASSI%SPUNUP_MPASO%SPUNUP_MOSART_SGLC_SWAV_BGC%BDRC</lname>
</compset>
<compset>
<alias>BGCEXP_BDRD_CNPRDCTC_20TR</alias>
<lname>20TR_CAM5%CMIP6_CLM45%CNPRDCTCBC_MPASSI%BGC_MPASO%OIECOOIDMS_MOSART_SGLC_SWAV_BGC%BDRD</lname>
</compset>
<compset>
<alias>BGCEXP_BDRD_CNPRDCTC_20TRS</alias>
<lname>20TR_CAM5%CMIP6_CLM45%CNPRDCTCBC_MPASSI%SPUNUP_MPASO%SPUNUP_MOSART_SGLC_SWAV_BGC%BDRD</lname>
</compset>
<compset>
<alias>BGCEXP_BCRC_CNPECACNT_1850</alias>
<lname>1850_CAM5%CMIP6_CLM45%CNPECACNTBC_MPASSI%BGC_MPASO%OIECOOIDMS_MOSART_SGLC_SWAV_BGC%BCRC</lname>
</compset>
<compset>
<alias>BGCEXP_BCRC_CNPECACNT_20TR</alias>
<lname>20TR_CAM5%CMIP6_CLM45%CNPECACNTBC_MPASSI%BGC_MPASO%OIECOOIDMS_MOSART_SGLC_SWAV_BGC%BCRC</lname>
</compset>
<compset>
<alias>BGCEXP_BCRD_CNPECACNT_20TR</alias>
<lname>20TR_CAM5%CMIP6_CLM45%CNPECACNTBC_MPASSI%BGC_MPASO%OIECOOIDMS_MOSART_SGLC_SWAV_BGC%BCRD</lname>
</compset>
<compset>
<alias>BGCEXP_BDRC_CNPECACNT_20TR</alias>
<lname>20TR_CAM5%CMIP6_CLM45%CNPECACNTBC_MPASSI%BGC_MPASO%OIECOOIDMS_MOSART_SGLC_SWAV_BGC%BDRC</lname>
</compset>
<compset>
<alias>BGCEXP_BDRD_CNPECACNT_20TR</alias>
<lname>20TR_CAM5%CMIP6_CLM45%CNPECACNTBC_MPASSI%BGC_MPASO%OIECOOIDMS_MOSART_SGLC_SWAV_BGC%BDRD</lname>
</compset>
<!-- E3SM v1 cryosphere compsets -->
<compset>
<alias>A_WCYCL1850-DIB</alias>
<lname>1850_CAM5%AV1C-L_CLM45%SPBC_MPASSI%DIB_MPASO%IB_MOSART_SGLC_SWAV</lname>
</compset>
<compset>
<alias>A_WCYCL1850-DIB-ISMF</alias>
<lname>1850_CAM5%AV1C-L_CLM45%SPBC_MPASSI%DIB_MPASO%IBISMF_MOSART_SGLC_SWAV</lname>
</compset>
<compset>
<alias>A_WCYCL1850-DIB_CMIP6</alias>
<lname>1850_CAM5%CMIP6_CLM45%SPBC_MPASSI%DIB_MPASO%IB_MOSART_SGLC_SWAV</lname>
</compset>
<compset>
<alias>A_WCYCL1850-DIB-ISMF_CMIP6</alias>
<lname>1850_CAM5%CMIP6_CLM45%SPBC_MPASSI%DIB_MPASO%IBISMF_MOSART_SGLC_SWAV</lname>
</compset>
<!-- E3SM backup compsets -->
<compset>
<alias>A_WCYCL1850_v0atm</alias>
<lname>1850_CAM5_CLM45%SP_MPASSI_MPASO_MOSART_SGLC_SWAV</lname>
</compset>
<compset>
<alias>A_WCYCL2000_v0atm</alias>
<lname>2000_CAM5_CLM45%SP_MPASSI_MPASO_MOSART_SGLC_SWAVi</lname>
</compset>
<compset>
<alias>A_WCYCL1850S_v0atm</alias>
<lname>1850_CAM5_CLM45%SP_MPASSI%SPUNUP_MPASO%SPUNUP_MOSART_SGLC_SWAV</lname>
</compset>
<compset>
<alias>A_WCYCL2000S_v0atm</alias>
<lname>2000_CAM5_CLM45%SP_MPASSI%SPUNUP_MPASO%SPUNUP_MOSART_SGLC_SWAVi</lname>
</compset>
<!-- E3SM BGC compsets -->
<compset>
<alias>A_BG1850CN</alias>
<lname>1850_CAM5_CLM45%CN_MPASSI_MPASO_MOSART_MALI%SIA_SWAV</lname>
</compset>
<!-- OCN + CICE + GLC Only Compsets -->
<compset>
<alias>MPAS_LISIO_TEST</alias>
<lname>2000_DATM%NYF_SLND_MPASSI_MPASO_DROF%NYF_MALI%SIA_SWAV</lname>
</compset>
<!-- Slab ocean cases -->
<compset>
<alias>ETEST</alias>
<lname>2000_CAM4_CLM40%SP_CICE_DOCN%SOM_MOSART_SGLC_SWAV_TEST</lname>
</compset>
<compset>
<alias>E1850C5</alias>
<lname>1850_CAM5_CLM40%SP_CICE_DOCN%SOM_MOSART_SGLC_SWAV</lname>
</compset>
<compset>
<alias>E1850C5TEST</alias>
<lname>1850_CAM5_CLM40%SP_CICE_DOCN%SOM_MOSART_SGLC_SWAV_TEST</lname>
</compset>
<compset>
<alias>E1850CN</alias>
<lname>1850_CAM4_CLM40%CN_CICE_DOCN%SOM_MOSART_SGLC_SWAV</lname>
</compset>
<compset>
<alias>E1850C5CN</alias>
<lname>1850_CAM5_CLM40%CN_CICE_DOCN%SOM_MOSART_SGLC_SWAV</lname>
</compset>
<compset>
<alias>E1850C5CNTEST</alias>
<lname>1850_CAM5_CLM40%CN_CICE_DOCN%SOM_MOSART_SGLC_SWAV_TEST </lname>
</compset>
<compset>
<alias>E1850C5CLM45</alias>
<lname>1850_CAM5_CLM45%SP_CICE_DOCN%SOM_MOSART_SGLC_SWAV</lname>
</compset>
<compset>
<alias>E1850C5CLM45CN</alias>
<lname>1850_CAM5_CLM45%CN_CICE_DOCN%SOM_MOSART_SGLC_SWAV</lname>
</compset>
<compset>
<alias>E1850C5CLM45BGC</alias>
<lname>1850_CAM5_CLM45%BGC_CICE_DOCN%SOM_MOSART_SGLC_SWAV</lname>
</compset>
<compset>
<alias>E1850C5CNECACTCBC</alias>
<lname>1850_CAM5_CLM45%CNECACTCBC_CICE_DOCN%SOM_MOSART_SGLC_SWAV</lname>
</compset>
<compset>
<alias>E20TRC5CNECACTCBC</alias>
<lname>20TR_CAM5_CLM45%CNECACTCBC_CICE_DOCN%SOM_MOSART_SGLC_SWAV</lname>
</compset>
<entries>
<entry id="RUN_STARTDATE">
<values>
<value compset="20TR_CAM">1850-01-01</value>
<value compset="SSP585_CAM">2015-01-01</value>
</values>
</entry>
</entries>
</compsets>
E3SM XML settings for all-active test configurations.
<?xml version="1.0"?>
<testlist>
<compset name="B">
<grid name="f19_g16">
<test name="STA_N2">
<machine compiler="intel" testtype="prealpha">yellowstone</machine>
<machine compiler="intel" testtype="prebeta">yellowstone</machine>
</test>
</grid>
<grid name="f45_g37">
<test name="PEA_P1_M">
<machine compiler="intel" testtype="prebeta" testmods="allactive/default">hobart</machine>
<machine compiler="pgi" testtype="prebeta" testmods="allactive/default">hobart</machine>
</test>
</grid>
</compset>
<compset name="B1850BDRD">
<grid name="f09_g16">
<test name="ERS_Ld11">
<machine compiler="ibm" testtype="prebeta" testmods="allactive/default">mira</machine>
</test>
</grid>
</compset>
<compset name="B1850BPRPC5L45BGC">
<grid name="f19_g16">
<test name="ERS_Ld11">
<machine compiler="intel" testtype="prealpha" testmods="allactive/default">yellowstone</machine>
<machine compiler="intel" testtype="prebeta" testmods="allactive/default">yellowstone</machine>
</test>
</grid>
</compset>
<compset name="B1850BPRPL45BGC">
<grid name="f19_g16">
<test name="ERS_Ld11">
<machine compiler="pgi" testtype="prebeta" testmods="allactive/default">bluewaters</machine>
</test>
</grid>
</compset>
<compset name="B1850C5">
<grid name="f19_g16">
<test name="PET_PT">
<machine compiler="pgi" testtype="prebeta" testmods="allactive/default">bluewaters</machine>
</test>
</grid>
</compset>
<compset name="B1850C5CN">
<grid name="f09_g16">
<test name="ERS">
<machine compiler="pgi" testtype="prealpha" testmods="allactive/default">bluewaters</machine>
<machine compiler="ibm" testtype="prealpha" testmods="allactive/default">mira</machine>
<machine compiler="intel" testtype="prealpha" testmods="allactive/default">yellowstone</machine>
</test>
</grid>
</compset>
<compset name="B1850C5L45BGC">
<grid name="T31_g37">
<test name="CME_D_Ld5">
<machine compiler="intel" testtype="prealpha" testmods="allactive/default">yellowstone</machine>
<machine compiler="intel" testtype="prebeta" testmods="allactive/default">yellowstone</machine>
</test>
<test name="CME_Ld5">
<machine compiler="intel" testtype="prebeta" testmods="allactive/default">edison</machine>
</test>
<test name="ERS_E_Ld7">
<machine compiler="intel" testtype="prealpha" testmods="allactive/default">yellowstone</machine>
<machine compiler="intel" testtype="prebeta" testmods="allactive/default">yellowstone</machine>
</test>
</grid>
<grid name="T31_g37">
<test name="NCR_P4x1D">
<machine compiler="intel" testtype="prealpha" testmods="allactive/default">yellowstone</machine>
<machine compiler="intel" testtype="prebeta" testmods="allactive/default">yellowstone</machine>
</test>
</grid>
<grid name="f09_g16">
<test name="ERI">
<machine compiler="pgi" testtype="prebeta" testmods="allactive/default">bluewaters</machine>
<machine compiler="intel" testtype="prebeta" testmods="allactive/default">edison</machine>
<machine compiler="intel" testtype="prebeta" testmods="allactive/default">eos</machine>
</test>
<test name="PFS">
<machine compiler="pgi " testtype="prebeta">bluewaters</machine>
<machine compiler="intel" testtype="prealpha">yellowstone</machine>
<machine compiler="intel" testtype="prebeta">yellowstone</machine>
</test>
</grid>
<grid name="f19_g16">
<test name="ERS_D_Ld7">
<machine compiler="pgi" testtype="prebeta" testmods="allactive/default">bluewaters</machine>
<machine compiler="intel" testtype="prealpha" testmods="allactive/default">yellowstone</machine>
<machine compiler="intel" testtype="prebeta" testmods="allactive/default">yellowstone</machine>
</test>
<test name="PET_PT">
<machine compiler="pgi" testtype="prebeta" testmods="allactive/default">titan</machine>
<machine compiler="gnu" testtype="prealpha" testmods="allactive/default">yellowstone</machine>
<machine compiler="gnu" testtype="prebeta" testmods="allactive/default">yellowstone</machine>
</test>
</grid>
<grid name="ne120_g16">
<test name="ERS_Ld9">
<machine compiler="ibm" testtype="prebeta" testmods="allactive/default">mira</machine>
</test>
</grid>
<grid name="ne30_g16">
<test name="ERI">
<machine compiler="intel" testtype="prealpha" testmods="allactive/default">yellowstone</machine>
<machine compiler="intel" testtype="prebeta" testmods="allactive/default">yellowstone</machine>
</test>
<test name="ERS_D_Ld7">
<machine compiler="ibm" testtype="prebeta" testmods="allactive/default">mira</machine>
</test>
<test name="ERS_IOP_Ld7">
<machine compiler="ibm" testtype="prebeta" testmods="allactive/default">mira</machine>
<machine compiler="gnu" testtype="prealpha" testmods="allactive/default">yellowstone</machine>
<machine compiler="gnu" testtype="prebeta" testmods="allactive/default">yellowstone</machine>
</test>
<test name="ERS_Ld7">
<machine compiler="gnu" testtype="prealpha" testmods="allactive/default">yellowstone</machine>
<machine compiler="gnu" testtype="prebeta" testmods="allactive/default">yellowstone</machine>
</test>
<test name="ERS_Ld9">
<machine compiler="pgi" testtype="prebeta" testmods="allactive/default">titan</machine>
</test>
<test name="PET_PT">
<machine compiler="intel" testtype="prebeta" testmods="allactive/default">eos</machine>
</test>
<test name="PFS">
<machine compiler="ibm " testtype="prebeta">mira</machine>
<machine compiler="intel " testtype="prealpha">yellowstone</machine>
<machine compiler="intel " testtype="prebeta">yellowstone</machine>
</test>
</grid>
</compset>
<compset name="B1850CN">
<grid name="T31_g37">
<test name="NOC">
<machine compiler="intel" testtype="prealpha">yellowstone</machine>
<machine compiler="intel" testtype="prebeta">yellowstone</machine>
</test>
</grid>
</compset>
<compset name="B1850RMCN">
<grid name="f09_g16">
<test name="ERS_Ld7">
<machine compiler="ibm" testtype="prebeta" testmods="allactive/default">mira</machine>
</test>
</grid>
<grid name="f19_g16">
<test name="ERS_Ld7">
<machine compiler="pgi" testtype="prebeta" testmods="allactive/default">bluewaters</machine>
</test>
</grid>
</compset>
<compset name="B1850W5CN">
<grid name="f19_g16">
<test name="ERS_Ld7">
<machine compiler="ibm" testtype="prebeta" testmods="allactive/default">mira</machine>
<machine compiler="intel" testtype="prealpha" testmods="allactive/default">yellowstone</machine>
<machine compiler="intel" testtype="prebeta" testmods="allactive/default">yellowstone</machine>
</test>
</grid>
</compset>
<compset name="B2000CNCHM">
<grid name="f19_g16">
<test name="ERS_Ld7">
<machine compiler="intel" testtype="prealpha" testmods="allactive/default">yellowstone</machine>
<machine compiler="intel" testtype="prebeta" testmods="allactive/default">yellowstone</machine>
</test>
</grid>
</compset>
<compset name="B2013WBCCN">
<grid name="f19_g16">
<test name="ERS_Ld7">
<machine compiler="intel" testtype="prebeta" testmods="allactive/default">janus</machine>
</test>
</grid>
</compset>
<compset name="BC5">
<grid name="ne16_g37">
<test name="ERS_Ld7">
<machine compiler="intel15" testtype="prebeta" testmods="allactive/default">babbageKnc</machine>
</test>
<test name="SMS_Ld7">
<machine compiler="intel15" testtype="prebeta" testmods="allactive/default">babbageKnc</machine>
</test>
</grid>
</compset>
<compset name="BC5L45BGC">
<grid name="f19_g16">
<test name="NCK_Ld5">
<machine compiler="gnu" testtype="prealpha" testmods="allactive/cism/test_coupling">yellowstone</machine>
<machine compiler="gnu" testtype="prebeta" testmods="allactive/cism/test_coupling">yellowstone</machine>
</test>
</grid>
</compset>
<compset name="BG1850C5L45BGCIS2">
<grid name="T31_g37_gl20">
<test name="SMS_D_Ld5">
<machine compiler="intel" testtype="prealpha" testmods="allactive/cism/test_coupling">yellowstone</machine>
<machine compiler="intel" testtype="prebeta" testmods="allactive/cism/test_coupling">yellowstone</machine>
</test>
</grid>
</compset>
<compset name="BGC5L45BGC">
<grid name="T31_g37">
<test name="SMS_D_Ld5">
<machine compiler="nag" testtype="prebeta" testmods="allactive/cism/test_coupling">hobart</machine>
</test>
</grid>
<grid name="f19_g16">
<test name="NCK_Ld5">
<machine compiler="intel" testtype="prealpha" testmods="allactive/cism/test_coupling">yellowstone</machine>
<machine compiler="intel" testtype="prebeta" testmods="allactive/cism/test_coupling">yellowstone</machine>
</test>
</grid>
</compset>
<compset name="BHISTBDRD">
<grid name="f09_g16">
<test name="ERS_Ld11">
<machine compiler="intel" testtype="prebeta" testmods="allactive/default">eos</machine>
</test>
</grid>
</compset>
<compset name="BHISTC5CN">
<grid name="f19_g16">
<test name="ERS_N2_Ld7">
<machine compiler="intel" testtype="prebeta" testmods="allactive/default">edison</machine>
<machine compiler="gnu" testtype="prealpha" testmods="allactive/default">yellowstone</machine>
<machine compiler="gnu" testtype="prebeta" testmods="allactive/default">yellowstone</machine>
</test>
</grid>
</compset>
<compset name="BHISTC5L45BGC">
<grid name="ne120_g16">
<test name="ERS_Ld9">
<machine compiler="pgi" testtype="prebeta" testmods="allactive/default">bluewaters</machine>
</test>
</grid>
</compset>
<compset name="BHISTCNCHM">
<grid name="f09_g16">
<test name="ERS_Ld7">
<machine compiler="intel" testtype="prebeta" testmods="allactive/default">eos</machine>
</test>
</grid>
</compset>
<compset name="BMOZ">
<grid name="f45_g37">
<test name="ERS_Ld7">
<machine compiler="gnu" testtype="prealpha" testmods="allactive/default">yellowstone</machine>
<machine compiler="gnu" testtype="prebeta" testmods="allactive/default">yellowstone</machine>
</test>
</grid>
</compset>
<compset name="BPIPDC5L45BGC">
<grid name="f09_g16">
<test name="ERS_Ld7">
<machine compiler="pgi" testtype="prebeta" testmods="allactive/default">bluewaters</machine>
</test>
<test name="SMS_D">
<machine compiler="intel" testtype="prebeta" testmods="allactive/default">edison</machine>
</test>
</grid>
<grid name="f19_g16">
<test name="ERS_IOP_Ld7">
<machine compiler="pgi" testtype="prebeta" testmods="allactive/default">bluewaters</machine>
<machine compiler="intel" testtype="prealpha" testmods="allactive/default">yellowstone</machine>
</test>
<test name="ERS_N2_Ld7">
<machine compiler="intel" testtype="prebeta" testmods="allactive/default">hobart</machine>
</test>
</grid>
</compset>
<compset name="BRCP26CN">
<grid name="f09_g16">
<test name="ERS_PT_Ld7">
<machine compiler="intel" testtype="prebeta" testmods="allactive/default">janus</machine>
</test>
</grid>
</compset>
<compset name="BRCP26W5CN">
<grid name="f19_g16">
<test name="ERS_Ld7">
<machine compiler="intel" testtype="prealpha" testmods="allactive/default">yellowstone</machine>
<machine compiler="intel" testtype="prebeta" testmods="allactive/default">yellowstone</machine>
</test>
</grid>
</compset>
<compset name="BRCP45BDRD">
<grid name="f09_g16">
<test name="ERS_Ld11">
<machine compiler="intel" testtype="prealpha" testmods="allactive/default">yellowstone</machine>
<machine compiler="intel" testtype="prebeta" testmods="allactive/default">yellowstone</machine>
</test>
</grid>
</compset>
<compset name="BRCP45W5CN">
<grid name="f19_g16">
<test name="ERS_Ld7">
<machine compiler="pgi" testtype="prealpha" testmods="allactive/default">yellowstone</machine>
<machine compiler="pgi" testtype="prebeta" testmods="allactive/default">yellowstone</machine>
</test>
</grid>
</compset>
<compset name="BRCP85BPRP">
<grid name="f09_g16">
<test name="SMS_Ld5">
<machine compiler="intel" testtype="prebeta" testmods="allactive/default">edison</machine>
</test>
</grid>
</compset>
<compset name="BRCP85C5L45BGC">
<grid name="f09_g16">
<test name="ERS">
<machine compiler="ibm" testtype="prebeta" testmods="allactive/default">mira</machine>
</test>
<test name="ERS_PT_Ld7">
<machine compiler="intel" testtype="prebeta" testmods="allactive/default">edison</machine>
</test>
</grid>
</compset>
<compset name="BRCP85W5CN">
<grid name="f19_g16">
<test name="ERS_Ld7">
<machine compiler="gnu" testtype="prealpha" testmods="allactive/default">yellowstone</machine>
<machine compiler="gnu" testtype="prebeta" testmods="allactive/default">yellowstone</machine>
</test>
</grid>
</compset>
<compset name="BTSC4L40CCMIR2">
<grid name="f19_g16">
<test name="ERS_Ld7">
<machine compiler="pgi" testtype="prealpha" testmods="allactive/default">yellowstone</machine>
<machine compiler="pgi" testtype="prebeta" testmods="allactive/default">yellowstone</machine>
</test>
</grid>
</compset>
<compset name="BTSC4L40CCMIS2R45">
<grid name="f19_g16">
<test name="ERS_D_Ld7">
<machine compiler="intel" testtype="prealpha" testmods="allactive/default">yellowstone</machine>
<machine compiler="intel" testtype="prebeta" testmods="allactive/default">yellowstone</machine>
</test>
</grid>
</compset>
<compset name="BWMC4L40CCMIR2">
<grid name="f19_g16">
<test name="ERS_Ld7">
<machine compiler="gnu" testtype="prealpha" testmods="allactive/default">yellowstone</machine>
<machine compiler="gnu" testtype="prebeta" testmods="allactive/default">yellowstone</machine>
</test>
</grid>
</compset>
<compset name="BWMC4L40CCMIS2R85">
<grid name="f19_g16">
<test name="ERS_Ld7">
<machine compiler="intel" testtype="prealpha" testmods="allactive/default">yellowstone</machine>
<machine compiler="intel" testtype="prebeta" testmods="allactive/default">yellowstone</machine>
</test>
</grid>
</compset>
<compset name="BWTC4L40CCMIR2">
<grid name="f19_g16">
<test name="ERS_Ld7">
<machine compiler="pgi" testtype="prealpha" testmods="allactive/default">yellowstone</machine>
<machine compiler="pgi" testtype="prebeta" testmods="allactive/default">yellowstone</machine>
</test>
</grid>
</compset>
</testlist>
E3SM XML settings for optimized processor elements (PEs) layout configurations.
<?xml version="1.0"?>
<config_pes>
<!-- ===================================================================== -->
<!-- ===================================================================== -->
<grid name="any">
<mach name="any">
<pes compset="any" pesize="any">
<comment>none</comment>
<ntasks>
<ntasks_atm>16</ntasks_atm>
<ntasks_lnd>16</ntasks_lnd>
<ntasks_rof>16</ntasks_rof>
<ntasks_ice>16</ntasks_ice>
<ntasks_ocn>16</ntasks_ocn>
<ntasks_glc>16</ntasks_glc>
<ntasks_wav>16</ntasks_wav>
<ntasks_cpl>16</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>1</nthrds_atm>
<nthrds_lnd>1</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
<nthrds_cpl>1</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>0</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
<mach name="theta">
<pes compset="any" pesize="any">
<comment>none</comment>
<ntasks>
<ntasks_atm>64</ntasks_atm>
<ntasks_lnd>64</ntasks_lnd>
<ntasks_rof>64</ntasks_rof>
<ntasks_ice>64</ntasks_ice>
<ntasks_ocn>64</ntasks_ocn>
<ntasks_glc>64</ntasks_glc>
<ntasks_wav>64</ntasks_wav>
<ntasks_cpl>64</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>1</nthrds_atm>
<nthrds_lnd>1</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
<nthrds_cpl>1</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>0</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="any">
<mach name="lawrencium-lr2|lawrencium-lr3">
<pes compset="any" pesize="any">
<comment>none</comment>
<ntasks>
<ntasks_atm>16</ntasks_atm>
<ntasks_lnd>16</ntasks_lnd>
<ntasks_rof>16</ntasks_rof>
<ntasks_ice>16</ntasks_ice>
<ntasks_ocn>16</ntasks_ocn>
<ntasks_glc>16</ntasks_glc>
<ntasks_wav>16</ntasks_wav>
<ntasks_cpl>16</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>1</nthrds_atm>
<nthrds_lnd>1</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
<nthrds_cpl>1</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>0</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="any">
<mach name="any">
<pes compset="XATM|DATM.+CLM" pesize="any">
<comment>none</comment>
<ntasks>
<ntasks_atm>-1</ntasks_atm>
<ntasks_lnd>-1</ntasks_lnd>
<ntasks_rof>-1</ntasks_rof>
<ntasks_ice>-1</ntasks_ice>
<ntasks_ocn>-1</ntasks_ocn>
<ntasks_glc>-1</ntasks_glc>
<ntasks_wav>-1</ntasks_wav>
<ntasks_cpl>-1</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>1</nthrds_atm>
<nthrds_lnd>1</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
<nthrds_cpl>1</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>0</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="any">
<mach name="eos">
<pes compset="any" pesize="any">
<comment>none</comment>
<ntasks>
<ntasks_atm>8</ntasks_atm>
<ntasks_lnd>8</ntasks_lnd>
<ntasks_rof>8</ntasks_rof>
<ntasks_ice>8</ntasks_ice>
<ntasks_ocn>8</ntasks_ocn>
<ntasks_glc>8</ntasks_glc>
<ntasks_wav>8</ntasks_wav>
<ntasks_cpl>8</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>4</nthrds_atm>
<nthrds_lnd>4</nthrds_lnd>
<nthrds_rof>4</nthrds_rof>
<nthrds_ice>4</nthrds_ice>
<nthrds_ocn>4</nthrds_ocn>
<nthrds_glc>4</nthrds_glc>
<nthrds_wav>4</nthrds_wav>
<nthrds_cpl>4</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>0</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="a%ne16np4">
<mach name="any">
<pes compset="any" pesize="any">
<comment>none</comment>
<ntasks>
<ntasks_atm>64</ntasks_atm>
<ntasks_lnd>64</ntasks_lnd>
<ntasks_rof>64</ntasks_rof>
<ntasks_ice>64</ntasks_ice>
<ntasks_ocn>64</ntasks_ocn>
<ntasks_glc>64</ntasks_glc>
<ntasks_wav>64</ntasks_wav>
<ntasks_cpl>64</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>1</nthrds_atm>
<nthrds_lnd>1</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
<nthrds_cpl>1</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>0</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="a%ne240np4">
<mach name="any">
<pes compset="any" pesize="any">
<comment>none</comment>
<ntasks>
<ntasks_atm>2048</ntasks_atm>
<ntasks_lnd>2048</ntasks_lnd>
<ntasks_rof>2048</ntasks_rof>
<ntasks_ice>2048</ntasks_ice>
<ntasks_ocn>2048</ntasks_ocn>
<ntasks_glc>2048</ntasks_glc>
<ntasks_wav>2048</ntasks_wav>
<ntasks_cpl>2048</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>1</nthrds_atm>
<nthrds_lnd>1</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
<nthrds_cpl>1</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>0</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="a%ne240np4">
<mach name="any">
<pes compset="CAM.+CLM.+DOCN." pesize="any">
<comment>none</comment>
<ntasks>
<ntasks_atm>2048</ntasks_atm>
<ntasks_lnd>2048</ntasks_lnd>
<ntasks_rof>2048</ntasks_rof>
<ntasks_ice>2048</ntasks_ice>
<ntasks_ocn>2048</ntasks_ocn>
<ntasks_glc>2048</ntasks_glc>
<ntasks_wav>2048</ntasks_wav>
<ntasks_cpl>2048</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>1</nthrds_atm>
<nthrds_lnd>1</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
<nthrds_cpl>1</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>0</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="a%ne120np4">
<mach name="titan|stampede|bluewaters|edison|eos|cori-haswell">
<pes compset="any" pesize="any">
<comment>none</comment>
<ntasks>
<ntasks_atm>4800</ntasks_atm>
<ntasks_lnd>4800</ntasks_lnd>
<ntasks_rof>4800</ntasks_rof>
<ntasks_ice>4800</ntasks_ice>
<ntasks_ocn>4800</ntasks_ocn>
<ntasks_glc>4800</ntasks_glc>
<ntasks_wav>4800</ntasks_wav>
<ntasks_cpl>4800</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>4</nthrds_atm>
<nthrds_lnd>4</nthrds_lnd>
<nthrds_rof>4</nthrds_rof>
<nthrds_ice>4</nthrds_ice>
<nthrds_ocn>4</nthrds_ocn>
<nthrds_glc>4</nthrds_glc>
<nthrds_wav>4</nthrds_wav>
<nthrds_cpl>4</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>0</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
<mach name="cori-knl">
<pes compset="any" pesize="L">
<comment>cori-knl, generic ne120, 338 nodes, 64x4 </comment>
<MAX_MPITASKS_PER_NODE>64</MAX_MPITASKS_PER_NODE>
<MAX_TASKS_PER_NODE>256</MAX_TASKS_PER_NODE>
<ntasks>
<ntasks_atm>21600</ntasks_atm>
<ntasks_lnd>21600</ntasks_lnd>
<ntasks_rof>21600</ntasks_rof>
<ntasks_ice>19200</ntasks_ice>
<ntasks_ocn>19200</ntasks_ocn>
<ntasks_glc>64</ntasks_glc>
<ntasks_wav>64</ntasks_wav>
<ntasks_cpl>21600</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>4</nthrds_atm>
<nthrds_lnd>4</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
<nthrds_ice>2</nthrds_ice>
<nthrds_ocn>2</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
<nthrds_cpl>4</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>0</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
<pes compset="any" pesize="any">
<comment>cori-knl, generic ne120, 169 nodes, 64x4 </comment>
<MAX_MPITASKS_PER_NODE>64</MAX_MPITASKS_PER_NODE>
<MAX_TASKS_PER_NODE>256</MAX_TASKS_PER_NODE>
<ntasks>
<ntasks_atm>10800</ntasks_atm>
<ntasks_lnd>10800</ntasks_lnd>
<ntasks_rof>10800</ntasks_rof>
<ntasks_ice>9600</ntasks_ice>
<ntasks_ocn>9600</ntasks_ocn>
<ntasks_glc>64</ntasks_glc>
<ntasks_wav>64</ntasks_wav>
<ntasks_cpl>10800</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>4</nthrds_atm>
<nthrds_lnd>4</nthrds_lnd>
<nthrds_rof>4</nthrds_rof>
<nthrds_ice>2</nthrds_ice>
<nthrds_ocn>2</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
<nthrds_cpl>4</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>0</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
<pes compset="any" pesize="S">
<comment>cori-knl, generic ne120, 85 nodes, 64x4 </comment>
<MAX_MPITASKS_PER_NODE>64</MAX_MPITASKS_PER_NODE>
<MAX_TASKS_PER_NODE>256</MAX_TASKS_PER_NODE>
<ntasks>
<ntasks_atm>5400</ntasks_atm>
<ntasks_lnd>5400</ntasks_lnd>
<ntasks_rof>5400</ntasks_rof>
<ntasks_ice>4800</ntasks_ice>
<ntasks_ocn>4800</ntasks_ocn>
<ntasks_glc>64</ntasks_glc>
<ntasks_wav>64</ntasks_wav>
<ntasks_cpl>5400</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>4</nthrds_atm>
<nthrds_lnd>4</nthrds_lnd>
<nthrds_rof>4</nthrds_rof>
<nthrds_ice>2</nthrds_ice>
<nthrds_ocn>2</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
<nthrds_cpl>4</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>0</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="a%ne120np4">
<mach name="any">
<pes compset="CAM.+CLM.+DOCN." pesize="any">
<comment>none</comment>
<ntasks>
<ntasks_atm>1024</ntasks_atm>
<ntasks_lnd>1024</ntasks_lnd>
<ntasks_rof>1024</ntasks_rof>
<ntasks_ice>1024</ntasks_ice>
<ntasks_ocn>1024</ntasks_ocn>
<ntasks_glc>1024</ntasks_glc>
<ntasks_wav>1024</ntasks_wav>
<ntasks_cpl>1024</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>1</nthrds_atm>
<nthrds_lnd>1</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
<nthrds_cpl>1</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>0</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="a%ne120np4">
<mach name="titan|stampede|bluewaters|edison|eos|cori-haswell">
<pes compset="CAM.+CLM.+DOCN." pesize="any">
<comment>none</comment>
<ntasks>
<ntasks_atm>4800</ntasks_atm>
<ntasks_lnd>4800</ntasks_lnd>
<ntasks_rof>4800</ntasks_rof>
<ntasks_ice>4800</ntasks_ice>
<ntasks_ocn>4800</ntasks_ocn>
<ntasks_glc>4800</ntasks_glc>
<ntasks_wav>4800</ntasks_wav>
<ntasks_cpl>4800</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>4</nthrds_atm>
<nthrds_lnd>4</nthrds_lnd>
<nthrds_rof>4</nthrds_rof>
<nthrds_ice>4</nthrds_ice>
<nthrds_ocn>4</nthrds_ocn>
<nthrds_glc>4</nthrds_glc>
<nthrds_wav>4</nthrds_wav>
<nthrds_cpl>4</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>0</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="a%ne120np4">
<mach name="titan|stampede|bluewaters">
<pes compset="CAM.+CLM.+CICE.+POP" pesize="any">
<comment>none</comment>
<ntasks>
<ntasks_atm>3200</ntasks_atm>
<ntasks_lnd>1600</ntasks_lnd>
<ntasks_rof>3200</ntasks_rof>
<ntasks_ice>1600</ntasks_ice>
<ntasks_ocn>32</ntasks_ocn>
<ntasks_glc>3200</ntasks_glc>
<ntasks_wav>3200</ntasks_wav>
<ntasks_cpl>3200</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>2</nthrds_atm>
<nthrds_lnd>2</nthrds_lnd>
<nthrds_rof>2</nthrds_rof>
<nthrds_ice>2</nthrds_ice>
<nthrds_ocn>2</nthrds_ocn>
<nthrds_glc>2</nthrds_glc>
<nthrds_wav>2</nthrds_wav>
<nthrds_cpl>2</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>1600</rootpe_ice>
<rootpe_ocn>3200</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="a%ne30np4">
<mach name="any">
<pes compset="any" pesize="any">
<comment>none</comment>
<ntasks>
<ntasks_atm>128</ntasks_atm>
<ntasks_lnd>128</ntasks_lnd>
<ntasks_rof>128</ntasks_rof>
<ntasks_ice>128</ntasks_ice>
<ntasks_ocn>128</ntasks_ocn>
<ntasks_glc>128</ntasks_glc>
<ntasks_wav>128</ntasks_wav>
<ntasks_cpl>128</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>1</nthrds_atm>
<nthrds_lnd>1</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
<nthrds_cpl>1</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>0</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="a%ne30np4">
<mach name="any">
<pes compset="CAM.+CLM.+DOCN." pesize="any">
<comment>none</comment>
<ntasks>
<ntasks_atm>128</ntasks_atm>
<ntasks_lnd>128</ntasks_lnd>
<ntasks_rof>128</ntasks_rof>
<ntasks_ice>128</ntasks_ice>
<ntasks_ocn>128</ntasks_ocn>
<ntasks_glc>128</ntasks_glc>
<ntasks_wav>128</ntasks_wav>
<ntasks_cpl>128</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>1</nthrds_atm>
<nthrds_lnd>1</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
<nthrds_cpl>1</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>0</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="a%ne30np4">
<mach name="melvin">
<pes compset="any" pesize="any">
<comment>none</comment>
<ntasks>
<ntasks_atm>48</ntasks_atm>
<ntasks_lnd>48</ntasks_lnd>
<ntasks_rof>48</ntasks_rof>
<ntasks_ice>48</ntasks_ice>
<ntasks_ocn>48</ntasks_ocn>
<ntasks_glc>48</ntasks_glc>
<ntasks_wav>48</ntasks_wav>
<ntasks_cpl>48</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>1</nthrds_atm>
<nthrds_lnd>1</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
<nthrds_cpl>1</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>0</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="a%ne30np4">
<mach name="edison">
<pes compset="any" pesize="any">
<comment>none</comment>
<ntasks>
<ntasks_atm>192</ntasks_atm>
<ntasks_lnd>192</ntasks_lnd>
<ntasks_rof>192</ntasks_rof>
<ntasks_ice>192</ntasks_ice>
<ntasks_ocn>192</ntasks_ocn>
<ntasks_glc>192</ntasks_glc>
<ntasks_wav>192</ntasks_wav>
<ntasks_cpl>192</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>4</nthrds_atm>
<nthrds_lnd>4</nthrds_lnd>
<nthrds_rof>4</nthrds_rof>
<nthrds_ice>4</nthrds_ice>
<nthrds_ocn>4</nthrds_ocn>
<nthrds_glc>4</nthrds_glc>
<nthrds_wav>4</nthrds_wav>
<nthrds_cpl>4</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>0</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="a%ne30np4">
<mach name="cori-haswell">
<pes compset="any" pesize="L">
<comment>169 nodes, 19 sypd</comment>
<ntasks>
<ntasks_atm>5400</ntasks_atm>
<ntasks_lnd>5400</ntasks_lnd>
<ntasks_rof>5400</ntasks_rof>
<ntasks_ice>5400</ntasks_ice>
<ntasks_ocn>5400</ntasks_ocn>
<ntasks_glc>5400</ntasks_glc>
<ntasks_wav>5400</ntasks_wav>
<ntasks_cpl>5400</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>1</nthrds_atm>
<nthrds_lnd>1</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
<nthrds_cpl>1</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>0</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
<pes compset="any" pesize="any">
<comment>85 nodes</comment>
<ntasks>
<ntasks_atm>2700</ntasks_atm>
<ntasks_lnd>2700</ntasks_lnd>
<ntasks_rof>2700</ntasks_rof>
<ntasks_ice>2700</ntasks_ice>
<ntasks_ocn>2700</ntasks_ocn>
<ntasks_glc>2700</ntasks_glc>
<ntasks_wav>2700</ntasks_wav>
<ntasks_cpl>2700</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>1</nthrds_atm>
<nthrds_lnd>1</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
<nthrds_cpl>1</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>0</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
<pes compset="any" pesize="S">
<comment>9 nodes</comment>
<ntasks>
<ntasks_atm>270</ntasks_atm>
<ntasks_lnd>270</ntasks_lnd>
<ntasks_rof>270</ntasks_rof>
<ntasks_ice>270</ntasks_ice>
<ntasks_ocn>270</ntasks_ocn>
<ntasks_glc>270</ntasks_glc>
<ntasks_wav>270</ntasks_wav>
<ntasks_cpl>270</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>1</nthrds_atm>
<nthrds_lnd>1</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
<nthrds_cpl>1</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>0</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="a%ne30np4">
<mach name="cori-knl">
<pes compset="any" pesize="L">
<comment>cori-knl, generic ne30, 85 nodes, 64x1</comment>
<MAX_MPITASKS_PER_NODE>64</MAX_MPITASKS_PER_NODE>
<MAX_TASKS_PER_NODE>64</MAX_TASKS_PER_NODE>
<ntasks>
<ntasks_atm>5440</ntasks_atm>
<ntasks_lnd>5440</ntasks_lnd>
<ntasks_rof>5440</ntasks_rof>
<ntasks_ice>4800</ntasks_ice>
<ntasks_ocn>4800</ntasks_ocn>
<ntasks_glc>64</ntasks_glc>
<ntasks_wav>64</ntasks_wav>
<ntasks_cpl>5440</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>1</nthrds_atm>
<nthrds_lnd>1</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
<nthrds_cpl>1</nthrds_cpl>
</nthrds>
</pes>
<pes compset="any" pesize="any">
<comment>cori-knl, generic ne30, 43 nodes, 32x4</comment>
<MAX_MPITASKS_PER_NODE>32</MAX_MPITASKS_PER_NODE>
<MAX_TASKS_PER_NODE>128</MAX_TASKS_PER_NODE>
<ntasks>
<ntasks_atm>1350</ntasks_atm>
<ntasks_lnd>1350</ntasks_lnd>
<ntasks_rof>1350</ntasks_rof>
<ntasks_ice>1200</ntasks_ice>
<ntasks_ocn>1200</ntasks_ocn>
<ntasks_glc>32</ntasks_glc>
<ntasks_wav>32</ntasks_wav>
<ntasks_cpl>1350</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>4</nthrds_atm>
<nthrds_lnd>4</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
<nthrds_cpl>4</nthrds_cpl>
</nthrds>
</pes>
<pes compset="any" pesize="S">
<comment>cori-knl, generic ne30, 22 nodes, 32x4</comment>
<MAX_MPITASKS_PER_NODE>32</MAX_MPITASKS_PER_NODE>
<MAX_TASKS_PER_NODE>128</MAX_TASKS_PER_NODE>
<ntasks>
<ntasks_atm>704</ntasks_atm>
<ntasks_lnd>704</ntasks_lnd>
<ntasks_rof>704</ntasks_rof>
<ntasks_ice>480</ntasks_ice>
<ntasks_ocn>480</ntasks_ocn>
<ntasks_glc>32</ntasks_glc>
<ntasks_wav>32</ntasks_wav>
<ntasks_cpl>704</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>4</nthrds_atm>
<nthrds_lnd>4</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
<nthrds_cpl>4</nthrds_cpl>
</nthrds>
</pes>
<pes compset="any" pesize="T">
<comment>cori-knl, generic ne30, 4 nodes, 34x8</comment>
<MAX_MPITASKS_PER_NODE>34</MAX_MPITASKS_PER_NODE>
<MAX_TASKS_PER_NODE>268</MAX_TASKS_PER_NODE>
<ntasks>
<ntasks_atm>136</ntasks_atm>
<ntasks_lnd>136</ntasks_lnd>
<ntasks_rof>136</ntasks_rof>
<ntasks_ice>128</ntasks_ice>
<ntasks_ocn>128</ntasks_ocn>
<ntasks_glc>33</ntasks_glc>
<ntasks_wav>33</ntasks_wav>
<ntasks_cpl>136</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>8</nthrds_atm>
<nthrds_lnd>8</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
<nthrds_cpl>8</nthrds_cpl>
</nthrds>
</pes>
</mach>
</grid>
<grid name="a%ne30np4">
<mach name="eos">
<pes compset="any" pesize="any">
<comment>none</comment>
<ntasks>
<ntasks_atm>960</ntasks_atm>
<ntasks_lnd>960</ntasks_lnd>
<ntasks_rof>960</ntasks_rof>
<ntasks_ice>960</ntasks_ice>
<ntasks_ocn>960</ntasks_ocn>
<ntasks_glc>960</ntasks_glc>
<ntasks_wav>960</ntasks_wav>
<ntasks_cpl>960</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>4</nthrds_atm>
<nthrds_lnd>4</nthrds_lnd>
<nthrds_rof>4</nthrds_rof>
<nthrds_ice>4</nthrds_ice>
<nthrds_ocn>4</nthrds_ocn>
<nthrds_glc>4</nthrds_glc>
<nthrds_wav>4</nthrds_wav>
<nthrds_cpl>4</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>0</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="a%ne30np4">
<mach name="blues">
<pes compset="XATM|DATM.+CLM" pesize="any">
<comment>none</comment>
<ntasks>
<ntasks_atm>32</ntasks_atm>
<ntasks_lnd>32</ntasks_lnd>
<ntasks_rof>32</ntasks_rof>
<ntasks_ice>32</ntasks_ice>
<ntasks_ocn>32</ntasks_ocn>
<ntasks_glc>32</ntasks_glc>
<ntasks_wav>32</ntasks_wav>
<ntasks_cpl>32</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>1</nthrds_atm>
<nthrds_lnd>1</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
<nthrds_cpl>1</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>0</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="a%ne30np4">
<mach name="titan|stampede|bluewaters">
<pes compset="any" pesize="any">
<comment>none</comment>
<ntasks>
<ntasks_atm>1280</ntasks_atm>
<ntasks_lnd>1280</ntasks_lnd>
<ntasks_rof>1280</ntasks_rof>
<ntasks_ice>1280</ntasks_ice>
<ntasks_ocn>1280</ntasks_ocn>
<ntasks_glc>1280</ntasks_glc>
<ntasks_wav>1280</ntasks_wav>
<ntasks_cpl>1280</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>1</nthrds_atm>
<nthrds_lnd>1</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
<nthrds_cpl>1</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>0</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="a%ne30np4">
<mach name="titan|stampede|bluewaters">
<pes compset="CAM.+CLM.+DOCN." pesize="any">
<comment>none</comment>
<ntasks>
<ntasks_atm>1280</ntasks_atm>
<ntasks_lnd>1280</ntasks_lnd>
<ntasks_rof>1280</ntasks_rof>
<ntasks_ice>1280</ntasks_ice>
<ntasks_ocn>1280</ntasks_ocn>
<ntasks_glc>1280</ntasks_glc>
<ntasks_wav>1280</ntasks_wav>
<ntasks_cpl>1280</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>1</nthrds_atm>
<nthrds_lnd>1</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
<nthrds_cpl>1</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>0</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="a%ne30np4_l%ne30np4_oi%gx1v6">
<mach name="titan|stampede|bluewaters">
<pes compset="any" pesize="any">
<comment>none</comment>
<ntasks>
<ntasks_atm>2000</ntasks_atm>
<ntasks_lnd>960</ntasks_lnd>
<ntasks_rof>2000</ntasks_rof>
<ntasks_ice>1040</ntasks_ice>
<ntasks_ocn>32</ntasks_ocn>
<ntasks_glc>1</ntasks_glc>
<ntasks_wav>2000</ntasks_wav>
<ntasks_cpl>960</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>1</nthrds_atm>
<nthrds_lnd>1</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
<nthrds_cpl>1</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>960</rootpe_ice>
<rootpe_ocn>2000</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="a%0.9x1.25">
<mach name="any">
<pes compset=".+SATM.+SLND.+SICE.+SOCN.+SROF.+MALI%SIA.+SWAV" pesize="any">
<comment>MALISIA.</comment>
<ntasks>
<ntasks_atm>8</ntasks_atm>
<ntasks_lnd>8</ntasks_lnd>
<ntasks_rof>8</ntasks_rof>
<ntasks_ice>8</ntasks_ice>
<ntasks_ocn>8</ntasks_ocn>
<ntasks_glc>8</ntasks_glc>
<ntasks_wav>8</ntasks_wav>
<ntasks_cpl>8</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>1</nthrds_atm>
<nthrds_lnd>1</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
<nthrds_cpl>1</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>0</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="any">
<mach name="any">
<pes compset="any" pesize="T">
<comment>none</comment>
<ntasks>
<ntasks_atm>4</ntasks_atm>
<ntasks_lnd>4</ntasks_lnd>
<ntasks_rof>4</ntasks_rof>
<ntasks_ice>4</ntasks_ice>
<ntasks_ocn>4</ntasks_ocn>
<ntasks_glc>4</ntasks_glc>
<ntasks_wav>4</ntasks_wav>
<ntasks_cpl>4</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>4</nthrds_atm>
<nthrds_lnd>4</nthrds_lnd>
<nthrds_rof>4</nthrds_rof>
<nthrds_ice>4</nthrds_ice>
<nthrds_ocn>4</nthrds_ocn>
<nthrds_glc>4</nthrds_glc>
<nthrds_wav>4</nthrds_wav>
<nthrds_cpl>4</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>0</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="any">
<mach name="sandiatoss3">
<pes compset="any" pesize="T">
<comment>none</comment>
<ntasks>
<ntasks_atm>128</ntasks_atm>
<ntasks_lnd>128</ntasks_lnd>
<ntasks_rof>128</ntasks_rof>
<ntasks_ice>128</ntasks_ice>
<ntasks_ocn>128</ntasks_ocn>
<ntasks_glc>128</ntasks_glc>
<ntasks_wav>128</ntasks_wav>
<ntasks_cpl>128</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>4</nthrds_atm>
<nthrds_lnd>4</nthrds_lnd>
<nthrds_rof>4</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_glc>4</nthrds_glc>
<nthrds_wav>4</nthrds_wav>
<nthrds_cpl>4</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>0</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="any">
<mach name="sandiatoss3">
<pes compset="any" pesize="S">
<comment>none</comment>
<ntasks>
<ntasks_atm>64</ntasks_atm>
<ntasks_lnd>64</ntasks_lnd>
<ntasks_rof>64</ntasks_rof>
<ntasks_ice>64</ntasks_ice>
<ntasks_ocn>64</ntasks_ocn>
<ntasks_glc>64</ntasks_glc>
<ntasks_wav>64</ntasks_wav>
<ntasks_cpl>64</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>1</nthrds_atm>
<nthrds_lnd>1</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
<nthrds_cpl>1</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>0</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="a%ne30np4_l%ne30np4_oi%oEC60to30v3_r%r05_g%null_w%null_z%null_m%oEC60to30v3">
<mach name="sandiatoss3">
<pes compset="any" pesize="any">
<comment>none</comment>
<ntasks>
<ntasks_atm>256</ntasks_atm>
<ntasks_lnd>256</ntasks_lnd>
<ntasks_rof>256</ntasks_rof>
<ntasks_ice>256</ntasks_ice>
<ntasks_ocn>256</ntasks_ocn>
<ntasks_glc>256</ntasks_glc>
<ntasks_wav>256</ntasks_wav>
<ntasks_cpl>256</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>1</nthrds_atm>
<nthrds_lnd>1</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
<nthrds_cpl>1</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>0</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="any">
<mach name="anlworkstation">
<pes compset="any" pesize="any">
<comment>none</comment>
<ntasks>
<ntasks_atm>16</ntasks_atm>
<ntasks_lnd>16</ntasks_lnd>
<ntasks_rof>16</ntasks_rof>
<ntasks_ice>16</ntasks_ice>
<ntasks_ocn>16</ntasks_ocn>
<ntasks_glc>16</ntasks_glc>
<ntasks_wav>16</ntasks_wav>
<ntasks_cpl>16</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>1</nthrds_atm>
<nthrds_lnd>1</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
<nthrds_cpl>1</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>0</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="a%T31">
<mach name="any">
<pes compset="any" pesize="any">
<comment>none</comment>
<ntasks>
<ntasks_atm>16</ntasks_atm>
<ntasks_lnd>16</ntasks_lnd>
<ntasks_rof>16</ntasks_rof>
<ntasks_ice>16</ntasks_ice>
<ntasks_ocn>16</ntasks_ocn>
<ntasks_glc>16</ntasks_glc>
<ntasks_wav>16</ntasks_wav>
<ntasks_cpl>16</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>1</nthrds_atm>
<nthrds_lnd>1</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
<nthrds_cpl>1</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>0</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="a%T31">
<mach name="edison">
<pes compset="any" pesize="any">
<comment>none</comment>
<ntasks>
<ntasks_atm>12</ntasks_atm>
<ntasks_lnd>12</ntasks_lnd>
<ntasks_rof>12</ntasks_rof>
<ntasks_ice>12</ntasks_ice>
<ntasks_ocn>12</ntasks_ocn>
<ntasks_glc>12</ntasks_glc>
<ntasks_wav>12</ntasks_wav>
<ntasks_cpl>12</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>4</nthrds_atm>
<nthrds_lnd>4</nthrds_lnd>
<nthrds_rof>4</nthrds_rof>
<nthrds_ice>4</nthrds_ice>
<nthrds_ocn>4</nthrds_ocn>
<nthrds_glc>4</nthrds_glc>
<nthrds_wav>4</nthrds_wav>
<nthrds_cpl>4</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>0</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="a%T31">
<mach name="eos">
<pes compset="any" pesize="any">
<comment>none</comment>
<ntasks>
<ntasks_atm>16</ntasks_atm>
<ntasks_lnd>16</ntasks_lnd>
<ntasks_rof>16</ntasks_rof>
<ntasks_ice>16</ntasks_ice>
<ntasks_ocn>16</ntasks_ocn>
<ntasks_glc>16</ntasks_glc>
<ntasks_wav>16</ntasks_wav>
<ntasks_cpl>16</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>4</nthrds_atm>
<nthrds_lnd>4</nthrds_lnd>
<nthrds_rof>4</nthrds_rof>
<nthrds_ice>4</nthrds_ice>
<nthrds_ocn>4</nthrds_ocn>
<nthrds_glc>4</nthrds_glc>
<nthrds_wav>4</nthrds_wav>
<nthrds_cpl>4</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>0</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="a%T31">
<mach name="any">
<pes compset="any" pesize="T">
<comment>none</comment>
<ntasks>
<ntasks_atm>4</ntasks_atm>
<ntasks_lnd>4</ntasks_lnd>
<ntasks_rof>4</ntasks_rof>
<ntasks_ice>4</ntasks_ice>
<ntasks_ocn>4</ntasks_ocn>
<ntasks_glc>4</ntasks_glc>
<ntasks_wav>4</ntasks_wav>
<ntasks_cpl>4</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>4</nthrds_atm>
<nthrds_lnd>4</nthrds_lnd>
<nthrds_rof>4</nthrds_rof>
<nthrds_ice>4</nthrds_ice>
<nthrds_ocn>4</nthrds_ocn>
<nthrds_glc>4</nthrds_glc>
<nthrds_wav>4</nthrds_wav>
<nthrds_cpl>4</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>0</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="a%4x5">
<mach name="any">
<pes compset="any" pesize="any">
<comment>none</comment>
<ntasks>
<ntasks_atm>8</ntasks_atm>
<ntasks_lnd>8</ntasks_lnd>
<ntasks_rof>8</ntasks_rof>
<ntasks_ice>8</ntasks_ice>
<ntasks_ocn>8</ntasks_ocn>
<ntasks_glc>8</ntasks_glc>
<ntasks_wav>8</ntasks_wav>
<ntasks_cpl>8</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>1</nthrds_atm>
<nthrds_lnd>1</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
<nthrds_cpl>1</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>0</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="a%4x5">
<mach name="any">
<pes compset="any" pesize="T">
<comment>none</comment>
<ntasks>
<ntasks_atm>2</ntasks_atm>
<ntasks_lnd>2</ntasks_lnd>
<ntasks_rof>2</ntasks_rof>
<ntasks_ice>2</ntasks_ice>
<ntasks_ocn>2</ntasks_ocn>
<ntasks_glc>2</ntasks_glc>
<ntasks_wav>2</ntasks_wav>
<ntasks_cpl>2</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>4</nthrds_atm>
<nthrds_lnd>4</nthrds_lnd>
<nthrds_rof>4</nthrds_rof>
<nthrds_ice>4</nthrds_ice>
<nthrds_ocn>4</nthrds_ocn>
<nthrds_glc>4</nthrds_glc>
<nthrds_wav>4</nthrds_wav>
<nthrds_cpl>4</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>0</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="a%T62">
<mach name="any">
<pes compset="any" pesize="any">
<comment>none</comment>
<ntasks>
<ntasks_atm>64</ntasks_atm>
<ntasks_lnd>64</ntasks_lnd>
<ntasks_rof>64</ntasks_rof>
<ntasks_ice>64</ntasks_ice>
<ntasks_ocn>64</ntasks_ocn>
<ntasks_glc>64</ntasks_glc>
<ntasks_wav>64</ntasks_wav>
<ntasks_cpl>64</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>1</nthrds_atm>
<nthrds_lnd>1</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
<nthrds_cpl>1</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>0</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="a%T62">
<mach name="edison|melvin">
<pes compset="any" pesize="any">
<comment>none</comment>
<ntasks>
<ntasks_atm>48</ntasks_atm>
<ntasks_lnd>48</ntasks_lnd>
<ntasks_rof>48</ntasks_rof>
<ntasks_ice>48</ntasks_ice>
<ntasks_ocn>48</ntasks_ocn>
<ntasks_glc>48</ntasks_glc>
<ntasks_wav>48</ntasks_wav>
<ntasks_cpl>48</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>1</nthrds_atm>
<nthrds_lnd>1</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
<nthrds_cpl>1</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>0</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="a%T62">
<mach name="eos">
<pes compset="any" pesize="any">
<comment>none</comment>
<ntasks>
<ntasks_atm>32</ntasks_atm>
<ntasks_lnd>32</ntasks_lnd>
<ntasks_rof>32</ntasks_rof>
<ntasks_ice>32</ntasks_ice>
<ntasks_ocn>32</ntasks_ocn>
<ntasks_glc>32</ntasks_glc>
<ntasks_wav>32</ntasks_wav>
<ntasks_cpl>32</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>4</nthrds_atm>
<nthrds_lnd>4</nthrds_lnd>
<nthrds_rof>4</nthrds_rof>
<nthrds_ice>4</nthrds_ice>
<nthrds_ocn>4</nthrds_ocn>
<nthrds_glc>4</nthrds_glc>
<nthrds_wav>4</nthrds_wav>
<nthrds_cpl>4</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>0</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="a%T62">
<mach name="cori-haswell">
<pes compset="any" pesize="any">
<comment>none</comment>
<ntasks>
<ntasks_atm>64</ntasks_atm>
<ntasks_lnd>64</ntasks_lnd>
<ntasks_rof>64</ntasks_rof>
<ntasks_ice>64</ntasks_ice>
<ntasks_ocn>64</ntasks_ocn>
<ntasks_glc>64</ntasks_glc>
<ntasks_wav>64</ntasks_wav>
<ntasks_cpl>64</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>1</nthrds_atm>
<nthrds_lnd>1</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
<nthrds_cpl>1</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>0</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="a%T62">
<mach name="cori-knl">
<pes compset="any" pesize="any">
<comment>2 nodes, 64x2</comment>
<MAX_MPITASKS_PER_NODE>64</MAX_MPITASKS_PER_NODE>
<MAX_TASKS_PER_NODE>128</MAX_TASKS_PER_NODE>
<ntasks>
<ntasks_atm>128</ntasks_atm>
<ntasks_lnd>128</ntasks_lnd>
<ntasks_rof>128</ntasks_rof>
<ntasks_ice>128</ntasks_ice>
<ntasks_ocn>128</ntasks_ocn>
<ntasks_glc>128</ntasks_glc>
<ntasks_wav>128</ntasks_wav>
<ntasks_cpl>128</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>2</nthrds_atm>
<nthrds_lnd>2</nthrds_lnd>
<nthrds_rof>2</nthrds_rof>
<nthrds_ice>2</nthrds_ice>
<nthrds_ocn>2</nthrds_ocn>
<nthrds_glc>2</nthrds_glc>
<nthrds_wav>2</nthrds_wav>
<nthrds_cpl>2</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>0</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="a%T62">
<mach name="any">
<pes compset="any" pesize="T">
<comment>none</comment>
<ntasks>
<ntasks_atm>16</ntasks_atm>
<ntasks_lnd>16</ntasks_lnd>
<ntasks_rof>16</ntasks_rof>
<ntasks_ice>16</ntasks_ice>
<ntasks_ocn>16</ntasks_ocn>
<ntasks_glc>16</ntasks_glc>
<ntasks_wav>16</ntasks_wav>
<ntasks_cpl>16</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>4</nthrds_atm>
<nthrds_lnd>4</nthrds_lnd>
<nthrds_rof>4</nthrds_rof>
<nthrds_ice>4</nthrds_ice>
<nthrds_ocn>4</nthrds_ocn>
<nthrds_glc>4</nthrds_glc>
<nthrds_wav>4</nthrds_wav>
<nthrds_cpl>4</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>0</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="a%T85">
<mach name="titan|stampede|janus">
<pes compset="any" pesize="any">
<comment>none</comment>
<ntasks>
<ntasks_atm>128</ntasks_atm>
<ntasks_lnd>128</ntasks_lnd>
<ntasks_rof>128</ntasks_rof>
<ntasks_ice>128</ntasks_ice>
<ntasks_ocn>128</ntasks_ocn>
<ntasks_glc>128</ntasks_glc>
<ntasks_wav>128</ntasks_wav>
<ntasks_cpl>128</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>4</nthrds_atm>
<nthrds_lnd>4</nthrds_lnd>
<nthrds_rof>4</nthrds_rof>
<nthrds_ice>4</nthrds_ice>
<nthrds_ocn>4</nthrds_ocn>
<nthrds_glc>4</nthrds_glc>
<nthrds_wav>4</nthrds_wav>
<nthrds_cpl>4</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>0</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="a%T85">
<mach name="any">
<pes compset="any" pesize="T">
<comment>none</comment>
<ntasks>
<ntasks_atm>32</ntasks_atm>
<ntasks_lnd>32</ntasks_lnd>
<ntasks_rof>32</ntasks_rof>
<ntasks_ice>32</ntasks_ice>
<ntasks_ocn>32</ntasks_ocn>
<ntasks_glc>32</ntasks_glc>
<ntasks_wav>32</ntasks_wav>
<ntasks_cpl>32</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>4</nthrds_atm>
<nthrds_lnd>4</nthrds_lnd>
<nthrds_rof>4</nthrds_rof>
<nthrds_ice>4</nthrds_ice>
<nthrds_ocn>4</nthrds_ocn>
<nthrds_glc>4</nthrds_glc>
<nthrds_wav>4</nthrds_wav>
<nthrds_cpl>4</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>0</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="a%T85">
<mach name="edison|eos">
<pes compset="any" pesize="T">
<comment>none</comment>
<ntasks>
<ntasks_atm>24</ntasks_atm>
<ntasks_lnd>24</ntasks_lnd>
<ntasks_rof>24</ntasks_rof>
<ntasks_ice>24</ntasks_ice>
<ntasks_ocn>24</ntasks_ocn>
<ntasks_glc>24</ntasks_glc>
<ntasks_wav>24</ntasks_wav>
<ntasks_cpl>24</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>4</nthrds_atm>
<nthrds_lnd>4</nthrds_lnd>
<nthrds_rof>4</nthrds_rof>
<nthrds_ice>4</nthrds_ice>
<nthrds_ocn>4</nthrds_ocn>
<nthrds_glc>4</nthrds_glc>
<nthrds_wav>4</nthrds_wav>
<nthrds_cpl>4</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>0</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="a%1.9x2.5">
<mach name="any">
<pes compset="any" pesize="any">
<comment>none</comment>
<ntasks>
<ntasks_atm>64</ntasks_atm>
<ntasks_lnd>64</ntasks_lnd>
<ntasks_rof>64</ntasks_rof>
<ntasks_ice>64</ntasks_ice>
<ntasks_ocn>64</ntasks_ocn>
<ntasks_glc>64</ntasks_glc>
<ntasks_wav>64</ntasks_wav>
<ntasks_cpl>64</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>1</nthrds_atm>
<nthrds_lnd>1</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
<nthrds_cpl>1</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>0</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="a%1.9x2.5">
<mach name="melvin">
<pes compset="any" pesize="any">
<comment>none</comment>
<ntasks>
<ntasks_atm>48</ntasks_atm>
<ntasks_lnd>48</ntasks_lnd>
<ntasks_rof>48</ntasks_rof>
<ntasks_ice>48</ntasks_ice>
<ntasks_ocn>48</ntasks_ocn>
<ntasks_glc>48</ntasks_glc>
<ntasks_wav>48</ntasks_wav>
<ntasks_cpl>48</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>1</nthrds_atm>
<nthrds_lnd>1</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
<nthrds_cpl>1</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>0</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="a%1.9x2.5">
<mach name="edison|eos">
<pes compset="any" pesize="any">
<comment>none</comment>
<ntasks>
<ntasks_atm>96</ntasks_atm>
<ntasks_lnd>96</ntasks_lnd>
<ntasks_rof>96</ntasks_rof>
<ntasks_ice>96</ntasks_ice>
<ntasks_ocn>96</ntasks_ocn>
<ntasks_glc>96</ntasks_glc>
<ntasks_wav>96</ntasks_wav>
<ntasks_cpl>96</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>4</nthrds_atm>
<nthrds_lnd>4</nthrds_lnd>
<nthrds_rof>4</nthrds_rof>
<nthrds_ice>4</nthrds_ice>
<nthrds_ocn>4</nthrds_ocn>
<nthrds_glc>4</nthrds_glc>
<nthrds_wav>4</nthrds_wav>
<nthrds_cpl>4</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>0</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="a%1.9x2.5">
<mach name="cori-haswell">
<pes compset="any" pesize="any">
<comment>none</comment>
<ntasks>
<ntasks_atm>96</ntasks_atm>
<ntasks_lnd>96</ntasks_lnd>
<ntasks_rof>96</ntasks_rof>
<ntasks_ice>96</ntasks_ice>
<ntasks_ocn>96</ntasks_ocn>
<ntasks_glc>96</ntasks_glc>
<ntasks_wav>96</ntasks_wav>
<ntasks_cpl>96</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>1</nthrds_atm>
<nthrds_lnd>1</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
<nthrds_cpl>1</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>0</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="a%1.9x2.5">
<mach name="cori-knl">
<pes compset="any" pesize="any">
<comment>4 nodes, 64x2</comment>
<MAX_MPITASKS_PER_NODE>64</MAX_MPITASKS_PER_NODE>
<MAX_TASKS_PER_NODE>128</MAX_TASKS_PER_NODE>
<ntasks>
<ntasks_atm>256</ntasks_atm>
<ntasks_lnd>256</ntasks_lnd>
<ntasks_rof>256</ntasks_rof>
<ntasks_ice>256</ntasks_ice>
<ntasks_ocn>256</ntasks_ocn>
<ntasks_glc>256</ntasks_glc>
<ntasks_wav>256</ntasks_wav>
<ntasks_cpl>256</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>2</nthrds_atm>
<nthrds_lnd>2</nthrds_lnd>
<nthrds_rof>2</nthrds_rof>
<nthrds_ice>2</nthrds_ice>
<nthrds_ocn>2</nthrds_ocn>
<nthrds_glc>2</nthrds_glc>
<nthrds_wav>2</nthrds_wav>
<nthrds_cpl>2</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>0</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="a%1.9x2.5">
<mach name="any">
<pes compset="CAM.+CLM.+DOCN." pesize="T">
<comment>none</comment>
<ntasks>
<ntasks_atm>16</ntasks_atm>
<ntasks_lnd>16</ntasks_lnd>
<ntasks_rof>16</ntasks_rof>
<ntasks_ice>16</ntasks_ice>
<ntasks_ocn>16</ntasks_ocn>
<ntasks_glc>16</ntasks_glc>
<ntasks_wav>16</ntasks_wav>
<ntasks_cpl>16</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>4</nthrds_atm>
<nthrds_lnd>4</nthrds_lnd>
<nthrds_rof>4</nthrds_rof>
<nthrds_ice>4</nthrds_ice>
<nthrds_ocn>4</nthrds_ocn>
<nthrds_glc>4</nthrds_glc>
<nthrds_wav>4</nthrds_wav>
<nthrds_cpl>4</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>0</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="a%1.9x2.5">
<mach name="edison|eos">
<pes compset="any" pesize="T">
<comment>none</comment>
<ntasks>
<ntasks_atm>24</ntasks_atm>
<ntasks_lnd>24</ntasks_lnd>
<ntasks_rof>24</ntasks_rof>
<ntasks_ice>24</ntasks_ice>
<ntasks_ocn>24</ntasks_ocn>
<ntasks_glc>24</ntasks_glc>
<ntasks_wav>24</ntasks_wav>
<ntasks_cpl>24</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>4</nthrds_atm>
<nthrds_lnd>4</nthrds_lnd>
<nthrds_rof>4</nthrds_rof>
<nthrds_ice>4</nthrds_ice>
<nthrds_ocn>4</nthrds_ocn>
<nthrds_glc>4</nthrds_glc>
<nthrds_wav>4</nthrds_wav>
<nthrds_cpl>4</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>0</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="a%0.9x1.25">
<mach name="any">
<pes compset="any" pesize="any">
<comment>none</comment>
<ntasks>
<ntasks_atm>64</ntasks_atm>
<ntasks_lnd>64</ntasks_lnd>
<ntasks_rof>64</ntasks_rof>
<ntasks_ice>64</ntasks_ice>
<ntasks_ocn>64</ntasks_ocn>
<ntasks_glc>64</ntasks_glc>
<ntasks_wav>64</ntasks_wav>
<ntasks_cpl>64</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>1</nthrds_atm>
<nthrds_lnd>1</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
<nthrds_cpl>1</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>0</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="a%0.9x1.25">
<mach name="melvin">
<pes compset="any" pesize="any">
<comment>none</comment>
<ntasks>
<ntasks_atm>48</ntasks_atm>
<ntasks_lnd>48</ntasks_lnd>
<ntasks_rof>48</ntasks_rof>
<ntasks_ice>48</ntasks_ice>
<ntasks_ocn>48</ntasks_ocn>
<ntasks_glc>48</ntasks_glc>
<ntasks_wav>48</ntasks_wav>
<ntasks_cpl>48</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>1</nthrds_atm>
<nthrds_lnd>1</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
<nthrds_cpl>1</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>0</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="a%0.9x1.25">
<mach name="edison|eos">
<pes compset="any" pesize="any">
<comment>none</comment>
<ntasks>
<ntasks_atm>192</ntasks_atm>
<ntasks_lnd>192</ntasks_lnd>
<ntasks_rof>192</ntasks_rof>
<ntasks_ice>192</ntasks_ice>
<ntasks_ocn>192</ntasks_ocn>
<ntasks_glc>192</ntasks_glc>
<ntasks_wav>192</ntasks_wav>
<ntasks_cpl>192</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>4</nthrds_atm>
<nthrds_lnd>4</nthrds_lnd>
<nthrds_rof>4</nthrds_rof>
<nthrds_ice>4</nthrds_ice>
<nthrds_ocn>4</nthrds_ocn>
<nthrds_glc>4</nthrds_glc>
<nthrds_wav>4</nthrds_wav>
<nthrds_cpl>4</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>0</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="a%0.9x1.25">
<mach name="cori-haswell">
<pes compset="any" pesize="any">
<comment>none</comment>
<ntasks>
<ntasks_atm>64</ntasks_atm>
<ntasks_lnd>64</ntasks_lnd>
<ntasks_rof>64</ntasks_rof>
<ntasks_ice>64</ntasks_ice>
<ntasks_ocn>64</ntasks_ocn>
<ntasks_glc>64</ntasks_glc>
<ntasks_wav>64</ntasks_wav>
<ntasks_cpl>64</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>1</nthrds_atm>
<nthrds_lnd>1</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
<nthrds_cpl>1</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>0</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="a%0.9x1.25">
<mach name="cori-knl">
<pes compset="any" pesize="any">
<comment>1 node, 64x2</comment>
<MAX_MPITASKS_PER_NODE>64</MAX_MPITASKS_PER_NODE>
<MAX_TASKS_PER_NODE>128</MAX_TASKS_PER_NODE>
<ntasks>
<ntasks_atm>64</ntasks_atm>
<ntasks_lnd>64</ntasks_lnd>
<ntasks_rof>64</ntasks_rof>
<ntasks_ice>64</ntasks_ice>
<ntasks_ocn>64</ntasks_ocn>
<ntasks_glc>64</ntasks_glc>
<ntasks_wav>64</ntasks_wav>
<ntasks_cpl>64</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>2</nthrds_atm>
<nthrds_lnd>2</nthrds_lnd>
<nthrds_rof>2</nthrds_rof>
<nthrds_ice>2</nthrds_ice>
<nthrds_ocn>2</nthrds_ocn>
<nthrds_glc>2</nthrds_glc>
<nthrds_wav>2</nthrds_wav>
<nthrds_cpl>2</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>0</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="a%0.9x1.25">
<mach name="any">
<pes compset="any" pesize="T">
<comment>none</comment>
<ntasks>
<ntasks_atm>32</ntasks_atm>
<ntasks_lnd>32</ntasks_lnd>
<ntasks_rof>32</ntasks_rof>
<ntasks_ice>32</ntasks_ice>
<ntasks_ocn>32</ntasks_ocn>
<ntasks_glc>32</ntasks_glc>
<ntasks_wav>32</ntasks_wav>
<ntasks_cpl>32</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>4</nthrds_atm>
<nthrds_lnd>4</nthrds_lnd>
<nthrds_rof>4</nthrds_rof>
<nthrds_ice>4</nthrds_ice>
<nthrds_ocn>4</nthrds_ocn>
<nthrds_glc>4</nthrds_glc>
<nthrds_wav>4</nthrds_wav>
<nthrds_cpl>4</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>0</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="a%0.9x1.25">
<mach name="any">
<pes compset="CAM.+CLM.+DOCN." pesize="T">
<comment>none</comment>
<ntasks>
<ntasks_atm>32</ntasks_atm>
<ntasks_lnd>32</ntasks_lnd>
<ntasks_rof>32</ntasks_rof>
<ntasks_ice>32</ntasks_ice>
<ntasks_ocn>32</ntasks_ocn>
<ntasks_glc>32</ntasks_glc>
<ntasks_wav>32</ntasks_wav>
<ntasks_cpl>32</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>4</nthrds_atm>
<nthrds_lnd>4</nthrds_lnd>
<nthrds_rof>4</nthrds_rof>
<nthrds_ice>4</nthrds_ice>
<nthrds_ocn>4</nthrds_ocn>
<nthrds_glc>4</nthrds_glc>
<nthrds_wav>4</nthrds_wav>
<nthrds_cpl>4</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>0</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="a%0.9x1.25">
<mach name="edison|eos">
<pes compset="any" pesize="T">
<comment>none</comment>
<ntasks>
<ntasks_atm>48</ntasks_atm>
<ntasks_lnd>48</ntasks_lnd>
<ntasks_rof>48</ntasks_rof>
<ntasks_ice>48</ntasks_ice>
<ntasks_ocn>48</ntasks_ocn>
<ntasks_glc>48</ntasks_glc>
<ntasks_wav>48</ntasks_wav>
<ntasks_cpl>48</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>4</nthrds_atm>
<nthrds_lnd>4</nthrds_lnd>
<nthrds_rof>4</nthrds_rof>
<nthrds_ice>4</nthrds_ice>
<nthrds_ocn>4</nthrds_ocn>
<nthrds_glc>4</nthrds_glc>
<nthrds_wav>4</nthrds_wav>
<nthrds_cpl>4</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>0</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="a%360x720cru">
<mach name="any">
<pes compset="any" pesize="any">
<comment>none</comment>
<ntasks>
<ntasks_atm>64</ntasks_atm>
<ntasks_lnd>64</ntasks_lnd>
<ntasks_rof>64</ntasks_rof>
<ntasks_ice>64</ntasks_ice>
<ntasks_ocn>64</ntasks_ocn>
<ntasks_glc>64</ntasks_glc>
<ntasks_wav>64</ntasks_wav>
<ntasks_cpl>64</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>1</nthrds_atm>
<nthrds_lnd>1</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
<nthrds_cpl>1</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>0</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="a%360x720cru">
<mach name="edison|melvin">
<pes compset="any" pesize="any">
<comment>none</comment>
<ntasks>
<ntasks_atm>48</ntasks_atm>
<ntasks_lnd>48</ntasks_lnd>
<ntasks_rof>48</ntasks_rof>
<ntasks_ice>48</ntasks_ice>
<ntasks_ocn>48</ntasks_ocn>
<ntasks_glc>48</ntasks_glc>
<ntasks_wav>48</ntasks_wav>
<ntasks_cpl>48</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>1</nthrds_atm>
<nthrds_lnd>1</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
<nthrds_cpl>1</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>0</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="a%360x720cru">
<mach name="cori-knl">
<pes compset="any" pesize="any">
<comment>none</comment>
<ntasks>
<ntasks_atm>128</ntasks_atm>
<ntasks_lnd>128</ntasks_lnd>
<ntasks_rof>128</ntasks_rof>
<ntasks_ice>128</ntasks_ice>
<ntasks_ocn>128</ntasks_ocn>
<ntasks_glc>128</ntasks_glc>
<ntasks_wav>128</ntasks_wav>
<ntasks_cpl>128</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>1</nthrds_atm>
<nthrds_lnd>1</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
<nthrds_cpl>1</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>0</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="a%ne16np4">
<mach name="edison|cori-haswell">
<pes compset="any" pesize="any">
<comment>none</comment>
<ntasks>
<ntasks_atm>96</ntasks_atm>
<ntasks_lnd>96</ntasks_lnd>
<ntasks_rof>96</ntasks_rof>
<ntasks_ice>96</ntasks_ice>
<ntasks_ocn>96</ntasks_ocn>
<ntasks_glc>96</ntasks_glc>
<ntasks_wav>96</ntasks_wav>
<ntasks_cpl>96</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>1</nthrds_atm>
<nthrds_lnd>1</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
<nthrds_cpl>1</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>0</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="a%ne16np4">
<mach name="cori-knl">
<pes compset="any" pesize="any">
<comment>cori-knl, 6 nodes, 64x4, sypd=2.93 (for F-compset)</comment>
<MAX_MPITASKS_PER_NODE>64</MAX_MPITASKS_PER_NODE>
<MAX_TASKS_PER_NODE>256</MAX_TASKS_PER_NODE>
<ntasks>
<ntasks_atm>384</ntasks_atm>
<ntasks_lnd>384</ntasks_lnd>
<ntasks_rof>384</ntasks_rof>
<ntasks_ice>256</ntasks_ice>
<ntasks_ocn>256</ntasks_ocn>
<ntasks_glc>64</ntasks_glc>
<ntasks_wav>64</ntasks_wav>
<ntasks_cpl>384</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>4</nthrds_atm>
<nthrds_lnd>4</nthrds_lnd>
<nthrds_rof>4</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
<nthrds_cpl>4</nthrds_cpl>
</nthrds>
</pes>
</mach>
</grid>
<grid name="a%ne11np4">
<mach name="cori-knl">
<pes compset="any" pesize="any">
<comment>6 nodes, 64x2, sypd=11.1 (for F-compset)</comment>
<MAX_MPITASKS_PER_NODE>64</MAX_MPITASKS_PER_NODE>
<MAX_TASKS_PER_NODE>128</MAX_TASKS_PER_NODE>
<ntasks>
<ntasks_atm>363</ntasks_atm>
<ntasks_lnd>363</ntasks_lnd>
<ntasks_rof>363</ntasks_rof>
<ntasks_ice>128</ntasks_ice>
<ntasks_ocn>128</ntasks_ocn>
<ntasks_glc>64</ntasks_glc>
<ntasks_wav>64</ntasks_wav>
<ntasks_cpl>363</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>2</nthrds_atm>
<nthrds_lnd>2</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
<nthrds_cpl>2</nthrds_cpl>
</nthrds>
</pes>
</mach>
</grid>
<grid name="a%0.9x1.25_l%0.9x1.25_oi%gx1">
<mach name="edison">
<pes compset="CAM.+CLM.+CICE.+POP" pesize="any">
<comment>none</comment>
<ntasks>
<ntasks_atm>960</ntasks_atm>
<ntasks_lnd>48</ntasks_lnd>
<ntasks_rof>960</ntasks_rof>
<ntasks_ice>912</ntasks_ice>
<ntasks_ocn>48</ntasks_ocn>
<ntasks_glc>1</ntasks_glc>
<ntasks_wav>960</ntasks_wav>
<ntasks_cpl>960</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>4</nthrds_atm>
<nthrds_lnd>4</nthrds_lnd>
<nthrds_rof>4</nthrds_rof>
<nthrds_ice>4</nthrds_ice>
<nthrds_ocn>4</nthrds_ocn>
<nthrds_glc>4</nthrds_glc>
<nthrds_wav>4</nthrds_wav>
<nthrds_cpl>4</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>48</rootpe_ice>
<rootpe_ocn>960</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="a%0.47x0.63">
<mach name="any">
<pes compset="any" pesize="any">
<comment>none</comment>
<ntasks>
<ntasks_atm>256</ntasks_atm>
<ntasks_lnd>256</ntasks_lnd>
<ntasks_rof>256</ntasks_rof>
<ntasks_ice>256</ntasks_ice>
<ntasks_ocn>256</ntasks_ocn>
<ntasks_glc>256</ntasks_glc>
<ntasks_wav>256</ntasks_wav>
<ntasks_cpl>256</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>1</nthrds_atm>
<nthrds_lnd>1</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
<nthrds_cpl>1</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>0</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="a%0.47x0.63">
<mach name="any">
<pes compset="any" pesize="T">
<comment>none</comment>
<ntasks>
<ntasks_atm>64</ntasks_atm>
<ntasks_lnd>64</ntasks_lnd>
<ntasks_rof>64</ntasks_rof>
<ntasks_ice>64</ntasks_ice>
<ntasks_ocn>64</ntasks_ocn>
<ntasks_glc>64</ntasks_glc>
<ntasks_wav>64</ntasks_wav>
<ntasks_cpl>64</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>4</nthrds_atm>
<nthrds_lnd>4</nthrds_lnd>
<nthrds_rof>4</nthrds_rof>
<nthrds_ice>4</nthrds_ice>
<nthrds_ocn>4</nthrds_ocn>
<nthrds_glc>4</nthrds_glc>
<nthrds_wav>4</nthrds_wav>
<nthrds_cpl>4</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>0</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="a%0.23x0.31">
<mach name="any">
<pes compset="any" pesize="any">
<comment>none</comment>
<ntasks>
<ntasks_atm>512</ntasks_atm>
<ntasks_lnd>512</ntasks_lnd>
<ntasks_rof>512</ntasks_rof>
<ntasks_ice>512</ntasks_ice>
<ntasks_ocn>512</ntasks_ocn>
<ntasks_glc>512</ntasks_glc>
<ntasks_wav>512</ntasks_wav>
<ntasks_cpl>512</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>1</nthrds_atm>
<nthrds_lnd>1</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
<nthrds_cpl>1</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>0</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="a%0.23x0.31">
<mach name="any">
<pes compset="any" pesize="T">
<comment>none</comment>
<ntasks>
<ntasks_atm>128</ntasks_atm>
<ntasks_lnd>128</ntasks_lnd>
<ntasks_rof>128</ntasks_rof>
<ntasks_ice>128</ntasks_ice>
<ntasks_ocn>128</ntasks_ocn>
<ntasks_glc>128</ntasks_glc>
<ntasks_wav>128</ntasks_wav>
<ntasks_cpl>128</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>4</nthrds_atm>
<nthrds_lnd>4</nthrds_lnd>
<nthrds_rof>4</nthrds_rof>
<nthrds_ice>4</nthrds_ice>
<nthrds_ocn>4</nthrds_ocn>
<nthrds_glc>4</nthrds_glc>
<nthrds_wav>4</nthrds_wav>
<nthrds_cpl>4</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>0</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="a%ar9v">
<mach name="any">
<pes compset="any" pesize="any">
<comment>none</comment>
<ntasks>
<ntasks_atm>32</ntasks_atm>
<ntasks_lnd>32</ntasks_lnd>
<ntasks_rof>32</ntasks_rof>
<ntasks_ice>32</ntasks_ice>
<ntasks_ocn>32</ntasks_ocn>
<ntasks_glc>32</ntasks_glc>
<ntasks_wav>32</ntasks_wav>
<ntasks_cpl>32</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>1</nthrds_atm>
<nthrds_lnd>1</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
<nthrds_cpl>1</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>0</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="a%wr50a">
<mach name="any">
<pes compset="any" pesize="any">
<comment>none</comment>
<ntasks>
<ntasks_atm>25</ntasks_atm>
<ntasks_lnd>25</ntasks_lnd>
<ntasks_rof>25</ntasks_rof>
<ntasks_ice>25</ntasks_ice>
<ntasks_ocn>25</ntasks_ocn>
<ntasks_glc>25</ntasks_glc>
<ntasks_wav>25</ntasks_wav>
<ntasks_cpl>25</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>1</nthrds_atm>
<nthrds_lnd>1</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
<nthrds_cpl>1</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>0</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="a%wr50a">
<mach name="any">
<pes compset="CAM.+CLM.+DOCN." pesize="any">
<comment>none</comment>
<ntasks>
<ntasks_atm>25</ntasks_atm>
<ntasks_lnd>25</ntasks_lnd>
<ntasks_rof>25</ntasks_rof>
<ntasks_ice>25</ntasks_ice>
<ntasks_ocn>25</ntasks_ocn>
<ntasks_glc>25</ntasks_glc>
<ntasks_wav>25</ntasks_wav>
<ntasks_cpl>25</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>1</nthrds_atm>
<nthrds_lnd>1</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
<nthrds_cpl>1</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>0</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="a%ar9v1|a%ar9v3">
<mach name="any">
<pes compset="any" pesize="any">
<comment>none</comment>
<ntasks>
<ntasks_atm>64</ntasks_atm>
<ntasks_lnd>64</ntasks_lnd>
<ntasks_rof>64</ntasks_rof>
<ntasks_ice>64</ntasks_ice>
<ntasks_ocn>64</ntasks_ocn>
<ntasks_glc>64</ntasks_glc>
<ntasks_wav>64</ntasks_wav>
<ntasks_cpl>64</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>1</nthrds_atm>
<nthrds_lnd>1</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
<nthrds_cpl>1</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>0</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="ar9v2|ar9v4">
<mach name="any">
<pes compset="any" pesize="any">
<comment>none</comment>
<ntasks>
<ntasks_atm>40</ntasks_atm>
<ntasks_lnd>40</ntasks_lnd>
<ntasks_rof>40</ntasks_rof>
<ntasks_ice>40</ntasks_ice>
<ntasks_ocn>40</ntasks_ocn>
<ntasks_glc>40</ntasks_glc>
<ntasks_wav>40</ntasks_wav>
<ntasks_cpl>40</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>1</nthrds_atm>
<nthrds_lnd>1</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
<nthrds_cpl>1</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>0</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="a%wr50a_l%wr50a_l%ar9v">
<mach name="any">
<pes compset="any" pesize="any">
<comment>none</comment>
<ntasks>
<ntasks_atm>64</ntasks_atm>
<ntasks_lnd>64</ntasks_lnd>
<ntasks_rof>64</ntasks_rof>
<ntasks_ice>64</ntasks_ice>
<ntasks_ocn>64</ntasks_ocn>
<ntasks_glc>64</ntasks_glc>
<ntasks_wav>64</ntasks_wav>
<ntasks_cpl>64</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>1</nthrds_atm>
<nthrds_lnd>1</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
<nthrds_cpl>1</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>0</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="any">
<mach name="mira">
<pes compset="any" pesize="any">
<comment>default and minimal 512-node partition</comment>
<ntasks>
<ntasks_atm>2048</ntasks_atm>
<ntasks_lnd>2048</ntasks_lnd>
<ntasks_rof>2048</ntasks_rof>
<ntasks_ice>2048</ntasks_ice>
<ntasks_ocn>2048</ntasks_ocn>
<ntasks_glc>2048</ntasks_glc>
<ntasks_wav>2048</ntasks_wav>
<ntasks_cpl>2048</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>16</nthrds_atm>
<nthrds_lnd>16</nthrds_lnd>
<nthrds_rof>16</nthrds_rof>
<nthrds_ice>16</nthrds_ice>
<nthrds_ocn>16</nthrds_ocn>
<nthrds_glc>16</nthrds_glc>
<nthrds_wav>16</nthrds_wav>
<nthrds_cpl>16</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>0</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
<mach name="cetus">
<pes compset="any" pesize="any">
<comment>default 64x16 PEs for acme_developer tests</comment>
<ntasks>
<ntasks_atm>64</ntasks_atm>
<ntasks_lnd>64</ntasks_lnd>
<ntasks_rof>64</ntasks_rof>
<ntasks_ice>64</ntasks_ice>
<ntasks_ocn>64</ntasks_ocn>
<ntasks_glc>64</ntasks_glc>
<ntasks_wav>64</ntasks_wav>
<ntasks_cpl>64</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>16</nthrds_atm>
<nthrds_lnd>16</nthrds_lnd>
<nthrds_rof>16</nthrds_rof>
<nthrds_ice>16</nthrds_ice>
<nthrds_ocn>16</nthrds_ocn>
<nthrds_glc>16</nthrds_glc>
<nthrds_wav>16</nthrds_wav>
<nthrds_cpl>16</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>0</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
<mach name="mira|cetus|bebop">
<pes compset=".+SATM.+SLND.+SICE.+SOCN.+SROF.+MALI.+SWAV" pesize="any">
<comment>-compset MALI</comment>
<ntasks>
<ntasks_atm>1</ntasks_atm>
<ntasks_lnd>1</ntasks_lnd>
<ntasks_rof>1</ntasks_rof>
<ntasks_ice>1</ntasks_ice>
<ntasks_ocn>1</ntasks_ocn>
<ntasks_glc>64</ntasks_glc>
<ntasks_wav>1</ntasks_wav>
<ntasks_cpl>64</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>1</nthrds_atm>
<nthrds_lnd>1</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_glc>16</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
<nthrds_cpl>16</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>0</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="a%4x5">
<mach name="cetus|mira">
<pes compset="any" pesize="any">
<comment>none</comment>
<ntasks>
<ntasks_atm>8</ntasks_atm>
<ntasks_lnd>8</ntasks_lnd>
<ntasks_rof>8</ntasks_rof>
<ntasks_ice>8</ntasks_ice>
<ntasks_ocn>8</ntasks_ocn>
<ntasks_glc>8</ntasks_glc>
<ntasks_wav>8</ntasks_wav>
<ntasks_cpl>8</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>16</nthrds_atm>
<nthrds_lnd>16</nthrds_lnd>
<nthrds_rof>16</nthrds_rof>
<nthrds_ice>16</nthrds_ice>
<nthrds_ocn>16</nthrds_ocn>
<nthrds_glc>16</nthrds_glc>
<nthrds_wav>16</nthrds_wav>
<nthrds_cpl>16</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>0</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="a%T62.+oi%gx3">
<mach name="any">
<pes compset="any" pesize="any">
<comment>none</comment>
<ntasks>
<ntasks_atm>16</ntasks_atm>
<ntasks_lnd>16</ntasks_lnd>
<ntasks_rof>16</ntasks_rof>
<ntasks_ice>16</ntasks_ice>
<ntasks_ocn>16</ntasks_ocn>
<ntasks_glc>16</ntasks_glc>
<ntasks_wav>16</ntasks_wav>
<ntasks_cpl>16</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>2</nthrds_atm>
<nthrds_lnd>2</nthrds_lnd>
<nthrds_rof>2</nthrds_rof>
<nthrds_ice>2</nthrds_ice>
<nthrds_ocn>2</nthrds_ocn>
<nthrds_glc>2</nthrds_glc>
<nthrds_wav>2</nthrds_wav>
<nthrds_cpl>2</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>0</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="a%T62.+oi%gx3">
<mach name="any">
<pes compset="any" pesize="T">
<comment>none</comment>
<ntasks>
<ntasks_atm>8</ntasks_atm>
<ntasks_lnd>8</ntasks_lnd>
<ntasks_rof>8</ntasks_rof>
<ntasks_ice>5</ntasks_ice>
<ntasks_ocn>4</ntasks_ocn>
<ntasks_glc>8</ntasks_glc>
<ntasks_wav>8</ntasks_wav>
<ntasks_cpl>8</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>4</nthrds_atm>
<nthrds_lnd>4</nthrds_lnd>
<nthrds_rof>4</nthrds_rof>
<nthrds_ice>4</nthrds_ice>
<nthrds_ocn>4</nthrds_ocn>
<nthrds_glc>4</nthrds_glc>
<nthrds_wav>4</nthrds_wav>
<nthrds_cpl>4</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>0</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="a%T62.+oi%gx1|a%1.9x2.5.+oi%gx1">
<mach name="any">
<pes compset="DATM.+CICE.+POP" pesize="any">
<comment>none</comment>
<ntasks>
<ntasks_atm>1</ntasks_atm>
<ntasks_lnd>1</ntasks_lnd>
<ntasks_rof>1</ntasks_rof>
<ntasks_ice>32</ntasks_ice>
<ntasks_ocn>96</ntasks_ocn>
<ntasks_glc>1</ntasks_glc>
<ntasks_wav>1</ntasks_wav>
<ntasks_cpl>32</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>1</nthrds_atm>
<nthrds_lnd>1</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
<nthrds_cpl>1</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>16</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>32</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="a%T62.+oi%gx1">
<mach name="any">
<pes compset="DATM.+CICE.+POP" pesize="L">
<comment>none</comment>
<ntasks>
<ntasks_atm>1</ntasks_atm>
<ntasks_lnd>1</ntasks_lnd>
<ntasks_rof>1</ntasks_rof>
<ntasks_ice>80</ntasks_ice>
<ntasks_ocn>192</ntasks_ocn>
<ntasks_glc>1</ntasks_glc>
<ntasks_wav>1</ntasks_wav>
<ntasks_cpl>80</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>1</nthrds_atm>
<nthrds_lnd>1</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
<nthrds_ice>4</nthrds_ice>
<nthrds_ocn>4</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
<nthrds_cpl>1</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>80</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="a%T62.+oi%tx0.1v2">
<mach name="any">
<pes compset="DATM.+DICE.+POP" pesize="S">
<comment>none</comment>
<ntasks>
<ntasks_atm>8</ntasks_atm>
<ntasks_lnd>8</ntasks_lnd>
<ntasks_rof>8</ntasks_rof>
<ntasks_ice>8</ntasks_ice>
<ntasks_ocn>480</ntasks_ocn>
<ntasks_glc>8</ntasks_glc>
<ntasks_wav>8</ntasks_wav>
<ntasks_cpl>32</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>1</nthrds_atm>
<nthrds_lnd>1</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
<nthrds_cpl>1</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>8</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>16</rootpe_ice>
<rootpe_ocn>32</rootpe_ocn>
<rootpe_glc>24</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="a%T62.+oi%tx0.1v2">
<mach name="any">
<pes compset="DATM.+CICE.+DOCN" pesize="S">
<comment>none</comment>
<ntasks>
<ntasks_atm>8</ntasks_atm>
<ntasks_lnd>8</ntasks_lnd>
<ntasks_rof>8</ntasks_rof>
<ntasks_ice>480</ntasks_ice>
<ntasks_ocn>8</ntasks_ocn>
<ntasks_glc>8</ntasks_glc>
<ntasks_wav>8</ntasks_wav>
<ntasks_cpl>32</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>1</nthrds_atm>
<nthrds_lnd>1</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
<nthrds_cpl>1</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>8</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>32</rootpe_ice>
<rootpe_ocn>16</rootpe_ocn>
<rootpe_glc>24</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="a%T62.+oi%tx0.1v2">
<mach name="any">
<pes compset="DATM.+CICE.+POP" pesize="S">
<comment>none</comment>
<ntasks>
<ntasks_atm>16</ntasks_atm>
<ntasks_lnd>8</ntasks_lnd>
<ntasks_rof>16</ntasks_rof>
<ntasks_ice>480</ntasks_ice>
<ntasks_ocn>480</ntasks_ocn>
<ntasks_glc>8</ntasks_glc>
<ntasks_wav>16</ntasks_wav>
<ntasks_cpl>32</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>1</nthrds_atm>
<nthrds_lnd>1</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
<nthrds_cpl>1</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>16</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>32</rootpe_ice>
<rootpe_ocn>32</rootpe_ocn>
<rootpe_glc>24</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="a%T62.+oi%tx0.1v2">
<mach name="any">
<pes compset="DATM.+DICE.+POP" pesize="M">
<comment>none</comment>
<ntasks>
<ntasks_atm>48</ntasks_atm>
<ntasks_lnd>48</ntasks_lnd>
<ntasks_rof>48</ntasks_rof>
<ntasks_ice>48</ntasks_ice>
<ntasks_ocn>1024</ntasks_ocn>
<ntasks_glc>48</ntasks_glc>
<ntasks_wav>48</ntasks_wav>
<ntasks_cpl>48</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>1</nthrds_atm>
<nthrds_lnd>1</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
<nthrds_cpl>1</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>48</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>192</rootpe_ice>
<rootpe_ocn>0</rootpe_ocn>
<rootpe_glc>96</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>144</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="a%T62.+oi%tx0.1v2">
<mach name="any">
<pes compset="DATM.+CICE.+DOCN" pesize="M">
<comment>none</comment>
<ntasks>
<ntasks_atm>48</ntasks_atm>
<ntasks_lnd>48</ntasks_lnd>
<ntasks_rof>48</ntasks_rof>
<ntasks_ice>1024</ntasks_ice>
<ntasks_ocn>48</ntasks_ocn>
<ntasks_glc>48</ntasks_glc>
<ntasks_wav>48</ntasks_wav>
<ntasks_cpl>48</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>1</nthrds_atm>
<nthrds_lnd>1</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
<nthrds_cpl>1</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>48</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>192</rootpe_ocn>
<rootpe_glc>96</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>144</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="a%T62.+oi%tx0.1v2">
<mach name="any">
<pes compset="DATM.+CICE.+POP" pesize="M">
<comment>none</comment>
<ntasks>
<ntasks_atm>48</ntasks_atm>
<ntasks_lnd>48</ntasks_lnd>
<ntasks_rof>48</ntasks_rof>
<ntasks_ice>1024</ntasks_ice>
<ntasks_ocn>1024</ntasks_ocn>
<ntasks_glc>48</ntasks_glc>
<ntasks_wav>48</ntasks_wav>
<ntasks_cpl>48</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>1</nthrds_atm>
<nthrds_lnd>1</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
<nthrds_cpl>1</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>48</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>0</rootpe_ocn>
<rootpe_glc>96</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>144</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="a%T62.+oi%tx0.1v2">
<mach name="edison|eos">
<pes compset="DATM.+CICE.+POP" pesize="M">
<comment>none</comment>
<ntasks>
<ntasks_atm>48</ntasks_atm>
<ntasks_lnd>48</ntasks_lnd>
<ntasks_rof>48</ntasks_rof>
<ntasks_ice>960</ntasks_ice>
<ntasks_ocn>960</ntasks_ocn>
<ntasks_glc>48</ntasks_glc>
<ntasks_wav>48</ntasks_wav>
<ntasks_cpl>48</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>2</nthrds_atm>
<nthrds_lnd>2</nthrds_lnd>
<nthrds_rof>2</nthrds_rof>
<nthrds_ice>2</nthrds_ice>
<nthrds_ocn>2</nthrds_ocn>
<nthrds_glc>2</nthrds_glc>
<nthrds_wav>2</nthrds_wav>
<nthrds_cpl>2</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>48</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>0</rootpe_ocn>
<rootpe_glc>96</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>144</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="a%T62">
<mach name="edison">
<pes compset="POP2%ECO" pesize="any">
<comment>none</comment>
<ntasks>
<ntasks_atm>96</ntasks_atm>
<ntasks_lnd>96</ntasks_lnd>
<ntasks_rof>96</ntasks_rof>
<ntasks_ice>96</ntasks_ice>
<ntasks_ocn>96</ntasks_ocn>
<ntasks_glc>96</ntasks_glc>
<ntasks_wav>96</ntasks_wav>
<ntasks_cpl>96</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>4</nthrds_atm>
<nthrds_lnd>4</nthrds_lnd>
<nthrds_rof>4</nthrds_rof>
<nthrds_ice>4</nthrds_ice>
<nthrds_ocn>4</nthrds_ocn>
<nthrds_glc>4</nthrds_glc>
<nthrds_wav>4</nthrds_wav>
<nthrds_cpl>4</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>0</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="a%4x5.+oi%4x5">
<mach name="any">
<pes compset="any" pesize="T">
<comment>none</comment>
<ntasks>
<ntasks_atm>8</ntasks_atm>
<ntasks_lnd>8</ntasks_lnd>
<ntasks_rof>8</ntasks_rof>
<ntasks_ice>6</ntasks_ice>
<ntasks_ocn>8</ntasks_ocn>
<ntasks_glc>8</ntasks_glc>
<ntasks_wav>8</ntasks_wav>
<ntasks_cpl>8</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>4</nthrds_atm>
<nthrds_lnd>4</nthrds_lnd>
<nthrds_rof>4</nthrds_rof>
<nthrds_ice>4</nthrds_ice>
<nthrds_ocn>4</nthrds_ocn>
<nthrds_glc>4</nthrds_glc>
<nthrds_wav>4</nthrds_wav>
<nthrds_cpl>4</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>0</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="a%4x5.+oi%gx3">
<mach name="any">
<pes compset="any" pesize="T">
<comment>none</comment>
<ntasks>
<ntasks_atm>8</ntasks_atm>
<ntasks_lnd>8</ntasks_lnd>
<ntasks_rof>8</ntasks_rof>
<ntasks_ice>5</ntasks_ice>
<ntasks_ocn>4</ntasks_ocn>
<ntasks_glc>8</ntasks_glc>
<ntasks_wav>8</ntasks_wav>
<ntasks_cpl>8</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>4</nthrds_atm>
<nthrds_lnd>4</nthrds_lnd>
<nthrds_rof>4</nthrds_rof>
<nthrds_ice>4</nthrds_ice>
<nthrds_ocn>4</nthrds_ocn>
<nthrds_glc>4</nthrds_glc>
<nthrds_wav>4</nthrds_wav>
<nthrds_cpl>4</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>0</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="a%T31.+oi%gx3">
<mach name="any">
<pes compset="any" pesize="T">
<comment>none</comment>
<ntasks>
<ntasks_atm>8</ntasks_atm>
<ntasks_lnd>8</ntasks_lnd>
<ntasks_rof>8</ntasks_rof>
<ntasks_ice>5</ntasks_ice>
<ntasks_ocn>4</ntasks_ocn>
<ntasks_glc>8</ntasks_glc>
<ntasks_wav>8</ntasks_wav>
<ntasks_cpl>8</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>4</nthrds_atm>
<nthrds_lnd>4</nthrds_lnd>
<nthrds_rof>4</nthrds_rof>
<nthrds_ice>4</nthrds_ice>
<nthrds_ocn>4</nthrds_ocn>
<nthrds_glc>4</nthrds_glc>
<nthrds_wav>4</nthrds_wav>
<nthrds_cpl>4</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>0</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="a%1.9x2.5_l%1.9x2.5_oi%gx1">
<mach name="titan|stampede|janus">
<pes compset="CAM.+CLM.+CICE.+POP" pesize="any">
<comment>none</comment>
<ntasks>
<ntasks_atm>160</ntasks_atm>
<ntasks_lnd>160</ntasks_lnd>
<ntasks_rof>160</ntasks_rof>
<ntasks_ice>160</ntasks_ice>
<ntasks_ocn>240</ntasks_ocn>
<ntasks_glc>160</ntasks_glc>
<ntasks_wav>160</ntasks_wav>
<ntasks_cpl>160</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>2</nthrds_atm>
<nthrds_lnd>1</nthrds_lnd>
<nthrds_rof>2</nthrds_rof>
<nthrds_ice>2</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_glc>2</nthrds_glc>
<nthrds_wav>2</nthrds_wav>
<nthrds_cpl>1</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>160</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="a%1.9x2.5">
<mach name="any">
<pes compset="any" pesize="T">
<comment>none</comment>
<ntasks>
<ntasks_atm>16</ntasks_atm>
<ntasks_lnd>16</ntasks_lnd>
<ntasks_rof>16</ntasks_rof>
<ntasks_ice>16</ntasks_ice>
<ntasks_ocn>16</ntasks_ocn>
<ntasks_glc>16</ntasks_glc>
<ntasks_wav>16</ntasks_wav>
<ntasks_cpl>16</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>4</nthrds_atm>
<nthrds_lnd>4</nthrds_lnd>
<nthrds_rof>4</nthrds_rof>
<nthrds_ice>4</nthrds_ice>
<nthrds_ocn>4</nthrds_ocn>
<nthrds_glc>4</nthrds_glc>
<nthrds_wav>4</nthrds_wav>
<nthrds_cpl>4</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>0</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="a%1.9x2.5_l%1.9x2.5_oi%gx1">
<mach name="any">
<pes compset="any" pesize="T">
<comment>none</comment>
<ntasks>
<ntasks_atm>32</ntasks_atm>
<ntasks_lnd>32</ntasks_lnd>
<ntasks_rof>32</ntasks_rof>
<ntasks_ice>32</ntasks_ice>
<ntasks_ocn>32</ntasks_ocn>
<ntasks_glc>32</ntasks_glc>
<ntasks_wav>32</ntasks_wav>
<ntasks_cpl>32</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>4</nthrds_atm>
<nthrds_lnd>4</nthrds_lnd>
<nthrds_rof>4</nthrds_rof>
<nthrds_ice>4</nthrds_ice>
<nthrds_ocn>4</nthrds_ocn>
<nthrds_glc>4</nthrds_glc>
<nthrds_wav>4</nthrds_wav>
<nthrds_cpl>4</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>0</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="a%1.9x2.5_l%1.9x2.5_oi%1.9x2.5">
<mach name="any">
<pes compset="any" pesize="T">
<comment>none</comment>
<ntasks>
<ntasks_atm>64</ntasks_atm>
<ntasks_lnd>32</ntasks_lnd>
<ntasks_rof>64</ntasks_rof>
<ntasks_ice>32</ntasks_ice>
<ntasks_ocn>64</ntasks_ocn>
<ntasks_glc>64</ntasks_glc>
<ntasks_wav>64</ntasks_wav>
<ntasks_cpl>64</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>2</nthrds_atm>
<nthrds_lnd>2</nthrds_lnd>
<nthrds_rof>2</nthrds_rof>
<nthrds_ice>2</nthrds_ice>
<nthrds_ocn>2</nthrds_ocn>
<nthrds_glc>2</nthrds_glc>
<nthrds_wav>2</nthrds_wav>
<nthrds_cpl>2</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>32</rootpe_ice>
<rootpe_ocn>0</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="a%0.9x1.25_l%0.9x1.25_oi%gx1">
<mach name="any">
<pes compset="any" pesize="T">
<comment>none</comment>
<ntasks>
<ntasks_atm>32</ntasks_atm>
<ntasks_lnd>32</ntasks_lnd>
<ntasks_rof>32</ntasks_rof>
<ntasks_ice>32</ntasks_ice>
<ntasks_ocn>32</ntasks_ocn>
<ntasks_glc>32</ntasks_glc>
<ntasks_wav>32</ntasks_wav>
<ntasks_cpl>32</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>4</nthrds_atm>
<nthrds_lnd>4</nthrds_lnd>
<nthrds_rof>4</nthrds_rof>
<nthrds_ice>4</nthrds_ice>
<nthrds_ocn>4</nthrds_ocn>
<nthrds_glc>4</nthrds_glc>
<nthrds_wav>4</nthrds_wav>
<nthrds_cpl>4</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>0</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="a%0.9x1.25_l%0.9x1.25_oi%gx1">
<mach name="titan|stampede">
<pes compset="CAM.+CLM.+CICE.+POP" pesize="any">
<comment>none</comment>
<ntasks>
<ntasks_atm>384</ntasks_atm>
<ntasks_lnd>64</ntasks_lnd>
<ntasks_rof>384</ntasks_rof>
<ntasks_ice>320</ntasks_ice>
<ntasks_ocn>32</ntasks_ocn>
<ntasks_glc>384</ntasks_glc>
<ntasks_wav>384</ntasks_wav>
<ntasks_cpl>64</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>4</nthrds_atm>
<nthrds_lnd>4</nthrds_lnd>
<nthrds_rof>4</nthrds_rof>
<nthrds_ice>4</nthrds_ice>
<nthrds_ocn>4</nthrds_ocn>
<nthrds_glc>4</nthrds_glc>
<nthrds_wav>4</nthrds_wav>
<nthrds_cpl>1</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>64</rootpe_ice>
<rootpe_ocn>384</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="a%T62_l%T62_oi%gx1">
<mach name="titan|stampede">
<pes compset="DATM.+CICE.+POP|DATM.+DICE.+POP|DATM.+CICE.+DOCN" pesize="any">
<comment>none</comment>
<ntasks>
<ntasks_atm>128</ntasks_atm>
<ntasks_lnd>128</ntasks_lnd>
<ntasks_rof>128</ntasks_rof>
<ntasks_ice>128</ntasks_ice>
<ntasks_ocn>128</ntasks_ocn>
<ntasks_glc>128</ntasks_glc>
<ntasks_wav>128</ntasks_wav>
<ntasks_cpl>128</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>2</nthrds_atm>
<nthrds_lnd>2</nthrds_lnd>
<nthrds_rof>2</nthrds_rof>
<nthrds_ice>2</nthrds_ice>
<nthrds_ocn>2</nthrds_ocn>
<nthrds_glc>2</nthrds_glc>
<nthrds_wav>2</nthrds_wav>
<nthrds_cpl>2</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>0</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="any">
<mach name="any">
<pes compset="WRF.+CLM.+DICE.+DOCN" pesize="any">
<comment>none</comment>
<ntasks>
<ntasks_atm>120</ntasks_atm>
<ntasks_lnd>96</ntasks_lnd>
<ntasks_rof>120</ntasks_rof>
<ntasks_ice>12</ntasks_ice>
<ntasks_ocn>12</ntasks_ocn>
<ntasks_glc>1</ntasks_glc>
<ntasks_wav>120</ntasks_wav>
<ntasks_cpl>96</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>1</nthrds_atm>
<nthrds_lnd>1</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
<nthrds_cpl>1</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>96</rootpe_ice>
<rootpe_ocn>108</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="wr50a_ar9v">
<mach name="any">
<pes compset="^X" pesize="any">
<comment>none</comment>
<ntasks>
<ntasks_atm>25</ntasks_atm>
<ntasks_lnd>25</ntasks_lnd>
<ntasks_rof>25</ntasks_rof>
<ntasks_ice>32</ntasks_ice>
<ntasks_ocn>32</ntasks_ocn>
<ntasks_glc>1</ntasks_glc>
<ntasks_wav>25</ntasks_wav>
<ntasks_cpl>64</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>1</nthrds_atm>
<nthrds_lnd>1</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
<nthrds_cpl>1</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>0</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="wr50a_ar9v">
<mach name="any">
<pes compset="^RB|^RJ" pesize="any">
<comment>none</comment>
<ntasks>
<ntasks_atm>256</ntasks_atm>
<ntasks_lnd>1</ntasks_lnd>
<ntasks_rof>256</ntasks_rof>
<ntasks_ice>256</ntasks_ice>
<ntasks_ocn>256</ntasks_ocn>
<ntasks_glc>1</ntasks_glc>
<ntasks_wav>256</ntasks_wav>
<ntasks_cpl>256</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>1</nthrds_atm>
<nthrds_lnd>1</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
<nthrds_cpl>1</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>0</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="wr50a_ar9v">
<mach name="any">
<pes compset="^RL|^RK" pesize="any">
<comment>none</comment>
<ntasks>
<ntasks_atm>256</ntasks_atm>
<ntasks_lnd>1</ntasks_lnd>
<ntasks_rof>256</ntasks_rof>
<ntasks_ice>16</ntasks_ice>
<ntasks_ocn>16</ntasks_ocn>
<ntasks_glc>1</ntasks_glc>
<ntasks_wav>256</ntasks_wav>
<ntasks_cpl>256</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>1</nthrds_atm>
<nthrds_lnd>1</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
<nthrds_cpl>1</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>0</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="a%0.9x1.25_l%0.9x1.25_oi%gx1">
<mach name="mira|cetus">
<pes compset="any" pesize="any">
<comment>none</comment>
<ntasks>
<ntasks_atm>1525</ntasks_atm>
<ntasks_lnd>71</ntasks_lnd>
<ntasks_rof>71</ntasks_rof>
<ntasks_ice>1454</ntasks_ice>
<ntasks_ocn>256</ntasks_ocn>
<ntasks_glc>1</ntasks_glc>
<ntasks_wav>1525</ntasks_wav>
<ntasks_cpl>1525</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>16</nthrds_atm>
<nthrds_lnd>16</nthrds_lnd>
<nthrds_rof>16</nthrds_rof>
<nthrds_ice>16</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>16</nthrds_wav>
<nthrds_cpl>4</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>71</rootpe_ice>
<rootpe_ocn>1525</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
<pes compset="DATM.+CLM" pesize="any">
<comment>none</comment>
<ntasks>
<ntasks_atm>512</ntasks_atm>
<ntasks_lnd>512</ntasks_lnd>
<ntasks_rof>512</ntasks_rof>
<ntasks_ice>512</ntasks_ice>
<ntasks_ocn>512</ntasks_ocn>
<ntasks_glc>512</ntasks_glc>
<ntasks_wav>512</ntasks_wav>
<ntasks_cpl>512</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>16</nthrds_atm>
<nthrds_lnd>16</nthrds_lnd>
<nthrds_rof>16</nthrds_rof>
<nthrds_ice>16</nthrds_ice>
<nthrds_ocn>16</nthrds_ocn>
<nthrds_glc>16</nthrds_glc>
<nthrds_wav>16</nthrds_wav>
<nthrds_cpl>16</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>0</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="a%0.9x1.25_l%0.9x1.25_oi%0.9x1.25">
<mach name="mira|cetus">
<pes compset="any" pesize="any">
<comment>none</comment>
<ntasks>
<ntasks_atm>1525</ntasks_atm>
<ntasks_lnd>71</ntasks_lnd>
<ntasks_rof>71</ntasks_rof>
<ntasks_ice>1454</ntasks_ice>
<ntasks_ocn>256</ntasks_ocn>
<ntasks_glc>1</ntasks_glc>
<ntasks_wav>1525</ntasks_wav>
<ntasks_cpl>1525</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>16</nthrds_atm>
<nthrds_lnd>16</nthrds_lnd>
<nthrds_rof>16</nthrds_rof>
<nthrds_ice>16</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>16</nthrds_wav>
<nthrds_cpl>4</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>71</rootpe_ice>
<rootpe_ocn>1525</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="a%0.47x0.63_l%0.47x0.63_oi%gx1">
<mach name="any">
<pes compset="CAM.+CLM.+CICE.+POP" pesize="any">
<comment>none</comment>
<ntasks>
<ntasks_atm>496</ntasks_atm>
<ntasks_lnd>176</ntasks_lnd>
<ntasks_rof>496</ntasks_rof>
<ntasks_ice>320</ntasks_ice>
<ntasks_ocn>16</ntasks_ocn>
<ntasks_glc>496</ntasks_glc>
<ntasks_wav>496</ntasks_wav>
<ntasks_cpl>160</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>1</nthrds_atm>
<nthrds_lnd>1</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
<nthrds_cpl>1</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>320</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>496</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="a%0.47x0.63_l%0.47x0.63_oi%gx1">
<mach name="titan|stampede|janus">
<pes compset="CAM.+CLM.+CICE.+POP" pesize="any">
<comment>none</comment>
<ntasks>
<ntasks_atm>512</ntasks_atm>
<ntasks_lnd>192</ntasks_lnd>
<ntasks_rof>512</ntasks_rof>
<ntasks_ice>320</ntasks_ice>
<ntasks_ocn>64</ntasks_ocn>
<ntasks_glc>512</ntasks_glc>
<ntasks_wav>512</ntasks_wav>
<ntasks_cpl>512</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>4</nthrds_atm>
<nthrds_lnd>4</nthrds_lnd>
<nthrds_rof>4</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_glc>4</nthrds_glc>
<nthrds_wav>4</nthrds_wav>
<nthrds_cpl>1</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>320</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>512</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="a%0.47x0.63_l%0.47x0.63_oi%tx0.1v2">
<mach name="any">
<pes compset="CAM.+CLM.+CICE.+POP" pesize="S">
<comment>none</comment>
<ntasks>
<ntasks_atm>480</ntasks_atm>
<ntasks_lnd>416</ntasks_lnd>
<ntasks_rof>480</ntasks_rof>
<ntasks_ice>480</ntasks_ice>
<ntasks_ocn>480</ntasks_ocn>
<ntasks_glc>480</ntasks_glc>
<ntasks_wav>480</ntasks_wav>
<ntasks_cpl>480</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>1</nthrds_atm>
<nthrds_lnd>1</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
<nthrds_cpl>1</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>0</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="a%0.47x0.63_l%0.47x0.63_oi%tx0.1v2">
<mach name="any">
<pes compset="CAM.+CLM.+CICE.+POP" pesize="M">
<comment>none</comment>
<ntasks>
<ntasks_atm>1024</ntasks_atm>
<ntasks_lnd>416</ntasks_lnd>
<ntasks_rof>1024</ntasks_rof>
<ntasks_ice>1024</ntasks_ice>
<ntasks_ocn>1024</ntasks_ocn>
<ntasks_glc>1024</ntasks_glc>
<ntasks_wav>1024</ntasks_wav>
<ntasks_cpl>1024</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>1</nthrds_atm>
<nthrds_lnd>1</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
<nthrds_cpl>1</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>0</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="a%0.47x0.63_l%0.47x0.63_oi%tx0.1v2">
<mach name="any">
<pes compset="CAM.+CLM.+CICE.+POP" pesize="L">
<comment>none</comment>
<ntasks>
<ntasks_atm>480</ntasks_atm>
<ntasks_lnd>416</ntasks_lnd>
<ntasks_rof>480</ntasks_rof>
<ntasks_ice>480</ntasks_ice>
<ntasks_ocn>1232</ntasks_ocn>
<ntasks_glc>480</ntasks_glc>
<ntasks_wav>480</ntasks_wav>
<ntasks_cpl>432</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>1</nthrds_atm>
<nthrds_lnd>1</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
<nthrds_cpl>1</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>480</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="a%0.47x0.63_l%0.47x0.63_oi%tx0.1v2">
<mach name="any">
<pes compset="CAM.+CLM.+CICE.+POP" pesize="X1">
<comment>none</comment>
<ntasks>
<ntasks_atm>1024</ntasks_atm>
<ntasks_lnd>416</ntasks_lnd>
<ntasks_rof>1024</ntasks_rof>
<ntasks_ice>1024</ntasks_ice>
<ntasks_ocn>2356</ntasks_ocn>
<ntasks_glc>1024</ntasks_glc>
<ntasks_wav>1024</ntasks_wav>
<ntasks_cpl>432</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>1</nthrds_atm>
<nthrds_lnd>1</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
<nthrds_cpl>1</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>1024</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="a%0.47x0.63_l%0.47x0.63_oi%tx0.1v2">
<mach name="any">
<pes compset="CAM.+CLM.+CICE.+POP" pesize="X2">
<comment>none</comment>
<ntasks>
<ntasks_atm>1664</ntasks_atm>
<ntasks_lnd>416</ntasks_lnd>
<ntasks_rof>1664</ntasks_rof>
<ntasks_ice>1800</ntasks_ice>
<ntasks_ocn>3476</ntasks_ocn>
<ntasks_glc>1664</ntasks_glc>
<ntasks_wav>1664</ntasks_wav>
<ntasks_cpl>432</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>1</nthrds_atm>
<nthrds_lnd>1</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
<nthrds_cpl>1</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>1800</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="a%0.23x0.31_l%0.23x0.31_oi%gx1">
<mach name="any">
<pes compset="CAM.+CLM.+CICE.+POP" pesize="any">
<comment>none</comment>
<ntasks>
<ntasks_atm>496</ntasks_atm>
<ntasks_lnd>336</ntasks_lnd>
<ntasks_rof>496</ntasks_rof>
<ntasks_ice>160</ntasks_ice>
<ntasks_ocn>16</ntasks_ocn>
<ntasks_glc>496</ntasks_glc>
<ntasks_wav>496</ntasks_wav>
<ntasks_cpl>160</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>1</nthrds_atm>
<nthrds_lnd>1</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
<nthrds_cpl>1</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>160</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>496</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="a%ne30np4_l%ne30np4_oi%ne30np4">
<mach name="anvil|bebop">
<pes compset="any" pesize="any">
<comment>ne30_ne30 grid on 40 nodes 36 ppn pure-MPI</comment>
<ntasks>
<ntasks_atm>1350</ntasks_atm>
<ntasks_lnd>72</ntasks_lnd>
<ntasks_rof>72</ntasks_rof>
<ntasks_ice>72</ntasks_ice>
<ntasks_ocn>72</ntasks_ocn>
<ntasks_cpl>72</ntasks_cpl>
<ntasks_glc>1</ntasks_glc>
<ntasks_wav>1</ntasks_wav>
</ntasks>
<nthrds>
<nthrds_atm>1</nthrds_atm>
<nthrds_lnd>1</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_cpl>1</nthrds_cpl>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>1368</rootpe_lnd>
<rootpe_rof>1368</rootpe_rof>
<rootpe_ice>1368</rootpe_ice>
<rootpe_ocn>1368</rootpe_ocn>
<rootpe_cpl>1368</rootpe_cpl>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
</rootpe>
</pes>
<pes compset="any" pesize="L">
<comment>77x36x1</comment>
<ntasks>
<ntasks_atm>2700</ntasks_atm>
<ntasks_lnd>72</ntasks_lnd>
<ntasks_rof>72</ntasks_rof>
<ntasks_ice>72</ntasks_ice>
<ntasks_ocn>72</ntasks_ocn>
<ntasks_cpl>2700</ntasks_cpl>
<ntasks_glc>1</ntasks_glc>
<ntasks_wav>1</ntasks_wav>
</ntasks>
<nthrds>
<nthrds_atm>1</nthrds_atm>
<nthrds_lnd>1</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_cpl>1</nthrds_cpl>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>2700</rootpe_lnd>
<rootpe_rof>2700</rootpe_rof>
<rootpe_ice>2628</rootpe_ice>
<rootpe_ocn>2700</rootpe_ocn>
<rootpe_cpl>0</rootpe_cpl>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
</rootpe>
</pes>
<pes compset="any" pesize="XL">
<comment>152x36x1</comment>
<ntasks>
<ntasks_atm>5400</ntasks_atm>
<ntasks_lnd>72</ntasks_lnd>
<ntasks_rof>72</ntasks_rof>
<ntasks_ice>72</ntasks_ice>
<ntasks_ocn>72</ntasks_ocn>
<ntasks_cpl>72</ntasks_cpl>
<ntasks_glc>1</ntasks_glc>
<ntasks_wav>1</ntasks_wav>
</ntasks>
<nthrds>
<nthrds_atm>1</nthrds_atm>
<nthrds_lnd>1</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_cpl>1</nthrds_cpl>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>5400</rootpe_lnd>
<rootpe_rof>5400</rootpe_rof>
<rootpe_ice>5400</rootpe_ice>
<rootpe_ocn>5400</rootpe_ocn>
<rootpe_cpl>5400</rootpe_cpl>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
</rootpe>
</pes>
</mach>
<mach name="compy">
<pes compset="any" pesize="any">
<comment>ne30_ne30 grid on 23 nodes 40 ppn pure-MPI</comment>
<ntasks>
<ntasks_atm>900</ntasks_atm>
<ntasks_lnd>900</ntasks_lnd>
<ntasks_rof>900</ntasks_rof>
<ntasks_ice>900</ntasks_ice>
<ntasks_ocn>900</ntasks_ocn>
<ntasks_cpl>900</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>1</nthrds_atm>
<nthrds_lnd>1</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_cpl>1</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>0</rootpe_ocn>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
<pes compset="any" pesize="L">
<comment>ne30_ne30 grid on 68 nodes 40 ppn pure-MPI</comment>
<ntasks>
<ntasks_atm>2700</ntasks_atm>
<ntasks_lnd>2700</ntasks_lnd>
<ntasks_rof>2700</ntasks_rof>
<ntasks_ice>2700</ntasks_ice>
<ntasks_ocn>2700</ntasks_ocn>
<ntasks_cpl>2700</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>1</nthrds_atm>
<nthrds_lnd>1</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_cpl>1</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>0</rootpe_ocn>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
<pes compset="any" pesize="XL">
<comment>ne30_ne30 grid on 135 nodes 40 ppn pure-MPI</comment>
<ntasks>
<ntasks_atm>5400</ntasks_atm>
<ntasks_lnd>5400</ntasks_lnd>
<ntasks_rof>5400</ntasks_rof>
<ntasks_ice>5400</ntasks_ice>
<ntasks_ocn>5400</ntasks_ocn>
<ntasks_cpl>5400</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>1</nthrds_atm>
<nthrds_lnd>1</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_cpl>1</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>0</rootpe_ocn>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="any">
<mach name="anvil">
<pes compset="any" pesize="any">
<comment>default,4nodes*36tasks*1threads</comment>
<ntasks>
<ntasks_atm>144</ntasks_atm>
<ntasks_lnd>144</ntasks_lnd>
<ntasks_rof>144</ntasks_rof>
<ntasks_ice>144</ntasks_ice>
<ntasks_ocn>144</ntasks_ocn>
<ntasks_glc>144</ntasks_glc>
<ntasks_wav>144</ntasks_wav>
<ntasks_cpl>144</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>1</nthrds_atm>
<nthrds_lnd>1</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
<nthrds_cpl>1</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>0</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
<mach name="compy">
<pes compset="any" pesize="any">
<comment>default,4nodes*40tasks*1thread</comment>
<ntasks>
<ntasks_atm>160</ntasks_atm>
<ntasks_lnd>160</ntasks_lnd>
<ntasks_rof>160</ntasks_rof>
<ntasks_ice>160</ntasks_ice>
<ntasks_ocn>160</ntasks_ocn>
<ntasks_glc>160</ntasks_glc>
<ntasks_wav>160</ntasks_wav>
<ntasks_cpl>160</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>1</nthrds_atm>
<nthrds_lnd>1</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
<nthrds_cpl>1</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>0</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="a%ne30np4">
<mach name="mira|cetus">
<pes compset="any" pesize="any">
<comment>ne30 grid on 128x16x4 PEs</comment>
<MAX_MPITASKS_PER_NODE>16</MAX_MPITASKS_PER_NODE>
<ntasks>
<ntasks_atm>1350</ntasks_atm>
<ntasks_lnd>176</ntasks_lnd>
<ntasks_rof>176</ntasks_rof>
<ntasks_ice>1360</ntasks_ice>
<ntasks_ocn>512</ntasks_ocn>
<ntasks_glc>1</ntasks_glc>
<ntasks_wav>1</ntasks_wav>
<ntasks_cpl>1360</ntasks_cpl>
<ntasks_esp>1</ntasks_esp>
</ntasks>
<nthrds>
<nthrds_atm>4</nthrds_atm>
<nthrds_lnd>4</nthrds_lnd>
<nthrds_rof>4</nthrds_rof>
<nthrds_ice>4</nthrds_ice>
<nthrds_ocn>4</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
<nthrds_cpl>4</nthrds_cpl>
<nthrds_esp>1</nthrds_esp>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>1360</rootpe_lnd>
<rootpe_rof>1360</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>1536</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
<rootpe_esp>0</rootpe_esp>
</rootpe>
</pes>
</mach>
</grid>
<grid name="a%ne240np4_l%0.23x0.31_oi%gx1">
<mach name="any">
<pes compset="any" pesize="any">
<comment>none</comment>
<ntasks>
<ntasks_atm>2560</ntasks_atm>
<ntasks_lnd>512</ntasks_lnd>
<ntasks_rof>2560</ntasks_rof>
<ntasks_ice>512</ntasks_ice>
<ntasks_ocn>512</ntasks_ocn>
<ntasks_glc>2560</ntasks_glc>
<ntasks_wav>2560</ntasks_wav>
<ntasks_cpl>512</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>1</nthrds_atm>
<nthrds_lnd>1</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
<nthrds_cpl>1</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>1536</rootpe_atm>
<rootpe_lnd>512</rootpe_lnd>
<rootpe_rof>1536</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>0</rootpe_ocn>
<rootpe_glc>1536</rootpe_glc>
<rootpe_wav>1536</rootpe_wav>
<rootpe_cpl>1023</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="a%ne240np4_l%0.23x0.31_oi%gx1">
<mach name="edison|eos">
<pes compset="any" pesize="any">
<comment>none</comment>
<ntasks>
<ntasks_atm>9600</ntasks_atm>
<ntasks_lnd>960</ntasks_lnd>
<ntasks_rof>960</ntasks_rof>
<ntasks_ice>960</ntasks_ice>
<ntasks_ocn>960</ntasks_ocn>
<ntasks_glc>960</ntasks_glc>
<ntasks_wav>9600</ntasks_wav>
<ntasks_cpl>960</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>4</nthrds_atm>
<nthrds_lnd>4</nthrds_lnd>
<nthrds_rof>4</nthrds_rof>
<nthrds_ice>4</nthrds_ice>
<nthrds_ocn>4</nthrds_ocn>
<nthrds_glc>4</nthrds_glc>
<nthrds_wav>4</nthrds_wav>
<nthrds_cpl>4</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>5800</rootpe_rof>
<rootpe_ice>960</rootpe_ice>
<rootpe_ocn>1920</rootpe_ocn>
<rootpe_glc>4840</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>3880</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="a%ne240np4_l%0.23x0.31_oi%tx0.1v2">
<mach name="any">
<pes compset="any" pesize="any">
<comment>none</comment>
<ntasks>
<ntasks_atm>2048</ntasks_atm>
<ntasks_lnd>112</ntasks_lnd>
<ntasks_rof>2048</ntasks_rof>
<ntasks_ice>1800</ntasks_ice>
<ntasks_ocn>4028</ntasks_ocn>
<ntasks_glc>2048</ntasks_glc>
<ntasks_wav>2048</ntasks_wav>
<ntasks_cpl>2048</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>1</nthrds_atm>
<nthrds_lnd>1</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
<nthrds_cpl>1</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>2048</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>2160</rootpe_ice>
<rootpe_ocn>3960</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="a%T341_l%T341_oi%T341">
<mach name="titan|stampede|janus">
<pes compset="CAM.+CLM4.+CICE.+DOCN%DOM" pesize="any">
<comment>none</comment>
<ntasks>
<ntasks_atm>512</ntasks_atm>
<ntasks_lnd>512</ntasks_lnd>
<ntasks_rof>512</ntasks_rof>
<ntasks_ice>512</ntasks_ice>
<ntasks_ocn>512</ntasks_ocn>
<ntasks_glc>512</ntasks_glc>
<ntasks_wav>512</ntasks_wav>
<ntasks_cpl>512</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>6</nthrds_atm>
<nthrds_lnd>6</nthrds_lnd>
<nthrds_rof>6</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_glc>6</nthrds_glc>
<nthrds_wav>6</nthrds_wav>
<nthrds_cpl>1</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>0</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="a%0.23x0.31_l%0.23x0.31_oi%tx0.1v2">
<mach name="janus">
<pes compset="CAM.+CLM.+CICE.+POP" pesize="any">
<comment>none</comment>
<ntasks>
<ntasks_atm>2048</ntasks_atm>
<ntasks_lnd>112</ntasks_lnd>
<ntasks_rof>2048</ntasks_rof>
<ntasks_ice>1800</ntasks_ice>
<ntasks_ocn>4028</ntasks_ocn>
<ntasks_glc>2048</ntasks_glc>
<ntasks_wav>2048</ntasks_wav>
<ntasks_cpl>1800</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>1</nthrds_atm>
<nthrds_lnd>1</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
<nthrds_cpl>1</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>2048</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>2160</rootpe_ice>
<rootpe_ocn>3960</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="a%0.23x0.31_l%0.23x0.31_oi%tx0.1v2">
<mach name="titan|stampede">
<pes compset="CAM.+CLM.+CICE.+POP" pesize="any">
<comment>none</comment>
<ntasks>
<ntasks_atm>1824</ntasks_atm>
<ntasks_lnd>112</ntasks_lnd>
<ntasks_rof>1824</ntasks_rof>
<ntasks_ice>1600</ntasks_ice>
<ntasks_ocn>3600</ntasks_ocn>
<ntasks_glc>1824</ntasks_glc>
<ntasks_wav>1824</ntasks_wav>
<ntasks_cpl>1600</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>1</nthrds_atm>
<nthrds_lnd>1</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
<nthrds_cpl>1</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>1824</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>1936</rootpe_ice>
<rootpe_ocn>3536</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="a%T341_l%T341_oi%tx0.1v2">
<mach name="titan|stampede|janus">
<pes compset="CAM.+CLM.+CICE.+POP" pesize="any">
<comment>none</comment>
<ntasks>
<ntasks_atm>512</ntasks_atm>
<ntasks_lnd>512</ntasks_lnd>
<ntasks_rof>512</ntasks_rof>
<ntasks_ice>1800</ntasks_ice>
<ntasks_ocn>2048</ntasks_ocn>
<ntasks_glc>512</ntasks_glc>
<ntasks_wav>512</ntasks_wav>
<ntasks_cpl>512</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>6</nthrds_atm>
<nthrds_lnd>6</nthrds_lnd>
<nthrds_rof>6</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_glc>6</nthrds_glc>
<nthrds_wav>6</nthrds_wav>
<nthrds_cpl>1</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>512</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>512</rootpe_ice>
<rootpe_ocn>2312</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="a%T341_l%0.23x0.31_oi%tx0.1v2">
<mach name="titan|stampede|janus">
<pes compset="CAM.+CLM.+CICE.+POP" pesize="any">
<comment>none</comment>
<ntasks>
<ntasks_atm>512</ntasks_atm>
<ntasks_lnd>512</ntasks_lnd>
<ntasks_rof>512</ntasks_rof>
<ntasks_ice>1800</ntasks_ice>
<ntasks_ocn>2048</ntasks_ocn>
<ntasks_glc>512</ntasks_glc>
<ntasks_wav>512</ntasks_wav>
<ntasks_cpl>512</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>6</nthrds_atm>
<nthrds_lnd>6</nthrds_lnd>
<nthrds_rof>6</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_glc>6</nthrds_glc>
<nthrds_wav>6</nthrds_wav>
<nthrds_cpl>1</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>512</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>512</rootpe_ice>
<rootpe_ocn>2312</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="a%ne120np4_l%0.23x0.31_oi%tx0.1v2">
<mach name="titan|stampede|janus">
<pes compset="CAM.+CLM.+CICE.+POP" pesize="any">
<comment>none</comment>
<ntasks>
<ntasks_atm>1440</ntasks_atm>
<ntasks_lnd>512</ntasks_lnd>
<ntasks_rof>1440</ntasks_rof>
<ntasks_ice>512</ntasks_ice>
<ntasks_ocn>2048</ntasks_ocn>
<ntasks_glc>1440</ntasks_glc>
<ntasks_wav>1440</ntasks_wav>
<ntasks_cpl>512</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>1</nthrds_atm>
<nthrds_lnd>1</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
<nthrds_cpl>1</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>1440</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="a%ne120np4_l%0.9x1.25_oi%gx1v6">
<mach name="titan|stampede|janus">
<pes compset="CAM.+CLM4.+CICE.+DOCN%DOM" pesize="any">
<comment>none</comment>
<ntasks>
<ntasks_atm>3600</ntasks_atm>
<ntasks_lnd>512</ntasks_lnd>
<ntasks_rof>3600</ntasks_rof>
<ntasks_ice>512</ntasks_ice>
<ntasks_ocn>512</ntasks_ocn>
<ntasks_glc>3600</ntasks_glc>
<ntasks_wav>3600</ntasks_wav>
<ntasks_cpl>512</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>1</nthrds_atm>
<nthrds_lnd>1</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
<nthrds_cpl>1</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>0</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="a%ne120np4_l%0.23x0.31_oi%tx0.1v2">
<mach name="titan|stampede|janus">
<pes compset="CAM.+CLM4.+CICE.+DOCN%DOM" pesize="any">
<comment>none</comment>
<ntasks>
<ntasks_atm>14400</ntasks_atm>
<ntasks_lnd>512</ntasks_lnd>
<ntasks_rof>14400</ntasks_rof>
<ntasks_ice>512</ntasks_ice>
<ntasks_ocn>512</ntasks_ocn>
<ntasks_glc>14400</ntasks_glc>
<ntasks_wav>14400</ntasks_wav>
<ntasks_cpl>512</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>1</nthrds_atm>
<nthrds_lnd>1</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
<nthrds_cpl>1</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>0</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="any">
<mach name="any">
<pes compset="DLND.+CISM2P" pesize="any">
<comment>none</comment>
<ntasks>
<ntasks_atm>192</ntasks_atm>
<ntasks_lnd>192</ntasks_lnd>
<ntasks_rof>192</ntasks_rof>
<ntasks_ice>192</ntasks_ice>
<ntasks_ocn>192</ntasks_ocn>
<ntasks_glc>192</ntasks_glc>
<ntasks_wav>192</ntasks_wav>
<ntasks_cpl>192</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>1</nthrds_atm>
<nthrds_lnd>1</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
<nthrds_cpl>1</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>0</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="any">
<mach name="any">
<pes compset="DLND.+_CISM2P" pesize="any">
<comment>none</comment>
<ntasks>
<ntasks_atm>96</ntasks_atm>
<ntasks_lnd>96</ntasks_lnd>
<ntasks_rof>96</ntasks_rof>
<ntasks_ice>96</ntasks_ice>
<ntasks_ocn>96</ntasks_ocn>
<ntasks_glc>96</ntasks_glc>
<ntasks_wav>96</ntasks_wav>
<ntasks_cpl>96</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>1</nthrds_atm>
<nthrds_lnd>1</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
<nthrds_cpl>1</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>0</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="any">
<mach name="any">
<pes compset="DLND.+_CISM2P" pesize="any">
<comment>none</comment>
<ntasks>
<ntasks_atm>32</ntasks_atm>
<ntasks_lnd>32</ntasks_lnd>
<ntasks_rof>32</ntasks_rof>
<ntasks_ice>32</ntasks_ice>
<ntasks_ocn>32</ntasks_ocn>
<ntasks_glc>32</ntasks_glc>
<ntasks_wav>32</ntasks_wav>
<ntasks_cpl>32</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>1</nthrds_atm>
<nthrds_lnd>1</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
<nthrds_cpl>1</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>0</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="any">
<mach name="any">
<pes compset="DLND.+CISM1|DLND.+CISM2S" pesize="any">
<comment>none</comment>
<ntasks>
<ntasks_atm>1</ntasks_atm>
<ntasks_lnd>1</ntasks_lnd>
<ntasks_rof>1</ntasks_rof>
<ntasks_ice>1</ntasks_ice>
<ntasks_ocn>1</ntasks_ocn>
<ntasks_glc>1</ntasks_glc>
<ntasks_wav>1</ntasks_wav>
<ntasks_cpl>1</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>1</nthrds_atm>
<nthrds_lnd>1</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
<nthrds_cpl>1</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>0</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="any">
<mach name="any">
<pes compset="_CISM1|_CISM2S" pesize="any">
<comment>none</comment>
<ntasks>
<ntasks_atm>16</ntasks_atm>
<ntasks_lnd>16</ntasks_lnd>
<ntasks_rof>16</ntasks_rof>
<ntasks_ice>16</ntasks_ice>
<ntasks_ocn>16</ntasks_ocn>
<ntasks_glc>1</ntasks_glc>
<ntasks_wav>16</ntasks_wav>
<ntasks_cpl>16</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>1</nthrds_atm>
<nthrds_lnd>1</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
<nthrds_cpl>1</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>0</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="any">
<mach name="any">
<pes compset="CLB" pesize="any">
<comment>none</comment>
<ntasks>
<ntasks_atm>16</ntasks_atm>
<ntasks_lnd>16</ntasks_lnd>
<ntasks_rof>16</ntasks_rof>
<ntasks_ice>16</ntasks_ice>
<ntasks_ocn>16</ntasks_ocn>
<ntasks_glc>16</ntasks_glc>
<ntasks_wav>16</ntasks_wav>
<ntasks_cpl>16</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>1</nthrds_atm>
<nthrds_lnd>1</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
<nthrds_cpl>1</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>0</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="any">
<mach name="any">
<pes compset="POP2%DAR" pesize="any">
<comment>none</comment>
<ntasks>
<ntasks_atm>16</ntasks_atm>
<ntasks_lnd>16</ntasks_lnd>
<ntasks_rof>16</ntasks_rof>
<ntasks_ice>16</ntasks_ice>
<ntasks_ocn>16</ntasks_ocn>
<ntasks_glc>16</ntasks_glc>
<ntasks_wav>16</ntasks_wav>
<ntasks_cpl>16</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>1</nthrds_atm>
<nthrds_lnd>1</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
<nthrds_cpl>1</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>0</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="any">
<mach name="any">
<pes compset="any" pesize="FC">
<comment>none</comment>
<ntasks>
<ntasks_atm>8</ntasks_atm>
<ntasks_lnd>2</ntasks_lnd>
<ntasks_rof>2</ntasks_rof>
<ntasks_ice>4</ntasks_ice>
<ntasks_ocn>8</ntasks_ocn>
<ntasks_glc>4</ntasks_glc>
<ntasks_wav>8</ntasks_wav>
<ntasks_cpl>4</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>1</nthrds_atm>
<nthrds_lnd>1</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
<nthrds_cpl>1</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>8</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>2</rootpe_rof>
<rootpe_ice>4</rootpe_ice>
<rootpe_ocn>24</rootpe_ocn>
<rootpe_glc>20</rootpe_glc>
<rootpe_wav>8</rootpe_wav>
<rootpe_cpl>16</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="any">
<mach name="edison">
<pes compset="any" pesize="FC">
<comment>none</comment>
<ntasks>
<ntasks_atm>8</ntasks_atm>
<ntasks_lnd>4</ntasks_lnd>
<ntasks_rof>4</ntasks_rof>
<ntasks_ice>8</ntasks_ice>
<ntasks_ocn>8</ntasks_ocn>
<ntasks_glc>4</ntasks_glc>
<ntasks_wav>4</ntasks_wav>
<ntasks_cpl>8</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>1</nthrds_atm>
<nthrds_lnd>1</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
<nthrds_cpl>1</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>8</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>12</rootpe_rof>
<rootpe_ice>16</rootpe_ice>
<rootpe_ocn>24</rootpe_ocn>
<rootpe_glc>32</rootpe_glc>
<rootpe_wav>44</rootpe_wav>
<rootpe_cpl>36</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="any">
<mach name="eos">
<pes compset="any" pesize="FC">
<comment>none</comment>
<ntasks>
<ntasks_atm>4</ntasks_atm>
<ntasks_lnd>4</ntasks_lnd>
<ntasks_rof>4</ntasks_rof>
<ntasks_ice>4</ntasks_ice>
<ntasks_ocn>4</ntasks_ocn>
<ntasks_glc>4</ntasks_glc>
<ntasks_wav>4</ntasks_wav>
<ntasks_cpl>4</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>1</nthrds_atm>
<nthrds_lnd>1</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
<nthrds_cpl>1</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>4</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>8</rootpe_rof>
<rootpe_ice>12</rootpe_ice>
<rootpe_ocn>16</rootpe_ocn>
<rootpe_glc>20</rootpe_glc>
<rootpe_wav>28</rootpe_wav>
<rootpe_cpl>24</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="any">
<mach name="any">
<pes compset="XATM" pesize="any">
<comment>none</comment>
<ntasks>
<ntasks_atm>16</ntasks_atm>
<ntasks_lnd>16</ntasks_lnd>
<ntasks_rof>16</ntasks_rof>
<ntasks_ice>16</ntasks_ice>
<ntasks_ocn>16</ntasks_ocn>
<ntasks_glc>16</ntasks_glc>
<ntasks_wav>16</ntasks_wav>
<ntasks_cpl>16</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>1</nthrds_atm>
<nthrds_lnd>1</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
<nthrds_cpl>1</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>0</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="any">
<mach name="any">
<pes compset="XATM" pesize="T">
<comment>none</comment>
<ntasks>
<ntasks_atm>4</ntasks_atm>
<ntasks_lnd>4</ntasks_lnd>
<ntasks_rof>4</ntasks_rof>
<ntasks_ice>4</ntasks_ice>
<ntasks_ocn>4</ntasks_ocn>
<ntasks_glc>4</ntasks_glc>
<ntasks_wav>4</ntasks_wav>
<ntasks_cpl>4</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>4</nthrds_atm>
<nthrds_lnd>4</nthrds_lnd>
<nthrds_rof>4</nthrds_rof>
<nthrds_ice>4</nthrds_ice>
<nthrds_ocn>4</nthrds_ocn>
<nthrds_glc>4</nthrds_glc>
<nthrds_wav>4</nthrds_wav>
<nthrds_cpl>4</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>0</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="any">
<mach name="any">
<pes compset="SATM" pesize="any">
<comment>none</comment>
<ntasks>
<ntasks_atm>8</ntasks_atm>
<ntasks_lnd>8</ntasks_lnd>
<ntasks_rof>8</ntasks_rof>
<ntasks_ice>8</ntasks_ice>
<ntasks_ocn>8</ntasks_ocn>
<ntasks_glc>8</ntasks_glc>
<ntasks_wav>8</ntasks_wav>
<ntasks_cpl>8</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>1</nthrds_atm>
<nthrds_lnd>1</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
<nthrds_cpl>1</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>0</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="any">
<mach name="any">
<pes compset="SATM" pesize="T">
<comment>none</comment>
<ntasks>
<ntasks_atm>2</ntasks_atm>
<ntasks_lnd>2</ntasks_lnd>
<ntasks_rof>2</ntasks_rof>
<ntasks_ice>2</ntasks_ice>
<ntasks_ocn>2</ntasks_ocn>
<ntasks_glc>2</ntasks_glc>
<ntasks_wav>2</ntasks_wav>
<ntasks_cpl>2</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>4</nthrds_atm>
<nthrds_lnd>4</nthrds_lnd>
<nthrds_rof>4</nthrds_rof>
<nthrds_ice>4</nthrds_ice>
<nthrds_ocn>4</nthrds_ocn>
<nthrds_glc>4</nthrds_glc>
<nthrds_wav>4</nthrds_wav>
<nthrds_cpl>4</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>0</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="any">
<mach name="any">
<pes compset="DATM.+DLND.+DICE.+DOCN%DOM.+DROF" pesize="T">
<comment>none</comment>
<ntasks>
<ntasks_atm>2</ntasks_atm>
<ntasks_lnd>2</ntasks_lnd>
<ntasks_rof>2</ntasks_rof>
<ntasks_ice>2</ntasks_ice>
<ntasks_ocn>2</ntasks_ocn>
<ntasks_glc>2</ntasks_glc>
<ntasks_wav>2</ntasks_wav>
<ntasks_cpl>2</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>4</nthrds_atm>
<nthrds_lnd>4</nthrds_lnd>
<nthrds_rof>4</nthrds_rof>
<nthrds_ice>4</nthrds_ice>
<nthrds_ocn>4</nthrds_ocn>
<nthrds_glc>4</nthrds_glc>
<nthrds_wav>4</nthrds_wav>
<nthrds_cpl>4</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>0</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="any">
<mach name="janus">
<pes compset="DATM.+DLND.+DICE.+DOCN%DOM.+DROF" pesize="T">
<comment>none</comment>
<ntasks>
<ntasks_atm>6</ntasks_atm>
<ntasks_lnd>6</ntasks_lnd>
<ntasks_rof>6</ntasks_rof>
<ntasks_ice>6</ntasks_ice>
<ntasks_ocn>6</ntasks_ocn>
<ntasks_glc>6</ntasks_glc>
<ntasks_wav>6</ntasks_wav>
<ntasks_cpl>6</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>4</nthrds_atm>
<nthrds_lnd>4</nthrds_lnd>
<nthrds_rof>4</nthrds_rof>
<nthrds_ice>4</nthrds_ice>
<nthrds_ocn>4</nthrds_ocn>
<nthrds_glc>4</nthrds_glc>
<nthrds_wav>4</nthrds_wav>
<nthrds_cpl>4</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>0</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="any">
<mach name="titan|stampede">
<pes compset="DATM.+DLND.+DICE.+DOCN%DOM.+DROF" pesize="T">
<comment>none</comment>
<ntasks>
<ntasks_atm>8</ntasks_atm>
<ntasks_lnd>8</ntasks_lnd>
<ntasks_rof>8</ntasks_rof>
<ntasks_ice>8</ntasks_ice>
<ntasks_ocn>8</ntasks_ocn>
<ntasks_glc>8</ntasks_glc>
<ntasks_wav>8</ntasks_wav>
<ntasks_cpl>8</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>4</nthrds_atm>
<nthrds_lnd>4</nthrds_lnd>
<nthrds_rof>4</nthrds_rof>
<nthrds_ice>4</nthrds_ice>
<nthrds_ocn>4</nthrds_ocn>
<nthrds_glc>4</nthrds_glc>
<nthrds_wav>4</nthrds_wav>
<nthrds_cpl>4</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>0</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="any">
<mach name="any">
<pes compset="DATM.+DLND.+DICE.+DOCN%DOM.+DROF" pesize="any">
<comment>none</comment>
<ntasks>
<ntasks_atm>-1</ntasks_atm>
<ntasks_lnd>-1</ntasks_lnd>
<ntasks_rof>-1</ntasks_rof>
<ntasks_ice>-1</ntasks_ice>
<ntasks_ocn>-1</ntasks_ocn>
<ntasks_glc>-1</ntasks_glc>
<ntasks_wav>-1</ntasks_wav>
<ntasks_cpl>-1</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>1</nthrds_atm>
<nthrds_lnd>1</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
<nthrds_cpl>1</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>0</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="any">
<mach name="any">
<pes compset="DATM.+DLND.+DICE.+DOCN%DOM.+DROF" pesize="T">
<comment>none</comment>
<ntasks>
<ntasks_atm>-1</ntasks_atm>
<ntasks_lnd>-1</ntasks_lnd>
<ntasks_rof>-1</ntasks_rof>
<ntasks_ice>-1</ntasks_ice>
<ntasks_ocn>-1</ntasks_ocn>
<ntasks_glc>-1</ntasks_glc>
<ntasks_wav>-1</ntasks_wav>
<ntasks_cpl>-1</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>4</nthrds_atm>
<nthrds_lnd>4</nthrds_lnd>
<nthrds_rof>4</nthrds_rof>
<nthrds_ice>4</nthrds_ice>
<nthrds_ocn>4</nthrds_ocn>
<nthrds_glc>4</nthrds_glc>
<nthrds_wav>4</nthrds_wav>
<nthrds_cpl>4</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>0</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="a%CLM_USRDAT">
<mach name="any">
<pes compset="any" pesize="any">
<comment>none</comment>
<ntasks>
<ntasks_atm>1</ntasks_atm>
<ntasks_lnd>1</ntasks_lnd>
<ntasks_rof>1</ntasks_rof>
<ntasks_ice>1</ntasks_ice>
<ntasks_ocn>1</ntasks_ocn>
<ntasks_glc>1</ntasks_glc>
<ntasks_wav>1</ntasks_wav>
<ntasks_cpl>1</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>1</nthrds_atm>
<nthrds_lnd>1</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
<nthrds_cpl>1</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>0</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="a%1x1_">
<mach name="any">
<pes compset="any" pesize="any">
<comment>none</comment>
<ntasks>
<ntasks_atm>1</ntasks_atm>
<ntasks_lnd>1</ntasks_lnd>
<ntasks_rof>1</ntasks_rof>
<ntasks_ice>1</ntasks_ice>
<ntasks_ocn>1</ntasks_ocn>
<ntasks_glc>1</ntasks_glc>
<ntasks_wav>1</ntasks_wav>
<ntasks_cpl>1</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>1</nthrds_atm>
<nthrds_lnd>1</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
<nthrds_cpl>1</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>0</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="a%5x5_">
<mach name="any">
<pes compset="any" pesize="any">
<comment>none</comment>
<ntasks>
<ntasks_atm>5</ntasks_atm>
<ntasks_lnd>5</ntasks_lnd>
<ntasks_rof>5</ntasks_rof>
<ntasks_ice>5</ntasks_ice>
<ntasks_ocn>5</ntasks_ocn>
<ntasks_glc>5</ntasks_glc>
<ntasks_wav>5</ntasks_wav>
<ntasks_cpl>5</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>1</nthrds_atm>
<nthrds_lnd>1</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
<nthrds_cpl>1</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>0</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="any">
<mach name="any">
<pes compset=".+DATM.+SLND.+DICE.+MPASO.+DROF.+MALI.+SWAV" pesize="any">
<comment>none</comment>
<ntasks>
<ntasks_atm>64</ntasks_atm>
<ntasks_lnd>64</ntasks_lnd>
<ntasks_rof>64</ntasks_rof>
<ntasks_ice>64</ntasks_ice>
<ntasks_ocn>64</ntasks_ocn>
<ntasks_glc>8</ntasks_glc>
<ntasks_wav>64</ntasks_wav>
<ntasks_cpl>64</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>1</nthrds_atm>
<nthrds_lnd>1</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
<nthrds_cpl>1</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>0</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="a%ne30np4">
<mach name="edison">
<pes compset="CAM.+CLM.+DOCN." pesize="any">
<comment>"113-node 12x4 F-compset sypd=10.8"</comment>
<MAX_MPITASKS_PER_NODE>12</MAX_MPITASKS_PER_NODE>
<MAX_TASKS_PER_NODE>48</MAX_TASKS_PER_NODE>
<ntasks>
<ntasks_atm>1350</ntasks_atm>
<ntasks_lnd>1350</ntasks_lnd>
<ntasks_rof>1350</ntasks_rof>
<ntasks_ice>1350</ntasks_ice>
<ntasks_ocn>1350</ntasks_ocn>
<ntasks_glc>12</ntasks_glc>
<ntasks_wav>12</ntasks_wav>
<ntasks_cpl>1350</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>4</nthrds_atm>
<nthrds_lnd>2</nthrds_lnd>
<nthrds_rof>2</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
<nthrds_cpl>2</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>0</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="a%ne30np4">
<mach name="edison">
<pes compset="CAM5.+CLM45.+MPASSI.+MPASO.+MOSART.+SGLC.+SWAV" pesize="any">
<comment>"133 node version gets 6 SYPD. This will be the default and M size"</comment>
<ntasks>
<ntasks_atm>2700</ntasks_atm>
<ntasks_lnd>312</ntasks_lnd>
<ntasks_rof>312</ntasks_rof>
<ntasks_ice>2400</ntasks_ice>
<ntasks_ocn>480</ntasks_ocn>
<ntasks_glc>312</ntasks_glc>
<ntasks_wav>2400</ntasks_wav>
<ntasks_cpl>2400</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>1</nthrds_atm>
<nthrds_lnd>1</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
<nthrds_cpl>1</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>2400</rootpe_lnd>
<rootpe_rof>2400</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>2712</rootpe_ocn>
<rootpe_glc>2400</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="a%ne30np4">
<mach name="edison">
<pes compset="CAM5.+CLM45.+MPASSI.+MPASO.+MOSART.+SGLC.+SWAV" pesize="S">
<comment>"39 node version gets 2.1 SYPD."</comment>
<ntasks>
<ntasks_atm>675</ntasks_atm>
<ntasks_lnd>56</ntasks_lnd>
<ntasks_rof>56</ntasks_rof>
<ntasks_ice>640</ntasks_ice>
<ntasks_ocn>240</ntasks_ocn>
<ntasks_glc>56</ntasks_glc>
<ntasks_wav>56</ntasks_wav>
<ntasks_cpl>640</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>1</nthrds_atm>
<nthrds_lnd>1</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
<nthrds_cpl>1</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>640</rootpe_lnd>
<rootpe_rof>640</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>696</rootpe_ocn>
<rootpe_glc>640</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="a%ne30np4">
<mach name="edison">
<pes compset="CAM5.+CLM45.+MPASSI.+MPASO.+MOSART.+SGLC.+SWAV" pesize="L">
<comment>"285 node version gets 11.5 SYPD"</comment>
<ntasks>
<ntasks_atm>5400</ntasks_atm>
<ntasks_lnd>600</ntasks_lnd>
<ntasks_rof>600</ntasks_rof>
<ntasks_ice>3200</ntasks_ice>
<ntasks_ocn>1440</ntasks_ocn>
<ntasks_glc>600</ntasks_glc>
<ntasks_wav>4800</ntasks_wav>
<ntasks_cpl>4800</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>1</nthrds_atm>
<nthrds_lnd>1</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
<nthrds_cpl>1</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>4800</rootpe_lnd>
<rootpe_rof>4800</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>5400</rootpe_ocn>
<rootpe_glc>4800</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="a%ne30np4">
<mach name="cori-haswell">
<pes compset="CAM5.+CLM45.+MPASSI.+MPASO.+MOSART.+SGLC.+SWAV" pesize="any">
<comment>"185 nodes, 32x1, ~5sypd (wmod185)"</comment>
<ntasks>
<ntasks_atm>5400</ntasks_atm>
<ntasks_lnd>608</ntasks_lnd>
<ntasks_rof>608</ntasks_rof>
<ntasks_ice>3200</ntasks_ice>
<ntasks_ocn>512</ntasks_ocn>
<ntasks_glc>32</ntasks_glc>
<ntasks_wav>32</ntasks_wav>
<ntasks_cpl>4800</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>1</nthrds_atm>
<nthrds_lnd>1</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
<nthrds_cpl>1</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>4800</rootpe_lnd>
<rootpe_rof>4800</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>5408</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
<pes compset="CAM5.+CLM45.+MPASSI.+MPASO.+MOSART.+SGLC.+SWAV" pesize="S">
<comment>"15 nodes, 32x1, ~.5sypd (wmod015)"</comment>
<ntasks>
<ntasks_atm>288</ntasks_atm>
<ntasks_lnd>32</ntasks_lnd>
<ntasks_rof>32</ntasks_rof>
<ntasks_ice>256</ntasks_ice>
<ntasks_ocn>192</ntasks_ocn>
<ntasks_glc>32</ntasks_glc>
<ntasks_wav>32</ntasks_wav>
<ntasks_cpl>288</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>1</nthrds_atm>
<nthrds_lnd>1</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
<nthrds_cpl>1</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>256</rootpe_lnd>
<rootpe_rof>256</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>288</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="a%ne30np4">
<mach name="cori-knl">
<pes compset=".*CAM5.+CLM45.+MPASSI.+MPASO.+MOSART.*" pesize="L">
<comment>"cori-knl ne30 coupled compest on 120 nodes, 64x1 (2 threads CPL/OCN/ICE), (kmod125) sypd=4.1"</comment>
<MAX_MPITASKS_PER_NODE>64</MAX_MPITASKS_PER_NODE>
<MAX_TASKS_PER_NODE>128</MAX_TASKS_PER_NODE>
<ntasks>
<ntasks_atm>6080</ntasks_atm>
<ntasks_lnd>822</ntasks_lnd>
<ntasks_rof>128</ntasks_rof>
<ntasks_ice>5120</ntasks_ice>
<ntasks_ocn>1920</ntasks_ocn>
<ntasks_glc>64</ntasks_glc>
<ntasks_wav>64</ntasks_wav>
<ntasks_cpl>5952</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>1</nthrds_atm>
<nthrds_lnd>1</nthrds_lnd>
<nthrds_rof>2</nthrds_rof>
<nthrds_ice>2</nthrds_ice>
<nthrds_ocn>2</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
<nthrds_cpl>2</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>5120</rootpe_lnd>
<rootpe_rof>5952</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>6080</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
<pes compset=".*CAM5.+CLM45.+MPASSI.+MPASO.+MOSART.*" pesize="any">
<comment>"cori-knl ne30 coupled compest on 60 nodes, 67x2, (kmod060b) sypd=2.86"</comment>
<MAX_MPITASKS_PER_NODE>67</MAX_MPITASKS_PER_NODE>
<MAX_TASKS_PER_NODE>268</MAX_TASKS_PER_NODE>
<ntasks>
<ntasks_atm>2700</ntasks_atm>
<ntasks_lnd>268</ntasks_lnd>
<ntasks_rof>134</ntasks_rof>
<ntasks_ice>2560</ntasks_ice>
<ntasks_ocn>960</ntasks_ocn>
<ntasks_glc>67</ntasks_glc>
<ntasks_wav>67</ntasks_wav>
<ntasks_cpl>2881</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>2</nthrds_atm>
<nthrds_lnd>2</nthrds_lnd>
<nthrds_rof>2</nthrds_rof>
<nthrds_ice>2</nthrds_ice>
<nthrds_ocn>2</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
<nthrds_cpl>2</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>2613</rootpe_lnd>
<rootpe_rof>2881</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>3015</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
<pes compset=".*CAM5.+CLM45.+MPASSI.+MPASO.+MOSART.*" pesize="S">
<comment>"cori-knl ne30 coupled compest on 31 nodes, 67x2, (kmod031b) sypd=1.71"</comment>
<MAX_MPITASKS_PER_NODE>67</MAX_MPITASKS_PER_NODE>
<MAX_TASKS_PER_NODE>134</MAX_TASKS_PER_NODE>
<ntasks>
<ntasks_atm>1350</ntasks_atm>
<ntasks_lnd>670</ntasks_lnd>
<ntasks_rof>64</ntasks_rof>
<ntasks_ice>1350</ntasks_ice>
<ntasks_ocn>600</ntasks_ocn>
<ntasks_glc>67</ntasks_glc>
<ntasks_wav>67</ntasks_wav>
<ntasks_cpl>1350</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>2</nthrds_atm>
<nthrds_lnd>2</nthrds_lnd>
<nthrds_rof>2</nthrds_rof>
<nthrds_ice>2</nthrds_ice>
<nthrds_ocn>2</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
<nthrds_cpl>2</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>1407</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>1474</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
<pes compset=".*CAM5.+CLM45.+MPASSI.+MPASO.+MOSART.*" pesize="T">
<comment>"cori-knl ne30 coupled compest on 17 nodes, 67x4, (kmod017) sypd=1.12"</comment>
<MAX_MPITASKS_PER_NODE>67</MAX_MPITASKS_PER_NODE>
<MAX_TASKS_PER_NODE>268</MAX_TASKS_PER_NODE>
<ntasks>
<ntasks_atm>737</ntasks_atm>
<ntasks_lnd>670</ntasks_lnd>
<ntasks_rof>64</ntasks_rof>
<ntasks_ice>640</ntasks_ice>
<ntasks_ocn>384</ntasks_ocn>
<ntasks_glc>67</ntasks_glc>
<ntasks_wav>67</ntasks_wav>
<ntasks_cpl>737</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>4</nthrds_atm>
<nthrds_lnd>2</nthrds_lnd>
<nthrds_rof>2</nthrds_rof>
<nthrds_ice>2</nthrds_ice>
<nthrds_ocn>2</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
<nthrds_cpl>2</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>670</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>737</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
<pes compset=".*CAM5.+CLM45.+CICE.+DOCN.+SROF.+SGLC.+SWAV.*" pesize="L">
<comment>cori-knl ne30 F-compset on 81 nodes, 67x1, sypd=6.1</comment>
<MAX_MPITASKS_PER_NODE>67</MAX_MPITASKS_PER_NODE>
<MAX_TASKS_PER_NODE>134</MAX_TASKS_PER_NODE>
<ntasks>
<ntasks_atm>5427</ntasks_atm>
<ntasks_lnd>5427</ntasks_lnd>
<ntasks_rof>5427</ntasks_rof>
<ntasks_ice>5427</ntasks_ice>
<ntasks_ocn>5427</ntasks_ocn>
<ntasks_glc>33</ntasks_glc>
<ntasks_wav>33</ntasks_wav>
<ntasks_cpl>5427</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>1</nthrds_atm>
<nthrds_lnd>1</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
<nthrds_cpl>1</nthrds_cpl>
</nthrds>
</pes>
<pes compset=".*CAM5.+CLM45.+CICE.+DOCN.+SROF.+SGLC.+SWAV.*" pesize="any">
<comment>cori-knl ne30 F-compset on 41 nodes, 33x4, sypd=4.4</comment>
<MAX_MPITASKS_PER_NODE>33</MAX_MPITASKS_PER_NODE>
<MAX_TASKS_PER_NODE>132</MAX_TASKS_PER_NODE>
<ntasks>
<ntasks_atm>1350</ntasks_atm>
<ntasks_lnd>1350</ntasks_lnd>
<ntasks_rof>1350</ntasks_rof>
<ntasks_ice>1200</ntasks_ice>
<ntasks_ocn>1200</ntasks_ocn>
<ntasks_glc>33</ntasks_glc>
<ntasks_wav>33</ntasks_wav>
<ntasks_cpl>1350</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>4</nthrds_atm>
<nthrds_lnd>4</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
<nthrds_cpl>4</nthrds_cpl>
</nthrds>
</pes>
<pes compset=".*CAM5.+CLM45.+CICE.+DOCN.+SROF.+SGLC.+SWAV.*" pesize="S">
<comment>cori-knl ne30 F-compset on 21 nodes, 33x4, sypd=2.35</comment>
<MAX_MPITASKS_PER_NODE>33</MAX_MPITASKS_PER_NODE>
<MAX_TASKS_PER_NODE>132</MAX_TASKS_PER_NODE>
<ntasks>
<ntasks_atm>693</ntasks_atm>
<ntasks_lnd>693</ntasks_lnd>
<ntasks_rof>693</ntasks_rof>
<ntasks_ice>693</ntasks_ice>
<ntasks_ocn>693</ntasks_ocn>
<ntasks_glc>33</ntasks_glc>
<ntasks_wav>33</ntasks_wav>
<ntasks_cpl>693</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>4</nthrds_atm>
<nthrds_lnd>4</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
<nthrds_cpl>4</nthrds_cpl>
</nthrds>
</pes>
<pes compset=".*CAM5.+CLM45.+CICE.+DOCN.+SROF.+SGLC.+SWAV.*" pesize="T">
<comment>cori-knl ne30 F-compset on 4 nodes, 34x8, sypd=0.61</comment>
<MAX_MPITASKS_PER_NODE>34</MAX_MPITASKS_PER_NODE>
<MAX_TASKS_PER_NODE>268</MAX_TASKS_PER_NODE>
<ntasks>
<ntasks_atm>136</ntasks_atm>
<ntasks_lnd>136</ntasks_lnd>
<ntasks_rof>136</ntasks_rof>
<ntasks_ice>136</ntasks_ice>
<ntasks_ocn>136</ntasks_ocn>
<ntasks_glc>33</ntasks_glc>
<ntasks_wav>33</ntasks_wav>
<ntasks_cpl>136</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>8</nthrds_atm>
<nthrds_lnd>4</nthrds_lnd>
<nthrds_rof>4</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
<nthrds_cpl>4</nthrds_cpl>
</nthrds>
</pes>
</mach>
</grid>
<grid name="a%ne30np4_l%ne30np4_oi%oEC60to30_r%r05_m%oEC60to30_g%null_w%null">
<mach name="titan">
<pes compset="CAM5.+CLM45.+MPASSI.+MPASO.+MOSART.+SGLC.+SWAV" pesize="any">
<comment>none</comment>
<ntasks>
<ntasks_atm>675</ntasks_atm>
<ntasks_lnd>168</ntasks_lnd>
<ntasks_rof>168</ntasks_rof>
<ntasks_ice>512</ntasks_ice>
<ntasks_ocn>512</ntasks_ocn>
<ntasks_glc>1</ntasks_glc>
<ntasks_wav>512</ntasks_wav>
<ntasks_cpl>512</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>2</nthrds_atm>
<nthrds_lnd>2</nthrds_lnd>
<nthrds_rof>2</nthrds_rof>
<nthrds_ice>2</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_glc>2</nthrds_glc>
<nthrds_wav>2</nthrds_wav>
<nthrds_cpl>2</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>512</rootpe_lnd>
<rootpe_rof>512</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>680</rootpe_ocn>
<rootpe_glc>512</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="any">
<mach name="blues">
<pes compset="CAM5.+CLM45.+MPASSI.+MPASO.+MOSART.+SGLC.+SWAV" pesize="any">
<comment>none</comment>
<ntasks>
<ntasks_atm>1024</ntasks_atm>
<ntasks_lnd>1024</ntasks_lnd>
<ntasks_rof>1024</ntasks_rof>
<ntasks_ice>1024</ntasks_ice>
<ntasks_ocn>1024</ntasks_ocn>
<ntasks_glc>1024</ntasks_glc>
<ntasks_wav>1024</ntasks_wav>
<ntasks_cpl>1024</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>1</nthrds_atm>
<nthrds_lnd>1</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
<nthrds_cpl>1</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>0</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="a%T62.+_oi%oQU120_r%rx1.+">
<mach name="bebop">
<pes compset=".*MPAS.*" pesize="any">
<comment>T62_oQU120 grid for MPAS tests on 20 nodes pure-MPI</comment>
<ntasks>
<ntasks_atm>720</ntasks_atm>
<ntasks_lnd>720</ntasks_lnd>
<ntasks_rof>720</ntasks_rof>
<ntasks_ice>720</ntasks_ice>
<ntasks_ocn>720</ntasks_ocn>
<ntasks_glc>720</ntasks_glc>
<ntasks_wav>720</ntasks_wav>
<ntasks_cpl>720</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>1</nthrds_atm>
<nthrds_lnd>1</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
<nthrds_cpl>1</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>0</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="a%ne120np4_l%ne120np4_oi%oRRS15to5_r%r0.+_m%oRRS15to5_g%null_w%null">
<mach name="titan|edison|cori-haswell">
<pes compset="CAM5.+CLM45.+MPASSI.+MPASO.+MOSART.+SGLC.+SWAV" pesize="any">
<comment>none</comment>
<ntasks>
<ntasks_atm>9600</ntasks_atm>
<ntasks_lnd>9600</ntasks_lnd>
<ntasks_rof>9600</ntasks_rof>
<ntasks_ice>9600</ntasks_ice>
<ntasks_ocn>9600</ntasks_ocn>
<ntasks_glc>9600</ntasks_glc>
<ntasks_wav>1</ntasks_wav>
<ntasks_cpl>9600</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>1</nthrds_atm>
<nthrds_lnd>1</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
<nthrds_cpl>1</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>0</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="a%ne120np4_l%ne120np4_oi%oRRS18to6_r%r0.+_m%oRRS18to6_g%null_w%null">
<mach name="titan|edison|cori-haswell">
<pes compset="CAM5.+CLM45.+MPASSI.+MPASO.+MOSART.+SGLC.+SWAV" pesize="any">
<comment>none</comment>
<ntasks>
<ntasks_atm>9600</ntasks_atm>
<ntasks_lnd>9600</ntasks_lnd>
<ntasks_rof>9600</ntasks_rof>
<ntasks_ice>9600</ntasks_ice>
<ntasks_ocn>9600</ntasks_ocn>
<ntasks_glc>9600</ntasks_glc>
<ntasks_wav>1</ntasks_wav>
<ntasks_cpl>9600</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>1</nthrds_atm>
<nthrds_lnd>1</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
<nthrds_cpl>1</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>0</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="a%ne120np4">
<mach name="mira">
<pes compset=".*CAM5.+CLM45.+MPASSI.+MPASO.+MOSART.*" pesize="S">
<comment>ne120 coupled-compset on 1024 nodes</comment>
<ntasks>
<ntasks_atm>2700</ntasks_atm>
<ntasks_lnd>300</ntasks_lnd>
<ntasks_rof>300</ntasks_rof>
<ntasks_ice>2400</ntasks_ice>
<ntasks_ocn>1396</ntasks_ocn>
<ntasks_glc>1</ntasks_glc>
<ntasks_wav>1</ntasks_wav>
<ntasks_cpl>2400</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>8</nthrds_atm>
<nthrds_lnd>8</nthrds_lnd>
<nthrds_rof>8</nthrds_rof>
<nthrds_ice>8</nthrds_ice>
<nthrds_ocn>2</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
<nthrds_cpl>8</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>2400</rootpe_lnd>
<rootpe_rof>2400</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>2700</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
<pes compset=".*CAM5.+CLM45.+MPASSI.+MPASO.+MOSART.*" pesize="any">
<comment>ne120 coupled-compset on 2048 nodes</comment>
<ntasks>
<ntasks_atm>5400</ntasks_atm>
<ntasks_lnd>2608</ntasks_lnd>
<ntasks_rof>2608</ntasks_rof>
<ntasks_ice>2792</ntasks_ice>
<ntasks_ocn>2792</ntasks_ocn>
<ntasks_glc>1</ntasks_glc>
<ntasks_wav>1</ntasks_wav>
<ntasks_cpl>2792</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>8</nthrds_atm>
<nthrds_lnd>8</nthrds_lnd>
<nthrds_rof>8</nthrds_rof>
<nthrds_ice>8</nthrds_ice>
<nthrds_ocn>2</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
<nthrds_cpl>8</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>2792</rootpe_lnd>
<rootpe_rof>2792</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>5400</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
<pes compset=".*CAM5.+CLM45.+MPASSI.+MPASO.+MOSART.*" pesize="L">
<comment>ne120 coupled-compset on 4096 nodes</comment>
<ntasks>
<ntasks_atm>10800</ntasks_atm>
<ntasks_lnd>2608</ntasks_lnd>
<ntasks_rof>2608</ntasks_rof>
<ntasks_ice>8192</ntasks_ice>
<ntasks_ocn>5584</ntasks_ocn>
<ntasks_glc>1</ntasks_glc>
<ntasks_wav>1</ntasks_wav>
<ntasks_cpl>8192</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>8</nthrds_atm>
<nthrds_lnd>8</nthrds_lnd>
<nthrds_rof>8</nthrds_rof>
<nthrds_ice>8</nthrds_ice>
<nthrds_ocn>2</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
<nthrds_cpl>8</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>8192</rootpe_lnd>
<rootpe_rof>8192</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>10800</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
<pes compset=".*CAM5.+CLM45.+CICE.+DOCN.+SROF.+SGLC.+SWAV.*" pesize="S">
<comment>ne120 F-compset on 512 nodes</comment>
<MAX_TASKS_PER_NODE>64</MAX_TASKS_PER_NODE>
<MAX_MPITASKS_PER_NODE>8</MAX_MPITASKS_PER_NODE>
<ntasks>
<ntasks_atm>3600</ntasks_atm>
<ntasks_cpl>2048</ntasks_cpl>
<ntasks_lnd>2048</ntasks_lnd>
<ntasks_rof>2048</ntasks_rof>
<ntasks_ice>2048</ntasks_ice>
<ntasks_ocn>2048</ntasks_ocn>
<ntasks_glc>1</ntasks_glc>
<ntasks_wav>1</ntasks_wav>
</ntasks>
<nthrds>
<nthrds_atm>8</nthrds_atm>
<nthrds_cpl>8</nthrds_cpl>
<nthrds_lnd>8</nthrds_lnd>
<nthrds_rof>8</nthrds_rof>
<nthrds_ice>8</nthrds_ice>
<nthrds_ocn>8</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_cpl>0</rootpe_cpl>
<rootpe_lnd>2048</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>0</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
</rootpe>
</pes>
<pes compset=".*CAM5.+CLM45.+CICE.+DOCN.+SROF.+SGLC.+SWAV.*" pesize="M">
<comment>ne120 F-compset on 1024 nodes</comment>
<MAX_TASKS_PER_NODE>64</MAX_TASKS_PER_NODE>
<MAX_MPITASKS_PER_NODE>8</MAX_MPITASKS_PER_NODE>
<ntasks>
<ntasks_atm>7200</ntasks_atm>
<ntasks_cpl>7200</ntasks_cpl>
<ntasks_lnd>7200</ntasks_lnd>
<ntasks_rof>992</ntasks_rof>
<ntasks_ice>992</ntasks_ice>
<ntasks_ocn>992</ntasks_ocn>
<ntasks_glc>1</ntasks_glc>
<ntasks_wav>1</ntasks_wav>
</ntasks>
<nthrds>
<nthrds_atm>6</nthrds_atm>
<nthrds_cpl>6</nthrds_cpl>
<nthrds_lnd>6</nthrds_lnd>
<nthrds_rof>6</nthrds_rof>
<nthrds_ice>6</nthrds_ice>
<nthrds_ocn>6</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_cpl>0</rootpe_cpl>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>7200</rootpe_rof>
<rootpe_ice>7200</rootpe_ice>
<rootpe_ocn>7200</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
</rootpe>
</pes>
<pes compset=".*CAM5.+CLM45.+CICE.+DOCN.+SROF.+SGLC.+SWAV.*" pesize="L">
<comment>ne120 F-compset on 2048 nodes</comment>
<MAX_TASKS_PER_NODE>64</MAX_TASKS_PER_NODE>
<MAX_MPITASKS_PER_NODE>8</MAX_MPITASKS_PER_NODE>
<ntasks>
<ntasks_atm>14400</ntasks_atm>
<ntasks_cpl>14400</ntasks_cpl>
<ntasks_lnd>14400</ntasks_lnd>
<ntasks_rof>1984</ntasks_rof>
<ntasks_ice>1984</ntasks_ice>
<ntasks_ocn>1984</ntasks_ocn>
<ntasks_glc>1</ntasks_glc>
<ntasks_wav>1</ntasks_wav>
</ntasks>
<nthrds>
<nthrds_atm>8</nthrds_atm>
<nthrds_cpl>8</nthrds_cpl>
<nthrds_lnd>8</nthrds_lnd>
<nthrds_rof>8</nthrds_rof>
<nthrds_ice>8</nthrds_ice>
<nthrds_ocn>8</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_cpl>0</rootpe_cpl>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>14400</rootpe_rof>
<rootpe_ice>14400</rootpe_ice>
<rootpe_ocn>14400</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
</rootpe>
</pes>
</mach>
<mach name="theta">
<pes compset=".*CAM5.+CLM45.+CICE.+DOCN.+SROF.+SGLC.+SWAV.*" pesize="any">
<comment>ne120 F-compset on 128 nodes</comment>
<MAX_TASKS_PER_NODE>256</MAX_TASKS_PER_NODE>
<MAX_MPITASKS_PER_NODE>64</MAX_MPITASKS_PER_NODE>
<ntasks>
<ntasks_atm>7200</ntasks_atm>
<ntasks_cpl>7200</ntasks_cpl>
<ntasks_ice>7200</ntasks_ice>
<ntasks_lnd>960</ntasks_lnd>
<ntasks_rof>960</ntasks_rof>
<ntasks_ocn>960</ntasks_ocn>
<ntasks_glc>1</ntasks_glc>
<ntasks_wav>1</ntasks_wav>
</ntasks>
<nthrds>
<nthrds_atm>4</nthrds_atm>
<nthrds_cpl>4</nthrds_cpl>
<nthrds_ice>4</nthrds_ice>
<nthrds_lnd>4</nthrds_lnd>
<nthrds_rof>4</nthrds_rof>
<nthrds_ocn>4</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_cpl>0</rootpe_cpl>
<rootpe_ice>0</rootpe_ice>
<rootpe_lnd>7232</rootpe_lnd>
<rootpe_rof>7232</rootpe_rof>
<rootpe_ocn>7232</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
</rootpe>
</pes>
<pes compset=".*CAM5.+CLM45.+CICE.+DOCN.+SROF.+SGLC.+SWAV.*" pesize="L">
<comment>ne120 F-compset on 384 nodes</comment>
<MAX_TASKS_PER_NODE>256</MAX_TASKS_PER_NODE>
<MAX_MPITASKS_PER_NODE>32</MAX_MPITASKS_PER_NODE>
<ntasks>
<ntasks_atm>10800</ntasks_atm>
<ntasks_cpl>10800</ntasks_cpl>
<ntasks_ice>10800</ntasks_ice>
<ntasks_lnd>1472</ntasks_lnd>
<ntasks_rof>1472</ntasks_rof>
<ntasks_ocn>1472</ntasks_ocn>
<ntasks_glc>1</ntasks_glc>
<ntasks_wav>1</ntasks_wav>
</ntasks>
<nthrds>
<nthrds_atm>8</nthrds_atm>
<nthrds_cpl>8</nthrds_cpl>
<nthrds_ice>8</nthrds_ice>
<nthrds_lnd>8</nthrds_lnd>
<nthrds_rof>8</nthrds_rof>
<nthrds_ocn>8</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_cpl>0</rootpe_cpl>
<rootpe_ice>0</rootpe_ice>
<rootpe_lnd>10816</rootpe_lnd>
<rootpe_rof>10816</rootpe_rof>
<rootpe_ocn>10816</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
</rootpe>
</pes>
<pes compset=".*CAM5.+CLM45.+MPASSI.+MPASO.+MOSART.*" pesize="any">
<comment>ne120-wcycl on 145 nodes, MPI-only</comment>
<MAX_TASKS_PER_NODE>64</MAX_TASKS_PER_NODE>
<MAX_MPITASKS_PER_NODE>64</MAX_MPITASKS_PER_NODE>
<ntasks>
<ntasks_atm>7200</ntasks_atm>
<ntasks_cpl>7200</ntasks_cpl>
<ntasks_ice>6400</ntasks_ice>
<ntasks_lnd>832</ntasks_lnd>
<ntasks_rof>832</ntasks_rof>
<ntasks_ocn>2048</ntasks_ocn>
<ntasks_glc>1</ntasks_glc>
<ntasks_wav>1</ntasks_wav>
</ntasks>
<nthrds>
<nthrds_atm>1</nthrds_atm>
<nthrds_cpl>1</nthrds_cpl>
<nthrds_ice>1</nthrds_ice>
<nthrds_lnd>1</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_cpl>0</rootpe_cpl>
<rootpe_ice>0</rootpe_ice>
<rootpe_lnd>6400</rootpe_lnd>
<rootpe_rof>6400</rootpe_rof>
<rootpe_ocn>7232</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
</rootpe>
</pes>
<pes compset=".*CAM5.+CLM45.+MPASSI.+MPASO.+MOSART.*" pesize="MT">
<comment>ne120-wcycl on 145 nodes, threaded</comment>
<MAX_TASKS_PER_NODE>256</MAX_TASKS_PER_NODE>
<MAX_MPITASKS_PER_NODE>64</MAX_MPITASKS_PER_NODE>
<ntasks>
<ntasks_atm>7200</ntasks_atm>
<ntasks_cpl>7200</ntasks_cpl>
<ntasks_ice>6400</ntasks_ice>
<ntasks_lnd>832</ntasks_lnd>
<ntasks_rof>832</ntasks_rof>
<ntasks_ocn>2048</ntasks_ocn>
<ntasks_glc>1</ntasks_glc>
<ntasks_wav>1</ntasks_wav>
</ntasks>
<nthrds>
<nthrds_atm>4</nthrds_atm>
<nthrds_cpl>1</nthrds_cpl>
<nthrds_ice>2</nthrds_ice>
<nthrds_lnd>4</nthrds_lnd>
<nthrds_rof>4</nthrds_rof>
<nthrds_ocn>4</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_cpl>0</rootpe_cpl>
<rootpe_ice>0</rootpe_ice>
<rootpe_lnd>6400</rootpe_lnd>
<rootpe_rof>6400</rootpe_rof>
<rootpe_ocn>7232</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
</rootpe>
</pes>
<pes compset=".*CAM5.+CLM45.+MPASSI.+MPASO.+MOSART.*" pesize="L">
<comment>ne120 coupled-compset on 466 nodes</comment>
<MAX_TASKS_PER_NODE>64</MAX_TASKS_PER_NODE>
<MAX_MPITASKS_PER_NODE>64</MAX_MPITASKS_PER_NODE>
<ntasks>
<ntasks_atm>21600</ntasks_atm>
<ntasks_cpl>16384</ntasks_cpl>
<ntasks_ice>16384</ntasks_ice>
<ntasks_lnd>5248</ntasks_lnd>
<ntasks_rof>5248</ntasks_rof>
<ntasks_ocn>8192</ntasks_ocn>
<ntasks_glc>1</ntasks_glc>
<ntasks_wav>1</ntasks_wav>
</ntasks>
<nthrds>
<nthrds_atm>1</nthrds_atm>
<nthrds_cpl>1</nthrds_cpl>
<nthrds_ice>1</nthrds_ice>
<nthrds_lnd>1</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_cpl>0</rootpe_cpl>
<rootpe_ice>0</rootpe_ice>
<rootpe_lnd>16384</rootpe_lnd>
<rootpe_rof>16384</rootpe_rof>
<rootpe_ocn>21632</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
</rootpe>
</pes>
<pes compset=".*CAM5.+CLM45.+MPASSI.+MPASO.+MOSART.*" pesize="XL">
<comment>ne120-wcycl on 863 nodes, MPI-only</comment>
<MAX_TASKS_PER_NODE>64</MAX_TASKS_PER_NODE>
<MAX_MPITASKS_PER_NODE>64</MAX_MPITASKS_PER_NODE>
<ntasks>
<ntasks_atm>43200</ntasks_atm>
<ntasks_cpl>43200</ntasks_cpl>
<ntasks_ice>24000</ntasks_ice>
<ntasks_lnd>4800</ntasks_lnd>
<ntasks_rof>4800</ntasks_rof>
<ntasks_ocn>12000</ntasks_ocn>
<ntasks_glc>1</ntasks_glc>
<ntasks_wav>1</ntasks_wav>
</ntasks>
<nthrds>
<nthrds_atm>1</nthrds_atm>
<nthrds_cpl>1</nthrds_cpl>
<nthrds_ice>1</nthrds_ice>
<nthrds_lnd>1</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_cpl>0</rootpe_cpl>
<rootpe_ice>0</rootpe_ice>
<rootpe_lnd>38400</rootpe_lnd>
<rootpe_rof>33600</rootpe_rof>
<rootpe_ocn>43200</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
</rootpe>
</pes>
<pes compset=".*CAM5.+CLM45.+MPASSI.+MPASO.+MOSART.*" pesize="XLT">
<comment>ne120-wcycl on 863 nodes, threaded</comment>
<MAX_TASKS_PER_NODE>128</MAX_TASKS_PER_NODE>
<MAX_MPITASKS_PER_NODE>64</MAX_MPITASKS_PER_NODE>
<ntasks>
<ntasks_atm>43200</ntasks_atm>
<ntasks_cpl>43200</ntasks_cpl>
<ntasks_ice>24000</ntasks_ice>
<ntasks_lnd>4800</ntasks_lnd>
<ntasks_rof>4800</ntasks_rof>
<ntasks_ocn>12000</ntasks_ocn>
<ntasks_glc>1</ntasks_glc>
<ntasks_wav>1</ntasks_wav>
</ntasks>
<nthrds>
<nthrds_atm>2</nthrds_atm>
<nthrds_cpl>1</nthrds_cpl>
<nthrds_ice>2</nthrds_ice>
<nthrds_lnd>2</nthrds_lnd>
<nthrds_rof>2</nthrds_rof>
<nthrds_ocn>2</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_cpl>0</rootpe_cpl>
<rootpe_ice>0</rootpe_ice>
<rootpe_lnd>38400</rootpe_lnd>
<rootpe_rof>33600</rootpe_rof>
<rootpe_ocn>43200</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
</rootpe>
</pes>
<pes compset=".*CAM5.+CLM45.+MPASSI.+MPASO.+MOSART.*" pesize="XLT2">
<comment>ne120-wcycl on 825 nodes, threaded, 32 tasks/node</comment>
<MAX_TASKS_PER_NODE>128</MAX_TASKS_PER_NODE>
<MAX_MPITASKS_PER_NODE>32</MAX_MPITASKS_PER_NODE>
<ntasks>
<ntasks_atm>21600</ntasks_atm>
<ntasks_cpl>21600</ntasks_cpl>
<ntasks_ice>9600</ntasks_ice>
<ntasks_lnd>4800</ntasks_lnd>
<ntasks_rof>4800</ntasks_rof>
<ntasks_ocn>4800</ntasks_ocn>
<ntasks_glc>1</ntasks_glc>
<ntasks_wav>1</ntasks_wav>
</ntasks>
<nthrds>
<nthrds_atm>4</nthrds_atm>
<nthrds_cpl>4</nthrds_cpl>
<nthrds_ice>4</nthrds_ice>
<nthrds_lnd>4</nthrds_lnd>
<nthrds_rof>4</nthrds_rof>
<nthrds_ocn>4</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_cpl>0</rootpe_cpl>
<rootpe_ice>0</rootpe_ice>
<rootpe_lnd>16800</rootpe_lnd>
<rootpe_rof>12000</rootpe_rof>
<rootpe_ocn>21600</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
</rootpe>
</pes>
<pes compset=".*CAM5.+CLM45.+MPASSI.+MPASO.+MOSART.*" pesize="XLT3">
<comment>ne120-wcycl on 800 nodes, threaded, 32 tasks/node</comment>
<MAX_TASKS_PER_NODE>128</MAX_TASKS_PER_NODE>
<MAX_MPITASKS_PER_NODE>32</MAX_MPITASKS_PER_NODE>
<ntasks>
<ntasks_atm>21600</ntasks_atm>
<ntasks_cpl>21600</ntasks_cpl>
<ntasks_ice>12000</ntasks_ice>
<ntasks_lnd>4800</ntasks_lnd>
<ntasks_rof>4800</ntasks_rof>
<ntasks_ocn>4000</ntasks_ocn>
<ntasks_glc>1</ntasks_glc>
<ntasks_wav>1</ntasks_wav>
</ntasks>
<nthrds>
<nthrds_atm>4</nthrds_atm>
<nthrds_cpl>4</nthrds_cpl>
<nthrds_ice>4</nthrds_ice>
<nthrds_lnd>4</nthrds_lnd>
<nthrds_rof>4</nthrds_rof>
<nthrds_ocn>4</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_cpl>0</rootpe_cpl>
<rootpe_ice>0</rootpe_ice>
<rootpe_lnd>16800</rootpe_lnd>
<rootpe_rof>12000</rootpe_rof>
<rootpe_ocn>21600</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
</rootpe>
</pes>
</mach>
<mach name="cori-knl">
<pes compset=".*CAM5.+CLM45.+MPASSI.+MPASO.+MOSART.*" pesize="L">
<comment>cori-knl ne120 coupled compset on 1025 nodes, 33x8, (hmod1025vc) s=1.0</comment>
<MAX_MPITASKS_PER_NODE>33</MAX_MPITASKS_PER_NODE>
<MAX_TASKS_PER_NODE>264</MAX_TASKS_PER_NODE>
<ntasks>
<ntasks_atm>29007</ntasks_atm>
<ntasks_cpl>27852</ntasks_cpl>
<ntasks_ice>19200</ntasks_ice>
<ntasks_lnd>4950</ntasks_lnd>
<ntasks_rof>1155</ntasks_rof>
<ntasks_ocn>4800</ntasks_ocn>
<ntasks_glc>33</ntasks_glc>
<ntasks_wav>33</ntasks_wav>
</ntasks>
<nthrds>
<nthrds_atm>4</nthrds_atm>
<nthrds_cpl>4</nthrds_cpl>
<nthrds_ice>4</nthrds_ice>
<nthrds_lnd>2</nthrds_lnd>
<nthrds_rof>2</nthrds_rof>
<nthrds_ocn>4</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_cpl>0</rootpe_cpl>
<rootpe_ice>0</rootpe_ice>
<rootpe_lnd>19800</rootpe_lnd>
<rootpe_rof>27852</rootpe_rof>
<rootpe_ocn>29007</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
</rootpe>
</pes>
<pes compset=".*CAM5.+CLM45.+MPASSI.+MPASO.+MOSART.*" pesize="any">
<comment>cori-knl ne120 coupled-compset on 448 nodes, 33x8, (hmod448b) sypd=0.69 wcosplite s=0.54</comment>
<MAX_MPITASKS_PER_NODE>33</MAX_MPITASKS_PER_NODE>
<MAX_TASKS_PER_NODE>264</MAX_TASKS_PER_NODE>
<ntasks>
<ntasks_atm>12375</ntasks_atm>
<ntasks_cpl>11880</ntasks_cpl>
<ntasks_ice>9600</ntasks_ice>
<ntasks_lnd>2277</ntasks_lnd>
<ntasks_rof>495</ntasks_rof>
<ntasks_ocn>2400</ntasks_ocn>
<ntasks_glc>33</ntasks_glc>
<ntasks_wav>33</ntasks_wav>
</ntasks>
<nthrds>
<nthrds_atm>4</nthrds_atm>
<nthrds_cpl>4</nthrds_cpl>
<nthrds_ice>4</nthrds_ice>
<nthrds_lnd>2</nthrds_lnd>
<nthrds_rof>2</nthrds_rof>
<nthrds_ocn>4</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_cpl>0</rootpe_cpl>
<rootpe_ice>0</rootpe_ice>
<rootpe_lnd>9603</rootpe_lnd>
<rootpe_rof>11880</rootpe_rof>
<rootpe_ocn>12375</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
</rootpe>
</pes>
<pes compset=".*CAM5.+CLM45.+MPASSI.+MPASO.+MOSART.*" pesize="S">
<comment>cori-knl ne120 coupled-compset on 207 nodes, 33x8, (hmod207) sypd=0.37</comment>
<MAX_MPITASKS_PER_NODE>33</MAX_MPITASKS_PER_NODE>
<MAX_TASKS_PER_NODE>264</MAX_TASKS_PER_NODE>
<ntasks>
<ntasks_atm>5610</ntasks_atm>
<ntasks_cpl>4620</ntasks_cpl>
<ntasks_ice>5584</ntasks_ice>
<ntasks_lnd>4620</ntasks_lnd>
<ntasks_rof>990</ntasks_rof>
<ntasks_ocn>1200</ntasks_ocn>
<ntasks_glc>33</ntasks_glc>
<ntasks_wav>33</ntasks_wav>
</ntasks>
<nthrds>
<nthrds_atm>8</nthrds_atm>
<nthrds_cpl>8</nthrds_cpl>
<nthrds_ice>4</nthrds_ice>
<nthrds_lnd>4</nthrds_lnd>
<nthrds_rof>2</nthrds_rof>
<nthrds_ocn>4</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_cpl>0</rootpe_cpl>
<rootpe_ice>0</rootpe_ice>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>4620</rootpe_rof>
<rootpe_ocn>5610</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
</rootpe>
</pes>
<pes compset=".*CAM5.+CLM45.+MPASSI.+MPASO.+MOSART.*" pesize="T">
<comment>cori-knl ne120 coupled-compset on 131 nodes, 33x8, (hmod131) sypd=0.25</comment>
<MAX_MPITASKS_PER_NODE>33</MAX_MPITASKS_PER_NODE>
<MAX_TASKS_PER_NODE>264</MAX_TASKS_PER_NODE>
<ntasks>
<ntasks_atm>3333</ntasks_atm>
<ntasks_cpl>3333</ntasks_cpl>
<ntasks_ice>3200</ntasks_ice>
<ntasks_lnd>2871</ntasks_lnd>
<ntasks_rof>462</ntasks_rof>
<ntasks_ocn>960</ntasks_ocn>
<ntasks_glc>33</ntasks_glc>
<ntasks_wav>33</ntasks_wav>
</ntasks>
<nthrds>
<nthrds_atm>8</nthrds_atm>
<nthrds_cpl>8</nthrds_cpl>
<nthrds_ice>4</nthrds_ice>
<nthrds_lnd>4</nthrds_lnd>
<nthrds_rof>4</nthrds_rof>
<nthrds_ocn>4</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_cpl>0</rootpe_cpl>
<rootpe_ice>0</rootpe_ice>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>2871</rootpe_rof>
<rootpe_ocn>3333</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
</rootpe>
</pes>
<pes compset=".*CAM5.+CLM45.+CICE.+DOCN.+SROF.+SGLC.+SWAV.*" pesize="X">
<comment>cori-knl ne120 F-compset on 675 nodes, 64x2, sypd=1.95</comment>
<MAX_MPITASKS_PER_NODE>64</MAX_MPITASKS_PER_NODE>
<MAX_TASKS_PER_NODE>128</MAX_TASKS_PER_NODE>
<ntasks>
<ntasks_atm>43200</ntasks_atm>
<ntasks_lnd>43200</ntasks_lnd>
<ntasks_rof>43200</ntasks_rof>
<ntasks_ice>43200</ntasks_ice>
<ntasks_ocn>43200</ntasks_ocn>
<ntasks_glc>64</ntasks_glc>
<ntasks_wav>64</ntasks_wav>
<ntasks_cpl>43200</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>2</nthrds_atm>
<nthrds_lnd>2</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
<nthrds_cpl>2</nthrds_cpl>
</nthrds>
</pes>
<pes compset=".*CAM5.+CLM45.+CICE.+DOCN.+SROF.+SGLC.+SWAV.*" pesize="L">
<comment>cori-knl ne120 F-compset on 323 nodes, 67x4, sypd=1.18</comment>
<MAX_MPITASKS_PER_NODE>67</MAX_MPITASKS_PER_NODE>
<MAX_TASKS_PER_NODE>268</MAX_TASKS_PER_NODE>
<ntasks>
<ntasks_atm>21600</ntasks_atm>
<ntasks_lnd>21600</ntasks_lnd>
<ntasks_rof>21600</ntasks_rof>
<ntasks_ice>21600</ntasks_ice>
<ntasks_ocn>21600</ntasks_ocn>
<ntasks_glc>67</ntasks_glc>
<ntasks_wav>67</ntasks_wav>
<ntasks_cpl>21600</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>4</nthrds_atm>
<nthrds_lnd>4</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
<nthrds_cpl>4</nthrds_cpl>
</nthrds>
</pes>
<pes compset=".*CAM5.+CLM45.+CICE.+DOCN.+SROF.+SGLC.+SWAV.*" pesize="any">
<comment>cori-knl ne120 F-compset on 162 nodes, 67x4, sypd=0.69</comment>
<MAX_MPITASKS_PER_NODE>67</MAX_MPITASKS_PER_NODE>
<MAX_TASKS_PER_NODE>268</MAX_TASKS_PER_NODE>
<ntasks>
<ntasks_atm>10800</ntasks_atm>
<ntasks_lnd>10800</ntasks_lnd>
<ntasks_rof>10800</ntasks_rof>
<ntasks_ice>10800</ntasks_ice>
<ntasks_ocn>10800</ntasks_ocn>
<ntasks_glc>67</ntasks_glc>
<ntasks_wav>67</ntasks_wav>
<ntasks_cpl>10800</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>4</nthrds_atm>
<nthrds_lnd>4</nthrds_lnd>
<nthrds_rof>4</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
<nthrds_cpl>4</nthrds_cpl>
</nthrds>
</pes>
<pes compset=".*CAM5.+CLM45.+CICE.+DOCN.+SROF.+SGLC.+SWAV.*" pesize="S">
<comment>cori-knl ne120 F-compset on 81 nodes, 67x4, sypd=0.35 </comment>
<MAX_MPITASKS_PER_NODE>67</MAX_MPITASKS_PER_NODE>
<MAX_TASKS_PER_NODE>268</MAX_TASKS_PER_NODE>
<ntasks>
<ntasks_atm>5427</ntasks_atm>
<ntasks_lnd>5427</ntasks_lnd>
<ntasks_rof>5427</ntasks_rof>
<ntasks_ice>5427</ntasks_ice>
<ntasks_ocn>5427</ntasks_ocn>
<ntasks_glc>67</ntasks_glc>
<ntasks_wav>67</ntasks_wav>
<ntasks_cpl>5427</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>4</nthrds_atm>
<nthrds_lnd>4</nthrds_lnd>
<nthrds_rof>4</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
<nthrds_cpl>4</nthrds_cpl>
</nthrds>
</pes>
<pes compset=".*CAM5.+CLM45.+CICE.+DOCN.+SROF.+SGLC.+SWAV.*" pesize="T">
<comment>cori-knl ne120 F-compset on 42 nodes, 67x4, sypd=0.19 </comment>
<MAX_MPITASKS_PER_NODE>67</MAX_MPITASKS_PER_NODE>
<MAX_TASKS_PER_NODE>268</MAX_TASKS_PER_NODE>
<ntasks>
<ntasks_atm>2814</ntasks_atm>
<ntasks_lnd>2814</ntasks_lnd>
<ntasks_rof>2814</ntasks_rof>
<ntasks_ice>2814</ntasks_ice>
<ntasks_ocn>2814</ntasks_ocn>
<ntasks_glc>67</ntasks_glc>
<ntasks_wav>67</ntasks_wav>
<ntasks_cpl>2814</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>4</nthrds_atm>
<nthrds_lnd>2</nthrds_lnd>
<nthrds_rof>2</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
<nthrds_cpl>4</nthrds_cpl>
</nthrds>
</pes>
</mach>
<mach name="compy">
<pes compset=".*CAM5.+CLM45.+MPASSI.+MPASO.+MOSART.*" pesize="any">
<comment>compy ne120 W-cycle on 310 nodes, 40x1, sypd=1.2</comment>
<ntasks>
<ntasks_atm>9600</ntasks_atm>
<ntasks_cpl>9600</ntasks_cpl>
<ntasks_ice>7200</ntasks_ice>
<ntasks_ocn>2800</ntasks_ocn>
<ntasks_lnd>2400</ntasks_lnd>
<ntasks_rof>2400</ntasks_rof>
</ntasks>
<nthrds>
<nthrds_atm>1</nthrds_atm>
<nthrds_cpl>1</nthrds_cpl>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_lnd>1</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_cpl>0</rootpe_cpl>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>9600</rootpe_ocn>
<rootpe_lnd>7200</rootpe_lnd>
<rootpe_rof>7200</rootpe_rof>
</rootpe>
</pes>
</mach>
</grid>
<grid name="any">
<mach name="edison|cori-haswell">
<pes compset="any" pesize="T">
<comment>none</comment>
<ntasks>
<ntasks_atm>240</ntasks_atm>
<ntasks_lnd>240</ntasks_lnd>
<ntasks_rof>240</ntasks_rof>
<ntasks_ice>240</ntasks_ice>
<ntasks_ocn>240</ntasks_ocn>
<ntasks_glc>240</ntasks_glc>
<ntasks_wav>240</ntasks_wav>
<ntasks_cpl>240</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>4</nthrds_atm>
<nthrds_lnd>4</nthrds_lnd>
<nthrds_rof>4</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_glc>4</nthrds_glc>
<nthrds_wav>4</nthrds_wav>
<nthrds_cpl>4</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>0</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="a%ne4np4.*">
<mach name="anvil|bebop">
<pes compset="any" pesize="any">
<comment>ne4 grid on 4 nodes pure-MPI </comment>
<ntasks>
<ntasks_atm>108</ntasks_atm>
<ntasks_ice>108</ntasks_ice>
<ntasks_cpl>108</ntasks_cpl>
<ntasks_lnd>36</ntasks_lnd>
<ntasks_rof>36</ntasks_rof>
<ntasks_ocn>36</ntasks_ocn>
<ntasks_glc>1</ntasks_glc>
<ntasks_wav>1</ntasks_wav>
</ntasks>
<nthrds>
<nthrds_atm>1</nthrds_atm>
<nthrds_lnd>1</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
<nthrds_cpl>1</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_ice>0</rootpe_ice>
<rootpe_cpl>0</rootpe_cpl>
<rootpe_lnd>108</rootpe_lnd>
<rootpe_rof>108</rootpe_rof>
<rootpe_ocn>108</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
</rootpe>
</pes>
</mach>
<mach name="compy">
<pes compset="any" pesize="any">
<comment>ne4 grid on 3 nodes pure-MPI</comment>
<ntasks>
<ntasks_atm>96</ntasks_atm>
<ntasks_ice>96</ntasks_ice>
<ntasks_cpl>96</ntasks_cpl>
<ntasks_lnd>96</ntasks_lnd>
<ntasks_rof>96</ntasks_rof>
<ntasks_ocn>96</ntasks_ocn>
</ntasks>
<nthrds>
<nthrds_atm>1</nthrds_atm>
<nthrds_ice>1</nthrds_ice>
<nthrds_cpl>1</nthrds_cpl>
<nthrds_lnd>1</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
<nthrds_ocn>1</nthrds_ocn>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_ice>0</rootpe_ice>
<rootpe_cpl>0</rootpe_cpl>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ocn>0</rootpe_ocn>
</rootpe>
</pes>
</mach>
</grid>
<grid name="a%ne30np4_l%ne30np4_oi%oEC60to30">
<mach name="anvil|bebop">
<pes compset=".*CAM5.+CLM45.+MPASSI.+MPASO.+MOSART.+SGLC.+SWAV" pesize="S">
<comment> -compset A_WCYCL* -res ne30_oEC* on 32 nodes pure-MPI </comment>
<ntasks>
<ntasks_atm>675</ntasks_atm>
<ntasks_lnd>72</ntasks_lnd>
<ntasks_rof>72</ntasks_rof>
<ntasks_ice>720</ntasks_ice>
<ntasks_ocn>360</ntasks_ocn>
<ntasks_cpl>720</ntasks_cpl>
<ntasks_glc>1</ntasks_glc>
<ntasks_wav>1</ntasks_wav>
</ntasks>
<nthrds>
<nthrds_atm>1</nthrds_atm>
<nthrds_lnd>1</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
<nthrds_cpl>1</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>720</rootpe_lnd>
<rootpe_rof>720</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>792</rootpe_ocn>
<rootpe_cpl>0</rootpe_cpl>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
</rootpe>
</pes>
<pes compset=".*CAM5.+CLM45.+MPASSI.+MPASO.+MOSART.+SGLC.+SWAV" pesize="any">
<comment> -compset A_WCYCL* -res ne30_oEC* on 54 nodes pure-MPI </comment>
<ntasks>
<ntasks_atm>1350</ntasks_atm>
<ntasks_lnd>216</ntasks_lnd>
<ntasks_rof>216</ntasks_rof>
<ntasks_ice>1152</ntasks_ice>
<ntasks_ocn>576</ntasks_ocn>
<ntasks_cpl>1152</ntasks_cpl>
<ntasks_glc>1</ntasks_glc>
<ntasks_wav>1</ntasks_wav>
</ntasks>
<nthrds>
<nthrds_atm>1</nthrds_atm>
<nthrds_lnd>1</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
<nthrds_cpl>1</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>1152</rootpe_lnd>
<rootpe_rof>1152</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>1368</rootpe_ocn>
<rootpe_cpl>0</rootpe_cpl>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
</rootpe>
</pes>
<pes compset=".*CAM5.+CLM45.+MPASSI.+MPASO.+MOSART.+SGLC.+SWAV" pesize="L">
<comment> -compset A_WCYCL* -res ne30_oEC* on 105 nodes pure-MPI </comment>
<ntasks>
<ntasks_atm>2700</ntasks_atm>
<ntasks_lnd>540</ntasks_lnd>
<ntasks_rof>540</ntasks_rof>
<ntasks_ice>2160</ntasks_ice>
<ntasks_ocn>1080</ntasks_ocn>
<ntasks_cpl>2160</ntasks_cpl>
<ntasks_glc>1</ntasks_glc>
<ntasks_wav>1</ntasks_wav>
</ntasks>
<nthrds>
<nthrds_atm>1</nthrds_atm>
<nthrds_lnd>1</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
<nthrds_cpl>1</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>2160</rootpe_lnd>
<rootpe_rof>2160</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>2700</rootpe_ocn>
<rootpe_cpl>0</rootpe_cpl>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
</rootpe>
</pes>
</mach>
<mach name="theta">
<pes compset=".*CAM5.+CLM45.+MPASSI.+MPASO.+MOSART.+SGLC.+SWAV" pesize="XS">
<comment>ne30-wcycl on 8 nodes</comment>
<MAX_TASKS_PER_NODE>128</MAX_TASKS_PER_NODE>
<MAX_MPITASKS_PER_NODE>64</MAX_MPITASKS_PER_NODE>
<ntasks>
<ntasks_atm>338</ntasks_atm>
<ntasks_lnd>128</ntasks_lnd>
<ntasks_rof>128</ntasks_rof>
<ntasks_ice>256</ntasks_ice>
<ntasks_ocn>128</ntasks_ocn>
<ntasks_cpl>256</ntasks_cpl>
<ntasks_glc>1</ntasks_glc>
<ntasks_wav>1</ntasks_wav>
</ntasks>
<nthrds>
<nthrds_atm>1</nthrds_atm>
<nthrds_lnd>1</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
<nthrds_cpl>1</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>256</rootpe_lnd>
<rootpe_rof>256</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>384</rootpe_ocn>
<rootpe_cpl>0</rootpe_cpl>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
</rootpe>
</pes>
<pes compset=".*CAM5.+CLM45.+MPASSI.+MPASO.+MOSART.+SGLC.+SWAV" pesize="any">
<comment>ne30-wcycl on 128 nodes</comment>
<MAX_TASKS_PER_NODE>128</MAX_TASKS_PER_NODE>
<MAX_MPITASKS_PER_NODE>64</MAX_MPITASKS_PER_NODE>
<ntasks>
<ntasks_atm>5400</ntasks_atm>
<ntasks_lnd>640</ntasks_lnd>
<ntasks_rof>640</ntasks_rof>
<ntasks_ice>2752</ntasks_ice>
<ntasks_ocn>2752</ntasks_ocn>
<ntasks_cpl>5400</ntasks_cpl>
<ntasks_glc>1</ntasks_glc>
<ntasks_wav>1</ntasks_wav>
</ntasks>
<nthrds>
<nthrds_atm>1</nthrds_atm>
<nthrds_lnd>1</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
<nthrds_cpl>1</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>4800</rootpe_lnd>
<rootpe_rof>4800</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>5440</rootpe_ocn>
<rootpe_cpl>0</rootpe_cpl>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
</rootpe>
</pes>
</mach>
<mach name="compy">
<pes compset=".*CAM5.+CLM45.+MPASSI.+MPASO.+MOSART.+" pesize="S">
<comment> -compset A_WCYCL* -res ne30_oEC* on 27 nodes pure-MPI </comment>
<ntasks>
<ntasks_atm>900</ntasks_atm>
<ntasks_lnd>900</ntasks_lnd>
<ntasks_rof>900</ntasks_rof>
<ntasks_ice>900</ntasks_ice>
<ntasks_ocn>160</ntasks_ocn>
<ntasks_cpl>900</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>1</nthrds_atm>
<nthrds_lnd>1</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_cpl>1</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>920</rootpe_ocn>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
<pes compset=".*CAM5.+CLM45.+MPASSI.+MPASO.+MOSART.+" pesize="any">
<comment> -compset A_WCYCL* -res ne30_oEC* on 40 nodes pure-MPI </comment>
<ntasks>
<ntasks_atm>1350</ntasks_atm>
<ntasks_lnd>1350</ntasks_lnd>
<ntasks_rof>1350</ntasks_rof>
<ntasks_ice>1350</ntasks_ice>
<ntasks_ocn>240</ntasks_ocn>
<ntasks_cpl>1350</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>1</nthrds_atm>
<nthrds_lnd>1</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_cpl>1</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>1360</rootpe_ocn>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
<pes compset=".*CAM5.+CLM45.+MPASSI.+MPASO.+MOSART.+" pesize="L">
<comment> -compset A_WCYCL* -res ne30_oEC* on 80 nodes pure-MPI </comment>
<ntasks>
<ntasks_atm>2700</ntasks_atm>
<ntasks_lnd>2700</ntasks_lnd>
<ntasks_rof>2700</ntasks_rof>
<ntasks_ice>2700</ntasks_ice>
<ntasks_ocn>480</ntasks_ocn>
<ntasks_cpl>2700</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>1</nthrds_atm>
<nthrds_lnd>1</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_cpl>1</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>2720</rootpe_ocn>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
<pes compset=".*CAM5.+CLM45.+MPASSI.+MPASO.+MOSART.+" pesize="XL">
<comment> -compset A_WCYCL* -res ne30_oEC* on 160 nodes pure-MPI </comment>
<ntasks>
<ntasks_atm>5400</ntasks_atm>
<ntasks_lnd>5400</ntasks_lnd>
<ntasks_rof>5400</ntasks_rof>
<ntasks_ice>5400</ntasks_ice>
<ntasks_ocn>1000</ntasks_ocn>
<ntasks_cpl>5400</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>1</nthrds_atm>
<nthrds_lnd>1</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_cpl>1</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>5400</rootpe_ocn>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="a%ne4np4_l%ne4np4_oi%oQU240_r%r05_m%oQU240_g%null_w%null">
<mach name="any">
<pes compset="any" pesize="S">
<comment>none</comment>
<ntasks>
<ntasks_atm>32</ntasks_atm>
<ntasks_lnd>32</ntasks_lnd>
<ntasks_rof>32</ntasks_rof>
<ntasks_ice>32</ntasks_ice>
<ntasks_ocn>32</ntasks_ocn>
<ntasks_glc>32</ntasks_glc>
<ntasks_wav>32</ntasks_wav>
<ntasks_cpl>32</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>1</nthrds_atm>
<nthrds_lnd>1</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
<nthrds_cpl>1</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>0</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
<pes compset="any" pesize="M">
<comment>none</comment>
<ntasks>
<ntasks_atm>48</ntasks_atm>
<ntasks_lnd>48</ntasks_lnd>
<ntasks_rof>48</ntasks_rof>
<ntasks_ice>48</ntasks_ice>
<ntasks_ocn>48</ntasks_ocn>
<ntasks_glc>48</ntasks_glc>
<ntasks_wav>48</ntasks_wav>
<ntasks_cpl>48</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>2</nthrds_atm>
<nthrds_lnd>2</nthrds_lnd>
<nthrds_rof>2</nthrds_rof>
<nthrds_ice>2</nthrds_ice>
<nthrds_ocn>2</nthrds_ocn>
<nthrds_glc>2</nthrds_glc>
<nthrds_wav>2</nthrds_wav>
<nthrds_cpl>2</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>0</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
<pes compset="any" pesize="L">
<comment>none</comment>
<ntasks>
<ntasks_atm>96</ntasks_atm>
<ntasks_lnd>96</ntasks_lnd>
<ntasks_rof>96</ntasks_rof>
<ntasks_ice>96</ntasks_ice>
<ntasks_ocn>96</ntasks_ocn>
<ntasks_glc>96</ntasks_glc>
<ntasks_wav>96</ntasks_wav>
<ntasks_cpl>96</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>4</nthrds_atm>
<nthrds_lnd>4</nthrds_lnd>
<nthrds_rof>4</nthrds_rof>
<nthrds_ice>4</nthrds_ice>
<nthrds_ocn>4</nthrds_ocn>
<nthrds_glc>4</nthrds_glc>
<nthrds_wav>4</nthrds_wav>
<nthrds_cpl>4</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>0</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="a%ne4np4_l%ne4np4_oi%oQU240_r%r05_m%oQU240_g%null_w%null">
<mach name="sandiatoss3">
<pes compset="any" pesize="S">
<comment>none</comment>
<ntasks>
<ntasks_atm>32</ntasks_atm>
<ntasks_lnd>16</ntasks_lnd>
<ntasks_rof>32</ntasks_rof>
<ntasks_ice>16</ntasks_ice>
<ntasks_ocn>16</ntasks_ocn>
<ntasks_glc>32</ntasks_glc>
<ntasks_wav>32</ntasks_wav>
<ntasks_cpl>48</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>1</nthrds_atm>
<nthrds_lnd>1</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
<nthrds_cpl>1</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>32</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>32</rootpe_ice>
<rootpe_ocn>32</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
<pes compset="CAM5.+CLM.+MPASSI.+MPASO.+MOSART.+SGLC.+SWAV" pesize="M">
<comment>none</comment>
<ntasks>
<ntasks_atm>96</ntasks_atm>
<ntasks_lnd>16</ntasks_lnd>
<ntasks_rof>96</ntasks_rof>
<ntasks_ice>96</ntasks_ice>
<ntasks_ocn>16</ntasks_ocn>
<ntasks_glc>96</ntasks_glc>
<ntasks_wav>96</ntasks_wav>
<ntasks_cpl>96</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>1</nthrds_atm>
<nthrds_lnd>1</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
<nthrds_cpl>1</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>96</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>96</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
<pes compset=".+CAM5.+CLM.+MPASSI.+MPASO.+MOSART.+SGLC.+SWAV" pesize="L">
<comment>none</comment>
<ntasks>
<ntasks_atm>96</ntasks_atm>
<ntasks_lnd>32</ntasks_lnd>
<ntasks_rof>96</ntasks_rof>
<ntasks_ice>96</ntasks_ice>
<ntasks_ocn>32</ntasks_ocn>
<ntasks_glc>96</ntasks_glc>
<ntasks_wav>96</ntasks_wav>
<ntasks_cpl>96</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>4</nthrds_atm>
<nthrds_lnd>4</nthrds_lnd>
<nthrds_rof>4</nthrds_rof>
<nthrds_ice>4</nthrds_ice>
<nthrds_ocn>4</nthrds_ocn>
<nthrds_glc>4</nthrds_glc>
<nthrds_wav>4</nthrds_wav>
<nthrds_cpl>4</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>96</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>96</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="a%ne4np4_l%ne4np4_oi%oQU240_r%r05_m%oQU240_g%null_w%null">
<mach name="melvin">
<pes compset="any" pesize="any">
<comment>none</comment>
<ntasks>
<ntasks_atm>32</ntasks_atm>
<ntasks_lnd>16</ntasks_lnd>
<ntasks_rof>32</ntasks_rof>
<ntasks_ice>16</ntasks_ice>
<ntasks_ocn>16</ntasks_ocn>
<ntasks_glc>32</ntasks_glc>
<ntasks_wav>32</ntasks_wav>
<ntasks_cpl>48</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>1</nthrds_atm>
<nthrds_lnd>1</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
<nthrds_cpl>1</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>32</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>32</rootpe_ice>
<rootpe_ocn>32</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="a%ne4np4">
<mach name="cetus">
<pes compset="any" pesize="any">
<comment>any compset on ne4 grid</comment>
<ntasks>
<ntasks_atm>6</ntasks_atm>
<ntasks_lnd>6</ntasks_lnd>
<ntasks_rof>6</ntasks_rof>
<ntasks_ice>6</ntasks_ice>
<ntasks_ocn>6</ntasks_ocn>
<ntasks_glc>6</ntasks_glc>
<ntasks_wav>6</ntasks_wav>
<ntasks_cpl>6</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>16</nthrds_atm>
<nthrds_lnd>16</nthrds_lnd>
<nthrds_rof>16</nthrds_rof>
<nthrds_ice>16</nthrds_ice>
<nthrds_ocn>16</nthrds_ocn>
<nthrds_glc>16</nthrds_glc>
<nthrds_wav>16</nthrds_wav>
<nthrds_cpl>16</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>0</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="a%ne4np4">
<mach name="cori-haswell">
<pes compset="any" pesize="any">
<comment>3 nodes, any compset on ne4 grid</comment>
<ntasks>
<ntasks_atm>96</ntasks_atm>
<ntasks_lnd>96</ntasks_lnd>
<ntasks_rof>96</ntasks_rof>
<ntasks_ice>96</ntasks_ice>
<ntasks_ocn>96</ntasks_ocn>
<ntasks_glc>96</ntasks_glc>
<ntasks_wav>96</ntasks_wav>
<ntasks_cpl>96</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>1</nthrds_atm>
<nthrds_lnd>1</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
<nthrds_cpl>1</nthrds_cpl>
</nthrds>
</pes>
</mach>
</grid>
<grid name="a%ne4np4">
<mach name="cori-knl">
<pes compset=".*CAM5.+CLM45.+MPASSI.+MPASO.+MOSART.*" pesize="any">
<comment>"cori-knl ne4 coupled compest on 6 nodes, sypd=22.9"</comment>
<MAX_MPITASKS_PER_NODE>67</MAX_MPITASKS_PER_NODE>
<MAX_TASKS_PER_NODE>134</MAX_TASKS_PER_NODE>
<ntasks>
<ntasks_atm>268</ntasks_atm>
<ntasks_lnd>268</ntasks_lnd>
<ntasks_rof>96</ntasks_rof>
<ntasks_ice>128</ntasks_ice>
<ntasks_ocn>128</ntasks_ocn>
<ntasks_glc>32</ntasks_glc>
<ntasks_wav>32</ntasks_wav>
<ntasks_cpl>268</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>1</nthrds_atm>
<nthrds_lnd>1</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
<nthrds_cpl>1</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>268</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
<pes compset="any" pesize="any">
<comment>cori-knl, 13 nodes, 67x1 any compset on ne4 grid, sypd=50</comment>
<MAX_MPITASKS_PER_NODE>67</MAX_MPITASKS_PER_NODE>
<MAX_TASKS_PER_NODE>67</MAX_TASKS_PER_NODE>
<ntasks>
<ntasks_atm>866</ntasks_atm>
<ntasks_lnd>128</ntasks_lnd>
<ntasks_rof>128</ntasks_rof>
<ntasks_ice>256</ntasks_ice>
<ntasks_ocn>256</ntasks_ocn>
<ntasks_glc>32</ntasks_glc>
<ntasks_wav>32</ntasks_wav>
<ntasks_cpl>866</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>1</nthrds_atm>
<nthrds_lnd>1</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
<nthrds_cpl>1</nthrds_cpl>
</nthrds>
</pes>
<pes compset="any" pesize="S">
<comment>cori-knl, 4 nodes, 67x1 any compset on ne4 grid, sypd=31.4</comment>
<MAX_MPITASKS_PER_NODE>67</MAX_MPITASKS_PER_NODE>
<MAX_TASKS_PER_NODE>67</MAX_TASKS_PER_NODE>
<ntasks>
<ntasks_atm>268</ntasks_atm>
<ntasks_lnd>268</ntasks_lnd>
<ntasks_rof>268</ntasks_rof>
<ntasks_ice>268</ntasks_ice>
<ntasks_ocn>268</ntasks_ocn>
<ntasks_glc>32</ntasks_glc>
<ntasks_wav>32</ntasks_wav>
<ntasks_cpl>268</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>1</nthrds_atm>
<nthrds_lnd>1</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
<nthrds_cpl>1</nthrds_cpl>
</nthrds>
</pes>
</mach>
<mach name="edison">
<pes compset=".*CAM5.+CLM45.+MPASSI.+MPASO.+MOSART.*" pesize="any">
<comment>"edison ne4 coupled compest on 6 nodes, OCN by itself on 2 nodes sypd=45.2"</comment>
<ntasks>
<ntasks_atm>96</ntasks_atm>
<ntasks_lnd>96</ntasks_lnd>
<ntasks_rof>24</ntasks_rof>
<ntasks_ice>96</ntasks_ice>
<ntasks_ocn>48</ntasks_ocn>
<ntasks_glc>24</ntasks_glc>
<ntasks_wav>24</ntasks_wav>
<ntasks_cpl>96</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>1</nthrds_atm>
<nthrds_lnd>1</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
<nthrds_cpl>1</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>96</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
<pes compset="any" pesize="any">
<comment>edison, 4 nodes, any compset on ne4 grid, sypd=57</comment>
<ntasks>
<ntasks_atm>96</ntasks_atm>
<ntasks_lnd>96</ntasks_lnd>
<ntasks_rof>96</ntasks_rof>
<ntasks_ice>96</ntasks_ice>
<ntasks_ocn>96</ntasks_ocn>
<ntasks_glc>24</ntasks_glc>
<ntasks_wav>24</ntasks_wav>
<ntasks_cpl>96</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>1</nthrds_atm>
<nthrds_lnd>1</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
<nthrds_cpl>1</nthrds_cpl>
</nthrds>
</pes>
</mach>
</grid>
<grid name=".*oi%oRRS30to10.*">
<mach name="theta">
<pes compset=".*MPASSI.+MPASO.+" pesize="any">
<comment>30to10-gmpas on 128 nodes</comment>
<MAX_TASKS_PER_NODE>128</MAX_TASKS_PER_NODE>
<MAX_MPITASKS_PER_NODE>64</MAX_MPITASKS_PER_NODE>
<ntasks>
<ntasks_atm>8192</ntasks_atm>
<ntasks_lnd>8192</ntasks_lnd>
<ntasks_rof>8192</ntasks_rof>
<ntasks_ice>8192</ntasks_ice>
<ntasks_ocn>8192</ntasks_ocn>
<ntasks_cpl>8192</ntasks_cpl>
<ntasks_glc>1</ntasks_glc>
<ntasks_wav>1</ntasks_wav>
</ntasks>
<nthrds>
<nthrds_atm>1</nthrds_atm>
<nthrds_lnd>1</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
<nthrds_cpl>1</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>0</rootpe_ocn>
<rootpe_cpl>0</rootpe_cpl>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
</rootpe>
</pes>
</mach>
<mach name="cori-knl">
<pes compset=".*MPASSI.+MPASO.+" pesize="any">
<comment>cori-knl G 30to10 on 52 nodes, 64x2</comment>
<MAX_MPITASKS_PER_NODE>64</MAX_MPITASKS_PER_NODE>
<MAX_TASKS_PER_NODE>128</MAX_TASKS_PER_NODE>
<ntasks>
<ntasks_atm>1280</ntasks_atm>
<ntasks_lnd>1280</ntasks_lnd>
<ntasks_rof>1280</ntasks_rof>
<ntasks_ice>1280</ntasks_ice>
<ntasks_ocn>2048</ntasks_ocn>
<ntasks_cpl>1280</ntasks_cpl>
<ntasks_glc>1</ntasks_glc>
<ntasks_wav>1</ntasks_wav>
</ntasks>
<nthrds>
<nthrds_atm>1</nthrds_atm>
<nthrds_lnd>1</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
<nthrds_ice>2</nthrds_ice>
<nthrds_ocn>2</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
<nthrds_cpl>2</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>1280</rootpe_ocn>
<rootpe_cpl>0</rootpe_cpl>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
</rootpe>
</pes>
</mach>
<mach name="cori-haswell">
<pes compset=".*MPASSI.+MPASO.+" pesize="any">
<comment>cori-haswell G 30to10 on 48 nodes</comment>
<ntasks>
<ntasks_atm>512</ntasks_atm>
<ntasks_lnd>512</ntasks_lnd>
<ntasks_rof>512</ntasks_rof>
<ntasks_ice>512</ntasks_ice>
<ntasks_ocn>1024</ntasks_ocn>
<ntasks_cpl>512</ntasks_cpl>
<ntasks_glc>1</ntasks_glc>
<ntasks_wav>1</ntasks_wav>
</ntasks>
<nthrds>
<nthrds_atm>1</nthrds_atm>
<nthrds_lnd>1</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
<nthrds_cpl>1</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>512</rootpe_ocn>
<rootpe_cpl>0</rootpe_cpl>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
</rootpe>
</pes>
</mach>
<mach name="edison">
<pes compset=".*MPASSI.+MPASO.+" pesize="any">
<comment>cori-knl G 30to10 on 128 nodes</comment>
<ntasks>
<ntasks_atm>1024</ntasks_atm>
<ntasks_lnd>1024</ntasks_lnd>
<ntasks_rof>1024</ntasks_rof>
<ntasks_ice>1024</ntasks_ice>
<ntasks_ocn>2048</ntasks_ocn>
<ntasks_cpl>1024</ntasks_cpl>
<ntasks_glc>1</ntasks_glc>
<ntasks_wav>1</ntasks_wav>
</ntasks>
<nthrds>
<nthrds_atm>1</nthrds_atm>
<nthrds_lnd>1</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
<nthrds_cpl>1</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>1024</rootpe_ocn>
<rootpe_cpl>0</rootpe_cpl>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
</rootpe>
</pes>
</mach>
<mach name="titan">
<pes compset=".*MPASSI.+MPASO.+" pesize="any">
<comment>30to10-gmpas on 32 nodes</comment>
<MAX_TASKS_PER_NODE>16</MAX_TASKS_PER_NODE>
<MAX_MPITASKS_PER_NODE>16</MAX_MPITASKS_PER_NODE>
<ntasks>
<ntasks_atm>512</ntasks_atm>
<ntasks_lnd>512</ntasks_lnd>
<ntasks_rof>512</ntasks_rof>
<ntasks_ice>512</ntasks_ice>
<ntasks_ocn>512</ntasks_ocn>
<ntasks_cpl>512</ntasks_cpl>
<ntasks_glc>1</ntasks_glc>
<ntasks_wav>1</ntasks_wav>
</ntasks>
<nthrds>
<nthrds_atm>1</nthrds_atm>
<nthrds_lnd>1</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
<nthrds_cpl>1</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>0</rootpe_ocn>
<rootpe_cpl>0</rootpe_cpl>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
</rootpe>
</pes>
</mach>
</grid>
<grid name="a%ne0np4.*">
<mach name="any">
<pes compset="any" pesize="any">
<comment>none</comment>
<ntasks>
<ntasks_atm>128</ntasks_atm>
<ntasks_lnd>128</ntasks_lnd>
<ntasks_rof>128</ntasks_rof>
<ntasks_ice>128</ntasks_ice>
<ntasks_ocn>128</ntasks_ocn>
<ntasks_glc>128</ntasks_glc>
<ntasks_wav>128</ntasks_wav>
<ntasks_cpl>128</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>1</nthrds_atm>
<nthrds_lnd>1</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
<nthrds_cpl>1</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>0</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
<mach name="theta">
<pes compset="any" pesize="any">
<comment>RRM grid on 128 Theta nodes</comment>
<ntasks>
<ntasks_atm>8192</ntasks_atm>
<ntasks_lnd>8192</ntasks_lnd>
<ntasks_rof>8192</ntasks_rof>
<ntasks_ice>8192</ntasks_ice>
<ntasks_ocn>8192</ntasks_ocn>
<ntasks_glc>8192</ntasks_glc>
<ntasks_wav>8192</ntasks_wav>
<ntasks_cpl>8192</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>1</nthrds_atm>
<nthrds_lnd>1</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
<nthrds_cpl>1</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>0</rootpe_ocn>
<rootpe_glc>0</rootpe_glc>
<rootpe_wav>0</rootpe_wav>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name=".*T62_oi%oRRS18to6*">
<mach name="cori-knl">
<pes compset=".*MPASSI.+MPASO.+" pesize="any">
<comment>cori-knl, hires (18to6) G case on 150 nodes, 64x2, sypd=0.5</comment>
<MAX_MPITASKS_PER_NODE>64</MAX_MPITASKS_PER_NODE>
<MAX_TASKS_PER_NODE>128</MAX_TASKS_PER_NODE>
<ntasks>
<ntasks_atm>9600</ntasks_atm>
<ntasks_lnd>9600</ntasks_lnd>
<ntasks_rof>9600</ntasks_rof>
<ntasks_ice>9600</ntasks_ice>
<ntasks_ocn>9600</ntasks_ocn>
<ntasks_cpl>9600</ntasks_cpl>
<ntasks_glc>1</ntasks_glc>
<ntasks_wav>1</ntasks_wav>
</ntasks>
<nthrds>
<nthrds_atm>1</nthrds_atm>
<nthrds_lnd>1</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
<nthrds_ice>2</nthrds_ice>
<nthrds_ocn>2</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
<nthrds_cpl>1</nthrds_cpl>
</nthrds>
</pes>
</mach>
</grid>
<grid name=".*T62_oi%oEC60to30.*">
<mach name="cori-knl">
<pes compset=".*MPASSI.+MPASO.+" pesize="any">
<comment>cori-knl, lowres (60to30) G case on 16 nodes, 64x2, sypd=2.42</comment>
<MAX_MPITASKS_PER_NODE>64</MAX_MPITASKS_PER_NODE>
<MAX_TASKS_PER_NODE>128</MAX_TASKS_PER_NODE>
<ntasks>
<ntasks_atm>1024</ntasks_atm>
<ntasks_lnd>1024</ntasks_lnd>
<ntasks_rof>1024</ntasks_rof>
<ntasks_ice>1024</ntasks_ice>
<ntasks_ocn>1024</ntasks_ocn>
<ntasks_cpl>1024</ntasks_cpl>
<ntasks_glc>1</ntasks_glc>
<ntasks_wav>1</ntasks_wav>
</ntasks>
<nthrds>
<nthrds_atm>1</nthrds_atm>
<nthrds_lnd>1</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
<nthrds_ice>2</nthrds_ice>
<nthrds_ocn>2</nthrds_ocn>
<nthrds_glc>1</nthrds_glc>
<nthrds_wav>1</nthrds_wav>
<nthrds_cpl>1</nthrds_cpl>
</nthrds>
</pes>
</mach>
</grid>
<grid name="a%T62.+_oi%oEC60to30.*">
<mach name="compy">
<pes compset=".*MPASSI.+MPASO.+" pesize="S">
<comment>compy, lowres (60to30v3) G case on 12 nodes 40 ppn pure-MPI, sypd=10</comment>
<ntasks>
<ntasks_atm>160</ntasks_atm>
<ntasks_lnd>160</ntasks_lnd>
<ntasks_rof>160</ntasks_rof>
<ntasks_ice>160</ntasks_ice>
<ntasks_ocn>320</ntasks_ocn>
<ntasks_cpl>120</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>1</nthrds_atm>
<nthrds_lnd>1</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_cpl>1</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>160</rootpe_ocn>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
<pes compset=".*MPASSI.+MPASO.+" pesize="any">
<comment>compy, lowres (60to30v3) G case on 24 nodes 40 ppn pure-MPI, sypd=18</comment>
<ntasks>
<ntasks_atm>320</ntasks_atm>
<ntasks_lnd>320</ntasks_lnd>
<ntasks_rof>320</ntasks_rof>
<ntasks_ice>320</ntasks_ice>
<ntasks_ocn>640</ntasks_ocn>
<ntasks_cpl>120</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>1</nthrds_atm>
<nthrds_lnd>1</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_cpl>1</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>320</rootpe_ocn>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
<pes compset=".*MPASSI.+MPASO.+" pesize="L">
<comment>compy, lowres (60to30v3) G case on 37 nodes 40 ppn pure-MPI, sypd=28</comment>
<ntasks>
<ntasks_atm>480</ntasks_atm>
<ntasks_lnd>480</ntasks_lnd>
<ntasks_rof>480</ntasks_rof>
<ntasks_ice>480</ntasks_ice>
<ntasks_ocn>1000</ntasks_ocn>
<ntasks_cpl>480</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>1</nthrds_atm>
<nthrds_lnd>1</nthrds_lnd>
<nthrds_rof>1</nthrds_rof>
<nthrds_ice>1</nthrds_ice>
<nthrds_ocn>1</nthrds_ocn>
<nthrds_cpl>1</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>480</rootpe_ocn>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
<grid name="a%T62.+_oi%oEC60to30.*">
<mach name="theta">
<pes compset=".*MPASSI.*DOCN.+" pesize="any">
<comment>--res T62_oEC60to30v3 --compset DTESTM on 2 nodes</comment>
<ntasks>
<ntasks_atm>128</ntasks_atm>
<ntasks_lnd>128</ntasks_lnd>
<ntasks_rof>128</ntasks_rof>
<ntasks_ice>128</ntasks_ice>
<ntasks_ocn>128</ntasks_ocn>
<ntasks_cpl>128</ntasks_cpl>
</ntasks>
<nthrds>
<nthrds_atm>2</nthrds_atm>
<nthrds_lnd>2</nthrds_lnd>
<nthrds_rof>2</nthrds_rof>
<nthrds_ice>2</nthrds_ice>
<nthrds_ocn>2</nthrds_ocn>
<nthrds_cpl>2</nthrds_cpl>
</nthrds>
<rootpe>
<rootpe_atm>0</rootpe_atm>
<rootpe_lnd>0</rootpe_lnd>
<rootpe_rof>0</rootpe_rof>
<rootpe_ice>0</rootpe_ice>
<rootpe_ocn>0</rootpe_ocn>
<rootpe_cpl>0</rootpe_cpl>
</rootpe>
</pes>
</mach>
</grid>
</config_pes>
CIMEROOT/config/e3sm/machines¶
E3SM XML settings for supported batch queuing systems.
<?xml version="1.0"?>
<config_batch version="2.0">
<!--
File: config_batch.xml
Purpose: abstract out the parts of run scripts that are different, and use this configuration to
create acme run scripts from a single template.
batch_system: the batch system type and version
batch_query: the batch query command for each batch system.
batch_redirect: Whether a redirect character is needed to submit jobs.
batch_directive: The string that prepends a batch directive for the batch system.
jobid_pattern: A perl regular expression used to filter out the returned job id from a
queue submission.
depend_pattern:
===============================================================
batch_system
===============================================================
The batch_system and associated tags are meant for configuring batch systems and
queues across machines. The batch_system tag denotes the name for a particular
batch system, these can either be shared between one or more machines, or can be
defined for a specific machine if need be.
Machine specific entries take precidence over generic entries, directives are appended
queues:
one or more queues can be defined per batch_system. if the attribute default="true"
is used, then that queue will be used by default. Alternatively, multiple queues can
be used. The following variables can be used to choose a queue :
walltimemin: Giving the minimum amount of walltime for the queue.
walltimemax: The maximum amount of walltime for a queue.
nodemin: The minimum node count required to use this queue.
nodemax: The maximum node count required to use this queue.
jobmin: The minimum task count required to use this queue. This should only rarely be used to select queues that only use a fraction of a node. This cannot be used in conjuction with nodemin.
jobmax: The maximum task count required to use this queue. This should only rarely be used to select queues that only use a fraction of a node. This cannot be used in conjuction with nodemax.
-->
<batch_system type="template" >
<batch_query args=""></batch_query>
<batch_submit></batch_submit>
<batch_cancel></batch_cancel>
<batch_redirect></batch_redirect>
<batch_directive></batch_directive>
<directives>
<directive></directive>
</directives>
</batch_system>
<batch_system type="none" >
<batch_query args=""></batch_query>
<batch_submit></batch_submit>
<batch_cancel></batch_cancel>
<batch_redirect></batch_redirect>
<batch_directive></batch_directive>
<directives>
<directive></directive>
</directives>
</batch_system>
<batch_system type="cobalt" >
<batch_query>qstat</batch_query>
<batch_submit>qsub</batch_submit>
<batch_cancel>qdel</batch_cancel>
<batch_env>--env</batch_env>
<batch_directive></batch_directive>
<jobid_pattern>(\d+)</jobid_pattern>
<depend_string> --dependencies</depend_string>
<walltime_format>%H:%M:%s</walltime_format>
<batch_mail_flag>-M</batch_mail_flag>
<batch_mail_type_flag></batch_mail_type_flag>
<batch_mail_type></batch_mail_type>
<submit_args>
<arg flag="--cwd" name="CASEROOT"/>
<arg flag="-A" name="CHARGE_ACCOUNT"/>
<arg flag="-t" name="JOB_WALLCLOCK_TIME"/>
<arg flag="-n" name=" ($TOTALPES + $MAX_MPITASKS_PER_NODE - 1)/$MAX_MPITASKS_PER_NODE"/>
<arg flag="-q" name="JOB_QUEUE"/>
<arg flag="--mode script"/>
</submit_args>
</batch_system>
<batch_system type="cobalt_theta" >
<batch_query>qstat</batch_query>
<batch_submit>/projects/ccsm/acme/tools/cobalt/dsub</batch_submit>
<batch_cancel>qdel</batch_cancel>
<batch_env>--env</batch_env>
<batch_directive>#COBALT</batch_directive>
<jobid_pattern>(\d+)</jobid_pattern>
<depend_string>--dependencies jobid</depend_string>
<depend_separator>:</depend_separator>
<batch_mail_flag>-M</batch_mail_flag>
<batch_mail_type_flag></batch_mail_type_flag>
<batch_mail_type></batch_mail_type>
<submit_args>
<arg flag="-A" name="CHARGE_ACCOUNT"/>
<arg flag="-t" name="JOB_WALLCLOCK_TIME"/>
<arg flag="-n" name=" ($TOTALPES + $MAX_MPITASKS_PER_NODE - 1)/$MAX_MPITASKS_PER_NODE"/>
<arg flag="-q" name="JOB_QUEUE"/>
<arg flag="--mode script"/>
</submit_args>
</batch_system>
<!-- This is the new version on Summit, released as IBM 10.1.0.0 build 476197, Nov 21 2017. -->
<batch_system type="lsf" version="10.1">
<batch_query args=" -w" >bjobs</batch_query>
<batch_submit>bsub</batch_submit>
<batch_cancel>bkill</batch_cancel>
<batch_env>-env</batch_env>
<batch_directive>#BSUB</batch_directive>
<jobid_pattern><(\d+)></jobid_pattern>
<depend_string> -w 'done(jobid)'</depend_string>
<depend_allow_string> -w 'ended(jobid)'</depend_allow_string>
<depend_separator>&&</depend_separator>
<walltime_format>%H:%M</walltime_format>
<batch_mail_flag>-u</batch_mail_flag>
<batch_mail_type_flag> </batch_mail_type_flag>
<batch_mail_type>, -B -N, -B,-N,-N</batch_mail_type>
<submit_args>
<arg flag="-q" name="$JOB_QUEUE"/>
<arg flag="-W" name="$JOB_WALLCLOCK_TIME"/>
<arg flag="-P" name="$CHARGE_ACCOUNT"/>
</submit_args>
<directives>
<directive > -nnodes {{ num_nodes }} </directive>
<directive default="e3sm.stdout" > -o {{ output_error_path }}.%J </directive>
<directive default="e3sm.stderr" > -e {{ output_error_path }}.%J </directive>
<directive > -J {{ job_id }} </directive>
</directives>
</batch_system>
<batch_system type="pbs" >
<batch_query args="-f" >qstat</batch_query>
<batch_submit>qsub </batch_submit>
<batch_cancel>qdel</batch_cancel>
<batch_env>-v</batch_env>
<batch_directive>#PBS</batch_directive>
<jobid_pattern>^(\S+)$</jobid_pattern>
<depend_string>-W depend=afterok:jobid</depend_string>
<depend_allow_string>-W depend=afterany:jobid</depend_allow_string>
<depend_separator>:</depend_separator>
<walltime_format>%H:%M:%S</walltime_format>
<batch_mail_flag>-M</batch_mail_flag>
<batch_mail_type_flag>-m</batch_mail_type_flag>
<batch_mail_type>, bea, b, e, a</batch_mail_type>
<submit_args>
<arg flag="-q" name="$JOB_QUEUE"/>
<arg flag="-l walltime=" name="$JOB_WALLCLOCK_TIME"/>
<arg flag="-A" name="$CHARGE_ACCOUNT"/>
</submit_args>
<directives>
<directive> -N {{ job_id }}</directive>
<directive default="n"> -r {{ rerunnable }} </directive>
<!-- <directive> -j oe {{ job_id }} </directive> -->
<directive> -j oe </directive>
<directive> -V </directive>
</directives>
</batch_system>
<batch_system type="moab" >
<batch_query>showq</batch_query>
<batch_submit>msub </batch_submit>
<batch_cancel>canceljob</batch_cancel>
<batch_directive>#MSUB</batch_directive>
<jobid_pattern>(\d+)$</jobid_pattern>
<depend_string>-W depend=afterok:jobid</depend_string>
<depend_allow_string>-W depend=afterany:jobid</depend_allow_string>
<depend_separator>:</depend_separator>
<walltime_format>%H:%M:%S</walltime_format>
<batch_mail_flag>-M</batch_mail_flag>
<batch_mail_type_flag>-m</batch_mail_type_flag>
<batch_mail_type>, bea, b, e, a</batch_mail_type>
<submit_args>
<arg flag="-l walltime=" name="$JOB_WALLCLOCK_TIME"/>
<arg flag="-A" name="$CHARGE_ACCOUNT"/>
</submit_args>
<directives>
<directive> -N {{ job_id }}</directive>
<directive> -j oe </directive>
<directive default="n"> -r {{ rerunnable }} </directive>
<directive default="/bin/bash" > -S {{ shell }}</directive>
</directives>
</batch_system>
<!-- for lawrence livermore computing -->
<batch_system type="lc_slurm">
<batch_query per_job_arg="-j">squeue</batch_query>
<batch_submit>sbatch</batch_submit>
<batch_cancel>scancel</batch_cancel>
<batch_directive>#SBATCH</batch_directive>
<jobid_pattern>(\d+)$</jobid_pattern>
<depend_string>--dependency=afterok:jobid</depend_string>
<depend_allow_string>--dependency=afterany:jobid</depend_allow_string>
<depend_separator>:</depend_separator>
<walltime_format>%H:%M:%S</walltime_format>
<batch_mail_flag>--mail-user</batch_mail_flag>
<batch_mail_type_flag>--mail-type</batch_mail_type_flag>
<batch_mail_type>none, all, begin, end, fail</batch_mail_type>
<directives>
<directive>--export=ALL</directive>
<directive>-p {{ job_queue }}</directive>
<directive>-J {{ job_id }}</directive>
<directive>-N {{ num_nodes }}</directive>
<directive>-n {{ total_tasks }}</directive>
<directive>-t {{ job_wallclock_time }}</directive>
<directive>-o {{ job_id }}.out</directive>
<directive>-e {{ job_id }}.err</directive>
<directive> -A {{ project }} </directive>
</directives>
<queues>
<queue walltimemax="01:00:00" nodemax="270" default="true">pbatch</queue>
<queue walltimemax="00:30:00">pdebug</queue>
</queues>
</batch_system>
<!-- for lawrence livermore computing -->
<!-- for NERSC machines: edison,cori-haswell,cori-knl -->
<batch_system type="nersc_slurm" >
<batch_query per_job_arg="-j">squeue</batch_query>
<batch_submit>sbatch</batch_submit>
<batch_cancel>scancel</batch_cancel>
<batch_directive>#SBATCH</batch_directive>
<jobid_pattern>(\d+)$</jobid_pattern>
<depend_string>--dependency=afterok:jobid</depend_string>
<depend_allow_string>--dependency=afterany:jobid</depend_allow_string>
<depend_separator>:</depend_separator>
<walltime_format>%H:%M:%S</walltime_format>
<batch_mail_flag>--mail-user</batch_mail_flag>
<batch_mail_type_flag>--mail-type</batch_mail_type_flag>
<batch_mail_type>none, all, begin, end, fail</batch_mail_type>
<submit_args>
<arg flag="--time" name="$JOB_WALLCLOCK_TIME"/>
<arg flag="-q" name="$JOB_QUEUE"/>
<arg flag="--account" name="$PROJECT"/>
</submit_args>
<directives>
<directive> --job-name={{ job_id }}</directive>
<directive> --nodes={{ num_nodes }}</directive>
<directive> --output={{ job_id }}.%j </directive>
<directive> --exclusive </directive>
</directives>
</batch_system>
<batch_system type="slurm" >
<batch_query per_job_arg="-j">squeue</batch_query>
<batch_submit>sbatch</batch_submit>
<batch_cancel>scancel</batch_cancel>
<batch_directive>#SBATCH</batch_directive>
<jobid_pattern>(\d+)$</jobid_pattern>
<depend_string>--dependency=afterok:jobid</depend_string>
<depend_allow_string>--dependency=afterany:jobid</depend_allow_string>
<depend_separator>:</depend_separator>
<walltime_format>%H:%M:%S</walltime_format>
<batch_mail_flag>--mail-user</batch_mail_flag>
<batch_mail_type_flag>--mail-type</batch_mail_type_flag>
<batch_mail_type>none, all, begin, end, fail</batch_mail_type>
<submit_args>
<arg flag="--time" name="$JOB_WALLCLOCK_TIME"/>
<arg flag="-p" name="$JOB_QUEUE"/>
<arg flag="--account" name="$PROJECT"/>
</submit_args>
<directives>
<directive> --job-name={{ job_id }}</directive>
<directive> --nodes={{ num_nodes }}</directive>
<directive> --output={{ job_id }}.%j </directive>
<directive> --exclusive </directive>
</directives>
</batch_system>
<batch_system MACH="blues" type="pbs" >
<directives>
<directive>-A {{ PROJECT }}</directive>
<directive>-l nodes={{ num_nodes }}:ppn={{ tasks_per_node }}</directive>
</directives>
<queues>
<queue walltimemax="01:00:00" nodemax="4" strict="true">shared</queue>
<queue walltimemax="03:00:00" default="true">batch</queue>
</queues>
</batch_system>
<batch_system MACH="anvil" type="slurm" >
<queues>
<queue walltimemax="01:00:00" default="true">acme-centos6</queue>
</queues>
</batch_system>
<batch_system MACH="bebop" type="slurm" >
<queues>
<queue walltimemax="00:30:00" nodemax="64" strict="true">debug</queue>
<queue walltimemax="01:00:00" nodemax="608" default="true">bdw</queue>
<queue walltimemax="01:00:00" nodemax="512">knl</queue>
</queues>
</batch_system>
<batch_system MACH="eos" type="pbs" >
<directives>
<directive>-A {{ project }}</directive>
<directive>-l nodes={{ num_nodes }}</directive>
</directives>
<queues>
<queue walltimemax="00:30:00" default="true">batch</queue>
</queues>
</batch_system>
<batch_system MACH="edison" type="nersc_slurm" >
<queues>
<queue walltimemax="00:30:00" nodemax="512" strict="true">debug</queue>
<queue walltimemax="01:30:00" default="true">regular</queue>
</queues>
</batch_system>
<batch_system MACH="cori-haswell" type="nersc_slurm">
<directives>
<directive> --constraint=haswell</directive>
</directives>
<queues>
<queue walltimemax="00:30:00" nodemax="64" strict="true">debug</queue>
<queue walltimemax="01:00:00" default="true">regular</queue>
</queues>
</batch_system>
<batch_system MACH="cori-knl" type="nersc_slurm">
<directives>
<directive> --constraint=knl,quad,cache</directive>
</directives>
<queues>
<queue walltimemax="00:30:00" nodemax="512" strict="true">debug</queue>
<queue walltimemax="01:15:00" default="true">regular</queue>
</queues>
</batch_system>
<batch_system MACH="stampede2" type="slurm">
<directives>
<directive>-n {{ total_tasks }}</directive>
</directives>
<queues>
<queue walltimemax="00:30:00" nodemax="4" strict="true">skx-dev</queue>
<queue walltimemax="00:30:00" nodemax="868" strict="true">skx-large</queue>
<queue walltimemax="01:00:00" nodemax="128" default="true">skx-normal</queue>
</queues>
</batch_system>
<batch_system MACH="mira" type="cobalt">
<queues>
<queue walltimemax="03:00:00" default="true">default</queue>
</queues>
</batch_system>
<batch_system MACH="cetus" type="cobalt">
<queues>
<queue walltimemax="01:00:00" default="true">default</queue>
</queues>
</batch_system>
<batch_system MACH="theta" type="cobalt_theta">
<queues>
<queue walltimemax="01:00:00" nodemin="1" nodemax="8" strict="true">debug-cache-quad</queue>
<queue walltimemin="00:30:00" walltimemax="03:00:00" nodemin="128" nodemax="255" strict="true">default</queue>
<queue walltimemin="00:30:00" walltimemax="06:00:00" nodemin="256" nodemax="383" strict="true">default</queue>
<queue walltimemin="00:30:00" walltimemax="09:00:00" nodemin="384" nodemax="639" strict="true">default</queue>
<queue walltimemin="00:30:00" walltimemax="12:00:00" nodemin="640" nodemax="801" strict="true">default</queue>
<queue walltimemin="00:30:00" walltimemax="24:00:00" nodemin="802" strict="true" default="true">default</queue>
</queues>
</batch_system>
<batch_system MACH="jlse" type="cobalt_theta">
<batch_submit>qsub</batch_submit>
<queues>
<queue walltimemax="01:00:00" jobmin="1" jobmax="20" default="true">skylake_8180</queue>
</queues>
</batch_system>
<batch_system MACH="cascade" type="slurm">
<directives>
<directive>--output=slurm.out</directive>
<directive>--error=slurm.err</directive>
</directives>
<queues>
<queue walltimemax="00:59:00" nodemin="1" nodemax="15" >small</queue>
<queue walltimemax="00:59:00" nodemin="16" nodemax="127" >medium</queue>
<queue walltimemax="00:59:00" nodemin="128" default="true" >large</queue>
</queues>
</batch_system>
<batch_system MACH="constance" type="slurm">
<directives>
<directive>--output=slurm.out</directive>
<directive>--error=slurm.err</directive>
</directives>
<queues>
<queue walltimemax="00:59:00" default="true">slurm</queue>
</queues>
</batch_system>
<batch_system MACH="compy" type="slurm">
<queues>
<queue walltimemax="00:59:00" default="true">slurm</queue>
</queues>
</batch_system>
<batch_system MACH="sooty" type="slurm" >
<directives>
<directive>--ntasks-per-node={{ tasks_per_node }}</directive>
<directive>--output=slurm.out</directive>
<directive>--error=slurm.err</directive>
</directives>
<queues>
<queue walltimemax="00:59:00" default="true">slurm</queue>
</queues>
</batch_system>
<batch_system MACH="sandiatoss3" type="slurm" >
<queues>
<queue nodemax="16" walltimemax="04:00:00" strict="true" default="true">short,batch</queue>
<queue walltimemax="24:00:00">batch</queue>
</queues>
</batch_system>
<batch_system MACH="ghost" type="slurm" >
<queues>
<queue nodemax="12" walltimemax="04:00:00" strict="true" default="true">short,batch</queue>
<queue walltimemax="24:00:00">batch</queue>
</queues>
</batch_system>
<batch_system MACH="mustang" type="moab" >
<directives>
<directive>-l nodes={{ num_nodes }}:ppn={{ tasks_per_node }}</directive>
</directives>
</batch_system>
<batch_system MACH="grizzly" type="slurm" >
<directives>
<directive>--nodes={{ num_nodes }}</directive>
<directive>--ntasks-per-node={{ tasks_per_node }}</directive>
<directive>--qos=standard </directive>
</directives>
<queues>
<queue walltimemax="16:00:00" default="true">standard</queue>
</queues>
</batch_system>
<batch_system MACH="badger" type="slurm" >
<directives>
<directive>--nodes={{ num_nodes }}</directive>
<directive>--ntasks-per-node={{ tasks_per_node }}</directive>
<directive>--qos=standard </directive>
</directives>
<queues>
<queue walltimemax="16:00:00" default="true">standard</queue>
</queues>
</batch_system>
<batch_system MACH="mesabi" type="pbs">
<queues>
<queue walltimemax="24:00" default="true">mesabi</queue>
<queue walltimemax="24:00">debug</queue>
</queues>
</batch_system>
<batch_system MACH="oic5" type="pbs" >
<directives>
<directive>-l nodes={{ num_nodes }}:ppn={{ tasks_per_node }}</directive>
<directive>-q esd13q</directive>
</directives>
<queues>
<queue default="true">esd13q</queue>
<queue walltimemax="1:00">esddbg13q</queue>
</queues>
</batch_system>
<batch_system MACH="cades" type="pbs" >
<directives>
<directive>-l nodes={{ num_nodes }}:ppn={{ tasks_per_node }}</directive>
<directive>-W group_list=cades-ccsi</directive>
</directives>
<queues>
<queue default="true">batch</queue>
</queues>
</batch_system>
<batch_system MACH="itasca" type="pbs">
<queues>
<queue walltimemax="24:00" default="true">batch</queue>
<queue walltimemax="24:00">debug</queue>
</queues>
</batch_system>
<batch_system MACH="titan" type="pbs" >
<directives>
<directive>-A {{ project }}</directive>
<directive>-l nodes={{ num_nodes }}</directive>
<directive>-env "all"</directive>
</directives>
<queues>
<queue walltimemax="02:00:00" default="true">batch</queue>
<queue walltimemax="01:00:00" nodemax="18688" strict="true">debug</queue>
</queues>
</batch_system>
<batch_system MACH="summit" type="lsf" >
<directives>
<directive>-P {{ project }}</directive>
</directives>
<directives compiler="!pgiacc">
<directive>-alloc_flags smt2</directive>
</directives>
<directives compiler="pgiacc">
<directive>-alloc_flags "gpumps smt2"</directive>
</directives>
<queues>
<queue walltimemax="02:00" default="true">batch</queue>
</queues>
</batch_system>
<batch_system MACH="summitdev" type="lsf" >
<directives>
<directive>-P {{ project }}</directive>
<directive>-alloc_flags gpumps</directive>
</directives>
<queues>
<queue walltimemax="01:00" default="true">batch</queue>
<!--
Nodes Max Walltime
<=4 4 hours
>4 1 hour
jobmax = 54nodes*20cores*16th = 8640
-->
</queues>
</batch_system>
<batch_system MACH="snl-white" type="lsf" >
<queues>
<queue walltimemax="02:00" default="true">rhel7G</queue>
</queues>
</batch_system>
<batch_system MACH="snl-blake" type="slurm" >
<queues>
<queue walltimemax="02:00" default="true">blake</queue>
</queues>
</batch_system>
<batch_system MACH="lawrencium-lr2" type="slurm" >
<directives>
<directive>--ntasks-per-node={{ tasks_per_node }}</directive>
<directive>--qos=lr_normal </directive>
<directive>--account={{ project }}</directive>
</directives>
<queues>
<queue walltimemax="01:00:00" default="true">lr2</queue>
</queues>
</batch_system>
<batch_system MACH="lawrencium-lr3" type="slurm" >
<directives>
<directive>--ntasks-per-node={{ tasks_per_node }}</directive>
<directive>--qos=lr_normal</directive>
<directive>--account={{ project }}</directive>
</directives>
<queues>
<queue walltimemax="01:00:00" default="true">lr3</queue>
</queues>
</batch_system>
<batch_system MACH="lawrencium-lr6" type="slurm" >
<directives>
<directive>--ntasks-per-node={{ tasks_per_node }}</directive>
<directive>--qos=condo_esd2 </directive>
</directives>
<queues>
<queue walltimemax="01:00:00" default="true">lr6</queue>
</queues>
</batch_system>
</config_batch>
E3SM XML settings for supported compilers.
<?xml version="1.0" encoding="UTF-8"?>
<config_compilers version="2.0">
<!--
===========================
This file defines compiler flags for building CIME. General flags are listed first
followed by flags specific to particular operating systems, followed by particular machines.
More general flags are replaced by more specific flags.
Flags of the sort ADD_FLAG indicate that the field should be appended to an already existing FLAG definition.
Attributes indicate that an if clause should be added to the Macros so that these flags are added
only under the conditions described by the attribute(s).
The env_mach_specific file may set environment variables or load modules which set environment variables
which are then used in the Makefile. For example the NETCDF_PATH on many machines is set by a module.
Do not use variables CPPDEFS and SLIBS here, instead use ADD_CPPDEFS and ADD_SLIBS
========================================================================
Serial/MPI compiler specification
========================================================================
SCC and SFC specifies the serial compiler
MPICC and MPICC specifies the mpi compiler
if $MPILIB is set to mpi-serial then
CC = $SCC
FC = $SFC
MPICC = $SCC
MPIFC = $SFC
INC_MPI = $(CIMEROOT)/src/externals/mct/mpi-serial
========================================================================
Options for including C++ code in the build
========================================================================
SUPPORTS_CXX (TRUE/FALSE): Whether we have defined all the necessary
settings for including C++ code in the build for this compiler (or
this compiler/machine combination). See below for a description of the
necessary settings.
The following are required for a compiler to support the inclusion of
C++ code:
SCXX: serial C++ compiler
MPICXX: mpi C++ compiler
CXX_LINKER (CXX/FORTRAN): When C++ code is included in the build, do
we use a C++ or Fortran linker?
In addition, some compilers require additional libraries or link-time
flags, specified via CXX_LIBS or CXX_LDFLAGS, as in the following
examples:
<CXX_LIBS> -L/path/to/directory -lfoo </CXX_LIBS>
or
<CXX_LDFLAGS> -cxxlib </CXX_LDFLAGS>
Note that these libraries or LDFLAGS will be added on the link line,
regardless of whether we are using a C++ or Fortran linker. For
example, if CXX_LINKER=CXX, then the above CXX_LIBS line should
specify extra libraries needed when linking C++ and fortran code using
a C++ linker. If CXX_LINKER=FORTRAN, then the above CXX_LDFLAGS line
should specify extra LDFLAGS needed when linking C++ and fortran code
using a fortran linker.
These should NOT be specified via <ADD_SLIBS USE_CXX="true"> or
<ADD_LDFLAGS USE_CXX="true">, because those mess up the configure step
for mct, etc.
===========================
-->
<!-- Define default values that can be overridden by specific
compilers -->
<compiler>
<SUPPORTS_CXX>FALSE</SUPPORTS_CXX>
<CPPDEFS>
<!-- MPAS Components rely on this flag to use the newer PIO/Scorpio interfaces. These newer interfaces are available on both Scorpio and Scorpio classic -->
<append MODEL="ice"> -DUSE_PIO2 </append>
<append MODEL="ocn"> -DUSE_PIO2 </append>
<append MODEL="glc"> -DUSE_PIO2 </append>
</CPPDEFS>
</compiler>
<compiler COMPILER="cray">
<CFLAGS>
<append compile_threaded="FALSE"> -h noomp </append>
</CFLAGS>
<CPPDEFS>
<!--http://docs.cray.com/cgi-bin/craydoc.cgi?mode=View;id=S-3901-83;idx=books_search;this_sort=;q=;type=books;title=Cray%20Fortran%20Reference%20Manual -->
<append> -DFORTRANUNDERSCORE -DNO_R16 -DCPRCRAY</append>
<append MODEL="moby"> -DDIR=NOOP </append>
</CPPDEFS>
<FC_AUTO_R8>
<base> -s real64 </base>
</FC_AUTO_R8>
<FFLAGS>
<base> -O2 -f free -N 255 -h byteswapio -em </base>
<append compile_threaded="FALSE"> -h noomp </append>
<append DEBUG="TRUE"> -g -trapuv -Wuninitialized </append>
</FFLAGS>
<FFLAGS_NOOPT>
<base> -O0 </base>
</FFLAGS_NOOPT>
<HAS_F2008_CONTIGUOUS>TRUE</HAS_F2008_CONTIGUOUS>
<LDFLAGS>
<base> -Wl,--allow-multiple-definition -h byteswapio </base>
<append compile_threaded="FALSE"> -h noomp </append>
</LDFLAGS>
</compiler>
<compiler COMPILER="gnu">
<CFLAGS>
<base> -mcmodel=medium </base>
<append compile_threaded="TRUE"> -fopenmp </append>
<append DEBUG="TRUE"> -g -Wall -Og -fbacktrace -fcheck=bounds -ffpe-trap=invalid,zero,overflow</append>
<append DEBUG="FALSE"> -O </append>
<append MODEL="csm_share"> -std=c99 </append>
</CFLAGS>
<CMAKE_OPTS>
<append MODEL="cism"> -D CISM_GNU=ON </append>
</CMAKE_OPTS>
<CPPDEFS>
<!-- http://gcc.gnu.org/onlinedocs/gfortran/ -->
<append> -DFORTRANUNDERSCORE -DNO_R16 -DCPRGNU</append>
</CPPDEFS>
<CXX_LINKER>FORTRAN</CXX_LINKER>
<FC_AUTO_R8>
<base> -fdefault-real-8 </base>
</FC_AUTO_R8>
<FFLAGS>
<!-- -ffree-line-length-none and -ffixed-line-length-none need to be in FFLAGS rather than in FIXEDFLAGS/FREEFLAGS
so that these are passed to cmake builds (cmake builds don't use FIXEDFLAGS and FREEFLAGS). -->
<base> -mcmodel=medium -fconvert=big-endian -ffree-line-length-none -ffixed-line-length-none </base>
<append compile_threaded="TRUE"> -fopenmp </append>
<append DEBUG="TRUE"> -g -Wall -Og -fbacktrace -fcheck=bounds -ffpe-trap=invalid,zero,overflow</append>
<append DEBUG="FALSE"> -O </append>
</FFLAGS>
<FFLAGS_NOOPT>
<base> -O0 </base>
</FFLAGS_NOOPT>
<FIXEDFLAGS>
<base> -ffixed-form </base>
</FIXEDFLAGS>
<FREEFLAGS>
<base> -ffree-form </base>
</FREEFLAGS>
<HAS_F2008_CONTIGUOUS>FALSE</HAS_F2008_CONTIGUOUS>
<LDFLAGS>
<append compile_threaded="TRUE"> -fopenmp </append>
</LDFLAGS>
<MPICC> mpicc </MPICC>
<MPICXX> mpicxx </MPICXX>
<MPIFC> mpif90 </MPIFC>
<SCC> gcc </SCC>
<SCXX> g++ </SCXX>
<SFC> gfortran </SFC>
<SUPPORTS_CXX>TRUE</SUPPORTS_CXX>
</compiler>
<compiler COMPILER="gnu7">
<CFLAGS>
<base> -mcmodel=medium </base>
<append compile_threaded="TRUE"> -fopenmp </append>
<append DEBUG="TRUE"> -g -Wall -Og -fbacktrace -fcheck=bounds -ffpe-trap=invalid,zero,overflow</append>
<append DEBUG="FALSE"> -O </append>
</CFLAGS>
<CMAKE_OPTS>
<append MODEL="cism"> -D CISM_GNU=ON </append>
</CMAKE_OPTS>
<CPPDEFS>
<!-- http://gcc.gnu.org/onlinedocs/gfortran/ -->
<append> -DFORTRANUNDERSCORE -DNO_R16 -DCPRGNU</append>
</CPPDEFS>
<CXX_LINKER>FORTRAN</CXX_LINKER>
<FC_AUTO_R8>
<base> -fdefault-real-8 </base>
</FC_AUTO_R8>
<FFLAGS>
<!-- -ffree-line-length-none and -ffixed-line-length-none need to be in FFLAGS rather than in FIXEDFLAGS/FREEFLAGS
so that these are passed to cmake builds (cmake builds don't use FIXEDFLAGS and FREEFLAGS). -->
<base> -mcmodel=medium -fconvert=big-endian -ffree-line-length-none -ffixed-line-length-none </base>
<append compile_threaded="TRUE"> -fopenmp </append>
<append DEBUG="TRUE"> -g -Wall -Og -fbacktrace -fcheck=bounds -ffpe-trap=invalid,zero,overflow</append>
<append DEBUG="FALSE"> -O </append>
</FFLAGS>
<FFLAGS_NOOPT>
<base> -O0 </base>
</FFLAGS_NOOPT>
<FIXEDFLAGS>
<base> -ffixed-form </base>
</FIXEDFLAGS>
<FREEFLAGS>
<base> -ffree-form </base>
</FREEFLAGS>
<HAS_F2008_CONTIGUOUS>FALSE</HAS_F2008_CONTIGUOUS>
<LDFLAGS>
<append compile_threaded="TRUE"> -fopenmp </append>
</LDFLAGS>
<MPICC> mpicc </MPICC>
<MPICXX> mpicxx </MPICXX>
<MPIFC> mpif90 </MPIFC>
<SCC> gcc </SCC>
<SCXX> g++ </SCXX>
<SFC> gfortran </SFC>
<SUPPORTS_CXX>TRUE</SUPPORTS_CXX>
</compiler>
<compiler COMPILER="ibm">
<CFLAGS>
<base> -g -qfullpath -qmaxmem=-1 -qphsinfo </base>
<append DEBUG="FALSE"> -O3 </append>
<append DEBUG="FALSE" compile_threaded="TRUE"> -qsmp=omp -qsuppress=1520-045 </append>
<append DEBUG="TRUE" compile_threaded="TRUE"> -qsmp=omp:noopt -qsuppress=1520-045 </append>
</CFLAGS>
<CPPDEFS>
<!-- http://publib.boulder.ibm.com/infocenter/comphelp/v7v91/index.jsp
Notes: (see xlf user's guide for the details)
-lmass => IBM-tuned intrinsic lib
-qsmp=noauto => enable SMP directives, but don't add any
-qsmp=omp => enable SMP directives, strict omp
-qstrict => don't turn divides into multiplies, etc
-qhot => higher-order-transformations (eg. loop padding)
-qalias=noaryovrlp => assume no array overlap wrt equivalance, etc
-qmaxmem=-1 => memory available to compiler during optimization
-qipa=level=2 => InterProcedure Analysis (eg. inlining) => slow compiles
-p -pg => enable profiling (use in both FFLAGS and LDFLAGS)
-qreport => for smp/omp only
-g => always leave it on because overhead is minimal
-qflttrap=... => enable default sigtrap (core dump)
-C => runtime array bounds checking (runs slow)
-qinitauto=... => initializes automatic variables
-->
<append> -DFORTRAN_SAME -DCPRIBM</append>
</CPPDEFS>
<CPRE>-WF,-D</CPRE>
<FC_AUTO_R8>
<base> -qrealsize=8 </base>
</FC_AUTO_R8>
<FFLAGS>
<base> -g -qfullpath -qmaxmem=-1 -qphsinfo </base>
<append DEBUG="FALSE"> -O2 -qstrict -Q </append>
<append DEBUG="FALSE" compile_threaded="TRUE"> -qsmp=omp -qsuppress=1520-045 </append>
<append DEBUG="TRUE" compile_threaded="TRUE"> -qsmp=omp:noopt -qsuppress=1520-045 </append>
<append DEBUG="TRUE"> -qinitauto=7FF7FFFF -qflttrap=ov:zero:inv:en </append>
</FFLAGS>
<FFLAGS_NOOPT>
<base> -O0 </base>
</FFLAGS_NOOPT>
<FIXEDFLAGS>
<base> -qsuffix=f=f -qfixed=132 </base>
</FIXEDFLAGS>
<FREEFLAGS>
<base> -qsuffix=f=f90:cpp=F90 </base>
</FREEFLAGS>
<HAS_F2008_CONTIGUOUS>TRUE</HAS_F2008_CONTIGUOUS>
</compiler>
<compiler COMPILER="intel">
<CFLAGS>
<base> -O2 -fp-model precise -std=gnu99 </base>
<append compile_threaded="TRUE"> -qopenmp </append>
<append DEBUG="FALSE"> -O2 -debug minimal </append>
<append DEBUG="TRUE"> -O0 -g </append>
</CFLAGS>
<CXXFLAGS>
<base> -std=c++11 -fp-model source </base>
<append compile_threaded="TRUE"> -qopenmp </append>
<append DEBUG="TRUE"> -O0 -g </append>
<append DEBUG="FALSE"> -O2 </append>
</CXXFLAGS>
<CPPDEFS>
<!-- http://software.intel.com/en-us/articles/intel-composer-xe/ -->
<append> -DFORTRANUNDERSCORE -DNO_R16 -DCPRINTEL</append>
</CPPDEFS>
<CXX_LDFLAGS>
<base> -cxxlib </base>
</CXX_LDFLAGS>
<CXX_LINKER>FORTRAN</CXX_LINKER>
<FC_AUTO_R8>
<base> -r8 </base>
</FC_AUTO_R8>
<FFLAGS>
<base> -convert big_endian -assume byterecl -ftz -traceback -assume realloc_lhs -fp-model source </base>
<append compile_threaded="TRUE"> -qopenmp </append>
<!-- WJS (8-11-14): For some reason, yellowstone-intel has starting giving lots of
messages about array temporaries, leading to a ton of standard output, and
sometimes causing runs to die. Adding '-check noarg_temp_created' to suppress these
diagnostics. This is a band-aid fix, which should really be addressed at the
system-level. (Note: I could not add this in the yellowstone-specific section,
beacuse apparently that overrides rather than adds to this ADD_FFLAGS list.) -->
<append DEBUG="TRUE"> -O0 -g -check uninit -check bounds -check pointers -fpe0 -check noarg_temp_created </append>
<append DEBUG="FALSE"> -O2 -debug minimal </append>
</FFLAGS>
<FFLAGS_NOOPT>
<base> -O0 </base>
</FFLAGS_NOOPT>
<FIXEDFLAGS>
<base> -fixed -132 </base>
</FIXEDFLAGS>
<FREEFLAGS>
<base> -free </base>
</FREEFLAGS>
<HAS_F2008_CONTIGUOUS>TRUE</HAS_F2008_CONTIGUOUS>
<LDFLAGS>
<append compile_threaded="TRUE"> -qopenmp </append>
</LDFLAGS>
<MPICC> mpicc </MPICC>
<MPICXX> mpicxx </MPICXX>
<MPIFC> mpif90 </MPIFC>
<SCC> icc </SCC>
<SCXX> icpc </SCXX>
<SFC> ifort </SFC>
<SUPPORTS_CXX>TRUE</SUPPORTS_CXX>
</compiler>
<compiler COMPILER="nag">
<CFLAGS>
<append DEBUG="TRUE"> -g </append>
<append> -std=c99 </append>
</CFLAGS>
<CPPDEFS>
<append> -DFORTRANUNDERSCORE -DNO_CRAY_POINTERS -DNO_SHR_VMATH -DCPRNAG</append>
</CPPDEFS>
<FC_AUTO_R8>
<base> -r8 </base>
</FC_AUTO_R8>
<FFLAGS>
<!-- Yes, you really do need this huge -wmismatch flag for NAG to work. -->
<!-- More specifically, it exempts MPI functions without explicit -->
<!-- interfaces from certain argument checks. Should not be necessary in -->
<!-- libraries that only use the F90 module interface. mpibcast and -->
<!-- mpiscatterv are actually CAM wrappers for MPI. -->
<base> -wmismatch=mpi_send,mpi_recv,mpi_bcast,mpi_allreduce,mpi_reduce,mpi_isend,mpi_irecv,mpi_irsend,mpi_rsend,mpi_gatherv,mpi_gather,mpi_scatterv,mpi_allgather,mpi_alltoallv,mpi_file_read_all,mpi_file_write_all,mpibcast,mpiscatterv,mpi_alltoallw,nfmpi_get_vara_all,NFMPI_IPUT_VARA,NFMPI_GET_VAR_ALL,NFMPI_PUT_VARA,NFMPI_PUT_ATT_REAL,NFMPI_PUT_ATT_DOUBLE,NFMPI_PUT_ATT_INT,NFMPI_GET_ATT_REAL,NFMPI_GET_ATT_INT,NFMPI_GET_ATT_DOUBLE,NFMPI_PUT_VARA_DOUBLE_ALL,NFMPI_PUT_VARA_REAL_ALL,NFMPI_PUT_VARA_INT_ALL -convert=BIG_ENDIAN </base>
<!-- DEBUG vs. non-DEBUG runs. -->
<append DEBUG="FALSE"> -ieee=full -O2 </append>
<append DEBUG="TRUE"> -g -time -f2003 -ieee=stop </append>
<!-- The "-gline" option is nice, but it doesn't work with OpenMP. -->
<!-- Runtime checks with OpenMP (in fact, all OpenMP cases) are WIP. -->
<append DEBUG="TRUE"> -C=all -g -time -f2003 -ieee=stop </append>
<append DEBUG="TRUE" compile_threaded="FALSE"> -gline </append>
<append compile_threaded="TRUE"> -openmp </append>
</FFLAGS>
<FIXEDFLAGS>
<base> -fixed </base>
</FIXEDFLAGS>
<FREEFLAGS>
<base> -free </base>
</FREEFLAGS>
<HAS_F2008_CONTIGUOUS>FALSE</HAS_F2008_CONTIGUOUS>
<LDFLAGS>
<append compile_threaded="TRUE"> -openmp </append>
</LDFLAGS>
<MPICC> mpicc </MPICC>
<MPIFC> mpif90 </MPIFC>
<SCC> gcc </SCC>
<SFC> nagfor </SFC>
</compiler>
<compiler COMPILER="pathscale">
<CFLAGS>
<append compile_threaded="TRUE"> -mp </append>
</CFLAGS>
<CPPDEFS>
<!-- http://www.pathscale.com/node/70 -->
<append> -DFORTRANUNDERSCORE -DNO_R16 -DCPRPATHSCALE </append>
</CPPDEFS>
<FC_AUTO_R8>
<base> -r8 </base>
</FC_AUTO_R8>
<FFLAGS>
<base> -O -extend_source -ftpp -fno-second-underscore -funderscoring -byteswapio </base>
<append compile_threaded="TRUE"> -mp </append>
<append DEBUG="TRUE"> -g -trapuv -Wuninitialized </append>
</FFLAGS>
<FFLAGS_NOOPT>
<base> -O0 </base>
</FFLAGS_NOOPT>
<HAS_F2008_CONTIGUOUS>FALSE</HAS_F2008_CONTIGUOUS>
<LDFLAGS>
<append compile_threaded="TRUE"> -mp </append>
</LDFLAGS>
<MPICC> mpicc </MPICC>
<MPIFC> mpif90 </MPIFC>
</compiler>
<compiler COMPILER="pgi">
<CFLAGS>
<base> -gopt -time </base>
<append compile_threaded="FALSE"> </append>
<append compile_threaded="TRUE"> -mp </append>
</CFLAGS>
<CPPDEFS>
<!-- http://www.pgroup.com/resources/docs.htm -->
<!-- Notes: (see pgi man page & user's guide for the details) -->
<!-- -Mextend => Allow 132-column source lines -->
<!-- -Mfixed => Assume fixed-format source -->
<!-- -Mfree => Assume free-format source -->
<!-- -byteswapio => Swap byte-order for unformatted i/o (big/little-endian) -->
<!-- -target=linux => Specifies the target architecture to Compute Node Linux (CNL only) -->
<!-- -fast => Chooses generally optimal flags for the target platform -->
<!-- -Mnovect => Disables automatic vector pipelining -->
<!-- -Mvect=nosse => Don't generate SSE, SSE2, 3Dnow, and prefetch instructions in loops -->
<!-- -Mflushz => Set SSE to flush-to-zero mode (underflow) loops where possible -->
<!-- -Kieee => Perform fp ops in strict conformance with the IEEE 754 standard. -->
<!-- Some optimizations disabled, slightly slower, more accurate math. -->
<!-- -mp=nonuma => Don't use thread/processors affinity (for NUMA architectures) -->
<!-- -->
<!-- -g => Generate symbolic debug information. Turns off optimization. -->
<!-- -gopt => Generate information for debugger without disabling optimizations -->
<!-- -Mbounds => Add array bounds checking -->
<!-- -Ktrap=fp => Determine IEEE Trap conditions fp => inv,divz,ovf -->
<!-- * inv: invalid operands -->
<!-- * divz divide by zero -->
<!-- * ovf: floating point overflow -->
<!-- -F => leaves file.f for each preprocessed file.F file -->
<!-- -time => Print execution time for each compiler step -->
<append> -DFORTRANUNDERSCORE -DNO_SHR_VMATH -DNO_R16 -DCPRPGI </append>
</CPPDEFS>
<CXX_LINKER>CXX</CXX_LINKER>
<FC_AUTO_R8>
<base> -r8 </base>
</FC_AUTO_R8>
<FFLAGS>
<base> -i4 -gopt -time -Mstack_arrays -Mextend -byteswapio -Mflushz -Kieee -Mallocatable=03 </base>
<append compile_threaded="FALSE"> </append>
<append compile_threaded="TRUE"> -mp </append>
<append DEBUG="TRUE"> -O0 -g -Ktrap=fp -Mbounds -Kieee </append>
<append MODEL="datm"> -Mnovect </append>
<append MODEL="dlnd"> -Mnovect </append>
<append MODEL="drof"> -Mnovect </append>
<append MODEL="dwav"> -Mnovect </append>
<append MODEL="dice"> -Mnovect </append>
<append MODEL="docn"> -Mnovect </append>
</FFLAGS>
<FFLAGS_NOOPT>
<base> -O0 </base>
</FFLAGS_NOOPT>
<FIXEDFLAGS>
<base> -Mfixed </base>
</FIXEDFLAGS>
<FREEFLAGS>
<base> -Mfree </base>
</FREEFLAGS>
<!-- Note that SUPPORTS_CXX is false for pgi in general, because we
need some machine-specific libraries -->
<!-- Technically, PGI does recognize this keyword during parsing,
but support is either buggy or incomplete, notably in that
the "contiguous" attribute is incompatible with "intent".-->
<HAS_F2008_CONTIGUOUS>FALSE</HAS_F2008_CONTIGUOUS>
<LDFLAGS>
<base> -time -Wl,--allow-multiple-definition </base>
<append compile_threaded="FALSE"> </append>
<append compile_threaded="TRUE"> -mp </append>
</LDFLAGS>
<MPICC> mpicc </MPICC>
<MPICXX> mpicxx </MPICXX>
<MPIFC> mpif90 </MPIFC>
<SCC> pgcc </SCC>
<SCXX> pgc++ </SCXX>
<SFC> pgf95 </SFC>
</compiler>
<compiler COMPILER="pgiacc">
<CFLAGS>
<base> -time </base>
<append compile_threaded="FALSE"> </append>
<append compile_threaded="TRUE"> -mp </append>
</CFLAGS>
<CPPDEFS>
<!-- http://www.pgroup.com/resources/docs.htm -->
<!-- Notes: (see pgi man page & user's guide for the details) -->
<!-- -Mextend => Allow 132-column source lines -->
<!-- -Mfixed => Assume fixed-format source -->
<!-- -Mfree => Assume free-format source -->
<!-- -byteswapio => Swap byte-order for unformatted i/o (big/little-endian) -->
<!-- -target=linux => Specifies the target architecture to Compute Node Linux (CNL only) -->
<!-- -fast => Chooses generally optimal flags for the target platform -->
<!-- -Mnovect => Disables automatic vector pipelining -->
<!-- -Mvect=nosse => Don't generate SSE, SSE2, 3Dnow, and prefetch instructions in loops -->
<!-- -Mflushz => Set SSE to flush-to-zero mode (underflow) loops where possible -->
<!-- -Kieee => Perform fp ops in strict conformance with the IEEE 754 standard. -->
<!-- Some optimizations disabled, slightly slower, more accurate math. -->
<!-- -mp=nonuma => Don't use thread/processors affinity (for NUMA architectures) -->
<!-- -->
<!-- -g => Generate symbolic debug information. Turns off optimization. -->
<!-- -gopt => Generate information for debugger without disabling optimizations -->
<!-- -Mbounds => Add array bounds checking -->
<!-- -Ktrap=fp => Determine IEEE Trap conditions fp => inv,divz,ovf -->
<!-- * inv: invalid operands -->
<!-- * divz divide by zero -->
<!-- * ovf: floating point overflow -->
<!-- -F => leaves file.f for each preprocessed file.F file -->
<!-- -time => Print execution time for each compiler step -->
<append> -DFORTRANUNDERSCORE -DNO_SHR_VMATH -DNO_R16 -DUSE_CUDA_FORTRAN -DCPRPGI </append>
</CPPDEFS>
<CXX_LINKER>CXX</CXX_LINKER>
<FC_AUTO_R8>
<base> -r8 </base>
</FC_AUTO_R8>
<FFLAGS>
<base> -i4 -time -Mstack_arrays -Mextend -byteswapio -Mflushz -Kieee </base>
<append compile_threaded="FALSE"> </append>
<append compile_threaded="TRUE"> -mp </append>
<append MODEL="cam"> </append>
<append DEBUG="TRUE"> -O0 -g -Ktrap=fp -Mbounds -Kieee </append>
<append MODEL="datm"> -Mnovect </append>
<append MODEL="dlnd"> -Mnovect </append>
<append MODEL="drof"> -Mnovect </append>
<append MODEL="dwav"> -Mnovect </append>
<append MODEL="dice"> -Mnovect </append>
<append MODEL="docn"> -Mnovect </append>
</FFLAGS>
<FFLAGS_NOOPT>
<base> -O0 </base>
</FFLAGS_NOOPT>
<FIXEDFLAGS>
<base> -Mfixed </base>
</FIXEDFLAGS>
<FREEFLAGS>
<base> -Mfree </base>
</FREEFLAGS>
<!-- Note that SUPPORTS_CXX is false for pgi in general, because we
need some machine-specific libraries -->
<!-- Technically, PGI does recognize this keyword during parsing,
but support is either buggy or incomplete, notably in that
the "contiguous" attribute is incompatible with "intent".-->
<HAS_F2008_CONTIGUOUS>FALSE</HAS_F2008_CONTIGUOUS>
<LDFLAGS>
<base> -time -Wl,--allow-multiple-definition -acc</base>
<append compile_threaded="FALSE"> </append>
<append compile_threaded="TRUE"> -mp </append>
</LDFLAGS>
<MPICC> mpicc </MPICC>
<MPICXX> mpicxx </MPICXX>
<MPIFC> mpif90 </MPIFC>
<SCC> pgcc </SCC>
<SCXX> pgc++ </SCXX>
<SFC> pgf95 </SFC>
</compiler>
<compiler OS="AIX" COMPILER="ibm">
<CFLAGS>
<append> -qarch=auto -qtune=auto -qcache=auto </append>
</CFLAGS>
<CONFIG_SHELL> /usr/bin/bash </CONFIG_SHELL>
<FFLAGS>
<append> -qarch=auto -qtune=auto -qcache=auto -qsclk=micro </append>
<append MODEL="cam"> -qspill=6000 </append>
</FFLAGS>
<LDFLAGS>
<append DEBUG="TRUE"> -qsigtrap=xl__trcedump </append>
<append> -bdatapsize:64K -bstackpsize:64K -btextpsize:32K </append>
</LDFLAGS>
<MPICC> mpcc_r </MPICC>
<MPIFC> mpxlf2003_r </MPIFC>
<SCC> cc_r </SCC>
<SFC> xlf2003_r </SFC>
<SLIBS>
<append> -lmassv -lessl </append>
<append DEBUG="FALSE"> -lmass </append>
</SLIBS>
</compiler>
<compiler OS="BGL" COMPILER="ibm">
<CFLAGS>
<base> -O3 -qstrict </base>
<append> -qtune=440 -qarch=440d </append>
</CFLAGS>
<CONFIG_ARGS>
<base> --build=powerpc-bgp-linux --host=powerpc64-suse-linux </base>
</CONFIG_ARGS>
<CPPDEFS>
<append> -DLINUX -DnoI8 </append>
</CPPDEFS>
<FFLAGS>
<append> -qtune=440 -qarch=440d </append>
<append DEBUG="FALSE"> -O3 -qstrict -Q </append>
<append DEBUG="TRUE"> -qinitauto=FF911299 -qflttrap=ov:zero:inv:en </append>
<append> -qextname=flush </append>
</FFLAGS>
<LDFLAGS>
<base> -Wl,--relax -Wl,--allow-multiple-definition </base>
</LDFLAGS>
<MLIBS>
<base> -L/bgl/BlueLight/ppcfloor/bglsys/lib -lmpich.rts -lmsglayer.rts -lrts.rts -ldevices.rts </base>
</MLIBS>
<MPICC> blrts_xlc </MPICC>
<MPIFC> blrts_xlf2003 </MPIFC>
<MPI_LIB_NAME> mpich.rts </MPI_LIB_NAME>
<MPI_PATH> /bgl/BlueLight/ppcfloor/bglsys</MPI_PATH>
<SCC> blrts_xlc </SCC>
<SFC> blrts_xlf2003 </SFC>
</compiler>
<compiler OS="BGP" COMPILER="ibm">
<CFLAGS>
<append> -qtune=450 -qarch=450 -I/bgsys/drivers/ppcfloor/arch/include/</append>
</CFLAGS>
<CONFIG_ARGS>
<base> --build=powerpc-bgp-linux --host=powerpc64-suse-linux </base>
</CONFIG_ARGS>
<CPPDEFS>
<append> -DLINUX -DnoI8 </append>
</CPPDEFS>
<FFLAGS>
<append>-qspillsize=2500 -qtune=450 -qarch=450 </append>
<append> -qextname=flush </append>
</FFLAGS>
<LDFLAGS>
<base> -Wl,--relax -Wl,--allow-multiple-definition </base>
</LDFLAGS>
</compiler>
<compiler OS="BGQ" COMPILER="ibm">
<CFLAGS>
<append DEBUG="FALSE" compile_threaded="TRUE"> -qsmp=omp:nested_par -qsuppress=1520-045 </append>
<append DEBUG="TRUE" compile_threaded="TRUE"> -qsmp=omp:nested_par:noopt -qsuppress=1520-045 </append>
</CFLAGS>
<CONFIG_ARGS>
<base> --build=powerpc-bgp-linux --host=powerpc64-suse-linux </base>
</CONFIG_ARGS>
<CPPDEFS>
<append> -DLINUX </append>
</CPPDEFS>
<FFLAGS>
<base> -g -qfullpath -qmaxmem=-1 -qspillsize=2500 -qextname=flush -qphsinfo </base>
<append DEBUG="FALSE"> -O3 -qstrict -Q </append>
<append DEBUG="FALSE" compile_threaded="TRUE"> -qsmp=omp:nested_par -qsuppress=1520-045 </append>
<append DEBUG="TRUE" compile_threaded="TRUE"> -qsmp=omp:nested_par:noopt -qsuppress=1520-045 </append>
</FFLAGS>
<LDFLAGS>
<base> -Wl,--relax -Wl,--allow-multiple-definition </base>
</LDFLAGS>
</compiler>
<compiler OS="CNL">
<CMAKE_OPTS>
<base> -DCMAKE_SYSTEM_NAME=Catamount</base>
</CMAKE_OPTS>
<CPPDEFS>
<append> -DLINUX </append>
<append MODEL="gptl"> -DHAVE_NANOTIME -DBIT64 -DHAVE_VPRINTF -DHAVE_BACKTRACE -DHAVE_SLASHPROC -DHAVE_COMM_F2C -DHAVE_TIMES -DHAVE_GETTIMEOFDAY </append>
</CPPDEFS>
<MPICC> cc </MPICC>
<MPICXX> CC </MPICXX>
<MPIFC> ftn </MPIFC>
<MPI_LIB_NAME> mpich </MPI_LIB_NAME>
<MPI_PATH> $ENV{MPICH_DIR}</MPI_PATH>
<NETCDF_PATH>$ENV{NETCDF_DIR}</NETCDF_PATH>
<PIO_FILESYSTEM_HINTS>lustre</PIO_FILESYSTEM_HINTS>
<PNETCDF_PATH>$ENV{PARALLEL_NETCDF_DIR}</PNETCDF_PATH>
<SCC> cc </SCC>
<SCXX> CC </SCXX>
<SFC> ftn </SFC>
</compiler>
<compiler OS="Darwin">
<CPPDEFS>
<append> -DSYSDARWIN </append>
</CPPDEFS>
<LDFLAGS>
<append MODEL="driver"> -all_load </append>
</LDFLAGS>
</compiler>
<compiler OS="Darwin" COMPILER="intel">
<FFLAGS>
<append compile_threaded="FALSE"> -heap-arrays </append>
</FFLAGS>
<SLIBS>
<append MPILIB="mpich"> -mkl=cluster </append>
<append MPILIB="mpich2"> -mkl=cluster </append>
<append MPILIB="mpt"> -mkl=cluster </append>
<append MPILIB="openmpi"> -mkl=cluster </append>
<append MPILIB="mvapich"> -mkl=cluster </append>
<append MPILIB="impi"> -mkl=cluster </append>
<append MPILIB="mpi-serial"> -mkl </append>
</SLIBS>
</compiler>
<compiler OS="Linux" COMPILER="intel">
<FFLAGS>
<append> -mcmodel medium -shared-intel </append>
</FFLAGS>
</compiler>
<compiler MACH="anlworkstation" COMPILER="gnu">
<ALBANY_PATH>/projects/install/rhel6-x86_64/ACME/AlbanyTrilinos/Albany/build/install</ALBANY_PATH>
<CFLAGS>
<append DEBUG="FALSE"> -O2 </append>
</CFLAGS>
<CONFIG_ARGS>
<base> --host=Linux </base>
</CONFIG_ARGS>
<CXX_LIBS>
<base>-lstdc++ -lmpi_cxx</base>
</CXX_LIBS>
<FFLAGS>
<append DEBUG="FALSE"> -O2 </append>
</FFLAGS>
<SLIBS>
<append>$SHELL{$ENV{NETCDF_PATH}/bin/nf-config --flibs} -lblas -llapack</append>
</SLIBS>
<NETCDF_PATH>$ENV{NETCDF_PATH}</NETCDF_PATH>
<PNETCDF_PATH>$ENV{PNETCDF_PATH}</PNETCDF_PATH>
</compiler>
<compiler MACH="anvil" COMPILER="gnu">
<CPPDEFS>
<append MODEL="gptl"> -DHAVE_NANOTIME -DBIT64 -DHAVE_SLASHPROC -DHAVE_GETTIMEOFDAY </append>
</CPPDEFS>
<PIO_FILESYSTEM_HINTS>gpfs </PIO_FILESYSTEM_HINTS>
<SLIBS>
<append>$SHELL{$ENV{NETCDF_FORTRAN_PATH}/bin/nf-config --flibs} -L$ENV{MKLROOT}/lib/intel64 -Wl,--no-as-needed -lmkl_intel_lp64 -lmkl_intel_thread -lmkl_core -liomp5 -lpthread -lm -ldl </append>
<append> $SHELL{$ENV{NETCDF_C_PATH}/bin/nc-config --libs} </append>
</SLIBS>
<NETCDF_C_PATH>$ENV{NETCDF_C_PATH}</NETCDF_C_PATH>
<NETCDF_FORTRAN_PATH>$ENV{NETCDF_FORTRAN_PATH}</NETCDF_FORTRAN_PATH>
<PNETCDF_PATH>$ENV{PNETCDF_PATH}</PNETCDF_PATH>
</compiler>
<compiler MACH="anvil" COMPILER="intel">
<CFLAGS>
<append compile_threaded="TRUE"> -static-intel</append>
<append compile_threaded="TRUE" DEBUG="TRUE">-heap-arrays</append>
</CFLAGS>
<CPPDEFS>
<append MODEL="gptl"> -DHAVE_SLASHPROC </append>
</CPPDEFS>
<FFLAGS>
<append DEBUG="FALSE"> -O2 -debug minimal -qno-opt-dynamic-align</append>
<append> $SHELL{$ENV{NETCDF_FORTRAN_PATH}/bin/nf-config --fflags} </append>
<append compile_threaded="TRUE"> -static-intel</append>
<append compile_threaded="TRUE" DEBUG="TRUE">-heap-arrays</append>
</FFLAGS>
<LDFLAGS>
<append compile_threaded="TRUE"> -static-intel</append>
</LDFLAGS>
<PIO_FILESYSTEM_HINTS>gpfs </PIO_FILESYSTEM_HINTS>
<SLIBS>
<append> $SHELL{$ENV{NETCDF_FORTRAN_PATH}/bin/nf-config --flibs} </append>
<append> $SHELL{$ENV{NETCDF_C_PATH}/bin/nc-config --libs} </append>
<append>-mkl</append>
</SLIBS>
<NETCDF_C_PATH>$ENV{NETCDF_C_PATH}</NETCDF_C_PATH>
<NETCDF_FORTRAN_PATH>$ENV{NETCDF_FORTRAN_PATH}</NETCDF_FORTRAN_PATH>
<PNETCDF_PATH>$ENV{PNETCDF_PATH}</PNETCDF_PATH>
</compiler>
<compiler MACH="anvil" COMPILER="pgi">
<PIO_FILESYSTEM_HINTS>gpfs </PIO_FILESYSTEM_HINTS>
<SLIBS>
<append>$SHELL{$ENV{NETCDF_PATH}/bin/nf-config --flibs} -llapack -lblas</append>
<append> -rpath $ENV{NETCDF_PATH}/lib </append>
</SLIBS>
<NETCDF_C_PATH>$ENV{NETCDF_C_PATH}</NETCDF_C_PATH>
<NETCDF_FORTRAN_PATH>$ENV{NETCDF_FORTRAN_PATH}</NETCDF_FORTRAN_PATH>
<PNETCDF_PATH>$ENV{PNETCDF_PATH}</PNETCDF_PATH>
</compiler>
<compiler MACH="bebop" COMPILER="intel">
<ALBANY_PATH>/soft/climate/AlbanyTrilinos_06262017/Albany/buildintel/install</ALBANY_PATH>
<CPPDEFS>
<append MODEL="gptl"> -DHAVE_SLASHPROC </append>
</CPPDEFS>
<CXX_LIBS>
<base>-lstdc++</base>
</CXX_LIBS>
<FFLAGS>
<append DEBUG="FALSE"> -O2 -debug minimal -qno-opt-dynamic-align</append>
</FFLAGS>
<MPICC> mpiicc </MPICC>
<MPICXX> mpiicpc </MPICXX>
<MPIFC> mpiifort </MPIFC>
<SLIBS>
<append>$SHELL{nf-config --flibs} -mkl</append>
</SLIBS>
<NETCDF_C_PATH>$ENV{NETCDF_C_PATH}</NETCDF_C_PATH>
<NETCDF_FORTRAN_PATH>$ENV{NETCDF_FORTRAN_PATH}</NETCDF_FORTRAN_PATH>
<PNETCDF_PATH>$ENV{PNETCDF_PATH}</PNETCDF_PATH>
</compiler>
<compiler MACH="blues" COMPILER="gnu">
<ALBANY_PATH>/soft/climate/AlbanyTrilinos_06262017/Albany/build/install</ALBANY_PATH>
<CPPDEFS>
<append MODEL="gptl"> -DHAVE_NANOTIME -DBIT64 -DHAVE_SLASHPROC -DHAVE_GETTIMEOFDAY</append>
</CPPDEFS>
<CXX_LIBS>
<base>-lstdc++</base>
</CXX_LIBS>
<MPI_LIB_NAME MPILIB="mvapich">mpi</MPI_LIB_NAME>
<MPI_PATH MPILIB="mvapich">/blues/gpfs/home/software/spack/opt/spack/linux-x86_64/gcc-5.3.0/mvapich2-2.2b-sdh7nhddicl4sh5mgxjyzxtxox3ajqey</MPI_PATH>
<NETCDF_PATH>$ENV{NETCDFROOT}</NETCDF_PATH>
<PIO_FILESYSTEM_HINTS>gpfs </PIO_FILESYSTEM_HINTS>
<PNETCDF_PATH>$ENV{PNETCDFROOT}</PNETCDF_PATH>
<SLIBS>
<append>$SHELL{$NETCDF_PATH/bin/nf-config --flibs} -llapack -lblas</append>
</SLIBS>
</compiler>
<compiler MACH="blues" COMPILER="intel">
<MPI_LIB_NAME MPILIB="mvapich">mpi</MPI_LIB_NAME>
<MPI_PATH MPILIB="mvapich">/soft/mvapich2/2.2b_psm/intel-15.0</MPI_PATH>
<NETCDF_PATH>$ENV{NETCDFROOT}</NETCDF_PATH>
<PIO_FILESYSTEM_HINTS>gpfs </PIO_FILESYSTEM_HINTS>
<PNETCDF_PATH>$ENV{PNETCDFROOT}</PNETCDF_PATH>
<SLIBS>
<append>$SHELL{$NETCDF_PATH/bin/nf-config --flibs} -llapack -lblas </append>
<append> -Wl,-rpath -Wl,$ENV{NETCDFROOT}/lib </append>
<append MPILIB="mpich"> -mkl=cluster </append>
<append MPILIB="mpich2"> -mkl=cluster </append>
<append MPILIB="mpt"> -mkl=cluster </append>
<append MPILIB="openmpi"> -mkl=cluster </append>
<append MPILIB="mvapich"> -mkl=cluster </append>
<append MPILIB="impi"> -mkl=cluster </append>
<append MPILIB="mpi-serial"> -mkl </append>
</SLIBS>
</compiler>
<compiler MACH="blues" COMPILER="intel13">
<MPI_LIB_NAME MPILIB="openmpi"> mpi</MPI_LIB_NAME>
<MPI_LIB_NAME MPILIB="mpich">mpich</MPI_LIB_NAME>
<MPI_PATH MPILIB="openmpi">/soft/openmpi/1.8.2/intel-13.1</MPI_PATH>
<MPI_PATH MPILIB="mpich">/soft/mpich2/1.4.1-intel-13.1</MPI_PATH>
<NETCDF_PATH>$ENV{NETCDFROOT}</NETCDF_PATH>
<PIO_FILESYSTEM_HINTS>gpfs </PIO_FILESYSTEM_HINTS>
<PNETCDF_PATH>$ENV{PNETCDFROOT}</PNETCDF_PATH>
<SLIBS>
<append>$SHELL{$NETCDF_PATH/bin/nf-config --flibs} -llapack -lblas</append>
<append MPILIB="mpich"> -mkl=cluster </append>
<append MPILIB="mpich2"> -mkl=cluster </append>
<append MPILIB="mpt"> -mkl=cluster </append>
<append MPILIB="openmpi"> -mkl=cluster </append>
<append MPILIB="mvapich"> -mkl=cluster </append>
<append MPILIB="impi"> -mkl=cluster </append>
<append MPILIB="mpi-serial"> -mkl </append>
</SLIBS>
</compiler>
<compiler MACH="blues" COMPILER="nag">
<MPI_LIB_NAME MPILIB="mpich"> mpi </MPI_LIB_NAME>
<MPI_PATH MPILIB="mpich">/home/robl/soft/mpich-3.1.4-nag-6.0</MPI_PATH>
<NETCDF_PATH>$ENV{NETCDFROOT}</NETCDF_PATH>
<PIO_FILESYSTEM_HINTS>gpfs </PIO_FILESYSTEM_HINTS>
<PNETCDF_PATH>$ENV{PNETCDFROOT}</PNETCDF_PATH>
<SLIBS>
<append>$(shell $NETCDF_PATH/bin/nf-config --flibs) $SHELL{$NETCDF_PATH/bin/nc-config --libs} -llapack -lblas</append>
</SLIBS>
</compiler>
<compiler MACH="blues" COMPILER="pgi">
<MPI_LIB_NAME MPILIB="mvapich"> mpi</MPI_LIB_NAME>
<MPI_LIB_NAME MPILIB="openmpi"> mpi</MPI_LIB_NAME>
<MPI_LIB_NAME MPILIB="mpich">mpich</MPI_LIB_NAME>
<MPI_PATH MPILIB="openmpi">/soft/openmpi/1.8.2/pgi-13.9</MPI_PATH>
<MPI_PATH MPILIB="mpich">/soft/mpich2/1.4.1-pgi-13.9/</MPI_PATH>
<NETCDF_PATH>$ENV{NETCDFROOT}</NETCDF_PATH>
<PIO_FILESYSTEM_HINTS>gpfs </PIO_FILESYSTEM_HINTS>
<PNETCDF_PATH>$ENV{PNETCDFROOT}</PNETCDF_PATH>
<SLIBS>
<append>$SHELL{$NETCDF_PATH/bin/nf-config --flibs} -llapack -lblas</append>
<append> -rpath $ENV{NETCDFROOT}/lib </append>
</SLIBS>
</compiler>
<compiler MACH="cades" COMPILER="gnu">
<CFLAGS>
<append compile_threaded="TRUE"> -fopenmp </append>
</CFLAGS>
<CMAKE_OPTS>
<append MODEL="cism"> -D CISM_GNU=ON </append>
</CMAKE_OPTS>
<CPPDEFS>
<append> -DFORTRANUNDERSCORE -DNO_R16</append>
<!-- <append MODEL="clm"> -DCLM_PFLOTRAN </append>
<append MODEL="clm"> -DCOLUMN_MODE </append> -->
</CPPDEFS>
<CXX_LINKER>FORTRAN</CXX_LINKER>
<FC_AUTO_R8>
<base> -fdefault-real-8 </base>
</FC_AUTO_R8>
<FFLAGS>
<!-- -ffree-line-length-none and -ffixed-line-length-none need to be in FFLAGS rather than in FIXEDFLAGS/FREEFLAGS
so that these are passed to cmake builds (cmake builds don't use FIXEDFLAGS and FREEFLAGS). -->
<base> -O -fconvert=big-endian -ffree-line-length-none -ffixed-line-length-none -fno-range-check</base>
<append compile_threaded="TRUE"> -fopenmp </append>
<append DEBUG="TRUE"> -g -Wall </append>
<!-- <append MODEL="clm"> -I/lustre/or-hydra/cades-ccsi/$USER/models/pflotran-interface/src/clm-pflotran</append> -->
</FFLAGS>
<FIXEDFLAGS>
<base> -ffixed-form </base>
</FIXEDFLAGS>
<FREEFLAGS>
<base> -ffree-form </base>
</FREEFLAGS>
<HDF5_PATH>/software/dev_tools/swtree/cs400_centos7.2_pe2016-08/hdf5-parallel/1.8.17/centos7.2_gnu5.3.0</HDF5_PATH>
<NETCDF_PATH>/software/dev_tools/swtree/cs400_centos7.2_pe2016-08/netcdf-hdf5parallel/4.3.3.1/centos7.2_gnu5.3.0</NETCDF_PATH>
<PNETCDF_PATH>/software/dev_tools/swtree/cs400_centos7.2_pe2016-08/pnetcdf/1.9.0/centos7.2_gnu5.3.0</PNETCDF_PATH>
<LAPACK_LIBDIR>/software/tools/compilers/intel_2017/mkl/lib/intel64</LAPACK_LIBDIR>
<LDFLAGS>
<append compile_threaded="TRUE"> -fopenmp </append>
<append MODEL="driver"> -L$NETCDF_PATH/lib -Wl,-rpath=$NETCDF_PATH/lib -lnetcdff -lnetcdf </append>
<!-- <append MODEL="driver"> -L$ENV{CLM_PFLOTRAN_SOURCE_DIR} -lpflotran $ENV{PETSC_LIB} </append> -->
</LDFLAGS>
<MPICC>mpicc</MPICC>
<MPICXX>mpic++</MPICXX>
<MPIFC>mpif90</MPIFC>
<SCC>gcc</SCC>
<SCXX>gcpp</SCXX>
<SFC>gfortran</SFC>
<SUPPORTS_CXX>TRUE</SUPPORTS_CXX>
</compiler>
<compiler MACH="cascade" COMPILER="intel">
<CONFIG_ARGS>
<base> --host=Linux --enable-filesystem-hints=lustre</base>
</CONFIG_ARGS>
<CPPDEFS>
<append> -DLINUX </append>
</CPPDEFS>
<FFLAGS>
<append DEBUG="TRUE"> -g -traceback -O0 -fpe0 -check all -check noarg_temp_created -ftrapuv </append>
</FFLAGS>
<NETCDF_PATH> $ENV{NETCDF_HOME}</NETCDF_PATH>
<PIO_FILESYSTEM_HINTS>lustre</PIO_FILESYSTEM_HINTS>
<PNETCDF_PATH>$ENV{PNETCDFROOT}</PNETCDF_PATH>
<SLIBS>
<base> -L$NETCDF_PATH/lib -lnetcdf -lnetcdff -L$ENV{MKL_PATH}/lib/intel64 -lmkl_rt </base>
<append MPILIB="mpich2"> -mkl=cluster </append>
<append MPILIB="mpi-serial"> -mkl </append>
</SLIBS>
</compiler>
<compiler MACH="cascade" COMPILER="nag">
<CPPDEFS>
<append> -DnoI8 </append>
</CPPDEFS>
<FFLAGS>
<!-- -C=Undefined flag doesn't work as it requires MPI and NETCDF to be
compiled with this flag, which is not available now. NAG has the following debug flags
<ADD_FFLAGS DEBUG="TRUE"> -gline -C=all -g -C=undefined -C=recursion -nan -O0 -v </ADD_FFLAGS>
"-nan" is an important flag. currently, it doesn't work for pio, it is only used for "cam" model.
-->
<append DEBUG="TRUE"> -C=all -g -O0 -v </append>
<append DEBUG="TRUE" MODEL="cam"> -C=all -g -nan -O0 -v </append>
</FFLAGS>
<LDFLAGS>
<append compile_threaded="TRUE"> </append>
</LDFLAGS>
<MPI_PATH MPILIB="mvapich2"> $ENV{MPI_LIB}</MPI_PATH>
<NETCDF_PATH>$ENV{NETCDF_ROOT}</NETCDF_PATH>
<PIO_FILESYSTEM_HINTS>lustre</PIO_FILESYSTEM_HINTS>
<PNETCDF_PATH>$ENV{PNETCDFROOT}</PNETCDF_PATH>
<SLIBS>
<append> -L$ENV{NETCDF_ROOT}/lib -lnetcdf -lnetcdff -L$ENV{MKL_PATH} -lmkl_rt</append>
</SLIBS>
</compiler>
<compiler MACH="cetus" COMPILER="ibm">
<ALBANY_PATH>/projects/ccsm/libs/AlbanyTrilinos/Albany/build/install</ALBANY_PATH>
<CPPDEFS>
<append> -DMPASLI_EXTERNAL_INTERFACE_DISABLE_MANGLING </append>
</CPPDEFS>
<CXX_LIBS>
<base> -llapack -lblas -L$ENV{IBM_MAIN_DIR}/xlf/bg/14.1/bglib64 -lxlfmath -lxlf90_r -lxlopt -lxl -L$ENV{IBM_MAIN_DIR}/xlsmp/bg/3.1/bglib64 -lxlsmp </base>
</CXX_LIBS>
<CXX_LINKER>CXX</CXX_LINKER>
<HDF5_PATH>/soft/libraries/hdf5/1.8.14/cnk-xl/current/</HDF5_PATH>
<LD> mpixlf77_r </LD>
<MPICC> mpixlc_r </MPICC>
<MPICXX> /soft/compilers/bgclang/mpi/bgclang/bin/mpic++11 </MPICXX>
<MPIFC> mpixlf2003_r </MPIFC>
<NETCDF_PATH>/soft/libraries/netcdf/4.3.3-f4.4.1/cnk-xl/current/</NETCDF_PATH>
<PETSC_PATH>/soft/libraries/petsc/3.5.3.1</PETSC_PATH>
<PIO_FILESYSTEM_HINTS>gpfs </PIO_FILESYSTEM_HINTS>
<PNETCDF_PATH>/soft/libraries/pnetcdf/1.6.0/cnk-xl/current/</PNETCDF_PATH>
<SCC> mpixlc_r </SCC>
<SFC> mpixlf2003_r </SFC>
<SLIBS>
<append>-L$NETCDF_PATH/lib -lnetcdff -lnetcdf -L$HDF5_PATH/lib -lhdf5_hl -lhdf5 -L/soft/libraries/alcf/current/xl/ZLIB/lib -lz -L/soft/libraries/alcf/current/xl/LAPACK/lib -llapack -L/soft/libraries/alcf/current/xl/BLAS/lib -lblas -L/bgsys/drivers/ppcfloor/comm/sys/lib </append>
<append compile_threaded="TRUE"> -L$ENV{IBM_MAIN_DIR}/xlf/bg/14.1/bglib64 -lxlfmath -lxlf90_r -lxlopt -lxl -L$ENV{IBM_MAIN_DIR}/xlsmp/bg/3.1/bglib64 -lxlsmp </append>
</SLIBS>
<SUPPORTS_CXX>TRUE</SUPPORTS_CXX>
</compiler>
<compiler MACH="constance" COMPILER="intel">
<CFLAGS>
<append DEBUG="FALSE"> -O2 </append>
</CFLAGS>
<CONFIG_ARGS>
<base> --host=Linux </base>
</CONFIG_ARGS>
<CPPDEFS>
<append> -DLINUX </append>
</CPPDEFS>
<FFLAGS>
<append DEBUG="FALSE"> -O2 </append>
<append DEBUG="TRUE"> -g -traceback -O0 -fpe0 -check all -check noarg_temp_created -ftrapuv </append>
</FFLAGS>
<NETCDF_PATH> $ENV{NETCDF_HOME}</NETCDF_PATH>
<PIO_FILESYSTEM_HINTS>lustre</PIO_FILESYSTEM_HINTS>
<PNETCDF_PATH>$ENV{PNETCDFROOT}</PNETCDF_PATH>
<SLIBS>
<base> -L$NETCDF_PATH/lib -lnetcdf -lnetcdff -lpmi -L$ENV{MKL_PATH} -lmkl_rt</base>
</SLIBS>
</compiler>
<compiler MACH="constance" COMPILER="nag">
<CFLAGS>
<append DEBUG="FALSE"> -O2 -kind=byte </append>
</CFLAGS>
<CONFIG_ARGS>
<base> --host=Linux </base>
</CONFIG_ARGS>
<CPPDEFS>
<append> -DLINUX </append>
</CPPDEFS>
<FFLAGS>
<append DEBUG="FALSE"> -O2 -kind=byte </append>
<append DEBUG="TRUE"> -C=all -g -O0 -v </append>
</FFLAGS>
<NETCDF_PATH> $ENV{NETCDF_HOME}</NETCDF_PATH>
<PIO_FILESYSTEM_HINTS>lustre</PIO_FILESYSTEM_HINTS>
<PNETCDF_PATH>$ENV{PNETCDFROOT}</PNETCDF_PATH>
<SLIBS>
<base> -L$NETCDF_PATH/lib -lnetcdf -lnetcdff -lpmi -L$ENV{MKL_PATH} -lmkl_rt</base>
</SLIBS>
</compiler>
<compiler MACH="constance" COMPILER="pgi">
<CFLAGS>
<append DEBUG="FALSE"> -O2 </append>
</CFLAGS>
<CONFIG_ARGS>
<base> --host=Linux </base>
</CONFIG_ARGS>
<CPPDEFS>
<append> -DLINUX </append>
</CPPDEFS>
<FFLAGS>
<append DEBUG="FALSE"> -O2 </append>
<append DEBUG="TRUE"> -g -traceback -O0 -fpe0 -check all -check noarg_temp_created -ftrapuv </append>
<append DEBUG="TRUE">-C -Mbounds -traceback -Mchkfpstk -Mchkstk -Mdalign -Mdepchk -Mextend -Miomutex -Mrecursive -Ktrap=fp -O0 -g -byteswapio -Meh_frame</append>
</FFLAGS>
<NETCDF_PATH> $ENV{NETCDF_HOME}</NETCDF_PATH>
<PIO_FILESYSTEM_HINTS>lustre</PIO_FILESYSTEM_HINTS>
<PNETCDF_PATH>$ENV{PNETCDFROOT}</PNETCDF_PATH>
<SLIBS>
<base> -L$NETCDF_PATH/lib -lnetcdf -lnetcdff -lpmi -L$ENV{MPI_LIB} -lmpich</base>
</SLIBS>
</compiler>
<compiler MACH="compy" COMPILER="intel">
<CFLAGS>
<append DEBUG="FALSE"> -O2 </append>
<append MODEL="gptl"> -DHAVE_SLASHPROC </append>
</CFLAGS>
<CONFIG_ARGS>
<base> --host=Linux </base>
</CONFIG_ARGS>
<CPPDEFS>
<append> -DLINUX </append>
</CPPDEFS>
<FFLAGS>
<append DEBUG="FALSE"> -O2 </append>
<append DEBUG="TRUE"> -g -traceback -O0 -fpe0 -check all -check noarg_temp_created -ftrapuv -init=snan</append>
</FFLAGS>
<NETCDF_PATH>$ENV{NETCDF_HOME}</NETCDF_PATH>
<PNETCDF_PATH>$ENV{PNETCDF_HOME}</PNETCDF_PATH>
<PIO_FILESYSTEM_HINTS>lustre</PIO_FILESYSTEM_HINTS>
<SLIBS>
<base> -lpmi -L$NETCDF_PATH/lib -lnetcdf -lnetcdff -L$ENV{MKL_PATH}/lib/intel64/ -lmkl_rt $ENV{PNETCDF_LIBRARIES}</base>
</SLIBS>
<MPICC MPILIB="impi">mpiicc</MPICC>
<MPICXX MPILIB="impi">mpiicpc</MPICXX>
<MPIFC MPILIB="impi">mpiifort</MPIFC>
</compiler>
<compiler MACH="compy" COMPILER="pgi">
<CFLAGS>
<append DEBUG="FALSE"> -O2 </append>
<append MODEL="gptl"> -DHAVE_SLASHPROC </append>
</CFLAGS>
<CONFIG_ARGS>
<base> --host=Linux </base>
</CONFIG_ARGS>
<CPPDEFS>
<append> -DLINUX </append>
</CPPDEFS>
<FFLAGS>
<append DEBUG="FALSE"> -O2 </append>
<append DEBUG="TRUE">-C -Mbounds -traceback -Mchkfpstk -Mchkstk -Mdalign -Mdepchk -Mextend -Miomutex -Mrecursive -Ktrap=fp -O0 -g -byteswapio -Meh_frame</append>
</FFLAGS>
<NETCDF_PATH> $ENV{NETCDF_HOME}</NETCDF_PATH>
<PIO_FILESYSTEM_HINTS>lustre</PIO_FILESYSTEM_HINTS>
<PNETCDF_PATH>$ENV{PNETCDF_HOME}</PNETCDF_PATH>
<LDFLAGS>
<append> -lpmi -L$NETCDF_PATH/lib -lnetcdf -lnetcdff -L$ENV{MKL_PATH}/lib/intel64/ -lmkl_rt $ENV{PNETCDF_LIBRARIES} </append>
</LDFLAGS>
<MPICC MPILIB="impi">mpipgcc</MPICC>
<MPICXX MPILIB="impi">mpipgcxx</MPICXX>
<MPIFC MPILIB="impi">mpipgf90</MPIFC>
<SUPPORTS_CXX>TRUE</SUPPORTS_CXX>
</compiler>
<compiler MACH="cori-haswell" COMPILER="intel">
<ALBANY_PATH>/global/project/projectdirs/acme/software/AlbanyTrilinos20190823/albany-build/install</ALBANY_PATH>
<CONFIG_ARGS>
<base> --host=Linux </base>
</CONFIG_ARGS>
<FFLAGS>
<base> -convert big_endian -assume byterecl -ftz -traceback -assume realloc_lhs -fp-model consistent -fimf-use-svml </base>
<append DEBUG="FALSE"> -O2 -debug minimal -qno-opt-dynamic-align </append>
</FFLAGS>
<PETSC_PATH>$ENV{PETSC_DIR}</PETSC_PATH>
<SCC> icc </SCC>
<SCXX> icpc </SCXX>
<SFC> ifort </SFC>
<SLIBS>
<append> -L$ENV{NETCDF_DIR} -lnetcdff -Wl,--as-needed,-L$ENV{NETCDF_DIR}/lib -lnetcdff -lnetcdf </append>
<append> -mkl -lpthread </append>
</SLIBS>
</compiler>
<compiler MACH="cori-knl" COMPILER="intel">
<ALBANY_PATH>/global/project/projectdirs/acme/software/AlbanyTrilinos20190823/albany-build/install</ALBANY_PATH>
<CFLAGS>
<append MPILIB="impi"> -axMIC-AVX512 -xCORE-AVX2 </append>
</CFLAGS>
<CONFIG_ARGS>
<base> --host=Linux </base>
</CONFIG_ARGS>
<CPPDEFS>
<append> -DARCH_MIC_KNL </append>
</CPPDEFS>
<FFLAGS>
<base> -convert big_endian -assume byterecl -ftz -traceback -assume realloc_lhs -fp-model consistent -fimf-use-svml </base>
<append DEBUG="FALSE"> -O2 -debug minimal -qno-opt-dynamic-align</append>
<append MPILIB="impi"> -xMIC-AVX512 </append>
<append> -DHAVE_ERF_INTRINSICS </append>
</FFLAGS>
<MPICC MPILIB="impi"> mpiicc </MPICC>
<MPICXX MPILIB="impi"> mpiicpc </MPICXX>
<MPIFC MPILIB="impi"> mpiifort </MPIFC>
<!-- When using Intel MPI, can't use the Cray compiler wrappers. Must also set environment variables in config_machines.xml -->
<MPI_LIB_NAME MPILIB="impi">impi</MPI_LIB_NAME>
<PETSC_PATH>$ENV{PETSC_DIR}</PETSC_PATH>
<SCC> icc </SCC>
<SCXX> icpc </SCXX>
<SFC> ifort </SFC>
<SLIBS>
<append> -L$ENV{NETCDF_DIR} -lnetcdff -Wl,--as-needed,-L$ENV{NETCDF_DIR}/lib -lnetcdff -lnetcdf </append>
<append> -mkl -lpthread </append>
</SLIBS>
</compiler>
<compiler MACH="cori-knl" COMPILER="intel19">
<CFLAGS>
<base> -O2 -fp-model precise -std=gnu99 </base>
<append compile_threaded="TRUE"> -qopenmp </append>
<append DEBUG="FALSE"> -O2 -debug minimal </append>
<append DEBUG="TRUE"> -O0 -g </append>
</CFLAGS>
<CONFIG_ARGS>
<base> --host=Linux </base>
</CONFIG_ARGS>
<CPPDEFS>
<append> -DFORTRANUNDERSCORE -DNO_R16 -DCPRINTEL</append>
</CPPDEFS>
<CXX_LDFLAGS>
<base> -cxxlib </base>
</CXX_LDFLAGS>
<CXX_LINKER>FORTRAN</CXX_LINKER>
<FC_AUTO_R8>
<base> -r8 </base>
</FC_AUTO_R8>
<FFLAGS>
<base> -convert big_endian -assume byterecl -ftz -traceback -assume realloc_lhs -fp-model consistent -fimf-use-svml </base>
<append compile_threaded="TRUE"> -qopenmp </append>
<append DEBUG="TRUE"> -O0 -g -check uninit -check bounds -check pointers -fpe0 -check noarg_temp_created </append>
<append DEBUG="FALSE"> -O2 -debug minimal -qno-opt-dynamic-align </append>
<append MPILIB="impi"> -xMIC-AVX512 </append>
</FFLAGS>
<FIXEDFLAGS>
<base> -fixed -132 </base>
</FIXEDFLAGS>
<FREEFLAGS>
<base> -free </base>
</FREEFLAGS>
<HAS_F2008_CONTIGUOUS>TRUE</HAS_F2008_CONTIGUOUS>
<LDFLAGS>
<append compile_threaded="TRUE"> -qopenmp </append>
</LDFLAGS>
<PETSC_PATH>$ENV{PETSC_DIR}</PETSC_PATH>
<SCC> icc </SCC>
<SCXX> icpc </SCXX>
<SFC> ifort </SFC>
<MPICC MPILIB="impi"> mpiicc </MPICC>
<MPICXX MPILIB="impi"> mpiicpc </MPICXX>
<MPIFC MPILIB="impi"> mpiifort </MPIFC>
<!-- When using Intel MPI, can't use the Cray compiler wrappers. Must also set environment variables in config_machines.xml -->
<MPI_LIB_NAME MPILIB="impi">impi</MPI_LIB_NAME>
<SLIBS>
<append> -L$ENV{NETCDF_DIR} -lnetcdff -Wl,--as-needed,-L$ENV{NETCDF_DIR}/lib -lnetcdff -lnetcdf </append>
<append> -mkl -lpthread </append>
</SLIBS>
<SUPPORTS_CXX>TRUE</SUPPORTS_CXX>
</compiler>
<compiler MACH="eastwind" COMPILER="pgi">
<CFLAGS>
<append DEBUG="FALSE"> -O2 </append>
</CFLAGS>
<CONFIG_ARGS>
<base> --host=Linux </base>
</CONFIG_ARGS>
<CPPDEFS>
<append> -DLINUX </append>
</CPPDEFS>
<FFLAGS>
<append DEBUG="FALSE"> -O2 </append>
</FFLAGS>
<NETCDF_PATH> $ENV{NETCDF_HOME}</NETCDF_PATH>
<PIO_FILESYSTEM_HINTS>lustre</PIO_FILESYSTEM_HINTS>
<SLIBS>
<base> -L$NETCDF_PATH/lib -lnetcdf -lnetcdff -lpmi </base>
</SLIBS>
</compiler>
<compiler MACH="eddi" COMPILER="gnu">
<CPPDEFS>
<append MODEL="gptl"> -DHAVE_VPRINTF -DHAVE_GETTIMEOFDAY -DHAVE_BACKTRACE </append>
</CPPDEFS>
<NETCDF_PATH>$ENV{NETCDF_HOME}</NETCDF_PATH>
<SLIBS>
<append> -L$ENV{NETCDF_HOME}/lib/ -lnetcdff -lnetcdf -lcurl -llapack -lblas </append>
</SLIBS>
</compiler>
<compiler MACH="eos" COMPILER="intel">
<CFLAGS>
<append DEBUG="FALSE"> -O2 </append>
</CFLAGS>
<CONFIG_ARGS>
<base> --host=Linux </base>
</CONFIG_ARGS>
<CPPDEFS>
<append MODEL="gptl"> -DHAVE_PAPI </append>
</CPPDEFS>
<FFLAGS>
<append DEBUG="FALSE"> -O2 </append>
<append DEBUG="TRUE"> -g -traceback -O0 -fpe0 -check all -check noarg_temp_created -ftrapuv </append>
</FFLAGS>
<MPICC> cc </MPICC>
<MPICXX> CC </MPICXX>
<MPIFC> ftn </MPIFC>
<SLIBS>
<append> -L$ENV{NETCDF_DIR} -lnetcdff -Wl,--as-needed,-L$ENV{NETCDF_DIR}/lib -lnetcdff -lnetcdf </append>
<append> $ENV{MKLROOT}/lib/intel64/libmkl_scalapack_lp64.a -Wl,--start-group $ENV{MKLROOT}/lib/intel64/libmkl_intel_lp64.a $ENV{MKLROOT}/lib/intel64/libmkl_core.a $ENV{MKLROOT}/lib/intel64/libmkl_sequential.a -Wl,--end-group $ENV{MKLROOT}/lib/intel64/libmkl_blacs_intelmpi_lp64.a -lpthread -lm </append>
</SLIBS>
</compiler>
<compiler MACH="ghost" COMPILER="intel">
<ALBANY_PATH>/projects/ccsm/AlbanyTrilinos_20190904/albany-build/install</ALBANY_PATH>
<CFLAGS>
<append DEBUG="FALSE"> -O2 </append>
</CFLAGS>
<CONFIG_ARGS>
<base> --host=Linux </base>
</CONFIG_ARGS>
<ESMF_LIBDIR>/projects/ccsm/esmf-6.3.0rp1/lib/libO/Linux.intel.64.openmpi.default</ESMF_LIBDIR>
<FFLAGS>
<append DEBUG="FALSE"> -O2 </append>
</FFLAGS>
<MPI_PATH MPILIB="openmpi">/opt/openmpi-1.8-intel</MPI_PATH>
<NETCDF_PATH>$ENV{NETCDFROOT}</NETCDF_PATH>
<PFUNIT_PATH MPILIB="mpi-serial" compile_threaded="FALSE">/projects/ccsm/pfunit/3.2.9/mpi-serial</PFUNIT_PATH>
<PIO_FILESYSTEM_HINTS>lustre </PIO_FILESYSTEM_HINTS>
<PNETCDF_PATH>$ENV{PNETCDFROOT}</PNETCDF_PATH>
<SLIBS>
<append>$SHELL{$NETCDF_PATH/bin/nf-config --flibs} -L/projects/ccsm/BLAS-intel -lblas_LINUX</append>
<append MPILIB="openmpi"> -mkl=cluster </append>
<append MPILIB="mpi-serial"> -mkl </append>
</SLIBS>
</compiler>
<compiler MACH="itasca" COMPILER="intel">
<CFLAGS>
<base> -O2 -fp-model precise -I/soft/intel/x86_64/2013/composer_xe_2013/composer_xe_2013_sp1.3.174/mkl/include </base>
<append compile_threaded="TRUE"> -openmp </append>
</CFLAGS>
<CPPDEFS>
<append> -DFORTRANUNDERSCORE -DNO_R16</append>
<append> -DCPRINTEL </append>
</CPPDEFS>
<CXX_LDFLAGS>
<base> -cxxlib </base>
</CXX_LDFLAGS>
<CXX_LINKER>FORTRAN</CXX_LINKER>
<FC_AUTO_R8>
<base> -r8 </base>
</FC_AUTO_R8>
<FFLAGS>
<base> -fp-model source -convert big_endian -assume byterecl -ftz -traceback -assume realloc_lhs -I/soft/intel/x86_64/2013/composer_xe_2013/composer_xe_2013_sp1.3.174/mkl/include </base>
<append compile_threaded="TRUE"> -openmp </append>
<append DEBUG="TRUE"> -O0 -g -check uninit -check bounds -check pointers -fpe0 </append>
<append DEBUG="FALSE"> -O2 </append>
</FFLAGS>
<FIXEDFLAGS>
<base> -fixed -132 </base>
</FIXEDFLAGS>
<FREEFLAGS>
<base> -free </base>
</FREEFLAGS>
<LDFLAGS>
<append compile_threaded="TRUE"> -openmp </append>
<append> -lnetcdff </append>
</LDFLAGS>
<MPICC> mpiicc </MPICC>
<MPICXX> mpiicpc </MPICXX>
<MPIFC> mpiifort </MPIFC>
<SCC> icc </SCC>
<SCXX> icpc </SCXX>
<SFC> ifort </SFC>
<SLIBS>
<append>-L/soft/netcdf/fortran-4.4-intel-sp1-update3-parallel/lib -lnetcdff -L/soft/hdf5/hdf5-1.8.13-intel-2013-sp1-update3-impi-5.0.0.028/lib -openmp -fPIC -lnetcdf -lnetcdf -L/soft/intel/x86_64/2013/composer_xe_2013/composer_xe_2013_sp1.3.174/mkl/lib/intel64 -lmkl_intel_lp64 -lmkl_core -lmkl_intel_thread -lpthread -lm </append>
</SLIBS>
<SUPPORTS_CXX>TRUE</SUPPORTS_CXX>
</compiler>
<compiler MACH="lawrencium-lr2" COMPILER="intel">
<CPPDEFS>
<append MODEL="gptl"> -DHAVE_VPRINTF -DHAVE_GETTIMEOFDAY </append>
</CPPDEFS>
<LAPACK_LIBDIR>/global/software/sl-6.x86_64/modules/intel/2016.1.150/lapack/3.6.0-intel/lib</LAPACK_LIBDIR>
<NETCDF_PATH>$ENV{NETCDF_DIR}</NETCDF_PATH>
<SLIBS>
<append> -lnetcdff -lnetcdf -mkl</append>
</SLIBS>
</compiler>
<compiler MACH="lawrencium-lr3" COMPILER="intel">
<CPPDEFS>
<append MODEL="gptl"> -DHAVE_VPRINTF -DHAVE_GETTIMEOFDAY </append>
</CPPDEFS>
<LAPACK_LIBDIR>/global/software/sl-6.x86_64/modules/intel/2016.1.150/lapack/3.6.0-intel/lib</LAPACK_LIBDIR>
<NETCDF_PATH>$ENV{NETCDF_DIR}</NETCDF_PATH>
<SLIBS>
<append> -lnetcdff -lnetcdf -mkl</append>
</SLIBS>
</compiler>
<compiler MACH="lawrencium-lr6" COMPILER="intel">
<CPPDEFS>
<append MODEL="gptl"> -DHAVE_VPRINTF -DHAVE_GETTIMEOFDAY </append>
</CPPDEFS>
<LAPACK_LIBDIR>/global/software/sl-6.x86_64/modules/intel/2016.1.150/lapack/3.6.0-intel/lib</LAPACK_LIBDIR>
<NETCDF_PATH>$ENV{NETCDF_DIR}</NETCDF_PATH>
<SLIBS>
<append> -lnetcdff -lnetcdf -mkl</append>
</SLIBS>
</compiler>
<compiler MACH="lawrencium-lr6" COMPILER="gnu">
<LAPACK_LIBDIR>$ENV{LAPACK_DIR}/lib</LAPACK_LIBDIR>
<CFLAGS>
<append DEBUG="FALSE"> -O2 </append>
</CFLAGS>
<CONFIG_ARGS>
<base> --host=Linux </base>
</CONFIG_ARGS>
<CXX_LIBS>
<base>-lstdc++ -lmpi_cxx</base>
</CXX_LIBS>
<FFLAGS>
<append DEBUG="FALSE"> -O2 </append>
<append > -I$ENV{NETCDF_DIR}/include </append>
</FFLAGS>
<NETCDF_PATH>$ENV{NETCDF_DIR}</NETCDF_PATH>
<PNETCDF_PATH>$ENV{PNETCDF_DIR}</PNETCDF_PATH>
<SLIBS>
<append> -L/global/software/sl-7.x86_64/modules/gcc/6.3.0/netcdf/4.4.1.1-gcc-p/lib -lnetcdff -lnetcdf -lnetcdf -lblas -llapack </append>
</SLIBS>
</compiler>
<compiler MACH="linux-generic" COMPILER="gnu">
<NETCDF_PATH> $ENV{NETCDF_PATH}</NETCDF_PATH>
<PNETCDF_PATH> $ENV{PNETCDF_PATH}</PNETCDF_PATH>
<SLIBS>
<append>$SHELL{$NETCDF_PATH/bin/nf-config --flibs} </append>
</SLIBS>
</compiler>
<compiler MACH="mac" COMPILER="gnu">
<LDFLAGS>
<append>-framework Accelerate</append>
</LDFLAGS>
<NETCDF_PATH> $ENV{NETCDF_PATH}</NETCDF_PATH>
<SLIBS>
<append>-L$NETCDF_PATH/lib -lnetcdff -lnetcdf</append>
</SLIBS>
</compiler>
<compiler MACH="melvin" COMPILER="gnu">
<ALBANY_PATH>/projects/install/rhel6-x86_64/ACME/AlbanyTrilinos/Albany/build/install</ALBANY_PATH>
<CFLAGS>
<append DEBUG="FALSE"> -O2 </append>
</CFLAGS>
<CONFIG_ARGS>
<base> --host=Linux </base>
</CONFIG_ARGS>
<CXX_LIBS>
<base>-lstdc++</base>
</CXX_LIBS>
<FFLAGS>
<append DEBUG="FALSE"> -O2 </append>
<append > -I$ENV{NETCDFROOT}/include </append>
</FFLAGS>
<NETCDF_PATH>$ENV{NETCDFROOT}</NETCDF_PATH>
<PFUNIT_PATH MPILIB="mpi-serial" compile_threaded="FALSE">$ENV{SEMS_PFUNIT_ROOT}</PFUNIT_PATH>
<PNETCDF_PATH>$ENV{PNETCDFROOT}</PNETCDF_PATH>
<SLIBS>
<append>$SHELL{$NETCDF_PATH/bin/nf-config --flibs} -lblas -llapack</append>
</SLIBS>
</compiler>
<compiler MACH="melvin" COMPILER="intel">
<ALBANY_PATH>/projects/install/rhel6-x86_64/ACME/AlbanyTrilinos/Albany/build/install</ALBANY_PATH>
<CFLAGS>
<append DEBUG="FALSE"> -O2 </append>
</CFLAGS>
<CONFIG_ARGS>
<base> --host=Linux </base>
</CONFIG_ARGS>
<CXX_LIBS>
<base>-lstdc++ -lmpi_cxx</base>
</CXX_LIBS>
<FFLAGS>
<append DEBUG="FALSE"> -O2 </append>
</FFLAGS>
<NETCDF_PATH>$ENV{NETCDFROOT}</NETCDF_PATH>
<PNETCDF_PATH>$ENV{PNETCDFROOT}</PNETCDF_PATH>
<SLIBS>
<append>$SHELL{$NETCDF_PATH/bin/nf-config --flibs} -lblas -llapack</append>
</SLIBS>
</compiler>
<compiler COMPILER="gnu" MACH="snl-white">
<SCXX>$ENV{E3SM_SRCROOT}/externals/kokkos/bin/nvcc_wrapper</SCXX>
<MPICXX>$ENV{E3SM_SRCROOT}/externals/kokkos/bin/nvcc_wrapper</MPICXX>
<KOKKOS_OPTIONS> --arch=Pascal60 --with-cuda=$ENV{CUDA_ROOT} --with-cuda-options=enable_lambda </KOKKOS_OPTIONS>
<CXXFLAGS>
<append>-expt-extended-lambda -DCUDA_BUILD</append>
</CXXFLAGS>
<LDFLAGS>
<append> -lstdc++ -lcudart </append>
</LDFLAGS>
<NETCDF_PATH>$ENV{NETCDF_FORTRAN_PATH}</NETCDF_PATH>
<SLIBS>
<append>$SHELL{$NETCDF_PATH/bin/nf-config --flibs} -lblas -llapack</append>
</SLIBS>
</compiler>
<compiler COMPILER="intel18" MACH="snl-blake">
<CPPDEFS>
<append> -DFORTRANUNDERSCORE -DNO_R16 -DCPRINTEL </append>
</CPPDEFS>
<CFLAGS>
<base> -xCORE_AVX512 -mkl -std=gnu99 </base>
<append DEBUG="FALSE"> -O3 -g -debug minimal </append>
<append DEBUG="TRUE"> -O0 -g </append>
</CFLAGS>
<CXXFLAGS>
<base> -xCORE_AVX512 -mkl -std=c++11 </base>
<append DEBUG="FALSE"> -O3 -g -debug minimal </append>
<append DEBUG="TRUE"> -O0 -g </append>
<append compile_threaded="TRUE"> -qopenmp </append>
</CXXFLAGS>
<FFLAGS>
<base> -convert big_endian -assume byterecl -ftz -traceback -assume realloc_lhs -xCORE_AVX512 -mkl </base>
<append compile_threaded="TRUE"> -qopenmp </append>
<append DEBUG="FALSE"> -O3 -g -debug minimal </append>
<append DEBUG="TRUE"> -O0 -g -check uninit -check bounds -check pointers -fpe0 -check noarg_temp_created </append>
</FFLAGS>
<LDFLAGS>
<base> -mkl -lstdc++ </base>
<append compile_threaded="TRUE"> -qopenmp </append>
<append MODEL="driver">-L$(NETCDF_FORTRAN_PATH)/lib64</append>
</LDFLAGS>
<FIXEDFLAGS>
<base> -fixed -132 </base>
</FIXEDFLAGS>
<FREEFLAGS>
<base> -free </base>
</FREEFLAGS>
<HAS_F2008_CONTIGUOUS>TRUE</HAS_F2008_CONTIGUOUS>
<FC_AUTO_R8>
<base> -r8 </base>
</FC_AUTO_R8>
<SFC> ifort </SFC>
<SCC> icc </SCC>
<SCXX> icpc </SCXX>
<CXX_LINKER>FORTRAN</CXX_LINKER>
<CXX_LDFLAGS>
<base> -cxxlib </base>
</CXX_LDFLAGS>
<SUPPORTS_CXX>TRUE</SUPPORTS_CXX>
</compiler>
<compiler MACH="mesabi" COMPILER="intel">
<CFLAGS>
<base> -O2 -fp-model precise -I/soft/intel/x86_64/2013/composer_xe_2013/composer_xe_2013_sp1.3.174/mkl/inc
lude </base>
<append compile_threaded="TRUE"> -openmp </append>
</CFLAGS>
<CPPDEFS>
<append> -DFORTRANUNDERSCORE -DNO_R16</append>
<append> -DCPRINTEL </append>
</CPPDEFS>
<CXX_LDFLAGS>
<base> -cxxlib </base>
</CXX_LDFLAGS>
<CXX_LINKER>FORTRAN</CXX_LINKER>
<FC_AUTO_R8>
<base> -r8 </base>
</FC_AUTO_R8>
<FFLAGS>
<base> -fp-model source -convert big_endian -assume byterecl -ftz -traceback -assume realloc_lhs -I/soft/i
ntel/x86_64/2013/composer_xe_2013/composer_xe_2013_sp1.3.174/mkl/include </base>
<append compile_threaded="TRUE"> -openmp </append>
<append DEBUG="TRUE"> -O0 -g -check uninit -check bounds -check pointers -fpe0 </append>
<append DEBUG="FALSE"> -O2 </append>
</FFLAGS>
<FIXEDFLAGS>
<base> -fixed -132 </base>
</FIXEDFLAGS>
<FREEFLAGS>
<base> -free </base>
</FREEFLAGS>
<LDFLAGS>
<append compile_threaded="TRUE"> -openmp </append>
<append> -lnetcdff </append>
</LDFLAGS>
<MPICC> mpiicc </MPICC>
<MPICXX> mpiicpc </MPICXX>
<MPIFC> mpiifort </MPIFC>
<SCC> icc </SCC>
<SCXX> icpc </SCXX>
<SFC> ifort </SFC>
<SLIBS>
<append>-L/soft/netcdf/fortran-4.4-intel-sp1-update3-parallel/lib -lnetcdff -L/soft/hdf5/hdf5-1.8.13-intel-2013-sp1-update3-impi-5.0.0.028/lib -openmp -fPIC -lnetcdf -lnetcdf -L/soft/intel/x86_64/2013/composer_xe_2013/composer_xe_2013_sp1.3.174/mkl/lib/intel64 -lmkl_intel_lp64 -lmkl_core -lmkl_intel_thread -lpthread -lm </append>
</SLIBS>
<SUPPORTS_CXX>TRUE</SUPPORTS_CXX>
</compiler>
<compiler MACH="mira" COMPILER="ibm">
<ALBANY_PATH>/projects/ccsm/libs/AlbanyTrilinos/Albany/build/install</ALBANY_PATH>
<CPPDEFS>
<append> -DMPASLI_EXTERNAL_INTERFACE_DISABLE_MANGLING </append>
</CPPDEFS>
<CXX_LIBS>
<base> -llapack -lblas -L$ENV{IBM_MAIN_DIR}/xlf/bg/14.1/bglib64 -lxlfmath -lxlf90_r -lxlopt -lxl -L$ENV{IBM_MAIN_DIR}/xlsmp/bg/3.1/bglib64 -lxlsmp </base>
</CXX_LIBS>
<CXX_LINKER>CXX</CXX_LINKER>
<HDF5_PATH>/soft/libraries/hdf5/1.8.14/cnk-xl/current/</HDF5_PATH>
<!-- This LD is a workaround for darshan initialization on mira (Darshan does -->
<!-- not run if f90 or higher is used for linking -->
<LD> mpixlf77_r </LD>
<MPICC> mpixlc_r </MPICC>
<MPICXX> /soft/compilers/bgclang/mpi/bgclang/bin/mpic++11 </MPICXX>
<MPIFC> mpixlf2003_r </MPIFC>
<NETCDF_PATH>/soft/libraries/netcdf/4.3.3-f4.4.1/cnk-xl/current/</NETCDF_PATH>
<PETSC_PATH>/soft/libraries/petsc/3.5.3.1</PETSC_PATH>
<PIO_FILESYSTEM_HINTS>gpfs </PIO_FILESYSTEM_HINTS>
<PNETCDF_PATH>/soft/libraries/pnetcdf/1.6.0/cnk-xl/current/</PNETCDF_PATH>
<SCC> mpixlc_r </SCC>
<SFC> mpixlf2003_r </SFC>
<SLIBS>
<append>-L$NETCDF_PATH/lib -lnetcdff -lnetcdf -L$HDF5_PATH/lib -lhdf5_hl -lhdf5 -L/soft/libraries/alcf/current/xl/ZLIB/lib -lz -L/soft/libraries/alcf/current/xl/LAPACK/lib -llapack -L/soft/libraries/alcf/current/xl/BLAS/lib -lblas -L/bgsys/drivers/ppcfloor/comm/sys/lib </append>
<append compile_threaded="TRUE"> -L$ENV{IBM_MAIN_DIR}/xlf/bg/14.1/bglib64 -lxlfmath -lxlf90_r -lxlopt -lxl -L$ENV{IBM_MAIN_DIR}/xlsmp/bg/3.1/bglib64 -lxlsmp </append>
</SLIBS>
<SUPPORTS_CXX>TRUE</SUPPORTS_CXX>
</compiler>
<compiler MACH="oic5" COMPILER="gnu">
<MPICC>/projects/cesm/devtools/mpich-3.0.4-gcc4.8.1/bin/mpicc</MPICC>
<MPIFC>/projects/cesm/devtools/mpich-3.0.4-gcc4.8.1/bin/mpif90</MPIFC>
<NETCDF_PATH>/projects/cesm/devtools/netcdf-4.1.3-gcc4.8.1-mpich3.0.4/</NETCDF_PATH>
<SCC>/projects/cesm/devtools/gcc-4.8.1/bin/gcc</SCC>
<SCXX>/projects/cesm/devtools/gcc-4.8.1/bin/g++</SCXX>
<SFC>/projects/cesm/devtools/gcc-4.8.1/bin/gfortran</SFC>
<SLIBS>
<append>-L/user/lib64 -llapack -lblas -lnetcdff </append>
</SLIBS>
</compiler>
<compiler MACH="oic5" COMPILER="pgi">
<NETCDF_PATH>/home/zdr/opt/netcdf-4.1.3_pgf95</NETCDF_PATH>
</compiler>
<compiler MACH="olympus" COMPILER="intel">
<CFLAGS>
<append DEBUG="FALSE"> -O2 </append>
</CFLAGS>
<CONFIG_ARGS>
<base> --host=Linux </base>
</CONFIG_ARGS>
<CPPDEFS>
<append> -DLINUX </append>
</CPPDEFS>
<FFLAGS>
<append DEBUG="FALSE"> -O2 </append>
</FFLAGS>
<NETCDF_PATH> $ENV{NETCDF_LIB}/..</NETCDF_PATH>
<PIO_FILESYSTEM_HINTS>lustre</PIO_FILESYSTEM_HINTS>
<SLIBS>
<base> -L$NETCDF_PATH/lib -lnetcdf -lnetcdff -lpmi </base>
</SLIBS>
</compiler>
<compiler MACH="olympus" COMPILER="pgi">
<CFLAGS>
<append DEBUG="FALSE"> -O2 </append>
</CFLAGS>
<CONFIG_ARGS>
<base> --host=Linux </base>
</CONFIG_ARGS>
<CPPDEFS>
<append> -DLINUX </append>
</CPPDEFS>
<FFLAGS>
<append DEBUG="FALSE"> -O2 </append>
</FFLAGS>
<NETCDF_PATH> $ENV{NETCDF_LIB}/..</NETCDF_PATH>
<PIO_FILESYSTEM_HINTS>lustre</PIO_FILESYSTEM_HINTS>
<SLIBS>
<base> -L$NETCDF_PATH/lib -lnetcdf -lnetcdff -lpmi </base>
</SLIBS>
</compiler>
<compiler MACH="sandiatoss3" COMPILER="intel">
<ALBANY_PATH>/projects/ccsm/AlbanyTrilinos_20190904/albany-build/install</ALBANY_PATH>
<CFLAGS>
<append DEBUG="FALSE"> -O2 </append>
</CFLAGS>
<CONFIG_ARGS>
<base> --host=Linux </base>
</CONFIG_ARGS>
<ESMF_LIBDIR>/projects/ccsm/esmf-6.3.0rp1/lib/libO/Linux.intel.64.openmpi.default</ESMF_LIBDIR>
<FFLAGS>
<append DEBUG="FALSE"> -O2 </append>
</FFLAGS>
<MPI_PATH MPILIB="openmpi">$ENV{MPIHOME}</MPI_PATH>
<NETCDF_PATH>$ENV{NETCDFROOT}</NETCDF_PATH>
<PFUNIT_PATH MPILIB="mpi-serial" compile_threaded="FALSE">/projects/ccsm/pfunit/3.2.9/mpi-serial</PFUNIT_PATH>
<PIO_FILESYSTEM_HINTS>lustre </PIO_FILESYSTEM_HINTS>
<PNETCDF_PATH>$ENV{PNETCDFROOT}</PNETCDF_PATH>
<SLIBS>
<append>$SHELL{$NETCDF_PATH/bin/nf-config --flibs} -L/projects/ccsm/BLAS-intel -lblas_LINUX -L$ENV{MKL_LIBS} -lmkl_rt</append>
<append MPILIB="openmpi"> -mkl=cluster </append>
<append MPILIB="mpi-serial"> -mkl </append>
</SLIBS>
</compiler>
<compiler MACH="sooty" COMPILER="intel">
<CFLAGS>
<append> -std=c99 </append>
</CFLAGS>
<CONFIG_ARGS>
<base> --host=Linux </base>
</CONFIG_ARGS>
<CPPDEFS>
<append> -DLINUX -DCPRINTEL </append>
</CPPDEFS>
<FFLAGS>
<append DEBUG="FALSE"> -O2 -debug minimal </append>
<append DEBUG="TRUE"> -g -traceback -O0 -fpe0 -check all -check noarg_temp_created -ftrapuv </append>
</FFLAGS>
<NETCDF_PATH>$ENV{NETCDF_PATH}</NETCDF_PATH>
<PIO_FILESYSTEM_HINTS>lustre</PIO_FILESYSTEM_HINTS>
<PNETCDF_PATH>$ENV{PNETCDFROOT}</PNETCDF_PATH>
<SLIBS>
<base> -L$NETCDF_PATH/lib -lnetcdf -lnetcdff -lpmi -L$ENV{MKLROOT} -lmkl_rt</base>
</SLIBS>
</compiler>
<compiler MACH="sooty" COMPILER="pgi">
<CFLAGS>
<append DEBUG="FALSE"> -O2 </append>
</CFLAGS>
<CONFIG_ARGS>
<base> --host=Linux </base>
</CONFIG_ARGS>
<CPPDEFS>
<append> -DLINUX </append>
</CPPDEFS>
<FFLAGS>
<append DEBUG="FALSE"> -O2 </append>
</FFLAGS>
<NETCDF_PATH> $ENV{NETCDF_LIB}/..</NETCDF_PATH>
<PIO_FILESYSTEM_HINTS>lustre</PIO_FILESYSTEM_HINTS>
<SLIBS>
<base> -L$NETCDF_PATH/lib -lnetcdf -lnetcdff -lpmi </base>
</SLIBS>
</compiler>
<compiler MACH="stampede2" COMPILER="intel">
<CFLAGS>
<!--ADD_FFLAGS DEBUG="FALSE"> -O2 -debug minimal -qno-opt-dynamic-align -qopt-zmm-usage=high</ADD_FFLAGS-->
<append> -xCORE-AVX2 </append>
</CFLAGS>
<CONFIG_ARGS>
<base> --host=Linux </base>
</CONFIG_ARGS>
<CPPDEFS>
<append> -DLINUX </append>
<append MODEL="gptl"> -DHAVE_NANOTIME -DBIT64 -DHAVE_VPRINTF -DHAVE_BACKTRACE -DHAVE_SLASHPROC -DHAVE_COMM_F2C -DHAVE_TIMES -DHAVE_GETTIMEOFDAY </append>
<append> -DARCH_MIC_KNL </append>
</CPPDEFS>
<FFLAGS>
<base> -convert big_endian -assume byterecl -ftz -traceback -assume realloc_lhs -fp-model consistent -fimf-use-svml </base>
<append DEBUG="FALSE"> -O2 -debug minimal -qno-opt-dynamic-align</append>
<!--ADD_FFLAGS> -xCORE-AVX512 </ADD_FFLAGS-->
<!--ADD_CFLAGS> -xCORE-AVX512 </ADD_CFLAGS-->
<append> -xCORE-AVX2 </append>
</FFLAGS>
<HDF5_PATH>$ENV{TACC_HDF5_DIR}</HDF5_PATH>
<MPICC> mpicc </MPICC>
<MPICXX> mpicxx </MPICXX>
<MPIFC> mpif90 </MPIFC>
<MPI_LIB_NAME>impi</MPI_LIB_NAME>
<NETCDF_PATH MPILIB="impi">$ENV{TACC_NETCDF_DIR}</NETCDF_PATH>
<NETCDF_PATH MPILIB="mpi-serial">$ENV{TACC_NETCDF_DIR}</NETCDF_PATH>
<PETSC_PATH>$ENV{PETSC_DIR}</PETSC_PATH>
<PNETCDF_PATH MPILIB="impi">$ENV{TACC_PNETCDF_DIR}</PNETCDF_PATH>
<SCC> icc </SCC>
<SCXX> icpc </SCXX>
<SFC> ifort </SFC>
<SLIBS>
<append MPILIB="impi"> -L$NETCDF_PATH -lnetcdff -Wl,--as-needed,-L$NETCDF_PATH/lib -lnetcdff -lnetcdf </append>
<append MPILIB="mpi-serial"> -L$NETCDF_PATH -lnetcdff -Wl,--as-needed,-L$NETCDF_PATH/lib -lnetcdff -lnetcdf </append>
<append> -mkl -lpthread </append>
</SLIBS>
</compiler>
<compiler MACH="summit" COMPILER="gnu">
<CFLAGS>
<append DEBUG="FALSE"> -O2 </append>
</CFLAGS>
<CONFIG_ARGS>
<base> --host=Linux </base>
</CONFIG_ARGS>
<FFLAGS>
<append DEBUG="FALSE"> -O2 </append>
</FFLAGS>
<LDFLAGS>
<append>-L$ENV{NETCDF_C_PATH}/lib -lnetcdf -L$ENV{NETCDF_FORTRAN_PATH}/lib -lnetcdff -L$ENV{ESSL_PATH}/lib64 -lessl -L$ENV{NETLIB_LAPACK_PATH}/lib -llapack</append>
<append MPILIB="!mpi-serial"> -L$ENV{PNETCDF_PATH}/lib -lpnetcdf -L$ENV{HDF5_PATH}/lib -lhdf5_hl -lhdf5 </append>
</LDFLAGS>
<MPICC> mpicc </MPICC>
<MPICXX> mpiCC </MPICXX>
<MPIFC> mpif90 </MPIFC>
<SCC> gcc </SCC>
<SCXX> g++ </SCXX>
<SFC> gfortran </SFC>
<PIO_FILESYSTEM_HINTS>gpfs</PIO_FILESYSTEM_HINTS>
<NETCDF_C_PATH>$ENV{NETCDF_C_PATH}</NETCDF_C_PATH>
<NETCDF_FORTRAN_PATH>$ENV{NETCDF_FORTRAN_PATH}</NETCDF_FORTRAN_PATH>
<SUPPORTS_CXX>TRUE</SUPPORTS_CXX>
</compiler>
<compiler MACH="summit" COMPILER="ibm">
<CONFIG_ARGS>
<base> --host=Linux </base>
</CONFIG_ARGS>
<CPPDEFS>
<append> -DLINUX </append>
</CPPDEFS>
<FFLAGS>
<append> -qzerosize -qfree=f90 -qxlf2003=polymorphic</append>
<append> -qspillsize=2500 -qextname=flush </append>
</FFLAGS>
<LDFLAGS>
<append>-lxlopt -lxl -lxlsmp -L$ENV{NETCDF_C_PATH}/lib -lnetcdf -L$ENV{NETCDF_FORTRAN_PATH}/lib -lnetcdff -L$ENV{ESSL_PATH}/lib64 -lessl -L$ENV{NETLIB_LAPACK_PATH}/lib -llapack</append>
<append MPILIB="!mpi-serial"> -L$ENV{PNETCDF_PATH}/lib -lpnetcdf -L$ENV{HDF5_PATH}/lib -lhdf5_hl -lhdf5 </append>
<append> -Wl,--relax -Wl,--allow-multiple-definition </append>
</LDFLAGS>
<MPICC> mpicc </MPICC>
<MPICXX> mpiCC </MPICXX>
<MPIFC> mpif90 </MPIFC>
<PIO_FILESYSTEM_HINTS>gpfs</PIO_FILESYSTEM_HINTS>
<SCC> xlc_r </SCC>
<SFC> xlf90_r </SFC>
<SCXX> xlc++_r </SCXX>
<NETCDF_C_PATH>$ENV{NETCDF_C_PATH}</NETCDF_C_PATH>
<NETCDF_FORTRAN_PATH>$ENV{NETCDF_FORTRAN_PATH}</NETCDF_FORTRAN_PATH>
<SUPPORTS_CXX>TRUE</SUPPORTS_CXX>
</compiler>
<compiler MACH="summit" COMPILER="pgi">
<CFLAGS>
<append DEBUG="FALSE"> -O2 </append>
</CFLAGS>
<CONFIG_ARGS>
<base> --host=Linux </base>
</CONFIG_ARGS>
<FFLAGS>
<append DEBUG="FALSE"> -O2 -DSUMMITDEV_PGI </append>
</FFLAGS>
<LDFLAGS>
<append>-L$ENV{NETCDF_C_PATH}/lib -lnetcdf -L$ENV{NETCDF_FORTRAN_PATH}/lib -lnetcdff -L$ENV{ESSL_PATH}/lib64 -lessl -L$ENV{NETLIB_LAPACK_PATH}/lib -llapack</append>
<append MPILIB="!mpi-serial"> -L$ENV{PNETCDF_PATH}/lib -lpnetcdf -L$ENV{HDF5_PATH}/lib -lhdf5_hl -lhdf5 </append>
</LDFLAGS>
<!--
Summit (backup in case defaults are not picking up):
<CXX_LIBS>
<base>/sw/summit/gcc/6.4.0/lib/gcc/powerpc64le-none-linux-gnu/6.4.0/crtbegin.o -L/sw/summit/gcc/6.4.0/lib64 -lstdc++ -latomic</base>
</CXX_LIBS>
-->
<MPICC> mpicc </MPICC>
<MPICXX> mpiCC </MPICXX>
<MPIFC> mpif90 </MPIFC>
<PIO_FILESYSTEM_HINTS>gpfs</PIO_FILESYSTEM_HINTS>
<SCC> pgcc </SCC>
<SCXX> pgc++ </SCXX>
<SFC> pgfortran </SFC>
<NETCDF_C_PATH>$ENV{NETCDF_C_PATH}</NETCDF_C_PATH>
<NETCDF_FORTRAN_PATH>$ENV{NETCDF_FORTRAN_PATH}</NETCDF_FORTRAN_PATH>
<SUPPORTS_CXX>TRUE</SUPPORTS_CXX>
</compiler>
<compiler MACH="summit" COMPILER="pgiacc">
<CFLAGS>
<append DEBUG="FALSE"> -O2 </append>
</CFLAGS>
<CONFIG_ARGS>
<base> --host=Linux </base>
</CONFIG_ARGS>
<FFLAGS>
<append DEBUG="FALSE"> -O2 -DSUMMITDEV_PGI </append>
</FFLAGS>
<LDFLAGS>
<append>-ta=tesla:cc70,pinned</append>
<append>-L$ENV{NETCDF_C_PATH}/lib -lnetcdf -L$ENV{NETCDF_FORTRAN_PATH}/lib -lnetcdff -L$ENV{ESSL_PATH}/lib64 -lessl -L$ENV{NETLIB_LAPACK_PATH}/lib -llapack</append>
<append MPILIB="!mpi-serial"> -L$ENV{PNETCDF_PATH}/lib -lpnetcdf -L$ENV{HDF5_PATH}/lib -lhdf5_hl -lhdf5 </append>
</LDFLAGS>
<MPICC> mpicc </MPICC>
<MPICXX> mpiCC </MPICXX>
<MPIFC> mpif90 </MPIFC>
<PIO_FILESYSTEM_HINTS>gpfs</PIO_FILESYSTEM_HINTS>
<SCC> pgcc </SCC>
<SFC> pgfortran </SFC>
<NETCDF_C_PATH>$ENV{NETCDF_C_PATH}</NETCDF_C_PATH>
<NETCDF_FORTRAN_PATH>$ENV{NETCDF_FORTRAN_PATH}</NETCDF_FORTRAN_PATH>
<SUPPORTS_CXX>TRUE</SUPPORTS_CXX>
</compiler>
<compiler MACH="summitdev" COMPILER="ibm">
<CONFIG_ARGS>
<base> --host=Linux </base>
</CONFIG_ARGS>
<FFLAGS>
<append> -qzerosize -qfree=f90 -qxlf2003=polymorphic</append>
</FFLAGS>
<LDFLAGS>
<append>-L$ENV{NETCDF_C_PATH}/lib -lnetcdf -L$ENV{NETCDF_FORTRAN_PATH}/lib -lnetcdff -L$ENV{PNETCDF_PATH}/lib -lpnetcdf -L$ENV{HDF5_PATH}/lib -lhdf5_hl -lhdf5 -L$ENV{ESSL_PATH}/lib64 -lessl -L$ENV{NETLIB_LAPACK_PATH}/lib64 -llapack</append>
</LDFLAGS>
<MPICC> mpixlc </MPICC>
<MPICXX> mpixlC </MPICXX>
<MPIFC> mpixlf </MPIFC>
<PIO_FILESYSTEM_HINTS>lustre</PIO_FILESYSTEM_HINTS>
<SCC> xlc_r </SCC>
<SFC> xlf_r </SFC>
</compiler>
<compiler MACH="summitdev" COMPILER="pgi">
<CFLAGS>
<append DEBUG="FALSE"> -O2 </append>
</CFLAGS>
<CONFIG_ARGS>
<base> --host=Linux </base>
</CONFIG_ARGS>
<FFLAGS>
<append DEBUG="FALSE"> -O2 -DSUMMITDEV_PGI </append>
</FFLAGS>
<LDFLAGS>
<append>-L$ENV{NETCDF_C_PATH}/lib -lnetcdf -L$ENV{NETCDF_FORTRAN_PATH}/lib -lnetcdff -L$ENV{PNETCDF_PATH}/lib -lpnetcdf -L$ENV{HDF5_PATH}/lib -lhdf5_hl -lhdf5 -L$ENV{ESSL_PATH}/lib64 -lessl -L$ENV{NETLIB_LAPACK_PATH}/lib -llapack</append>
</LDFLAGS>
<MPICC> mpicc </MPICC>
<MPICXX> mpiCC </MPICXX>
<MPIFC> mpif90 </MPIFC>
<PIO_FILESYSTEM_HINTS>lustre</PIO_FILESYSTEM_HINTS>
<SCC> pgcc </SCC>
<SFC> pgfortran </SFC>
</compiler>
<compiler MACH="summitdev" COMPILER="pgiacc">
<CFLAGS>
<append DEBUG="FALSE"> -O2 </append>
</CFLAGS>
<CONFIG_ARGS>
<base> --host=Linux </base>
</CONFIG_ARGS>
<FFLAGS>
<append DEBUG="FALSE"> -O2 -DSUMMITDEV_PGI </append>
</FFLAGS>
<LDFLAGS>
<!-- Specifying cc60 implies cuda8.0, just making sure -->
<append>-ta=tesla:cc60,cuda8.0,pinned</append>
<append>-L$ENV{NETCDF_C_PATH}/lib -lnetcdf -L$ENV{NETCDF_FORTRAN_PATH}/lib -lnetcdff -L$ENV{PNETCDF_PATH}/lib -lpnetcdf -L$ENV{HDF5_PATH}/lib -lhdf5_hl -lhdf5 -L$ENV{ESSL_PATH}/lib64 -lessl -L$ENV{NETLIB_LAPACK_PATH}/lib -llapack</append>
</LDFLAGS>
<MPICC> mpicc </MPICC>
<MPICXX> mpiCC </MPICXX>
<MPIFC> mpif90 </MPIFC>
<PIO_FILESYSTEM_HINTS>lustre</PIO_FILESYSTEM_HINTS>
<SCC> pgcc </SCC>
<SFC> pgfortran </SFC>
</compiler>
<compiler MACH="syrah" COMPILER="intel">
<CFLAGS>
<append DEBUG="FALSE"> -O2 </append>
</CFLAGS>
<CONFIG_ARGS>
<base> --host=Linux </base>
</CONFIG_ARGS>
<CPPDEFS>
<append> -DNO_SHR_VMATH -DCNL </append>
</CPPDEFS>
<FFLAGS>
<append DEBUG="FALSE"> -O2 </append>
<append DEBUG="TRUE"> -g -traceback -O0 -fpe0 -check all -check noarg_temp_created -ftrapuv </append>
</FFLAGS>
<LDFLAGS>
<append> -llapack -lblas</append>
</LDFLAGS>
<MPI_LIB_NAME> mpich</MPI_LIB_NAME>
<MPI_PATH>/usr/tce/packages/mvapich2/mvapich2-2.2-intel-18.0.1/</MPI_PATH>
<NETCDF_PATH>/usr/tce/packages/netcdf-fortran/netcdf-fortran-4.4.4-intel-18.0.1/</NETCDF_PATH>
<SLIBS>
<append>$SHELL{/usr/tce/packages/netcdf-fortran/netcdf-fortran-4.4.4-intel-18.0.1/bin/nf-config --flibs}</append>
</SLIBS>
</compiler>
<compiler MACH="quartz" COMPILER="intel">
<CFLAGS>
<append DEBUG="FALSE"> -O2 </append>
</CFLAGS>
<CONFIG_ARGS>
<base> --host=Linux </base>
</CONFIG_ARGS>
<CPPDEFS>
<append> -DNO_SHR_VMATH -DCNL </append>
</CPPDEFS>
<FFLAGS>
<append DEBUG="FALSE"> -O2 </append>
<append DEBUG="TRUE"> -g -traceback -O0 -fpe0 -check all -check noarg_temp_created -ftrapuv </append>
</FFLAGS>
<LDFLAGS>
<append> -llapack -lblas</append>
</LDFLAGS>
<MPI_LIB_NAME> mpich</MPI_LIB_NAME>
<MPI_PATH>/usr/tce/packages/mvapich2/mvapich2-2.2-intel-18.0.1/</MPI_PATH>
<NETCDF_PATH>/usr/tce/packages/netcdf-fortran/netcdf-fortran-4.4.4-intel-18.0.1/</NETCDF_PATH>
<SLIBS>
<append>$SHELL{/usr/tce/packages/netcdf-fortran/netcdf-fortran-4.4.4-intel-18.0.1/bin/nf-config --flibs}</append>
</SLIBS>
</compiler>
<compiler MACH="theta" COMPILER="gnu">
<CONFIG_ARGS>
<base> --host=Linux </base>
</CONFIG_ARGS>
<CFLAGS>
<append DEBUG="FALSE"> -O2 </append>
</CFLAGS>
<FFLAGS>
<append DEBUG="FALSE"> -O2 </append>
</FFLAGS>
<SLIBS>
<append>$SHELL{nf-config --flibs}</append>
</SLIBS>
</compiler>
<compiler MACH="theta" COMPILER="intel">
<CONFIG_ARGS>
<base> --host=Linux </base>
</CONFIG_ARGS>
<CPPDEFS>
<append> -DARCH_MIC_KNL </append>
</CPPDEFS>
<FFLAGS>
<base>-convert big_endian -assume byterecl -ftz -traceback -assume realloc_lhs -fp-model consistent</base>
<append DEBUG="FALSE"> -O2 -debug minimal -qno-opt-dynamic-align -fp-speculation=off</append>
<append> -DHAVE_ERF_INTRINSICS </append>
</FFLAGS>
<SCC> icc </SCC>
<SCXX> icpc </SCXX>
<SFC> ifort </SFC>
<SLIBS>
<append> -L$ENV{NETCDF_DIR}/lib -lnetcdff -L$ENV{NETCDF_DIR}/lib -lnetcdf -Wl,-rpath -Wl,$ENV{NETCDF_DIR}/lib </append>
<append> -mkl -lpthread </append>
</SLIBS>
</compiler>
<compiler MACH="jlse" COMPILER="intel">
<CONFIG_ARGS>
<base> --host=Linux </base>
</CONFIG_ARGS>
<CPPDEFS>
<append> -DHAVE_SLASHPROC </append>
</CPPDEFS>
<FFLAGS>
<base>-convert big_endian -assume byterecl -ftz -traceback -assume realloc_lhs -fp-model consistent</base>
<append DEBUG="FALSE"> -O2 -debug minimal -qno-opt-dynamic-align -fp-speculation=off</append>
</FFLAGS>
<MPIFC>mpiifort</MPIFC>
<MPICC>mpiicc</MPICC>
<MPICXX>mpiicpc</MPICXX>
<SCC>icc</SCC>
<SCXX>icpc</SCXX>
<SFC>ifort</SFC>
<SLIBS>
<append>$SHELL{$(NETCDF_PATH)/bin/nf-config --flibs} -Wl,-rpath -Wl,$ENV{NETCDF_PATH}/lib -mkl</append>
</SLIBS>
</compiler>
<compiler MACH="jlse" COMPILER="gnu">
<CONFIG_ARGS>
<base> --host=Linux </base>
</CONFIG_ARGS>
<CFLAGS>
<append DEBUG="FALSE"> -O2 </append>
</CFLAGS>
<FFLAGS>
<append DEBUG="FALSE"> -O2 </append>
</FFLAGS>
<SLIBS>
<append>$SHELL{$(NETCDF_PATH)/bin/nf-config --flibs} -Wl,-rpath -Wl,$(NETCDF_PATH)/lib -L/home/azamat/soft/libs -llapack -lblas</append>
</SLIBS>
</compiler>
<compiler MACH="titan" COMPILER="intel">
<CFLAGS>
<append DEBUG="FALSE"> -O2 </append>
</CFLAGS>
<CONFIG_ARGS>
<base> --host=Linux </base>
</CONFIG_ARGS>
<FFLAGS>
<append DEBUG="FALSE"> -O2 </append>
</FFLAGS>
<SLIBS>
<append MPILIB="mpich">$SHELL{nf-config --flibs} -mkl=cluster </append>
<append MPILIB="mpich2">$SHELL{nf-config --flibs} -mkl=cluster </append>
<append MPILIB="mpt">$SHELL{nf-config --flibs} -mkl=cluster </append>
<append MPILIB="openmpi">$SHELL{nf-config --flibs} -mkl=cluster </append>
<append MPILIB="mvapich">$SHELL{nf-config --flibs} -mkl=cluster </append>
<append MPILIB="impi">$SHELL{nf-config --flibs} -mkl=cluster </append>
<!-- mx: the cray-netcdf has wrong configuration on titan, so have to specify the lib path explicitly -->
<append MPILIB="mpi-serial"> -L/opt/cray/netcdf/4.4.1.1.3/INTEL/16.0/lib -lnetcdff -L/opt/cray/hdf5/1.10.0.3/GNU/4.9/lib -lnetcdf -mkl </append>
</SLIBS>
<SUPPORTS_CXX>TRUE</SUPPORTS_CXX>
</compiler>
<compiler MACH="titan" COMPILER="pgi">
<ALBANY_PATH>/ccs/proj/cli106/AlbanyTrilinos/Albany/build/install</ALBANY_PATH>
<CFLAGS>
<append DEBUG="FALSE"> -O2 </append>
</CFLAGS>
<CONFIG_ARGS>
<base> --host=Linux </base>
</CONFIG_ARGS>
<CXX_LIBS>
<base> -lfmpich -lmpichf90_pgi $ENV{PGI_PATH}/linux86-64/$ENV{PGI_VERSION}/lib/f90main.o /opt/gcc/default/snos/lib64/libstdc++.a </base>
</CXX_LIBS>
<FFLAGS>
<append DEBUG="FALSE"> -O2 </append>
<append MODEL="glc"> -target-cpu=istanbul </append>
</FFLAGS>
<SLIBS>
<append MPILIB="mpich">$SHELL{nf-config --flibs} </append>
<append MPILIB="mpich2">$SHELL{nf-config --flibs} </append>
<append MPILIB="mpt">$SHELL{nf-config --flibs} </append>
<append MPILIB="openmpi">$SHELL{nf-config --flibs} </append>
<append MPILIB="mvapich">$SHELL{nf-config --flibs} </append>
<append MPILIB="impi">$SHELL{nf-config --flibs} </append>
<!-- mx: the cray-netcdf has wrong configuration on titan, so have to specify the lib path explicitly -->
<append MPILIB="mpi-serial"> -L/opt/cray/netcdf/4.4.1.1.3/PGI/15.3/lib -lnetcdff -L/opt/cray/hdf5/1.10.0.3/GNU/4.9/lib -lnetcdf </append>
</SLIBS>
<SUPPORTS_CXX>TRUE</SUPPORTS_CXX>
</compiler>
<compiler MACH="titan" COMPILER="pgiacc">
<CFLAGS>
<append DEBUG="FALSE"> -O2 </append>
</CFLAGS>
<CONFIG_ARGS>
<base> --host=Linux </base>
</CONFIG_ARGS>
<CXX_LIBS>
<base> -lfmpich -lmpichf90_pgi $ENV{PGI_PATH}/linux86-64/$ENV{PGI_VERSION}/lib/f90main.o </base>
</CXX_LIBS>
<FFLAGS>
<append DEBUG="FALSE"> -O2 </append>
</FFLAGS>
<LDFLAGS>
<append>-ta=nvidia,cc35,cuda7.5</append>
</LDFLAGS>
<SLIBS>
<append>$SHELL{$ENV{NETCDF_PATH}/bin/nf-config --flibs} </append>
</SLIBS>
<SUPPORTS_CXX>TRUE</SUPPORTS_CXX>
</compiler>
<compiler MACH="grizzly" COMPILER="gnu">
<PIO_FILESYSTEM_HINTS>lustre</PIO_FILESYSTEM_HINTS>
<MPICC>mpicc</MPICC>
<MPIFC>mpif90</MPIFC>
<MPICXX>mpic++</MPICXX>
<SFC>gfortran</SFC>
<SCC>gcc</SCC>
<SCXX>g++</SCXX>
<SLIBS>
<append>$SHELL{$ENV{NETCDF_PATH}/bin/nf-config --flibs} -llapack -lblas</append>
<append>$ENV{MKLROOT}/lib/intel64/libmkl_scalapack_lp64.a -Wl,--start-group $ENV{MKLROOT}/lib/intel64/libmkl_intel_lp64.a $ENV{MKLROOT}/lib/intel64/libmkl_core.a $ENV{MKLROOT}/lib/intel64/libmkl_sequential.a -Wl,--end-group $ENV{MKLROOT}/lib/intel64/libmkl_blacs_intelmpi_lp64.a -lpthread -lm -z muldefs</append>
</SLIBS>
<CXX_LIBS>
<base>-lstdc++ -lmpi_cxx</base>
</CXX_LIBS>
</compiler>
<compiler MACH="grizzly" COMPILER="intel">
<PIO_FILESYSTEM_HINTS>lustre</PIO_FILESYSTEM_HINTS>
<MPICC>mpicc</MPICC>
<MPIFC>mpif90</MPIFC>
<MPICXX>mpic++</MPICXX>
<SFC>ifort</SFC>
<SCC>icc</SCC>
<SCXX>icpc</SCXX>
<SLIBS>
<append>$SHELL{$ENV{NETCDF_PATH}/bin/nf-config --flibs} -llapack -lblas</append>
<append>-mkl -lpthread</append>
</SLIBS>
<CXX_LIBS>
<base>-lstdc++ -lmpi_cxx</base>
</CXX_LIBS>
</compiler>
<compiler MACH="badger" COMPILER="gnu">
<PIO_FILESYSTEM_HINTS>lustre</PIO_FILESYSTEM_HINTS>
<MPICC>mpicc</MPICC>
<MPIFC>mpif90</MPIFC>
<MPICXX>mpic++</MPICXX>
<SFC>gfortran</SFC>
<SCC>gcc</SCC>
<SCXX>g++</SCXX>
<SLIBS>
<append>$SHELL{$ENV{NETCDF_PATH}/bin/nf-config --flibs} -llapack -lblas</append>
<append>$ENV{MKLROOT}/lib/intel64/libmkl_scalapack_lp64.a -Wl,--start-group $ENV{MKLROOT}/lib/intel64/libmkl_intel_lp64.a $ENV{MKLROOT}/lib/intel64/libmkl_core.a $ENV{MKLROOT}/lib/intel64/libmkl_sequential.a -Wl,--end-group $ENV{MKLROOT}/lib/intel64/libmkl_blacs_intelmpi_lp64.a -lpthread -lm -z muldefs</append>
</SLIBS>
<CXX_LIBS>
<base>-lstdc++ -lmpi_cxx</base>
</CXX_LIBS>
</compiler>
<compiler MACH="badger" COMPILER="intel">
<PIO_FILESYSTEM_HINTS>lustre</PIO_FILESYSTEM_HINTS>
<MPICC>mpicc</MPICC>
<MPIFC>mpif90</MPIFC>
<MPICXX>mpic++</MPICXX>
<SFC>ifort</SFC>
<SCC>icc</SCC>
<SCXX>icpc</SCXX>
<SLIBS>
<append>$SHELL{$ENV{NETCDF_PATH}/bin/nf-config --flibs} -llapack -lblas</append>
<append>-mkl -lpthread</append>
</SLIBS>
<CXX_LIBS>
<base>-lstdc++ -lmpi_cxx</base>
</CXX_LIBS>
<CFLAGS>
<base>compile_threaded="TRUE"> -qopenmp</base>
</CFLAGS>
<FFLAGS>
<base>compile_threaded="TRUE"> -qopenmp</base>
</FFLAGS>
<LDFLAGS>
<base>compile_threaded="TRUE"> -qopenmp</base>
</LDFLAGS>
</compiler>
<compiler MACH="userdefined">
<CONFIG_ARGS>
<base/>
</CONFIG_ARGS>
<CPPDEFS>
<append/>
</CPPDEFS>
<ESMF_LIBDIR/>
<MPI_LIB_NAME/>
<MPI_PATH/>
<NETCDF_PATH> USERDEFINED_MUST_EDIT_THIS</NETCDF_PATH>
<PNETCDF_PATH/>
<SLIBS>
<append># USERDEFINED $SHELL{$NETCDF_PATH/bin/nf-config --flibs}</append>
</SLIBS>
</compiler>
</config_compilers>
E3SM XML settings for supported machines.
<?xml version="1.0"?>
<config_machines version="2.0">
<!--
===============================================================
COMPILER and COMPILERS
===============================================================
If a machine supports multiple compilers - then
- the settings for COMPILERS should reflect the supported compilers
as a comma separated string
- the setting for COMPILER should be the default compiler
(which is one of the values in COMPILERS)
===============================================================
MPILIB and MPILIBS
===============================================================
If a machine supports only one MPILIB is supported - then
the setting for MPILIB and MPILIBS should be blank ("")
If a machine supports multiple mpi libraries (e.g. mpich and openmpi)
- the settings for MPILIBS should reflect the supported mpi libraries
as a comma separated string
The default settings for COMPILERS and MPILIBS is blank (in config_machines.xml)
Normally variable substitutions are not made until the case scripts are run, however variables
of the form $ENV{VARIABLE_NAME} are substituted in create_newcase from the environment
variable of the same name if it exists.
===============================================================
PROJECT_REQUIRED
===============================================================
A machine may need the PROJECT xml variable to be defined either because it is
used in some paths, or because it is used to give an account number in the job
submission script. If either of these are the case, then PROJECT_REQUIRED
should be set to TRUE for the given machine.
walltimes:
Denotes the walltimes that can be used for a particular machine.
walltime: as before, if default="true" is defined, this walltime will be used
by default.
Alternatively, ccsm_estcost must be used to choose the queue based on the estimated cost of the run.
mpirun: the mpirun command that will be used to actually launch the model.
The attributes used to choose the mpirun command are:
mpilib: can either be 'default' the name of an mpi library, or a compiler name so one can choose the mpirun
based on the mpi library in use.
the 'executable' tag must have arguments required for the chosen mpirun, as well as the executable name.
unit_testing: can be 'true' or 'false'.
This allows using a different mpirun command to launch unit tests
-->
<machine MACH="cori-haswell">
<DESC>Cori. XC40 Cray system at NERSC. Haswell partition. os is CNL, 32 pes/node, batch system is SLURM</DESC>
<NODENAME_REGEX>cori-knl-is-default</NODENAME_REGEX>
<OS>CNL</OS>
<COMPILERS>intel,gnu</COMPILERS>
<MPILIBS>mpt</MPILIBS>
<PROJECT>acme</PROJECT>
<SAVE_TIMING_DIR>/project/projectdirs/acme</SAVE_TIMING_DIR>
<SAVE_TIMING_DIR_PROJECTS>acme,m3411,m3412</SAVE_TIMING_DIR_PROJECTS>
<CIME_OUTPUT_ROOT>$ENV{SCRATCH}/acme_scratch/cori-haswell</CIME_OUTPUT_ROOT>
<CIME_HTML_ROOT>/global/project/projectdirs/acme/www/$ENV{USER}</CIME_HTML_ROOT>
<CIME_URL_ROOT>http://portal.nersc.gov/project/acme/$ENV{USER}</CIME_URL_ROOT>
<DIN_LOC_ROOT>/project/projectdirs/acme/inputdata</DIN_LOC_ROOT>
<DIN_LOC_ROOT_CLMFORC>/project/projectdirs/acme/inputdata/atm/datm7</DIN_LOC_ROOT_CLMFORC>
<DOUT_S_ROOT>$CIME_OUTPUT_ROOT/archive/$CASE</DOUT_S_ROOT>
<BASELINE_ROOT>/project/projectdirs/acme/baselines/$COMPILER</BASELINE_ROOT>
<CCSM_CPRNC>/project/projectdirs/acme/tools/cprnc.cori/cprnc</CCSM_CPRNC>
<GMAKE_J>8</GMAKE_J>
<TESTS>e3sm_developer</TESTS>
<NTEST_PARALLEL_JOBS>4</NTEST_PARALLEL_JOBS>
<BATCH_SYSTEM>nersc_slurm</BATCH_SYSTEM>
<SUPPORTED_BY>e3sm</SUPPORTED_BY>
<MAX_TASKS_PER_NODE>32</MAX_TASKS_PER_NODE>
<MAX_MPITASKS_PER_NODE>32</MAX_MPITASKS_PER_NODE>
<PROJECT_REQUIRED>TRUE</PROJECT_REQUIRED>
<mpirun mpilib="default">
<executable>srun</executable>
<arguments>
<arg name="label"> --label</arg>
<arg name="num_tasks"> -n {{ total_tasks }}</arg>
<arg name="thread_count">-c $SHELL{echo 64/`./xmlquery --value MAX_MPITASKS_PER_NODE`|bc}</arg>
<arg name="binding"> $SHELL{if [ 32 -ge `./xmlquery --value MAX_MPITASKS_PER_NODE` ]; then echo "--cpu_bind=cores"; else echo "--cpu_bind=threads";fi;} </arg>
<arg name="placement"> -m plane=$SHELL{echo `./xmlquery --value MAX_MPITASKS_PER_NODE`}</arg>
</arguments>
</mpirun>
<module_system type="module">
<init_path lang="perl">/opt/modules/default/init/perl</init_path>
<init_path lang="python">/opt/modules/default/init/python</init_path>
<init_path lang="sh">/opt/modules/default/init/sh</init_path>
<init_path lang="csh">/opt/modules/default/init/csh</init_path>
<cmd_path lang="perl">/opt/modules/default/bin/modulecmd perl</cmd_path>
<cmd_path lang="python">/opt/modules/default/bin/modulecmd python</cmd_path>
<cmd_path lang="sh">module</cmd_path>
<cmd_path lang="csh">module</cmd_path>
<modules>
<command name="rm">PrgEnv-intel</command>
<command name="rm">PrgEnv-cray</command>
<command name="rm">PrgEnv-gnu</command>
<command name="rm">intel</command>
<command name="rm">cce</command>
<command name="rm">gcc</command>
<command name="rm">cray-parallel-netcdf</command>
<command name="rm">cray-hdf5-parallel</command>
<command name="rm">pmi</command>
<command name="rm">cray-libsci</command>
<command name="rm">cray-mpich2</command>
<command name="rm">cray-mpich</command>
<command name="rm">cray-netcdf</command>
<command name="rm">cray-hdf5</command>
<command name="rm">cray-netcdf-hdf5parallel</command>
<command name="rm">craype-sandybridge</command>
<command name="rm">craype-ivybridge</command>
<command name="rm">craype</command>
<command name="rm">craype-hugepages2M</command>
<command name="rm">papi</command>
<command name="rm">cmake</command>
<command name="rm">cray-petsc</command>
<command name="rm">esmf</command>
<command name="rm">zlib</command>
<command name="rm">craype-hugepages2M</command>
<!-- first load basic defaults, then remove/swap/load as necessary -->
<command name="load">craype</command>
<command name="load">PrgEnv-intel</command>
<command name="load">cray-mpich</command>
<command name="rm">craype-mic-knl</command>
<command name="load">craype-haswell</command>
</modules>
<modules mpilib="mpt">
<command name="swap">cray-mpich cray-mpich/7.7.6</command>
</modules>
<modules compiler="intel">
<command name="load">PrgEnv-intel/6.0.5</command>
<command name="rm">intel</command>
<command name="load">intel/19.0.3.199</command>
</modules>
<modules compiler="gnu">
<command name="swap">PrgEnv-intel PrgEnv-gnu/6.0.5</command>
<command name="rm">gcc</command>
<command name="load">gcc/8.2.0</command>
<command name="rm">cray-libsci</command>
<command name="load">cray-libsci/19.02.1</command>
</modules>
<modules>
<command name="swap">craype craype/2.5.18</command>
<command name="rm">pmi</command>
<command name="load">pmi/5.0.14</command>
<command name="rm">craype-mic-knl</command>
<command name="load">craype-haswell</command>
</modules>
<modules mpilib="mpi-serial">
<command name="rm">cray-netcdf-hdf5parallel</command>
<command name="rm">cray-hdf5-parallel</command>
<command name="rm">cray-parallel-netcdf</command>
<command name="load">cray-netcdf/4.6.1.3</command>
<command name="load">cray-hdf5/1.10.2.0</command>
</modules>
<modules mpilib="!mpi-serial">
<command name="rm">cray-netcdf-hdf5parallel</command>
<command name="load">cray-netcdf-hdf5parallel/4.6.1.3</command>
<command name="load">cray-hdf5-parallel/1.10.2.0</command>
<command name="load">cray-parallel-netcdf/1.8.1.4</command>
</modules>
<modules>
<command name="rm">git</command>
<command name="load">git</command>
<command name="rm">cmake</command>
<command name="load">cmake/3.14.4</command>
</modules>
</module_system>
<RUNDIR>$CIME_OUTPUT_ROOT/$CASE/run</RUNDIR>
<EXEROOT>$CIME_OUTPUT_ROOT/$CASE/bld</EXEROOT>
<TEST_TPUT_TOLERANCE>0.1</TEST_TPUT_TOLERANCE>
<environment_variables>
<env name="MPICH_ENV_DISPLAY">1</env>
<env name="MPICH_VERSION_DISPLAY">1</env>
<!--env name="MPICH_CPUMASK_DISPLAY">1</env-->
<env name="OMP_STACKSIZE">128M</env>
<env name="OMP_PROC_BIND">spread</env>
<env name="OMP_PLACES">threads</env>
<env name="HDF5_USE_FILE_LOCKING">FALSE</env>
<env name="PERL5LIB">/project/projectdirs/acme/perl5/lib/perl5/x86_64-linux-thread-multi</env>
</environment_variables>
<environment_variables compiler="intel">
<env name="FORT_BUFFERED">yes</env>
</environment_variables>
</machine>
<!-- KNL nodes of Cori -->
<machine MACH="cori-knl">
<DESC>Cori. XC40 Cray system at NERSC. KNL partition. os is CNL, 68 pes/node (for now only use 64), batch system is SLURM</DESC>
<NODENAME_REGEX>cori</NODENAME_REGEX>
<OS>CNL</OS>
<COMPILERS>intel,gnu,intel19</COMPILERS>
<MPILIBS>mpt,impi</MPILIBS>
<PROJECT>acme</PROJECT>
<SAVE_TIMING_DIR>/project/projectdirs/acme</SAVE_TIMING_DIR>
<SAVE_TIMING_DIR_PROJECTS>acme,m3411,m3412</SAVE_TIMING_DIR_PROJECTS>
<CIME_OUTPUT_ROOT>$ENV{SCRATCH}/acme_scratch/cori-knl</CIME_OUTPUT_ROOT>
<CIME_HTML_ROOT>/global/project/projectdirs/acme/www/$ENV{USER}</CIME_HTML_ROOT>
<CIME_URL_ROOT>http://portal.nersc.gov/project/acme/$ENV{USER}</CIME_URL_ROOT>
<DIN_LOC_ROOT>/project/projectdirs/acme/inputdata</DIN_LOC_ROOT>
<DIN_LOC_ROOT_CLMFORC>/project/projectdirs/acme/inputdata/atm/datm7</DIN_LOC_ROOT_CLMFORC>
<DOUT_S_ROOT>$CIME_OUTPUT_ROOT/archive/$CASE</DOUT_S_ROOT>
<BASELINE_ROOT>/project/projectdirs/acme/baselines/$COMPILER</BASELINE_ROOT>
<CCSM_CPRNC>/project/projectdirs/acme/tools/cprnc.cori/cprnc</CCSM_CPRNC>
<GMAKE_J>8</GMAKE_J>
<TESTS>e3sm_developer</TESTS>
<NTEST_PARALLEL_JOBS>4</NTEST_PARALLEL_JOBS>
<BATCH_SYSTEM>nersc_slurm</BATCH_SYSTEM>
<SUPPORTED_BY>e3sm</SUPPORTED_BY>
<MAX_TASKS_PER_NODE>128</MAX_TASKS_PER_NODE>
<MAX_MPITASKS_PER_NODE>64</MAX_MPITASKS_PER_NODE>
<PROJECT_REQUIRED>TRUE</PROJECT_REQUIRED>
<mpirun mpilib="default">
<executable>srun</executable>
<arguments>
<arg name="label"> --label</arg>
<arg name="num_tasks"> -n {{ total_tasks }}</arg>
<arg name="thread_count">-c $SHELL{mpn=`./xmlquery --value MAX_MPITASKS_PER_NODE`; if [ 68 -ge $mpn ]; then c0=`expr 272 / $mpn`; c1=`expr $c0 / 4`; cflag=`expr $c1 \* 4`; echo $cflag|bc ; else echo 272/$mpn|bc;fi;} </arg>
<arg name="binding"> $SHELL{if [ 68 -ge `./xmlquery --value MAX_MPITASKS_PER_NODE` ]; then echo "--cpu_bind=cores"; else echo "--cpu_bind=threads";fi;} </arg>
<arg name="placement"> -m plane=$SHELL{echo `./xmlquery --value MAX_MPITASKS_PER_NODE`}</arg>
</arguments>
</mpirun>
<module_system type="module">
<init_path lang="perl">/opt/modules/default/init/perl</init_path>
<init_path lang="python">/opt/modules/default/init/python</init_path>
<init_path lang="sh">/opt/modules/default/init/sh</init_path>
<init_path lang="csh">/opt/modules/default/init/csh</init_path>
<cmd_path lang="perl">/opt/modules/default/bin/modulecmd perl</cmd_path>
<cmd_path lang="python">/opt/modules/default/bin/modulecmd python</cmd_path>
<cmd_path lang="sh">module</cmd_path>
<cmd_path lang="csh">module</cmd_path>
<modules>
<command name="rm">craype</command>
<command name="rm">craype-hugepages2M</command>
<command name="rm">craype-mic-knl</command>
<command name="rm">craype-haswell</command>
<command name="rm">PrgEnv-intel</command>
<command name="rm">PrgEnv-cray</command>
<command name="rm">PrgEnv-gnu</command>
<command name="rm">intel</command>
<command name="rm">cce</command>
<command name="rm">gcc</command>
<command name="rm">cray-parallel-netcdf</command>
<command name="rm">cray-hdf5-parallel</command>
<command name="rm">pmi</command>
<command name="rm">cray-mpich2</command>
<command name="rm">cray-mpich</command>
<command name="rm">cray-netcdf</command>
<command name="rm">cray-hdf5</command>
<command name="rm">cray-netcdf-hdf5parallel</command>
<command name="rm">cray-libsci</command>
<command name="rm">papi</command>
<command name="rm">cmake</command>
<command name="rm">cray-petsc</command>
<command name="rm">esmf</command>
<command name="rm">zlib</command>
<command name="rm">craype-hugepages2M</command>
<!-- first load basic defaults, then remove/swap/load as necessary -->
<command name="load">craype</command>
<command name="load">PrgEnv-intel</command>
<command name="load">cray-mpich</command>
<command name="rm">craype-haswell</command>
<command name="load">craype-mic-knl</command>
</modules>
<modules mpilib="mpt">
<command name="swap">cray-mpich cray-mpich/7.7.6</command>
</modules>
<modules mpilib="impi">
<command name="swap">cray-mpich impi/2019.up3</command>
</modules>
<modules compiler="intel">
<command name="load">PrgEnv-intel/6.0.5</command>
<command name="rm">intel</command>
<command name="load">intel/18.0.1.163</command>
</modules>
<modules compiler="intel19">
<command name="load">PrgEnv-intel/6.0.5</command>
<command name="rm">intel</command>
<command name="load">intel/19.0.3.199</command>
</modules>
<modules compiler="gnu">
<command name="swap">PrgEnv-intel PrgEnv-gnu/6.0.5</command>
<command name="rm">gcc</command>
<command name="load">gcc/8.2.0</command>
<command name="rm">cray-libsci</command>
<command name="load">cray-libsci/19.02.1</command>
</modules>
<modules>
<command name="swap">craype craype/2.5.18</command>
<command name="rm">pmi</command>
<command name="load">pmi/5.0.14</command>
<command name="rm">craype-haswell</command>
<command name="load">craype-mic-knl</command>
</modules>
<modules mpilib="mpi-serial">
<command name="rm">cray-netcdf-hdf5parallel</command>
<command name="rm">cray-hdf5-parallel</command>
<command name="rm">cray-parallel-netcdf</command>
<command name="load">cray-netcdf/4.6.1.3</command>
<command name="load">cray-hdf5/1.10.2.0</command>
</modules>
<modules mpilib="!mpi-serial">
<command name="rm">cray-netcdf-hdf5parallel</command>
<command name="load">cray-netcdf-hdf5parallel/4.6.1.3</command>
<command name="load">cray-hdf5-parallel/1.10.2.0</command>
<command name="load">cray-parallel-netcdf/1.8.1.4</command>
</modules>
<modules>
<command name="rm">git</command>
<command name="load">git</command>
<command name="rm">cmake</command>
<command name="load">cmake/3.14.4</command>
</modules>
<!--command name="list">>& ml.txt</command-->
</module_system>
<RUNDIR>$CIME_OUTPUT_ROOT/$CASE/run</RUNDIR>
<EXEROOT>$CIME_OUTPUT_ROOT/$CASE/bld</EXEROOT>
<TEST_TPUT_TOLERANCE>0.1</TEST_TPUT_TOLERANCE>
<environment_variables>
<env name="MPICH_ENV_DISPLAY">1</env>
<env name="MPICH_VERSION_DISPLAY">1</env>
<!--env name="MPICH_CPUMASK_DISPLAY">1</env-->
<env name="OMP_STACKSIZE">128M</env>
<env name="OMP_PROC_BIND">spread</env>
<env name="OMP_PLACES">threads</env>
<env name="HDF5_USE_FILE_LOCKING">FALSE</env>
<env name="PERL5LIB">/project/projectdirs/acme/perl5/lib/perl5/x86_64-linux-thread-multi</env>
</environment_variables>
<environment_variables mpilib="mpt">
<env name="MPICH_GNI_DYNAMIC_CONN">disabled</env>
</environment_variables>
<environment_variables compiler="intel">
<env name="FORT_BUFFERED">yes</env>
<env name="MPICH_MEMORY_REPORT">1</env>
</environment_variables>
<environment_variables compiler="intel19">
<env name="MPICH_MEMORY_REPORT">1</env>
</environment_variables>
</machine>
<!-- Skylake nodes of Stampede2 at TACC -->
<machine MACH="stampede2">
<DESC>Stampede2. Intel skylake nodes at TACC. 48 cores per node, batch system is SLURM</DESC>
<NODENAME_REGEX>.*stampede2.*</NODENAME_REGEX>
<OS>LINUX</OS>
<COMPILERS>intel,gnu</COMPILERS>
<MPILIBS>impi</MPILIBS>
<SAVE_TIMING_DIR>$ENV{SCRATCH}</SAVE_TIMING_DIR>
<SAVE_TIMING_DIR_PROJECTS>acme</SAVE_TIMING_DIR_PROJECTS>
<CIME_OUTPUT_ROOT>$ENV{SCRATCH}/acme_scratch/stampede2</CIME_OUTPUT_ROOT>
<DIN_LOC_ROOT>$ENV{SCRATCH}/inputdata</DIN_LOC_ROOT>
<DIN_LOC_ROOT_CLMFORC>$ENV{SCRATCH}/inputdata/atm/datm7</DIN_LOC_ROOT_CLMFORC>
<DOUT_S_ROOT>$CIME_OUTPUT_ROOT/archive/$CASE</DOUT_S_ROOT>
<BASELINE_ROOT>$ENV{SCRATCH}/baselines/$COMPILER</BASELINE_ROOT>
<CCSM_CPRNC>$ENV{SCRATCH}/tools/cprnc.cori/cprnc</CCSM_CPRNC>
<GMAKE_J>8</GMAKE_J>
<TESTS>e3sm_developer</TESTS>
<BATCH_SYSTEM>slurm</BATCH_SYSTEM>
<SUPPORTED_BY>e3sm</SUPPORTED_BY>
<MAX_TASKS_PER_NODE>96</MAX_TASKS_PER_NODE>
<MAX_MPITASKS_PER_NODE>48</MAX_MPITASKS_PER_NODE>
<PROJECT_REQUIRED>FALSE</PROJECT_REQUIRED>
<mpirun mpilib="default">
<executable>ibrun</executable>
</mpirun>
<module_system type="module">
<init_path lang="perl">/opt/apps/lmod/lmod/init/perl</init_path>
<init_path lang="python">/opt/apps/lmod/lmod/init/python</init_path>
<init_path lang="sh">/opt/apps/lmod/lmod/init/sh</init_path>
<init_path lang="csh">/opt/apps/lmod/lmod/init/csh</init_path>
<cmd_path lang="perl">/opt/apps/lmod/lmod/libexec/lmod perl</cmd_path>
<cmd_path lang="python">/opt/apps/lmod/lmod/libexec/lmod python</cmd_path>
<cmd_path lang="sh">module -q</cmd_path>
<cmd_path lang="csh">module -q</cmd_path>
<modules>
<command name="purge"/>
</modules>
<modules compiler="intel">
<command name="load">intel/18.0.0</command>
</modules>
<modules compiler="gnu">
<command name="load">gcc/6.3.0</command>
</modules>
<modules mpilib="impi">
<command name="load">impi/18.0.0</command>
</modules>
<modules mpilib="mpi-serial">
<command name="load">hdf5/1.8.16</command>
<command name="load">netcdf/4.3.3.1</command>
</modules>
<modules mpilib="!mpi-serial">
<command name="load">phdf5/1.8.16</command>
<command name="load">parallel-netcdf/4.3.3.1</command>
<command name="load">pnetcdf/1.8.1</command>
</modules>
<modules>
<command name="load">git</command>
<command name="load">cmake</command>
<command name="load">autotools</command>
<command name="load">xalt</command>
<!--command name="load">TACC</command-->
<!--command name="load">python/2.7.13</command-->
</modules>
</module_system>
<RUNDIR>$CIME_OUTPUT_ROOT/$CASE/run</RUNDIR>
<EXEROOT>$CIME_OUTPUT_ROOT/$CASE/bld</EXEROOT>
<TEST_TPUT_TOLERANCE>0.1</TEST_TPUT_TOLERANCE>
<environment_variables>
<env name="MPICH_ENV_DISPLAY">1</env>
<env name="MPICH_VERSION_DISPLAY">1</env>
<env name="OMP_STACKSIZE">128M</env>
<env name="OMP_PROC_BIND">spread</env>
<env name="OMP_PLACES">threads</env>
<env name="I_MPI_PIN">1</env>
<env name="MY_MPIRUN_OPTIONS">-l</env>
</environment_variables>
</machine>
<machine MACH="mac">
<DESC>Mac OS/X workstation or laptop</DESC>
<NODENAME_REGEX/>
<OS>Darwin</OS>
<COMPILERS>gnu</COMPILERS>
<MPILIBS>openmpi,mpich</MPILIBS>
<CIME_OUTPUT_ROOT>$ENV{HOME}/projects/acme/scratch</CIME_OUTPUT_ROOT>
<DIN_LOC_ROOT>$ENV{HOME}/projects/acme/cesm-inputdata</DIN_LOC_ROOT>
<DIN_LOC_ROOT_CLMFORC>$ENV{HOME}/projects/acme/ptclm-data</DIN_LOC_ROOT_CLMFORC>
<DOUT_S_ROOT>$ENV{HOME}/projects/acme/scratch/archive/$CASE</DOUT_S_ROOT>
<BASELINE_ROOT>$ENV{HOME}/projects/acme/baselines/$COMPILER</BASELINE_ROOT>
<CCSM_CPRNC>$CCSMROOT/tools/cprnc/build/cprnc</CCSM_CPRNC>
<GMAKE_J>4</GMAKE_J>
<TESTS>e3sm_developer</TESTS>
<BATCH_SYSTEM>none</BATCH_SYSTEM>
<SUPPORTED_BY>jnjohnson at lbl dot gov</SUPPORTED_BY>
<MAX_TASKS_PER_NODE>4</MAX_TASKS_PER_NODE>
<MAX_MPITASKS_PER_NODE>2</MAX_MPITASKS_PER_NODE>
<mpirun mpilib="default">
<executable>mpirun</executable>
<arguments/>
</mpirun>
<module_system type="none"/>
<RUNDIR>$ENV{HOME}/projects/acme/scratch/$CASE/run</RUNDIR>
<EXEROOT>$ENV{HOME}/projects/acme/scratch/$CASE/bld</EXEROOT>
<!-- cmake -DCMAKE_Fortran_COMPILER=/opt/local/bin/mpif90-mpich-gcc48 -DHDF5_DIR=/opt/local -DNetcdf_INCLUDE_DIR=/opt/local/include .. -->
<!-- <GMAKE>make</GMAKE> <- this doesn't actually work! -->
</machine>
<machine MACH="linux-generic">
<DESC>Linux workstation or laptop</DESC>
<NODENAME_REGEX>none</NODENAME_REGEX>
<OS>LINUX</OS>
<COMPILERS>gnu</COMPILERS>
<MPILIBS>openmpi,mpich</MPILIBS>
<CIME_OUTPUT_ROOT>$ENV{HOME}/projects/acme/scratch</CIME_OUTPUT_ROOT>
<DIN_LOC_ROOT>$ENV{HOME}/projects/acme/cesm-inputdata</DIN_LOC_ROOT>
<DIN_LOC_ROOT_CLMFORC>$ENV{HOME}/projects/acme/ptclm-data</DIN_LOC_ROOT_CLMFORC>
<DOUT_S_ROOT>$ENV{HOME}/projects/acme/scratch/archive/$CASE</DOUT_S_ROOT>
<BASELINE_ROOT>$ENV{HOME}/projects/acme/baselines/$COMPILER</BASELINE_ROOT>
<CCSM_CPRNC>$CCSMROOT/tools/cprnc/build/cprnc</CCSM_CPRNC>
<GMAKE_J>4</GMAKE_J>
<TESTS>e3sm_developer</TESTS>
<BATCH_SYSTEM>none</BATCH_SYSTEM>
<SUPPORTED_BY>jayesh at mcs dot anl dot gov</SUPPORTED_BY>
<MAX_TASKS_PER_NODE>4</MAX_TASKS_PER_NODE>
<MAX_MPITASKS_PER_NODE>2</MAX_MPITASKS_PER_NODE>
<mpirun mpilib="default">
<executable>mpirun</executable>
<arguments>
<arg name="num_tasks"> -np {{ total_tasks }}</arg>
</arguments>
</mpirun>
<module_system type="none"/>
<RUNDIR>$ENV{HOME}/projects/acme/scratch/$CASE/run</RUNDIR>
<EXEROOT>$ENV{HOME}/projects/acme/scratch/$CASE/bld</EXEROOT>
<!-- cmake -DCMAKE_Fortran_COMPILER=/opt/local/bin/mpif90-mpich-gcc48 -DHDF5_DIR=/opt/local -DNetcdf_INCLUDE_DIR=/opt/local/include .. -->
<!-- <GMAKE>make</GMAKE> <- this doesn't actually work! -->
</machine>
<machine MACH="melvin">
<DESC>Linux workstation for Jenkins testing</DESC>
<NODENAME_REGEX>(melvin|watson|s999964|climate|penn|sems)</NODENAME_REGEX>
<OS>LINUX</OS>
<PROXY>sonproxy.sandia.gov:80</PROXY>
<COMPILERS>gnu,intel</COMPILERS>
<MPILIBS>openmpi</MPILIBS>
<SAVE_TIMING_DIR>/sems-data-store/ACME/timings</SAVE_TIMING_DIR>
<SAVE_TIMING_DIR_PROJECTS>.*</SAVE_TIMING_DIR_PROJECTS>
<CIME_OUTPUT_ROOT>$ENV{HOME}/acme/scratch</CIME_OUTPUT_ROOT>
<DIN_LOC_ROOT>/sems-data-store/ACME/inputdata</DIN_LOC_ROOT>
<DIN_LOC_ROOT_CLMFORC>/sems-data-store/ACME/inputdata/atm/datm7</DIN_LOC_ROOT_CLMFORC>
<DOUT_S_ROOT>$CIME_OUTPUT_ROOT/archive/$CASE</DOUT_S_ROOT>
<BASELINE_ROOT>/sems-data-store/ACME/baselines/$COMPILER</BASELINE_ROOT>
<CCSM_CPRNC>/sems-data-store/ACME/cprnc/build.new/cprnc</CCSM_CPRNC>
<GMAKE_J>32</GMAKE_J>
<TESTS>e3sm_developer</TESTS>
<BATCH_SYSTEM>none</BATCH_SYSTEM>
<SUPPORTED_BY>jgfouca at sandia dot gov</SUPPORTED_BY>
<MAX_TASKS_PER_NODE>48</MAX_TASKS_PER_NODE>
<MAX_MPITASKS_PER_NODE>48</MAX_MPITASKS_PER_NODE>
<mpirun mpilib="default">
<executable>mpirun</executable>
<arguments>
<arg name="num_tasks"> -np {{ total_tasks }}</arg>
<arg name="tasks_per_node"> --map-by ppr:{{ tasks_per_numa }}:socket:PE=$ENV{OMP_NUM_THREADS} --bind-to hwthread:overload-allowed</arg>
</arguments>
</mpirun>
<module_system type="module" allow_error="false">
<init_path lang="python">/usr/share/Modules/init/python.py</init_path>
<init_path lang="perl">/usr/share/Modules/init/perl.pm</init_path>
<init_path lang="sh">/usr/share/Modules/init/sh</init_path>
<init_path lang="csh">/usr/share/Modules/init/csh</init_path>
<cmd_path lang="python">/usr/bin/modulecmd python</cmd_path>
<cmd_path lang="perl">/usr/bin/modulecmd perl</cmd_path>
<cmd_path lang="csh">module</cmd_path>
<cmd_path lang="sh">module</cmd_path>
<modules>
<command name="purge"/>
<command name="load">sems-env</command>
<command name="load">acme-env</command>
<command name="load">sems-git</command>
<command name="load">acme-binutils</command>
<command name="load">sems-python/2.7.9</command>
<command name="load">sems-cmake/3.12.2</command>
</modules>
<modules compiler="gnu">
<command name="load">sems-gcc/7.3.0</command>
</modules>
<modules compiler="intel">
<command name="load">sems-intel/16.0.3</command>
</modules>
<modules mpilib="mpi-serial">
<command name="load">sems-netcdf/4.4.1/exo</command>
<command name="load">acme-pfunit/3.2.8/base</command>
</modules>
<modules mpilib="!mpi-serial">
<command name="load">acme-openmpi/2.1.5/acme</command>
<command name="load">acme-netcdf/4.4.1/acme</command>
</modules>
</module_system>
<RUNDIR>$CIME_OUTPUT_ROOT/$CASE/run</RUNDIR>
<EXEROOT>$CIME_OUTPUT_ROOT/$CASE/bld</EXEROOT>
<TEST_TPUT_TOLERANCE>0.1</TEST_TPUT_TOLERANCE>
<MAX_GB_OLD_TEST_DATA>1000</MAX_GB_OLD_TEST_DATA>
<!-- <GMAKE>make</GMAKE> <- this doesn't actually work! -->
<environment_variables>
<env name="NETCDFROOT">$ENV{SEMS_NETCDF_ROOT}</env>
<env name="OMP_STACKSIZE">64M</env>
<env name="OMP_PROC_BIND">spread</env>
<env name="OMP_PLACES">threads</env>
</environment_variables>
<environment_variables mpilib="!mpi-serial">
<env name="PNETCDFROOT">$ENV{SEMS_NETCDF_ROOT}</env>
</environment_variables>
</machine>
<machine MACH="snl-white">
<DESC>IBM Power 8 Testbed machine</DESC>
<NODENAME_REGEX>white</NODENAME_REGEX>
<OS>LINUX</OS>
<COMPILERS>gnu</COMPILERS>
<MPILIBS>openmpi</MPILIBS>
<CIME_OUTPUT_ROOT>$ENV{HOME}/projects/e3sm/scratch</CIME_OUTPUT_ROOT>
<DIN_LOC_ROOT>$ENV{HOME}/projects/e3sm/cesm-inputdata</DIN_LOC_ROOT>
<DIN_LOC_ROOT_CLMFORC>$ENV{HOME}/projects/e3sm/ptclm-data</DIN_LOC_ROOT_CLMFORC>
<DOUT_S_ROOT>$ENV{HOME}/projects/e3sm/scratch/archive/$CASE</DOUT_S_ROOT>
<BASELINE_ROOT>$ENV{HOME}/projects/e3sm/baselines/$COMPILER</BASELINE_ROOT>
<CCSM_CPRNC>$CCSMROOT/tools/cprnc/build/cprnc</CCSM_CPRNC>
<GMAKE_J>32</GMAKE_J>
<TESTS>e3sm_developer</TESTS>
<BATCH_SYSTEM>lsf</BATCH_SYSTEM>
<SUPPORTED_BY>mdeakin at sandia dot gov</SUPPORTED_BY>
<MAX_TASKS_PER_NODE>4</MAX_TASKS_PER_NODE>
<MAX_MPITASKS_PER_NODE>1</MAX_MPITASKS_PER_NODE>
<mpirun mpilib="default">
<executable>mpirun</executable>
<arguments/>
</mpirun>
<module_system type="module" allow_error="true">
<init_path lang="sh">/usr/share/Modules/init/sh</init_path>
<init_path lang="python">/usr/share/Modules/init/python.py</init_path>
<cmd_path lang="sh">module</cmd_path>
<cmd_path lang="python">/usr/bin/modulecmd python</cmd_path>
<modules>
<command name="load">devpack/20181011/openmpi/2.1.2/gcc/7.2.0/cuda/9.2.88</command>
</modules>
</module_system>
<RUNDIR>$ENV{HOME}/projects/e3sm/scratch/$CASE/run</RUNDIR>
<EXEROOT>$ENV{HOME}/projects/e3sm/scratch/$CASE/bld</EXEROOT>
<environment_variables>
<env name="NETCDF_C_PATH">$ENV{NETCDF_ROOT}</env>
<env name="NETCDF_FORTRAN_PATH">/ascldap/users/jgfouca/packages/netcdf-fortran-4.4.4-white</env>
<env name="E3SM_SRCROOT">$SRCROOT</env>
</environment_variables>
</machine>
<machine MACH="snl-blake">
<DESC>Skylake Testbed machine</DESC>
<NODENAME_REGEX>blake</NODENAME_REGEX>
<OS>LINUX</OS>
<COMPILERS>intel18</COMPILERS>
<MPILIBS>openmpi</MPILIBS>
<CIME_OUTPUT_ROOT>$ENV{HOME}/projects/e3sm/scratch</CIME_OUTPUT_ROOT>
<DIN_LOC_ROOT>$ENV{HOME}/projects/e3sm/cesm-inputdata</DIN_LOC_ROOT>
<DIN_LOC_ROOT_CLMFORC>$ENV{HOME}/projects/e3sm/ptclm-data</DIN_LOC_ROOT_CLMFORC>
<DOUT_S_ROOT>$ENV{HOME}/projects/e3sm/scratch/archive/$CASE</DOUT_S_ROOT>
<BASELINE_ROOT>$ENV{HOME}/projects/e3sm/baselines/$COMPILER</BASELINE_ROOT>
<CCSM_CPRNC>$CCSMROOT/tools/cprnc/build/cprnc</CCSM_CPRNC>
<GMAKE_J>48</GMAKE_J>
<TESTS>e3sm_developer</TESTS>
<BATCH_SYSTEM>slurm</BATCH_SYSTEM>
<SUPPORTED_BY>mdeakin at sandia dot gov</SUPPORTED_BY>
<MAX_TASKS_PER_NODE>48</MAX_TASKS_PER_NODE>
<MAX_MPITASKS_PER_NODE>48</MAX_MPITASKS_PER_NODE>
<mpirun mpilib="default">
<executable>mpirun</executable>
<arguments/>
</mpirun>
<module_system type="module" allow_error="true">
<init_path lang="sh">/usr/share/Modules/init/sh</init_path>
<init_path lang="python">/usr/share/Modules/init/python.py</init_path>
<cmd_path lang="sh">module</cmd_path>
<cmd_path lang="python">module</cmd_path>
<modules>
<command name="load">zlib/1.2.11</command>
<command name="load">intel/compilers/18.1.163</command>
<command name="load">openmpi/2.1.2/intel/18.1.163</command>
<command name="load">hdf5/1.10.1/openmpi/2.1.2/intel/18.1.163</command>
<command name="load">netcdf-exo/4.4.1.1/openmpi/2.1.2/intel/18.1.163</command>
</modules>
</module_system>
<RUNDIR>$ENV{HOME}/projects/e3sm/scratch/$CASE/run</RUNDIR>
<EXEROOT>$ENV{HOME}/projects/e3sm/scratch/$CASE/bld</EXEROOT>
<environment_variables>
<env name="NETCDF_C_PATH">$ENV{NETCDF_ROOT}</env>
<env name="NETCDF_FORTRAN_PATH">$ENV{NETCDFF_ROOT}</env>
</environment_variables>
</machine>
<machine MACH="anlworkstation">
<DESC>Linux workstation for ANL</DESC>
<NODENAME_REGEX>compute.*mcs.anl.gov</NODENAME_REGEX>
<OS>LINUX</OS>
<COMPILERS>gnu</COMPILERS>
<MPILIBS>mpich</MPILIBS>
<CIME_OUTPUT_ROOT>$ENV{HOME}/acme/scratch</CIME_OUTPUT_ROOT>
<DIN_LOC_ROOT>/home/climate1/acme/inputdata</DIN_LOC_ROOT>
<DIN_LOC_ROOT_CLMFORC>/home/climate1/acme/inputdata/atm/datm7</DIN_LOC_ROOT_CLMFORC>
<DOUT_S_ROOT>$CIME_OUTPUT_ROOT/archive/$CASE</DOUT_S_ROOT>
<BASELINE_ROOT>/home/climate1/acme/baselines/$COMPILER</BASELINE_ROOT>
<CCSM_CPRNC>/home/climate1/acme/cprnc/build/cprnc</CCSM_CPRNC>
<GMAKE>make</GMAKE>
<GMAKE_J>32</GMAKE_J>
<TESTS>e3sm_developer</TESTS>
<BATCH_SYSTEM>none</BATCH_SYSTEM>
<SUPPORTED_BY>jgfouca at sandia dot gov</SUPPORTED_BY>
<MAX_TASKS_PER_NODE>32</MAX_TASKS_PER_NODE>
<MAX_MPITASKS_PER_NODE>32</MAX_MPITASKS_PER_NODE>
<mpirun mpilib="default">
<executable>mpirun</executable>
<arguments>
<arg name="num_tasks"> -l -np {{ total_tasks }}</arg>
</arguments>
</mpirun>
<module_system type="soft">
<init_path lang="csh">/software/common/adm/packages/softenv-1.6.2/etc/softenv-load.csh</init_path>
<init_path lang="sh">/software/common/adm/packages/softenv-1.6.2/etc/softenv-load.sh</init_path>
<cmd_path lang="csh">source /software/common/adm/packages/softenv-1.6.2/etc/softenv-aliases.csh ; soft</cmd_path>
<cmd_path lang="sh">source /software/common/adm/packages/softenv-1.6.2/etc/softenv-aliases.sh ; soft</cmd_path>
<modules>
<command name="add">+cmake-3.12.4</command>
</modules>
<modules compiler="gnu">
<command name="add">+gcc-6.2.0</command>
<command name="add">+szip-2.1-gcc-6.2.0</command>
</modules>
<modules compiler="gnu" mpilib="mpi-serial">
<command name="add">+netcdf-4.4.1c-4.2cxx-4.4.4f-serial-gcc6.2.0</command>
</modules>
<modules compiler="gnu" mpilib="!mpi-serial">
<command name="add">+mpich-3.2-gcc-6.2.0</command>
<command name="add">+hdf5-1.8.16-gcc-6.2.0-mpich-3.2-parallel</command>
<command name="add">+netcdf-4.4.1c-4.2cxx-4.4.4f-parallel-gcc6.2.0-mpich-3.2</command>
</modules>
</module_system>
<RUNDIR>$CIME_OUTPUT_ROOT/$CASE/run</RUNDIR>
<EXEROOT>$CIME_OUTPUT_ROOT/$CASE/bld</EXEROOT>
<environment_variables>
<env name="NETCDF_PATH">$SHELL{dirname $(dirname $(which ncdump))}</env>
</environment_variables>
<environment_variables mpilib="mpi-serial">
<!-- We currently don't have a soft env for serial hdf5 -->
<env name="LD_LIBRARY_PATH">/soft/apps/packages/climate/hdf5/1.8.16-serial/gcc-6.2.0/lib:$ENV{LD_LIBRARY_PATH}</env>
</environment_variables>
<environment_variables mpilib="!mpi-serial">
<env name="HDF5_PATH">$SHELL{dirname $(dirname $(which h5dump))}</env>
<!-- We currently don't have a soft env for pnetcdf 1.8.1 -->
<env name="PNETCDF_PATH">/soft/apps/packages/climate/pnetcdf/1.8.1/gcc-6.2.0</env>
</environment_variables>
<environment_variables SMP_PRESENT="TRUE">
<env name="OMP_STACKSIZE">64M</env>
</environment_variables>
</machine>
<machine MACH="sandiatoss3">
<DESC>SNL clust</DESC>
<NODENAME_REGEX>(skybridge|chama)</NODENAME_REGEX>
<OS>LINUX</OS>
<PROXY>wwwproxy.sandia.gov:80</PROXY>
<COMPILERS>intel</COMPILERS>
<MPILIBS>openmpi</MPILIBS>
<PROJECT>fy190158</PROJECT>
<SAVE_TIMING_DIR>/projects/ccsm/timings</SAVE_TIMING_DIR>
<SAVE_TIMING_DIR_PROJECTS>.*</SAVE_TIMING_DIR_PROJECTS>
<CIME_OUTPUT_ROOT>/gpfs1/$USER/acme_scratch/sandiatoss3</CIME_OUTPUT_ROOT>
<DIN_LOC_ROOT>/projects/ccsm/inputdata</DIN_LOC_ROOT>
<DIN_LOC_ROOT_CLMFORC>/projects/ccsm/inputdata/atm/datm7</DIN_LOC_ROOT_CLMFORC>
<DOUT_S_ROOT>$CIME_OUTPUT_ROOT/archive/$CASE</DOUT_S_ROOT>
<BASELINE_ROOT>/projects/ccsm/ccsm_baselines/$COMPILER</BASELINE_ROOT>
<CCSM_CPRNC>/projects/ccsm/cprnc/build.toss3/cprnc_wrap</CCSM_CPRNC>
<GMAKE_J>8</GMAKE_J>
<TESTS>e3sm_integration</TESTS>
<BATCH_SYSTEM>slurm</BATCH_SYSTEM>
<SUPPORTED_BY>jgfouca at sandia dot gov</SUPPORTED_BY>
<MAX_TASKS_PER_NODE>16</MAX_TASKS_PER_NODE>
<MAX_MPITASKS_PER_NODE>16</MAX_MPITASKS_PER_NODE>
<PROJECT_REQUIRED>TRUE</PROJECT_REQUIRED>
<mpirun mpilib="default">
<executable>mpiexec</executable>
<arguments>
<arg name="num_tasks"> --n {{ total_tasks }}</arg>
<arg name="tasks_per_node"> --map-by ppr:{{ tasks_per_numa }}:socket:PE=$ENV{OMP_NUM_THREADS} --bind-to core</arg>
</arguments>
</mpirun>
<mpirun mpilib="mpi-serial">
<executable/>
</mpirun>
<module_system type="module">
<init_path lang="python">/usr/share/lmod/lmod/init/python.py</init_path>
<init_path lang="perl">/usr/share/lmod/lmod/init/perl.pm</init_path>
<init_path lang="sh">/usr/share/lmod/lmod/init/sh</init_path>
<init_path lang="csh">/usr/share/lmod/lmod/init/csh</init_path>
<cmd_path lang="python">/usr/share/lmod/lmod/libexec/lmod python</cmd_path>
<cmd_path lang="perl">/usr/share/lmod/lmod/libexec/lmod perl</cmd_path>
<cmd_path lang="csh">module</cmd_path>
<cmd_path lang="sh">module</cmd_path>
<modules>
<command name="purge"/>
<command name="load">sems-env</command>
<command name="load">sems-git</command>
<command name="load">sems-python/2.7.9</command>
<command name="load">sems-cmake/3.12.2</command>
<command name="load">gnu/4.9.2</command>
<command name="load">sems-intel/17.0.0</command>
</modules>
<modules mpilib="!mpi-serial">
<command name="load">sems-openmpi/1.10.5</command>
<command name="load">sems-netcdf/4.4.1/exo_parallel</command>
</modules>
<modules mpilib="mpi-serial">
<command name="load">sems-netcdf/4.4.1/exo</command>
</modules>
</module_system>
<RUNDIR>/gscratch/$USER/acme_scratch/sandiatoss3/$CASE/run</RUNDIR>
<EXEROOT>$CIME_OUTPUT_ROOT/$CASE/bld</EXEROOT>
<!-- complete path to a short term archiving directory -->
<!-- path to the cprnc tool used to compare netcdf history files in testing -->
<TEST_TPUT_TOLERANCE>0.1</TEST_TPUT_TOLERANCE>
<environment_variables>
<env name="NETCDFROOT">$ENV{SEMS_NETCDF_ROOT}</env>
<env name="NETCDF_INCLUDES">$ENV{SEMS_NETCDF_ROOT}/include</env>
<env name="NETCDF_LIBS">$ENV{SEMS_NETCDF_ROOT}/lib</env>
<env name="OMP_STACKSIZE">64M</env>
</environment_variables>
<environment_variables mpilib="!mpi-serial">
<env name="PNETCDFROOT">$ENV{SEMS_NETCDF_ROOT}</env>
</environment_variables>
</machine>
<machine MACH="ghost">
<DESC>SNL clust</DESC>
<NODENAME_REGEX>ghost-login</NODENAME_REGEX>
<OS>LINUX</OS>
<PROXY>wwwproxy.sandia.gov:80</PROXY>
<COMPILERS>intel</COMPILERS>
<MPILIBS>openmpi</MPILIBS>
<PROJECT>fy190158</PROJECT>
<CIME_OUTPUT_ROOT>/gscratch/$USER/acme_scratch/ghost</CIME_OUTPUT_ROOT>
<DIN_LOC_ROOT>/projects/ccsm/inputdata</DIN_LOC_ROOT>
<DIN_LOC_ROOT_CLMFORC>/projects/ccsm/inputdata/atm/datm7</DIN_LOC_ROOT_CLMFORC>
<DOUT_S_ROOT>$CIME_OUTPUT_ROOT/archive/$CASE</DOUT_S_ROOT>
<BASELINE_ROOT>/projects/ccsm/ccsm_baselines/$COMPILER</BASELINE_ROOT>
<CCSM_CPRNC>/projects/ccsm/cprnc/build.toss3/cprnc_wrap</CCSM_CPRNC>
<GMAKE_J>8</GMAKE_J>
<TESTS>e3sm_integration</TESTS>
<BATCH_SYSTEM>slurm</BATCH_SYSTEM>
<SUPPORTED_BY>jgfouca at sandia dot gov</SUPPORTED_BY>
<MAX_TASKS_PER_NODE>36</MAX_TASKS_PER_NODE>
<MAX_MPITASKS_PER_NODE>36</MAX_MPITASKS_PER_NODE>
<PROJECT_REQUIRED>TRUE</PROJECT_REQUIRED>
<mpirun mpilib="default">
<executable>mpiexec</executable>
<arguments>
<arg name="num_tasks"> --n {{ total_tasks }}</arg>
<arg name="tasks_per_node"> --map-by ppr:{{ tasks_per_numa }}:socket:PE=$ENV{OMP_NUM_THREADS} --bind-to core</arg>
</arguments>
</mpirun>
<mpirun mpilib="mpi-serial">
<executable/>
</mpirun>
<module_system type="module">
<init_path lang="python">/usr/share/lmod/lmod/init/python.py</init_path>
<init_path lang="perl">/usr/share/lmod/lmod/init/perl.pm</init_path>
<init_path lang="sh">/usr/share/lmod/lmod/init/sh</init_path>
<init_path lang="csh">/usr/share/lmod/lmod/init/csh</init_path>
<cmd_path lang="python">/usr/share/lmod/lmod/libexec/lmod python</cmd_path>
<cmd_path lang="perl">/usr/share/lmod/lmod/libexec/lmod perl</cmd_path>
<cmd_path lang="csh">module</cmd_path>
<cmd_path lang="sh">module</cmd_path>
<modules>
<command name="purge"/>
<command name="load">sems-env</command>
<command name="load">sems-git</command>
<command name="load">sems-python/2.7.9</command>
<command name="load">sems-cmake</command>
<command name="load">gnu/4.9.2</command>
<command name="load">sems-intel/16.0.2</command>
<command name="load">mkl/16.0</command>
<command name="load">sems-netcdf/4.4.1/exo_parallel</command>
</modules>
<modules mpilib="!mpi-serial">
<command name="load">sems-openmpi/1.10.5</command>
</modules>
</module_system>
<RUNDIR>$CIME_OUTPUT_ROOT/$CASE/run</RUNDIR>
<EXEROOT>$CIME_OUTPUT_ROOT/$CASE/bld</EXEROOT>
<!-- complete path to a short term archiving directory -->
<!-- path to the cprnc tool used to compare netcdf history files in testing -->
<environment_variables>
<env name="NETCDFROOT">$ENV{SEMS_NETCDF_ROOT}</env>
<env name="NETCDF_INCLUDES">$ENV{SEMS_NETCDF_ROOT}/include</env>
<env name="NETCDF_LIBS">$ENV{SEMS_NETCDF_ROOT}/lib</env>
<env name="OMP_STACKSIZE">64M</env>
</environment_variables>
<environment_variables mpilib="!mpi-serial">
<env name="PNETCDFROOT">$ENV{SEMS_NETCDF_ROOT}</env>
</environment_variables>
</machine>
<machine MACH="blues">
<DESC>ANL/LCRC Linux Cluster</DESC>
<NODENAME_REGEX>blogin.*.lcrc.anl.gov</NODENAME_REGEX>
<OS>LINUX</OS>
<COMPILERS>gnu,pgi,intel,nag</COMPILERS>
<MPILIBS>mvapich,mpich,openmpi</MPILIBS>
<PROJECT>ACME</PROJECT>
<CIME_OUTPUT_ROOT>/lcrc/project/$PROJECT/$USER/acme_scratch</CIME_OUTPUT_ROOT>
<DIN_LOC_ROOT>/home/ccsm-data/inputdata</DIN_LOC_ROOT>
<DIN_LOC_ROOT_CLMFORC>/home/ccsm-data/inputdata/atm/datm7</DIN_LOC_ROOT_CLMFORC>
<DOUT_S_ROOT>/lcrc/project/ACME/$USER/archive/$CASE</DOUT_S_ROOT>
<BASELINE_ROOT>/lcrc/group/acme/acme_baselines/blues/$COMPILER</BASELINE_ROOT>
<CCSM_CPRNC>/home/ccsm-data/tools/cprnc</CCSM_CPRNC>
<GMAKE_J>4</GMAKE_J>
<TESTS>e3sm_integration</TESTS>
<NTEST_PARALLEL_JOBS>4</NTEST_PARALLEL_JOBS>
<BATCH_SYSTEM>pbs</BATCH_SYSTEM>
<SUPPORTED_BY>acme</SUPPORTED_BY>
<MAX_TASKS_PER_NODE>16</MAX_TASKS_PER_NODE>
<MAX_MPITASKS_PER_NODE>16</MAX_MPITASKS_PER_NODE>
<PROJECT_REQUIRED>TRUE</PROJECT_REQUIRED>
<mpirun mpilib="mvapich">
<executable>mpiexec</executable>
<arguments>
<arg name="num_tasks"> -n {{ total_tasks }} </arg>
</arguments>
</mpirun>
<mpirun mpilib="mpich">
<executable>mpiexec</executable>
<arguments>
<arg name="num_tasks"> -n {{ total_tasks }} </arg>
</arguments>
</mpirun>
<mpirun mpilib="mpi-serial">
<executable/>
</mpirun>
<module_system type="soft">
<init_path lang="csh">/etc/profile.d/a_softenv.csh</init_path>
<init_path lang="sh">/etc/profile.d/a_softenv.sh</init_path>
<cmd_path lang="csh">soft</cmd_path>
<cmd_path lang="sh">soft</cmd_path>
<modules>
<command name="add">+cmake-2.8.12</command>
<command name="add">+python-2.7</command>
</modules>
<modules compiler="gnu">
<command name="add">+gcc-5.3.0</command>
<command name="add">+hdf5-1.10.0-gcc-5.3.0-serial</command>
<command name="add">+netcdf-c-4.4.0-f77-4.4.3-gcc-5.3.0-serial</command>
</modules>
<modules compiler="gnu-5.2">
<command name="add">+gcc-5.2</command>
<command name="add">+netcdf-4.3.3.1-gnu5.2-serial</command>
</modules>
<modules compiler="gnu" mpilib="mvapich">
<command name="add">+mvapich2-2.2b-gcc-5.3.0</command>
<command name="add">+pnetcdf-1.6.1-gcc-5.3.0-mvapich2-2.2b</command>
</modules>
<modules compiler="gnu-5.2" mpilib="mvapich">
<command name="add">+mvapich2-2.2b-gcc-5.2</command>
</modules>
<modules compiler="intel">
<command name="add">+intel-15.0</command>
<command name="add">+mkl-11.2.1</command>
</modules>
<modules compiler="intel" mpilib="mvapich">
<command name="add">+mvapich2-2.2b-intel-15.0</command>
<command name="add">+pnetcdf-1.6.1-mvapich2-2.2a-intel-15.0</command>
</modules>
<modules compiler="pgi">
<command name="add">+pgi-15.7</command>
<command name="add">+binutils-2.27</command>
<command name="add">+netcdf-c-4.4.1-f77-4.4.4-pgi-15.7-serial</command>
</modules>
<modules compiler="pgi" mpilib="mvapich">
<command name="add">+mvapich2-2.2-pgi-15.7</command>
<command name="add">+pnetcdf-1.7.0-pgi-15.7-mvapich2-2.2</command>
</modules>
<modules compiler="nag">
<command name="add">+nag-6.0</command>
<command name="add">+hdf5-1.8.12-serial-nag</command>
<command name="add">+netcdf-4.3.1-serial-nag</command>
</modules>
<modules compiler="nag" mpilib="mvapich">
<command name="add">+mpich3-3.1.4-nag-6.0</command>
<command name="add">+pnetcdf-1.6.1-mpich-3.1.4-nag-6.0</command>
</modules>
</module_system>
<RUNDIR>$CIME_OUTPUT_ROOT/$CASE/run</RUNDIR>
<EXEROOT>$CIME_OUTPUT_ROOT/$CASE/bld</EXEROOT>
<TEST_TPUT_TOLERANCE>0.1</TEST_TPUT_TOLERANCE>
<environment_variables>
<env name="NETCDFROOT">$SHELL{dirname $(dirname $(which ncdump))}</env>
</environment_variables>
<environment_variables mpilib="!mpi-serial">
<env name="PNETCDFROOT">$SHELL{dirname $(dirname $(which pnetcdf_version))}</env>
</environment_variables>
<environment_variables>
<env name="OMP_STACKSIZE">64M</env>
</environment_variables>
</machine>
<machine MACH="anvil">
<DESC>ANL/LCRC Linux Cluster</DESC>
<NODENAME_REGEX>b51.*.lcrc.anl.gov</NODENAME_REGEX>
<OS>LINUX</OS>
<COMPILERS>intel,gnu,pgi</COMPILERS>
<MPILIBS>mvapich,openmpi</MPILIBS>
<PROJECT>condo</PROJECT>
<SAVE_TIMING_DIR>/lcrc/group/acme</SAVE_TIMING_DIR>
<SAVE_TIMING_DIR_PROJECTS>.*</SAVE_TIMING_DIR_PROJECTS>
<CIME_OUTPUT_ROOT>/lcrc/group/acme/$USER/acme_scratch/anvil</CIME_OUTPUT_ROOT>
<DIN_LOC_ROOT>/home/ccsm-data/inputdata</DIN_LOC_ROOT>
<DIN_LOC_ROOT_CLMFORC>/home/ccsm-data/inputdata/atm/datm7</DIN_LOC_ROOT_CLMFORC>
<DOUT_S_ROOT>/lcrc/group/acme/$USER/archive/$CASE</DOUT_S_ROOT>
<BASELINE_ROOT>/lcrc/group/acme/acme_baselines/$COMPILER</BASELINE_ROOT>
<CCSM_CPRNC>/lcrc/group/acme/tools/cprnc/cprnc</CCSM_CPRNC>
<GMAKE_J>8</GMAKE_J>
<TESTS>e3sm_integration</TESTS>
<NTEST_PARALLEL_JOBS>4</NTEST_PARALLEL_JOBS>
<BATCH_SYSTEM>slurm</BATCH_SYSTEM>
<SUPPORTED_BY>E3SM</SUPPORTED_BY>
<MAX_TASKS_PER_NODE>36</MAX_TASKS_PER_NODE>
<MAX_MPITASKS_PER_NODE>36</MAX_MPITASKS_PER_NODE>
<PROJECT_REQUIRED>FALSE</PROJECT_REQUIRED>
<mpirun mpilib="default">
<executable>srun</executable>
<arguments>
<arg name="num_tasks"> -l -N {{ num_nodes }} -n {{ total_tasks }} </arg>
<arg name="binding">--cpu_bind=cores</arg>
<arg name="thread_count">-c $SHELL{if [ FALSE = `./xmlquery --value SMP_PRESENT` ];then echo 1;else echo $OMP_NUM_THREADS;fi}</arg>
<arg name="placement">-m plane=$SHELL{if [ FALSE = `./xmlquery --value SMP_PRESENT` ];then echo 36;else echo 36/$OMP_NUM_THREADS|bc;fi}</arg>
</arguments>
</mpirun>
<mpirun mpilib="mpi-serial">
<executable/>
</mpirun>
<module_system type="soft">
<init_path lang="csh">/etc/profile.d/a_softenv.csh</init_path>
<init_path lang="sh">/etc/profile.d/a_softenv.sh</init_path>
<cmd_path lang="csh">soft</cmd_path>
<cmd_path lang="sh">soft</cmd_path>
<modules>
<command name="add">+cmake-3.12.3</command>
<command name="add">+python-2.7</command>
</modules>
<modules compiler="intel">
<command name="add">+gcc-5.3.0</command>
<command name="add">+intel-17.0.0</command>
<command name="add">+netcdf-c-4.4.1-f77-4.4.4-intel-17.0.0-serial</command>
</modules>
<modules compiler="intel" mpilib="mvapich">
<command name="add">+mvapich2-2.2-intel-17.0.0-acme</command>
<command name="add">+pnetcdf-1.7.0-intel-17.0.0-mvapich2-2.2-acme</command>
</modules>
<modules compiler="intel" mpilib="openmpi">
<command name="add">+openmpi-2.0.1-intel-17.0.0-acme</command>
<command name="add">+pnetcdf-1.7.0-intel-17.0.0-openmpi-2.0.1-acme</command>
</modules>
<modules compiler="gnu">
<command name="add">+gcc-5.3.0</command>
<command name="add">+netcdf-c-4.4.0-f77-4.4.3-gcc-5.3.0-serial</command>
</modules>
<modules compiler="gnu" mpilib="mvapich">
<command name="add">+mvapich2-2.2b-gcc-5.3.0-acme</command>
<command name="add">+pnetcdf-1.6.1-gcc-5.3.0-mvapich2-2.2b-acme</command>
</modules>
<modules compiler="gnu" mpilib="openmpi">
<command name="add">+openmpi-1.10.2-gcc-5.3.0-acme</command>
<command name="add">+pnetcdf-1.6.1-gcc-5.3.0-openmpi-1.10.2-acme</command>
</modules>
<modules compiler="pgi">
<command name="add">+pgi-16.3</command>
<command name="add">+netcdf-c-4.4.0-f77-4.4.3-pgi-16.3-serial</command>
</modules>
<modules compiler="pgi" mpilib="mvapich">
<command name="add">+mvapich2-2.2b-pgi-16.3-acme</command>
<command name="add">+pnetcdf-1.6.1-pgi-16.3-mvapich2-2.2b-acme</command>
</modules>
<modules compiler="pgi" mpilib="openmpi">
<command name="add">+openmpi-1.10.2-pgi-16.3-acme</command>
<command name="add">+pnetcdf-1.6.1-pgi-16.3-openmpi-1.10.2-acme</command>
</modules>
</module_system>
<RUNDIR>$CIME_OUTPUT_ROOT/$CASE/run</RUNDIR>
<EXEROOT>$CIME_OUTPUT_ROOT/$CASE/bld</EXEROOT>
<TEST_TPUT_TOLERANCE>0.1</TEST_TPUT_TOLERANCE>
<MAX_GB_OLD_TEST_DATA>1000</MAX_GB_OLD_TEST_DATA>
<environment_variables>
<env name="NETCDF_C_PATH">$SHELL{which nc-config | xargs dirname | xargs dirname}</env>
<env name="NETCDF_FORTRAN_PATH">$SHELL{which nf-config | xargs dirname | xargs dirname}</env>
</environment_variables>
<environment_variables mpilib="!mpi-serial">
<env name="PNETCDF_PATH">$SHELL{which pnetcdf_version | xargs dirname | xargs dirname}</env>
</environment_variables>
<environment_variables mpilib="mvapich">
<env name="MV2_ENABLE_AFFINITY">0</env>
<env name="MV2_SHOW_CPU_BINDING">1</env>
</environment_variables>
<environment_variables mpilib="mvapich" DEBUG="TRUE">
<env name="MV2_DEBUG_SHOW_BACKTRACE">1</env>
<env name="MV2_SHOW_ENV_INFO">2</env>
</environment_variables>
<environment_variables mpilib="impi" DEBUG="TRUE">
<env name="I_MPI_DEBUG">10</env>
</environment_variables>
<environment_variables SMP_PRESENT="TRUE">
<env name="OMP_STACKSIZE">64M</env>
<env name="KMP_HOT_TEAMS_MODE">1</env>
</environment_variables>
<environment_variables SMP_PRESENT="TRUE" compiler="intel">
<env name="KMP_AFFINITY">granularity=thread,scatter</env>
<env name="KMP_HOT_TEAMS_MODE">1</env>
</environment_variables>
<environment_variables SMP_PRESENT="TRUE" compiler="!intel">
<env name="OMP_PROC_BIND">spread</env>
<env name="OMP_PLACES">threads</env>
</environment_variables>
</machine>
<machine MACH="bebop">
<DESC>ANL/LCRC Cluster, Cray CS400, 352-nodes Xeon Phi 7230 KNLs 64C/1.3GHz + 672-nodes Xeon E5-2695v4 Broadwells 36C/2.10GHz, Intel Omni-Path network, SLURM batch system, Lmod module environment.</DESC>
<NODENAME_REGEX>beboplogin.*</NODENAME_REGEX>
<OS>LINUX</OS>
<COMPILERS>intel,gnu</COMPILERS>
<MPILIBS>impi,mpich,mvapich,openmpi</MPILIBS>
<PROJECT>acme</PROJECT>
<CIME_OUTPUT_ROOT>/lcrc/group/acme/$USER/acme_scratch/bebop</CIME_OUTPUT_ROOT>
<DIN_LOC_ROOT>/home/ccsm-data/inputdata</DIN_LOC_ROOT>
<DIN_LOC_ROOT_CLMFORC>/home/ccsm-data/inputdata/atm/datm7</DIN_LOC_ROOT_CLMFORC>
<DOUT_S_ROOT>$CIME_OUTPUT_ROOT/archive/$CASE</DOUT_S_ROOT>
<BASELINE_ROOT>/lcrc/group/acme/acme_baselines/bebop/$COMPILER</BASELINE_ROOT>
<CCSM_CPRNC>/lcrc/group/acme/tools/cprnc/cprnc</CCSM_CPRNC>
<GMAKE_J>8</GMAKE_J>
<TESTS>e3sm_integration</TESTS>
<NTEST_PARALLEL_JOBS>4</NTEST_PARALLEL_JOBS>
<BATCH_SYSTEM>slurm</BATCH_SYSTEM>
<SUPPORTED_BY>E3SM</SUPPORTED_BY>
<MAX_TASKS_PER_NODE>36</MAX_TASKS_PER_NODE>
<MAX_MPITASKS_PER_NODE>36</MAX_MPITASKS_PER_NODE>
<PROJECT_REQUIRED>TRUE</PROJECT_REQUIRED>
<mpirun mpilib="default">
<executable>mpirun</executable>
<arguments>
<arg name="num_tasks"> -l -n {{ total_tasks }}</arg>
</arguments>
</mpirun>
<mpirun mpilib="mpi-serial">
<executable/>
</mpirun>
<module_system type="module">
<init_path lang="sh">/home/software/spack-0.10.1/opt/spack/linux-centos7-x86_64/gcc-4.8.5/lmod-7.4.9-ic63herzfgw5u3na5mdtvp3nwxy6oj2z/lmod/lmod/init/sh</init_path>
<init_path lang="csh">/home/software/spack-0.10.1/opt/spack/linux-centos7-x86_64/gcc-4.8.5/lmod-7.4.9-ic63herzfgw5u3na5mdtvp3nwxy6oj2z/lmod/lmod/init/csh</init_path>
<init_path lang="python">/home/software/spack-0.10.1/opt/spack/linux-centos7-x86_64/gcc-4.8.5/lmod-7.4.9-ic63herzfgw5u3na5mdtvp3nwxy6oj2z/lmod/lmod/init/env_modules_python.py</init_path>
<cmd_path lang="python">/home/software/spack-0.10.1/opt/spack/linux-centos7-x86_64/gcc-4.8.5/lmod-7.4.9-ic63herzfgw5u3na5mdtvp3nwxy6oj2z/lmod/lmod/libexec/lmod python</cmd_path>
<cmd_path lang="sh">module</cmd_path>
<cmd_path lang="csh">module</cmd_path>
<modules>
<command name="purge"/>
</modules>
<modules compiler="intel">
<command name="load">intel/17.0.4-74uvhji</command>
<command name="load">intel-mkl/2017.3.196-jyjmyut</command>
</modules>
<modules compiler="gnu">
<command name="load">gcc/7.1.0-4bgguyp</command>
</modules>
<modules compiler="intel" mpilib="impi">
<command name="load">intel-mpi/2017.3-dfphq6k</command>
<command name="load">parallel-netcdf/1.6.1</command>
</modules>
<modules compiler="intel" mpilib="mvapich">
<command name="load">mvapich2/2.2-n6lclff</command>
<command name="load">parallel-netcdf/1.6.1-mvapich2.2</command>
</modules>
<modules>
<command name="load">cmake</command>
<command name="load">netcdf/4.4.1.1-prsuusl</command>
<command name="load">netcdf-fortran/4.4.4-ojwazvy</command>
</modules>
</module_system>
<RUNDIR>$CIME_OUTPUT_ROOT/$CASE/run</RUNDIR>
<EXEROOT>$CIME_OUTPUT_ROOT/$CASE/bld</EXEROOT>
<TEST_TPUT_TOLERANCE>0.1</TEST_TPUT_TOLERANCE>
<environment_variables>
<env name="NETCDF_C_PATH">$SHELL{which nc-config | xargs dirname | xargs dirname}</env>
<env name="NETCDF_FORTRAN_PATH">$SHELL{which nf-config | xargs dirname | xargs dirname}</env>
<env name="PATH">/lcrc/group/acme/soft/perl/5.26.0/bin:$ENV{PATH}</env>
</environment_variables>
<environment_variables mpilib="!mpi-serial">
<env name="PNETCDF_PATH">$SHELL{which pnetcdf_version | xargs dirname | xargs dirname}</env>
</environment_variables>
<environment_variables SMP_PRESENT="TRUE">
<env name="OMP_STACKSIZE">128M</env>
<env name="OMP_PROC_BIND">spread</env>
<env name="OMP_PLACES">threads</env>
</environment_variables>
<environment_variables mpilib="impi">
<env name="I_MPI_FABRICS">shm:tmi</env>
</environment_variables>
</machine>
<machine MACH="cetus">
<DESC>ANL IBM BG/Q, os is BGQ, 16 cores/node, batch system is cobalt</DESC>
<NODENAME_REGEX>cetus</NODENAME_REGEX>
<OS>BGQ</OS>
<COMPILERS>ibm</COMPILERS>
<MPILIBS>ibm</MPILIBS>
<PROJECT>ClimateEnergy_2</PROJECT>
<CHARGE_ACCOUNT>ClimateEnergy</CHARGE_ACCOUNT>
<CIME_OUTPUT_ROOT>/projects/$PROJECT/$USER</CIME_OUTPUT_ROOT>
<DIN_LOC_ROOT>/projects/ccsm/inputdata</DIN_LOC_ROOT>
<DIN_LOC_ROOT_CLMFORC>/projects/ccsm/inputdata/atm/datm7</DIN_LOC_ROOT_CLMFORC>
<DOUT_S_ROOT>/projects/$PROJECT/$USER/archive/$CASE</DOUT_S_ROOT>
<BASELINE_ROOT>/projects/ccsm/ccsm_baselines//$COMPILER</BASELINE_ROOT>
<CCSM_CPRNC>/projects/ccsm/tools/cprnc/cprnc</CCSM_CPRNC>
<GMAKE_J>8</GMAKE_J>
<TESTS>e3sm_developer</TESTS>
<BATCH_SYSTEM>cobalt</BATCH_SYSTEM>
<SUPPORTED_BY>jayesh -at- mcs.anl.gov</SUPPORTED_BY>
<MAX_TASKS_PER_NODE>64</MAX_TASKS_PER_NODE>
<MAX_MPITASKS_PER_NODE>4</MAX_MPITASKS_PER_NODE>
<PROJECT_REQUIRED>TRUE</PROJECT_REQUIRED>
<mpirun mpilib="default">
<executable>/usr/bin/runjob</executable>
<arguments>
<arg name="label">--label short</arg>
<arg name="tasks_per_node">--ranks-per-node $MAX_MPITASKS_PER_NODE</arg>
<arg name="num_tasks">--np {{ total_tasks }}</arg>
<arg name="locargs">--block $COBALT_PARTNAME $LOCARGS</arg>
<arg name="bgq_smp_vars">$ENV{BGQ_SMP_VARS}</arg>
<arg name="stacksize">$ENV{BGQ_STACKSIZE}</arg>
</arguments>
</mpirun>
<module_system type="soft">
<init_path lang="csh">/etc/profile.d/00softenv.csh</init_path>
<init_path lang="sh">/etc/profile.d/00softenv.sh</init_path>
<cmd_path lang="csh">soft</cmd_path>
<cmd_path lang="sh">soft</cmd_path>
<modules>
<command name="add">+mpiwrapper-xl</command>
<command name="add">@ibm-compilers-2016-05</command>
<command name="add">+cmake</command>
<command name="add">+python</command>
</modules>
</module_system>
<RUNDIR>$CIME_OUTPUT_ROOT/$CASE/run</RUNDIR>
<EXEROOT>$CIME_OUTPUT_ROOT/$CASE/bld</EXEROOT>
<TEST_TPUT_TOLERANCE>0.1</TEST_TPUT_TOLERANCE>
<environment_variables>
<env name="MPI_TYPE_MAX">10000</env>
<env name="BGQ_SMP_VARS"> </env>
<env name="BGQ_STACKSIZE"> </env>
</environment_variables>
<environment_variables SMP_PRESENT="TRUE">
<env name="BGQ_SMP_VARS">--envs BG_THREADLAYOUT=1 XL_BG_SPREADLAYOUT=YES OMP_DYNAMIC=FALSE OMP_NUM_THREADS=$ENV{OMP_NUM_THREADS}</env>
</environment_variables>
<environment_variables SMP_PRESENT="TRUE" MAX_MPITASKS_PER_NODE="!16">
<env name="BGQ_STACKSIZE">--envs OMP_STACKSIZE=64M</env>
</environment_variables>
<environment_variables SMP_PRESENT="TRUE" MAX_MPITASKS_PER_NODE="16">
<env name="BGQ_STACKSIZE">--envs OMP_STACKSIZE=16M</env>
</environment_variables>
</machine>
<machine MACH="syrah">
<DESC>LLNL Linux Cluster, Linux (pgi), 16 pes/node, batch system is Slurm</DESC>
<OS>LINUX</OS>
<COMPILERS>intel</COMPILERS>
<MPILIBS>mpich</MPILIBS>
<CIME_OUTPUT_ROOT>/p/lscratchh/$USER</CIME_OUTPUT_ROOT>
<DIN_LOC_ROOT>/usr/gdata/climdat/ccsm3data/inputdata</DIN_LOC_ROOT>
<DIN_LOC_ROOT_CLMFORC>/usr/gdata/climdat/ccsm3data/inputdata/atm/datm7</DIN_LOC_ROOT_CLMFORC>
<DOUT_S_ROOT>/p/lscratchh/$CCSMUSER/archive/$CASE</DOUT_S_ROOT>
<BASELINE_ROOT>/p/lscratchh/$CCSMUSER/ccsm_baselines/$COMPILER</BASELINE_ROOT>
<CCSM_CPRNC>/p/lscratchd/ma21/ccsm3data/tools/cprnc/cprnc</CCSM_CPRNC>
<GMAKE_J>8</GMAKE_J>
<BATCH_SYSTEM>lc_slurm</BATCH_SYSTEM>
<SUPPORTED_BY>donahue5 -at- llnl.gov</SUPPORTED_BY>
<MAX_TASKS_PER_NODE>16</MAX_TASKS_PER_NODE>
<MAX_MPITASKS_PER_NODE>16</MAX_MPITASKS_PER_NODE>
<mpirun mpilib="mpi-serial">
<executable/>
</mpirun>
<mpirun mpilib="default">
<executable>srun</executable>
</mpirun>
<module_system type="module">
<init_path lang="python">/usr/share/lmod/lmod/init/env_modules_python.py</init_path>
<init_path lang="perl">/usr/share/lmod/lmod/init/perl</init_path>
<init_path lang="sh">/usr/share/lmod/lmod/init/sh</init_path>
<init_path lang="csh">/usr/share/lmod/lmod/init/csh</init_path>
<cmd_path lang="csh">module</cmd_path>
<cmd_path lang="sh">module</cmd_path>
<cmd_path lang="python">/usr/share/lmod/lmod/libexec/lmod python</cmd_path>
<cmd_path lang="perl">/usr/share/lmod/lmod/libexec/lmod perl</cmd_path>
<modules compiler="intel">
<command name="load">python</command>
<command name="load">git</command>
<command name="load">intel/19.0.4</command>
<command name="load">mvapich2/2.3</command>
<command name="load">cmake/3.14.5</command>
<command name="load">netcdf-fortran/4.4.4</command>
<command name="load">pnetcdf/1.9.0</command>
</modules>
</module_system>
<RUNDIR>/p/lscratchh/$CCSMUSER/ACME/$CASE/run</RUNDIR>
<EXEROOT>/p/lscratchh/$CCSMUSER/$CASE/bld</EXEROOT>
<environment_variables compiler="intel">
<env name="NETCDFROOT">/usr/tce/packages/netcdf-fortran/netcdf-fortran-4.4.4-intel-18.0.1/</env>
<env name="NETCDF_PATH">/usr/tce/packages/netcdf-fortran/netcdf-fortran-4.4.4-intel-18.0.1/</env>
</environment_variables>
<environment_variables compiler="intel" mpilib="!mpi-serial">
<env name="PNETCDFROOT">/usr/tce/packages/pnetcdf/pnetcdf-1.9.0-intel-18.0.1-mvapich2-2.2/</env>
</environment_variables>
</machine>
<machine MACH="quartz">
<DESC>LLNL Linux Cluster, Linux (pgi), 36 pes/node, batch system is Slurm</DESC>
<OS>LINUX</OS>
<COMPILERS>intel</COMPILERS>
<MPILIBS>mpich</MPILIBS>
<CIME_OUTPUT_ROOT>/p/lscratchh/$USER</CIME_OUTPUT_ROOT>
<DIN_LOC_ROOT>/usr/gdata/climdat/ccsm3data/inputdata</DIN_LOC_ROOT>
<DIN_LOC_ROOT_CLMFORC>/usr/gdata/climdat/ccsm3data/inputdata/atm/datm7</DIN_LOC_ROOT_CLMFORC>
<DOUT_S_ROOT>/p/lscratchh/$CCSMUSER/archive/$CASE</DOUT_S_ROOT>
<BASELINE_ROOT>/p/lscratchh/$CCSMUSER/ccsm_baselines/$COMPILER</BASELINE_ROOT>
<CCSM_CPRNC>/p/lscratchd/ma21/ccsm3data/tools/cprnc/cprnc</CCSM_CPRNC>
<GMAKE_J>8</GMAKE_J>
<BATCH_SYSTEM>lc_slurm</BATCH_SYSTEM>
<SUPPORTED_BY>donahue5 -at- llnl.gov</SUPPORTED_BY>
<MAX_TASKS_PER_NODE>36</MAX_TASKS_PER_NODE>
<MAX_MPITASKS_PER_NODE>36</MAX_MPITASKS_PER_NODE>
<mpirun mpilib="mpi-serial">
<executable/>
</mpirun>
<mpirun mpilib="default">
<executable>srun</executable>
</mpirun>
<module_system type="module">
<init_path lang="python">/usr/share/lmod/lmod/init/env_modules_python.py</init_path>
<init_path lang="perl">/usr/share/lmod/lmod/init/perl</init_path>
<init_path lang="sh">/usr/share/lmod/lmod/init/sh</init_path>
<init_path lang="csh">/usr/share/lmod/lmod/init/csh</init_path>
<cmd_path lang="csh">module</cmd_path>
<cmd_path lang="sh">module</cmd_path>
<cmd_path lang="python">/usr/share/lmod/lmod/libexec/lmod python</cmd_path>
<cmd_path lang="perl">/usr/share/lmod/lmod/libexec/lmod perl</cmd_path>
<modules compiler="intel">
<command name="load">python</command>
<command name="load">git</command>
<command name="load">intel/19.0.4</command>
<command name="load">mvapich2/2.3</command>
<command name="load">cmake/3.14.5</command>
<command name="load">netcdf-fortran/4.4.4</command>
<command name="load">pnetcdf/1.9.0</command>
</modules>
</module_system>
<RUNDIR>/p/lscratchh/$CCSMUSER/ACME/$CASE/run</RUNDIR>
<EXEROOT>/p/lscratchh/$CCSMUSER/$CASE/bld</EXEROOT>
<environment_variables compiler="intel">
<env name="NETCDFROOT">/usr/tce/packages/netcdf-fortran/netcdf-fortran-4.4.4-intel-18.0.1/</env>
<env name="NETCDF_PATH">/usr/tce/packages/netcdf-fortran/netcdf-fortran-4.4.4-intel-18.0.1/</env>
</environment_variables>
<environment_variables compiler="intel" mpilib="!mpi-serial">
<env name="PNETCDFROOT">/usr/tce/packages/pnetcdf/pnetcdf-1.9.0-intel-18.0.1-mvapich2-2.2/</env>
</environment_variables>
</machine>
<machine MACH="mira">
<DESC>ANL IBM BG/Q, os is BGQ, 16 cores/node, batch system is cobalt</DESC>
<NODENAME_REGEX>mira.*</NODENAME_REGEX>
<OS>BGQ</OS>
<COMPILERS>ibm</COMPILERS>
<MPILIBS>ibm</MPILIBS>
<PROJECT>ClimateEnergy_2</PROJECT>
<SAVE_TIMING_DIR>/projects/$PROJECT</SAVE_TIMING_DIR>
<SAVE_TIMING_DIR_PROJECTS>ClimateEnergy_2</SAVE_TIMING_DIR_PROJECTS>
<CIME_OUTPUT_ROOT>/projects/$PROJECT/$USER</CIME_OUTPUT_ROOT>
<DIN_LOC_ROOT>/projects/ccsm/inputdata</DIN_LOC_ROOT>
<DIN_LOC_ROOT_CLMFORC>/projects/ccsm/inputdata/atm/datm7</DIN_LOC_ROOT_CLMFORC>
<DOUT_S_ROOT>/projects/$PROJECT/$USER/archive/$CASE</DOUT_S_ROOT>
<BASELINE_ROOT>/projects/ccsm/ccsm_baselines//$COMPILER</BASELINE_ROOT>
<CCSM_CPRNC>/projects/ccsm/tools/cprnc/cprnc</CCSM_CPRNC>
<GMAKE_J>8</GMAKE_J>
<TESTS>e3sm_developer</TESTS>
<BATCH_SYSTEM>cobalt</BATCH_SYSTEM>
<SUPPORTED_BY>mickelso -at- mcs.anl.gov</SUPPORTED_BY>
<MAX_TASKS_PER_NODE>64</MAX_TASKS_PER_NODE>
<MAX_MPITASKS_PER_NODE>4</MAX_MPITASKS_PER_NODE>
<PROJECT_REQUIRED>TRUE</PROJECT_REQUIRED>
<mpirun mpilib="default">
<executable>/usr/bin/runjob</executable>
<arguments>
<arg name="label">--label short</arg>
<arg name="tasks_per_node">--ranks-per-node $MAX_MPITASKS_PER_NODE</arg>
<arg name="num_tasks">--np {{ total_tasks }}</arg>
<arg name="locargs">--block $COBALT_PARTNAME $LOCARGS</arg>
<arg name="bgq_smp_vars">$ENV{BGQ_SMP_VARS}</arg>
<arg name="stacksize">$ENV{BGQ_STACKSIZE}</arg>
</arguments>
</mpirun>
<module_system type="soft">
<init_path lang="csh">/etc/profile.d/00softenv.csh</init_path>
<init_path lang="sh">/etc/profile.d/00softenv.sh</init_path>
<cmd_path lang="csh">soft</cmd_path>
<cmd_path lang="sh">soft</cmd_path>
<modules>
<command name="add">+mpiwrapper-xl</command>
<command name="add">@ibm-compilers-2016-05</command>
<command name="add">+cmake</command>
<command name="add">+python</command>
</modules>
</module_system>
<RUNDIR>$CIME_OUTPUT_ROOT/$CASE/run</RUNDIR>
<EXEROOT>$CIME_OUTPUT_ROOT/$CASE/bld</EXEROOT>
<TEST_TPUT_TOLERANCE>0.1</TEST_TPUT_TOLERANCE>
<environment_variables>
<env name="MPI_TYPE_MAX">10000</env>
<env name="BGQ_SMP_VARS"> </env>
<env name="BGQ_STACKSIZE"> </env>
</environment_variables>
<environment_variables SMP_PRESENT="TRUE">
<env name="BGQ_SMP_VARS">--envs BG_THREADLAYOUT=1 XL_BG_SPREADLAYOUT=YES OMP_DYNAMIC=FALSE OMP_NUM_THREADS=$ENV{OMP_NUM_THREADS}</env>
</environment_variables>
<environment_variables SMP_PRESENT="TRUE" MAX_MPITASKS_PER_NODE="!16">
<env name="BGQ_STACKSIZE">--envs OMP_STACKSIZE=64M</env>
</environment_variables>
<environment_variables SMP_PRESENT="TRUE" MAX_MPITASKS_PER_NODE="16">
<env name="BGQ_STACKSIZE">--envs OMP_STACKSIZE=16M</env>
</environment_variables>
</machine>
<machine MACH="theta">
<DESC>ALCF Cray XC40 KNL, os is CNL, 64 pes/node, batch system is cobalt</DESC>
<NODENAME_REGEX>theta.*</NODENAME_REGEX>
<OS>CNL</OS>
<COMPILERS>intel,gnu,cray</COMPILERS>
<MPILIBS>mpt</MPILIBS>
<SAVE_TIMING_DIR>/projects/$PROJECT</SAVE_TIMING_DIR>
<SAVE_TIMING_DIR_PROJECTS>ClimateEnergy_3,OceanClimate_2</SAVE_TIMING_DIR_PROJECTS>
<CIME_OUTPUT_ROOT>/projects/$PROJECT/$USER</CIME_OUTPUT_ROOT>
<DIN_LOC_ROOT>/projects/ccsm/acme/inputdata</DIN_LOC_ROOT>
<DIN_LOC_ROOT_CLMFORC>/projects/ccsm/acme/inputdata/atm/datm7</DIN_LOC_ROOT_CLMFORC>
<DOUT_S_ROOT>$CIME_OUTPUT_ROOT/archive/$CASE</DOUT_S_ROOT>
<BASELINE_ROOT>/projects/$PROJECT/acme/baselines/$COMPILER</BASELINE_ROOT>
<CCSM_CPRNC>/projects/ccsm/acme/tools/cprnc/cprnc</CCSM_CPRNC>
<GMAKE_J>8</GMAKE_J>
<TESTS>e3sm_developer</TESTS>
<NTEST_PARALLEL_JOBS>4</NTEST_PARALLEL_JOBS>
<BATCH_SYSTEM>cobalt_theta</BATCH_SYSTEM>
<SUPPORTED_BY>E3SM</SUPPORTED_BY>
<MAX_TASKS_PER_NODE>128</MAX_TASKS_PER_NODE>
<MAX_MPITASKS_PER_NODE>64</MAX_MPITASKS_PER_NODE>
<PROJECT_REQUIRED>TRUE</PROJECT_REQUIRED>
<mpirun mpilib="default">
<executable>aprun</executable>
<arguments>
<arg name="num_tasks">-n {{ total_tasks }}</arg>
<arg name="tasks_per_node">-N $SHELL{if [ `./xmlquery --value MAX_MPITASKS_PER_NODE` -gt `./xmlquery --value TOTAL_TASKS` ];then echo `./xmlquery --value TOTAL_TASKS`;else echo `./xmlquery --value MAX_MPITASKS_PER_NODE`;fi;}</arg>
<arg name="hyperthreading">--cc depth -d $SHELL{echo `./xmlquery --value MAX_TASKS_PER_NODE`/`./xmlquery --value MAX_MPITASKS_PER_NODE`|bc} -j $SHELL{if [ 64 -ge `./xmlquery --value MAX_TASKS_PER_NODE` ];then echo 1;else echo `./xmlquery --value MAX_TASKS_PER_NODE`/64|bc;fi;}</arg>
<arg name="env_vars">$ENV{SMP_VARS} $ENV{labeling}</arg>
</arguments>
</mpirun>
<module_system type="module">
<init_path lang="perl">/opt/modules/default/init/perl.pm</init_path>
<init_path lang="python">/opt/modules/default/init/python.py</init_path>
<init_path lang="sh">/opt/modules/default/init/sh</init_path>
<init_path lang="csh">/opt/modules/default/init/csh</init_path>
<cmd_path lang="perl">/opt/modules/default/bin/modulecmd perl</cmd_path>
<cmd_path lang="python">/opt/modules/default/bin/modulecmd python</cmd_path>
<cmd_path lang="sh">module</cmd_path>
<cmd_path lang="csh">module</cmd_path>
<modules>
<command name="rm">craype-mic-knl</command>
<command name="rm">PrgEnv-intel</command>
<command name="rm">PrgEnv-cray</command>
<command name="rm">PrgEnv-gnu</command>
<command name="rm">intel</command>
<command name="rm">cce</command>
<command name="rm">cray-mpich</command>
<command name="rm">cray-parallel-netcdf</command>
<command name="rm">cray-hdf5-parallel</command>
<command name="rm">cray-hdf5</command>
<command name="rm">cray-netcdf</command>
<command name="rm">cray-netcdf-hdf5parallel</command>
<command name="rm">cray-libsci</command>
<command name="rm">craype</command>
<command name="load">craype/2.5.12</command>
<command name="load">cmake/3.11.4</command>
</modules>
<modules compiler="intel">
<command name="load">intel/18.0.0.128</command>
<command name="load">PrgEnv-intel/6.0.4</command>
</modules>
<modules compiler="cray">
<command name="load">cce/8.6.2</command>
<command name="load">PrgEnv-cray/6.0.4</command>
</modules>
<modules compiler="gnu">
<command name="load">gcc/7.3.0</command>
<command name="load">PrgEnv-gnu/6.0.4</command>
</modules>
<modules compiler="!intel">
<command name="switch">cray-libsci/17.09.1</command>
</modules>
<modules>
<command name="load">craype-mic-knl</command>
<command name="load">cray-mpich/7.6.2</command>
</modules>
<modules mpilib="mpt">
<command name="load">cray-netcdf/4.4.1.1.3</command>
<command name="load">cray-parallel-netcdf/1.8.1.3</command>
</modules>
<modules mpilib="mpi-serial">
<command name="load">cray-netcdf/4.4.1.1.3</command>
</modules>
</module_system>
<RUNDIR>$CIME_OUTPUT_ROOT/$CASE/run</RUNDIR>
<EXEROOT>$CIME_OUTPUT_ROOT/$CASE/bld</EXEROOT>
<TEST_TPUT_TOLERANCE>0.1</TEST_TPUT_TOLERANCE>
<environment_variables>
<env name="MPICH_ENV_DISPLAY">1</env>
<env name="MPICH_VERSION_DISPLAY">1</env>
<!--env name="MPICH_CPUMASK_DISPLAY">1</env-->
<env name="MPAS_TOOL_DIR">/projects/ccsm/acme/tools/mpas</env>
<env name="HDF5_DISABLE_VERSION_CHECK">2</env>
<env name="labeling"> </env>
<env name="SMP_VARS"> </env>
</environment_variables>
<environment_variables SMP_PRESENT="TRUE" compiler="intel">
<env name="SMP_VARS">-e OMP_NUM_THREADS=$ENV{OMP_NUM_THREADS} -e OMP_STACKSIZE=128M -e KMP_AFFINITY=granularity=thread,scatter</env>
</environment_variables>
<environment_variables SMP_PRESENT="TRUE" compiler="!intel">
<env name="SMP_VARS">-e OMP_NUM_THREADS=$ENV{OMP_NUM_THREADS} -e OMP_STACKSIZE=128M -e OMP_PROC_BIND=spread -e OMP_PLACES=threads</env>
</environment_variables>
<environment_variables DEBUG="TRUE">
<env name="labeling">-e PMI_LABEL_ERROUT=1</env>
</environment_variables>
</machine>
<machine MACH="jlse">
<DESC>ANL experimental/evaluation cluster, batch system is cobalt</DESC>
<NODENAME_REGEX>jlse.*</NODENAME_REGEX>
<OS>LINUX</OS>
<COMPILERS>intel,gnu</COMPILERS>
<MPILIBS>mpich</MPILIBS>
<CIME_OUTPUT_ROOT>$ENV{HOME}/acme/scratch</CIME_OUTPUT_ROOT>
<DIN_LOC_ROOT>/home/azamat/acme/inputdata</DIN_LOC_ROOT>
<DIN_LOC_ROOT_CLMFORC>/home/azamat/acme/inputdata/atm/datm7</DIN_LOC_ROOT_CLMFORC>
<DOUT_S_ROOT>$CIME_OUTPUT_ROOT/archive/$CASE</DOUT_S_ROOT>
<BASELINE_ROOT>$ENV{HOME}/acme/baselines/$COMPILER</BASELINE_ROOT>
<CCSM_CPRNC>/home/azamat/acme/tools/cprnc</CCSM_CPRNC>
<GMAKE_J>8</GMAKE_J>
<TESTS>acme_developer</TESTS>
<BATCH_SYSTEM>cobalt_theta</BATCH_SYSTEM>
<SUPPORTED_BY>e3sm</SUPPORTED_BY>
<MAX_TASKS_PER_NODE>128</MAX_TASKS_PER_NODE>
<MAX_MPITASKS_PER_NODE>64</MAX_MPITASKS_PER_NODE>
<PROJECT_REQUIRED>FALSE</PROJECT_REQUIRED>
<mpirun mpilib="default">
<executable>mpirun</executable>
<arguments>
<arg name="num_tasks"> -n $TOTALPES</arg>
</arguments>
</mpirun>
<module_system type="soft">
<init_path lang="sh">/etc/bashrc</init_path>
<cmd_path lang="sh">source</cmd_path>
<modules>
<command name="ignore">/soft/compilers/intel/compilers_and_libraries/linux/bin/compilervars.sh intel64</command>
</modules>
</module_system>
<RUNDIR>$CIME_OUTPUT_ROOT/$CASE/run</RUNDIR>
<EXEROOT>$CIME_OUTPUT_ROOT/$CASE/bld</EXEROOT>
<environment_variables>
<env name="MPICH_ENV_DISPLAY">1</env>
<env name="MPICH_VERSION_DISPLAY">1</env>
<env name="MPICH_CPUMASK_DISPLAY">1</env>
<env name="MPICH_MEMORY_REPORT">1</env>
<env name="PATH">/home/azamat/perl5/bin:$ENV{PATH}</env>
<env name="PERL5LIB">/home/azamat/perl5/lib/perl5</env>
<env name="PERL_LOCAL_LIB_ROOT">/home/azamat/perl5</env>
<env name="PERL_MB_OPT">"--install_base \"/home/azamat/perl5\""</env>
<env name="PERL_MM_OPT">"INSTALL_BASE=/home/azamat/perl5"</env>
</environment_variables>
<environment_variables compiler="intel">
<env name="NETCDF_PATH">/home/azamat/soft/netcdf/4.3.3.1c-4.2cxx-4.4.2f/intel18</env>
<env name="PNETCDF_PATH">/home/azamat/soft/pnetcdf/1.6.1/intel18</env>
<env name="I_MPI_DEBUG">10</env>
<env name="I_MPI_PIN_CELL">core</env>
</environment_variables>
<environment_variables compiler="!intel">
<env name="NETCDF_PATH">/home/azamat/soft/netcdf/4.3.3.1c-4.2cxx-4.4.2f/gnu-arm</env>
<env name="PNETCDF_PATH">/home/azamat/soft/pnetcdf/1.6.1/gnu-arm</env>
</environment_variables>
<environment_variables SMP_PRESENT="TRUE" compiler="intel">
<env name="KMP_AFFINITY">verbose,granularity=thread,scatter</env>
<env name="OMP_STACKSIZE">256M</env>
</environment_variables>
<environment_variables SMP_PRESENT="TRUE" compiler="!intel">
<env name="OMP_PROC_BIND">spread</env>
<env name="OMP_PLACES">threads</env>
<env name="OMP_STACKSIZE">256M</env>
</environment_variables>
</machine>
<machine MACH="sooty">
<DESC>PNL cluster, OS is Linux, batch system is SLURM</DESC>
<NODENAME_REGEX>sooty</NODENAME_REGEX>
<OS>LINUX</OS>
<COMPILERS>intel,pgi</COMPILERS>
<MPILIBS>mvapich2</MPILIBS>
<CIME_OUTPUT_ROOT>/lustre/$USER/cime_output_root</CIME_OUTPUT_ROOT>
<DIN_LOC_ROOT>/lustre/climate/csmdata/</DIN_LOC_ROOT>
<DIN_LOC_ROOT_CLMFORC>/lustre/climate/csmdata/atm/datm7</DIN_LOC_ROOT_CLMFORC>
<DOUT_S_ROOT>/lustre/$USER/archive/$CASE</DOUT_S_ROOT>
<BASELINE_ROOT>/lustre/climate/acme_baselines/$COMPILER</BASELINE_ROOT>
<CCSM_CPRNC>/lustre/climate/acme_baselines/cprnc/cprnc</CCSM_CPRNC>
<GMAKE_J>8</GMAKE_J>
<BATCH_SYSTEM>slurm</BATCH_SYSTEM>
<SUPPORTED_BY>balwinder.singh -at- pnnl.gov</SUPPORTED_BY>
<MAX_TASKS_PER_NODE>8</MAX_TASKS_PER_NODE>
<MAX_MPITASKS_PER_NODE>8</MAX_MPITASKS_PER_NODE>
<PROJECT_REQUIRED>FALSE</PROJECT_REQUIRED>
<mpirun mpilib="mpi-serial">
<executable/>
</mpirun>
<mpirun mpilib="mvapich2">
<executable>srun</executable>
<arguments>
<arg name="mpi">--mpi=none</arg>
<arg name="num_tasks">--ntasks={{ total_tasks }}</arg>
<arg name="cpu_bind">--cpu_bind=sockets --cpu_bind=verbose</arg>
<arg name="kill-on-bad-exit">--kill-on-bad-exit</arg>
</arguments>
</mpirun>
<module_system type="module">
<init_path lang="perl">/share/apps/modules/Modules/3.2.10/init/perl.pm</init_path>
<init_path lang="python">/share/apps/modules/Modules/3.2.10/init/python.py</init_path>
<init_path lang="csh">/etc/profile.d/modules.csh</init_path>
<init_path lang="sh">/etc/profile.d/modules.sh</init_path>
<cmd_path lang="perl">/share/apps/modules/Modules/3.2.10/bin/modulecmd perl</cmd_path>
<cmd_path lang="python">/share/apps/modules/Modules/3.2.10/bin/modulecmd python</cmd_path>
<cmd_path lang="sh">module</cmd_path>
<cmd_path lang="csh">module</cmd_path>
<modules>
<command name="purge"/>
</modules>
<modules>
<command name="load">perl/5.20.0</command>
<command name="load">cmake/3.3.0</command>
<command name="load">python/2.7.8</command>
<command name="load">svn/1.8.13</command>
</modules>
<modules compiler="intel">
<command name="load">intel/15.0.1</command>
<command name="load">mkl/15.0.1</command>
</modules>
<modules compiler="pgi">
<command name="load">pgi/14.10</command>
</modules>
<modules mpilib="mvapich2">
<command name="load">mvapich2/2.1</command>
</modules>
<modules>
<command name="load">netcdf/4.3.2</command>
</modules>
</module_system>
<RUNDIR>/lustre/$USER/csmruns/$CASE/run</RUNDIR>
<EXEROOT>/lustre/$USER/csmruns/$CASE/bld</EXEROOT>
<environment_variables>
<env name="MKLROOT">$ENV{MKLROOT} </env>
<env name="NETCDF_PATH">$ENV{NETCDF_LIB}/../</env>
<env name="OMP_STACKSIZE">64M</env>
</environment_variables>
</machine>
<machine MACH="cascade">
<DESC>PNNL Intel KNC cluster, OS is Linux, batch system is SLURM</DESC>
<NODENAME_REGEX>glogin</NODENAME_REGEX>
<OS>LINUX</OS>
<COMPILERS>intel</COMPILERS>
<MPILIBS>impi,mvapich2</MPILIBS>
<CIME_OUTPUT_ROOT>/dtemp/$PROJECT/$USER</CIME_OUTPUT_ROOT>
<DIN_LOC_ROOT>/dtemp/st49401/sing201/acme/inputdata/</DIN_LOC_ROOT>
<DIN_LOC_ROOT_CLMFORC>/dtemp/st49401/sing201/acme/inputdata/atm/datm7</DIN_LOC_ROOT_CLMFORC>
<DOUT_S_ROOT>$CIME_OUTPUT_ROOT/archive/$CASE</DOUT_S_ROOT>
<BASELINE_ROOT>$CIME_OUTPUT_ROOT/acme/acme_baselines</BASELINE_ROOT>
<CCSM_CPRNC>$CIME_OUTPUT_ROOT/acme/acme_baselines/cprnc/cprnc</CCSM_CPRNC>
<GMAKE_J>8</GMAKE_J>
<BATCH_SYSTEM>slurm</BATCH_SYSTEM>
<SUPPORTED_BY>balwinder.singh -at- pnnl.gov</SUPPORTED_BY>
<MAX_TASKS_PER_NODE>16</MAX_TASKS_PER_NODE>
<MAX_MPITASKS_PER_NODE>16</MAX_MPITASKS_PER_NODE>
<PROJECT_REQUIRED>TRUE</PROJECT_REQUIRED>
<mpirun mpilib="mpi-serial">
<executable/>
</mpirun>
<mpirun mpilib="impi">
<executable>mpirun</executable>
<arguments>
<arg name="num_tasks"> -np {{ total_tasks }}</arg>
</arguments>
</mpirun>
<mpirun mpilib="mvapich2">
<executable>srun</executable>
<arguments>
<arg name="mpi">--mpi=none</arg>
<arg name="num_tasks">--ntasks={{ total_tasks }}</arg>
<arg name="cpu_bind">--cpu_bind=sockets --cpu_bind=verbose</arg>
<arg name="kill-on-bad-exit">--kill-on-bad-exit</arg>
</arguments>
</mpirun>
<module_system type="module">
<init_path lang="python">/opt/lmod/7.8.4/init/env_modules_python.py</init_path>
<init_path lang="csh">/etc/profile.d/modules.csh</init_path>
<init_path lang="sh">/etc/profile.d/modules.sh</init_path>
<cmd_path lang="python">/opt/lmod/7.8.4/libexec/lmod python</cmd_path>
<cmd_path lang="sh">module</cmd_path>
<cmd_path lang="csh">module</cmd_path>
<modules>
<command name="purge"/>
</modules>
<modules>
<command name="load">python/2.7.9</command>
</modules>
<modules compiler="intel">
<command name="load">intel/ips_18</command>
<command name="load">mkl/14.0</command>
</modules>
<modules mpilib="impi">
<command name="load">impi/4.1.2.040</command>
</modules>
<modules mpilib="mvapich2">
<command name="load">mvapich2/1.9</command>
</modules>
<modules>
<command name="load">netcdf/4.3.0</command>
</modules>
</module_system>
<RUNDIR>$CIME_OUTPUT_ROOT/csmruns/$CASE/run</RUNDIR>
<EXEROOT>$CIME_OUTPUT_ROOT/csmruns/$CASE/bld</EXEROOT>
<environment_variables>
<env name="OMP_STACKSIZE">64M</env>
<env name="NETCDF_HOME">$ENV{NETCDF_ROOT}</env>
</environment_variables>
<environment_variables compiler="intel">
<env name="MKL_PATH">$ENV{MLIBHOME}</env>
<env name="COMPILER">intel</env>
</environment_variables>
</machine>
<machine MACH="constance">
<DESC>PNL Haswell cluster, OS is Linux, batch system is SLURM</DESC>
<NODENAME_REGEX>constance</NODENAME_REGEX>
<OS>LINUX</OS>
<COMPILERS>intel,pgi,nag</COMPILERS>
<MPILIBS>mvapich2,openmpi,intelmpi,mvapich</MPILIBS>
<CIME_OUTPUT_ROOT>/pic/scratch/$USER</CIME_OUTPUT_ROOT>
<DIN_LOC_ROOT>/pic/projects/climate/csmdata/</DIN_LOC_ROOT>
<DIN_LOC_ROOT_CLMFORC>/pic/projects/climate/csmdata/atm/datm7</DIN_LOC_ROOT_CLMFORC>
<DOUT_S_ROOT>/pic/scratch/$USER/archive/$CASE</DOUT_S_ROOT>
<BASELINE_ROOT>/pic/projects/climate/acme_baselines/$COMPILER</BASELINE_ROOT>
<CCSM_CPRNC>/pic/projects/climate/acme_baselines/cprnc/cprnc</CCSM_CPRNC>
<GMAKE_J>8</GMAKE_J>
<BATCH_SYSTEM>slurm</BATCH_SYSTEM>
<SUPPORTED_BY>balwinder.singh -at- pnnl.gov</SUPPORTED_BY>
<MAX_TASKS_PER_NODE>24</MAX_TASKS_PER_NODE>
<MAX_MPITASKS_PER_NODE>24</MAX_MPITASKS_PER_NODE>
<PROJECT_REQUIRED>FALSE</PROJECT_REQUIRED>
<mpirun mpilib="mpi-serial">
<executable/>
</mpirun>
<mpirun mpilib="mvapich2">
<executable>srun</executable>
<arguments>
<arg name="mpi">--mpi=none</arg>
<arg name="num_tasks">--ntasks={{ total_tasks }}</arg>
<arg name="cpu_bind">--cpu_bind=sockets --cpu_bind=verbose</arg>
<arg name="kill-on-bad-exit">--kill-on-bad-exit</arg>
</arguments>
</mpirun>
<mpirun mpilib="mvapich">
<executable>srun</executable>
<arguments>
<arg name="num_tasks">--ntasks={{ total_tasks }}</arg>
<arg name="cpu_bind">--cpu_bind=sockets --cpu_bind=verbose</arg>
<arg name="kill-on-bad-exit">--kill-on-bad-exit</arg>
</arguments>
</mpirun>
<mpirun mpilib="intelmpi">
<executable>mpirun</executable>
<arguments>
<arg name="num_tasks">-n {{ total_tasks }}</arg>
</arguments>
</mpirun>
<mpirun mpilib="openmpi">
<executable>mpirun</executable>
<arguments>
<arg name="num_tasks">-n {{ total_tasks }}</arg>
</arguments>
</mpirun>
<module_system type="module">
<init_path lang="perl">/share/apps/modules/Modules/3.2.10/init/perl.pm</init_path>
<init_path lang="python">/share/apps/modules/Modules/3.2.10/init/python.py</init_path>
<init_path lang="csh">/etc/profile.d/modules.csh</init_path>
<init_path lang="sh">/etc/profile.d/modules.sh</init_path>
<cmd_path lang="perl">/share/apps/modules/Modules/3.2.10/bin/modulecmd perl</cmd_path>
<cmd_path lang="python">/share/apps/modules/Modules/3.2.10/bin/modulecmd python</cmd_path>
<cmd_path lang="sh">module</cmd_path>
<cmd_path lang="csh">module</cmd_path>
<modules>
<command name="purge"/>
</modules>
<modules>
<command name="load">perl/5.20.0</command>
<command name="load">cmake/3.3.0</command>
<command name="load">python/2.7.8</command>
</modules>
<modules compiler="intel">
<command name="load">intel/15.0.1</command>
<command name="load">mkl/15.0.1</command>
</modules>
<modules compiler="pgi">
<command name="load">pgi/14.10</command>
</modules>
<modules compiler="nag">
<command name="load">nag/6.0</command>
<command name="load">mkl/15.0.1</command>
</modules>
<modules mpilib="mvapich">
<command name="load">mvapich2/2.1</command>
</modules>
<modules mpilib="mvapich2" compiler="intel">
<command name="load">mvapich2/2.1</command>
</modules>
<modules mpilib="mvapich2" compiler="pgi">
<command name="load">mvapich2/2.1</command>
</modules>
<modules mpilib="mvapich2" compiler="nag">
<command name="load">mvapich2/2.3b</command>
</modules>
<modules mpilib="intelmpi">
<command name="load">intelmpi/5.0.1.035</command>
</modules>
<modules mpilib="openmpi">
<command name="load">openmpi/1.8.3</command>
</modules>
<modules compiler="intel">
<command name="load">netcdf/4.3.2</command>
</modules>
<modules compiler="pgi">
<command name="load">netcdf/4.3.2</command>
</modules>
<modules compiler="nag">
<command name="load">netcdf/4.4.1.1</command>
</modules>
</module_system>
<RUNDIR>/pic/scratch/$USER/csmruns/$CASE/run</RUNDIR>
<EXEROOT>/pic/scratch/$USER/csmruns/$CASE/bld</EXEROOT>
<environment_variables>
<env name="OMP_STACKSIZE">64M</env>
<env name="NETCDF_HOME">$ENV{NETCDF_LIB}/../</env>
</environment_variables>
<environment_variables compiler="intel">
<env name="MKL_PATH">$ENV{MLIB_LIB}</env>
</environment_variables>
<environment_variables compiler="nag">
<env name="MKL_PATH">$ENV{MLIB_LIB}</env>
</environment_variables>
</machine>
<machine MACH="compy">
<DESC>PNL E3SM Intel Xeon Gold 6148(Skylake) nodes, OS is Linux, SLURM</DESC>
<NODENAME_REGEX>compy</NODENAME_REGEX>
<OS>LINUX</OS>
<COMPILERS>intel,pgi</COMPILERS>
<MPILIBS>impi,mvapich2</MPILIBS>
<SAVE_TIMING_DIR>/compyfs</SAVE_TIMING_DIR>
<SAVE_TIMING_DIR_PROJECTS>.*</SAVE_TIMING_DIR_PROJECTS>
<CIME_OUTPUT_ROOT>/compyfs/$USER/e3sm_scratch</CIME_OUTPUT_ROOT>
<DIN_LOC_ROOT>/compyfs/inputdata</DIN_LOC_ROOT>
<DIN_LOC_ROOT_CLMFORC>/compyfs/inputdata/atm/datm7</DIN_LOC_ROOT_CLMFORC>
<DOUT_S_ROOT>/compyfs/$USER/e3sm_scratch/archive/$CASE</DOUT_S_ROOT>
<BASELINE_ROOT>/compyfs/e3sm_baselines/$COMPILER</BASELINE_ROOT>
<CCSM_CPRNC>/compyfs/e3sm_baselines/cprnc/cprnc</CCSM_CPRNC>
<GMAKE_J>8</GMAKE_J>
<TESTS>e3sm_integration</TESTS>
<BATCH_SYSTEM>slurm</BATCH_SYSTEM>
<SUPPORTED_BY>bibi.mathew -at- pnnl.gov</SUPPORTED_BY>
<MAX_TASKS_PER_NODE>40</MAX_TASKS_PER_NODE>
<MAX_MPITASKS_PER_NODE>40</MAX_MPITASKS_PER_NODE>
<PROJECT_REQUIRED>TRUE</PROJECT_REQUIRED>
<mpirun mpilib="mpi-serial">
<executable/>
</mpirun>
<mpirun mpilib="mvapich2">
<executable>srun</executable>
<arguments>
<arg name="mpi">--mpi=none</arg>
<arg name="num_tasks">--ntasks={{ total_tasks }} --nodes={{ num_nodes }}</arg>
<arg name="kill-on-bad-exit">--kill-on-bad-exit</arg>
<arg name="cpu_bind">-l --cpu_bind=cores -c $ENV{OMP_NUM_THREADS} -m plane=$SHELL{echo 40/$OMP_NUM_THREADS|bc}</arg>
</arguments>
</mpirun>
<mpirun mpilib="impi">
<executable>srun</executable>
<arguments>
<arg name="mpi">--mpi=pmi2</arg>
<arg name="num_tasks">--ntasks={{ total_tasks }} --nodes={{ num_nodes }}</arg>
<arg name="kill-on-bad-exit">--kill-on-bad-exit</arg>
<arg name="cpu_bind">-l --cpu_bind=cores -c $ENV{OMP_NUM_THREADS} -m plane=$SHELL{echo 40/$OMP_NUM_THREADS|bc}</arg>
</arguments>
</mpirun>
<module_system type="module">
<init_path lang="perl">/share/apps/modules/init/perl.pm</init_path>
<init_path lang="python">/share/apps/modules/init/python.py</init_path>
<init_path lang="csh">/etc/profile.d/modules.csh</init_path>
<init_path lang="sh">/etc/profile.d/modules.sh</init_path>
<cmd_path lang="perl"> /share/apps/modules/bin/modulecmd perl</cmd_path>
<cmd_path lang="python">/share/apps/modules/bin/modulecmd python</cmd_path>
<cmd_path lang="sh">module</cmd_path>
<cmd_path lang="csh">module</cmd_path>
<modules>
<command name="purge"/>
</modules>
<modules>
<command name="load">cmake/3.11.4</command>
</modules>
<modules compiler="intel">
<command name="load">intel/19.0.5</command>
</modules>
<modules compiler="pgi">
<command name="load">pgi/18.10</command>
</modules>
<modules mpilib="mvapich2">
<command name="load">mvapich2/2.3.1</command>
</modules>
<modules mpilib="impi" compiler="intel">
<command name="load">intelmpi/2019u4</command>
</modules>
<modules mpilib="impi" compiler="pgi">
<command name="load">intelmpi/2019u3</command>
</modules>
<modules>
<command name="load">netcdf/4.6.3</command>
<command name="load">pnetcdf/1.9.0</command>
<command name="load">mkl/2019u5</command>
</modules>
</module_system>
<RUNDIR>$CIME_OUTPUT_ROOT/$CASE/run</RUNDIR>
<EXEROOT>$CIME_OUTPUT_ROOT/$CASE/bld</EXEROOT>
<TEST_TPUT_TOLERANCE>0.05</TEST_TPUT_TOLERANCE>
<MAX_GB_OLD_TEST_DATA>1000</MAX_GB_OLD_TEST_DATA>
<environment_variables>
<env name="NETCDF_HOME">$ENV{NETCDF_ROOT}/</env>
<env name="MKL_PATH">$ENV{MKLROOT}</env>
</environment_variables>
<environment_variables mpilib="mvapich2">
<env name="MV2_ENABLE_AFFINITY">0</env>
<env name="MV2_SHOW_CPU_BINDING">1</env>
</environment_variables>
<environment_variables mpilib="impi">
<env name="I_MPI_ADJUST_ALLREDUCE">1</env>
</environment_variables>
<environment_variables mpilib="impi" DEBUG="TRUE">
<env name="I_MPI_DEBUG">10</env>
</environment_variables>
<environment_variables SMP_PRESENT="TRUE">
<env name="OMP_STACKSIZE">64M</env>
<env name="OMP_PLACES">cores</env>
</environment_variables>
</machine>
<machine MACH="oic5">
<DESC>ORNL XK6, os is Linux, 32 pes/node, batch system is PBS</DESC>
<NODENAME_REGEX>oic5</NODENAME_REGEX>
<OS>LINUX</OS>
<COMPILERS>gnu</COMPILERS>
<MPILIBS>mpich,openmpi</MPILIBS>
<CIME_OUTPUT_ROOT>/home/$USER/models/ACME</CIME_OUTPUT_ROOT>
<DIN_LOC_ROOT>/home/zdr/models/ccsm_inputdata</DIN_LOC_ROOT>
<DIN_LOC_ROOT_CLMFORC>/home/zdr/models/ccsm_inputdata/atm/datm7</DIN_LOC_ROOT_CLMFORC>
<DOUT_S_ROOT>/home/$USER/models/ACME/run/archive/$CASE</DOUT_S_ROOT>
<GMAKE_J>32</GMAKE_J>
<TESTS>e3sm_developer</TESTS>
<BATCH_SYSTEM>pbs</BATCH_SYSTEM>
<SUPPORTED_BY>dmricciuto</SUPPORTED_BY>
<MAX_TASKS_PER_NODE>32</MAX_TASKS_PER_NODE>
<MAX_MPITASKS_PER_NODE>32</MAX_MPITASKS_PER_NODE>
<mpirun mpilib="mpich">
<executable>/projects/cesm/devtools/mpich-3.0.4-gcc4.8.1/bin/mpirun</executable>
<arguments>
<arg name="num_tasks"> -np {{ total_tasks }}</arg>
<arg name="machine_file">--hostfile $ENV{PBS_NODEFILE}</arg>
</arguments>
</mpirun>
<mpirun mpilib="mpi-serial">
<executable> </executable>
</mpirun>
<module_system type="none" />
<RUNDIR>/home/$USER/models/ACME/run/$CASE/run</RUNDIR>
<EXEROOT>/home/$USER/models/ACME/run/$CASE/bld</EXEROOT>
</machine>
<machine MACH="cades">
<DESC>OR-CONDO, CADES-CCSI, os is Linux, 16 pes/nodes, batch system is PBS</DESC>
<NODENAME_REGEX>or-condo</NODENAME_REGEX>
<OS>LINUX</OS>
<COMPILERS>gnu,intel</COMPILERS>
<MPILIBS>openmpi</MPILIBS>
<CIME_OUTPUT_ROOT>/lustre/or-hydra/cades-ccsi/scratch/$USER</CIME_OUTPUT_ROOT>
<DIN_LOC_ROOT>/lustre/or-hydra/cades-ccsi/proj-shared/project_acme/ACME_inputdata</DIN_LOC_ROOT>
<DIN_LOC_ROOT_CLMFORC>/lustre/or-hydra/cades-ccsi/proj-shared/project_acme/ACME_inputdata/atm/datm7</DIN_LOC_ROOT_CLMFORC>
<DOUT_S_ROOT>$CIME_OUTPUT_ROOT/archive/$CASE</DOUT_S_ROOT>
<BASELINE_ROOT>/lustre/or-hydra/cades-ccsi/proj-shared/project_acme/baselines/$COMPILER</BASELINE_ROOT>
<CCSM_CPRNC>/lustre/or-hydra/cades-ccsi/proj-shared/tools/cprnc.orcondo</CCSM_CPRNC>
<GMAKE_J>4</GMAKE_J>
<TESTS>e3sm_developer</TESTS>
<BATCH_SYSTEM>pbs</BATCH_SYSTEM>
<SUPPORTED_BY>yinj -at- ornl.gov</SUPPORTED_BY>
<MAX_TASKS_PER_NODE>32</MAX_TASKS_PER_NODE>
<MAX_MPITASKS_PER_NODE>32</MAX_MPITASKS_PER_NODE>
<PROJECT_REQUIRED>FALSE</PROJECT_REQUIRED>
<mpirun mpilib="openmpi" compiler="gnu">
<executable>mpirun</executable>
<arguments>
<arg name="num_tasks"> -np {{ total_tasks }}</arg>
<arg name="machine_file">--hostfile $ENV{PBS_NODEFILE}</arg>
</arguments>
</mpirun>
<mpirun mpilib="mpi-serial">
<executable> </executable>
</mpirun>
<module_system type="module">
<init_path lang="sh">/usr/share/Modules/init/sh</init_path>
<init_path lang="csh">/usr/share/Modules/init/csh</init_path>
<init_path lang="perl">/usr/share/Modules/init/perl.pm</init_path>
<init_path lang="python">/usr/share/Modules/init/python.py</init_path>
<cmd_path lang="sh">module</cmd_path>
<cmd_path lang="csh">module</cmd_path>
<cmd_path lang="perl">/usr/bin/modulecmd perl</cmd_path>
<cmd_path lang="python">/usr/bin/modulecmd python</cmd_path>
<modules>
<command name="purge"/>
</modules>
<modules compiler="gnu">
<command name="load">PE-gnu</command>
</modules>
<modules>
<command name="load">mkl/2017</command>
<command name="load">/lustre/or-hydra/cades-ccsi/proj-shared/tools/cmake/3.6.1</command>
<command name="load">python/2.7.12</command>
<command name="load">/lustre/or-hydra/cades-ccsi/proj-shared/tools/nco/4.6.4</command>
<command name="load">hdf5-parallel/1.8.17</command>
<command name="load">netcdf-hdf5parallel/4.3.3.1</command>
<command name="load">pnetcdf/1.9.0</command>
</modules>
</module_system>
<!-- customize these fields as appropriate for your system (max tasks) and
desired layout (change '${group}/${USER}' to your
prefered location). -->
<RUNDIR>$CIME_OUTPUT_ROOT/$CASE/run</RUNDIR>
<EXEROOT>$CIME_OUTPUT_ROOT/$CASE/bld</EXEROOT>
<!-- for CLM-PFLOTRAN coupling, the PETSC_PATH must be defined specifically upon machines -->
<environment_variables compiler="gnu" mpilib="openmpi">
<env name="PETSC_PATH">/software/user_tools/current/cades-ccsi/petsc4pf/openmpi-1.10-gcc-5.3</env>
</environment_variables>
</machine>
<machine MACH="titan">
<DESC>ORNL XK6, os is CNL, 16 pes/node, batch system is PBS</DESC>
<NODENAME_REGEX>titan</NODENAME_REGEX>
<NODE_FAIL_REGEX>Received node event ec_node</NODE_FAIL_REGEX>
<OS>CNL</OS>
<COMPILERS>pgi,pgiacc,intel,cray</COMPILERS>
<MPILIBS>mpich</MPILIBS>
<PROJECT>cli115</PROJECT>
<SAVE_TIMING_DIR>$ENV{PROJWORK}/$PROJECT</SAVE_TIMING_DIR>
<SAVE_TIMING_DIR_PROJECTS>cli106,cli115,cli127,cli133,csc190</SAVE_TIMING_DIR_PROJECTS>
<CIME_OUTPUT_ROOT>$ENV{HOME}/acme_scratch/$PROJECT</CIME_OUTPUT_ROOT>
<DIN_LOC_ROOT>/lustre/atlas1/cli900/world-shared/cesm/inputdata</DIN_LOC_ROOT>
<DIN_LOC_ROOT_CLMFORC>/lustre/atlas1/cli900/world-shared/cesm/inputdata/atm/datm7</DIN_LOC_ROOT_CLMFORC>
<DOUT_S_ROOT>$ENV{MEMBERWORK}/$PROJECT/archive/$CASE</DOUT_S_ROOT>
<BASELINE_ROOT>/lustre/atlas1/cli115/world-shared/E3SM/baselines/$COMPILER</BASELINE_ROOT>
<CCSM_CPRNC>/lustre/atlas1/cli900/world-shared/cesm/tools/cprnc/cprnc.titan</CCSM_CPRNC>
<GMAKE_J>8</GMAKE_J>
<TESTS>e3sm_developer</TESTS>
<NTEST_PARALLEL_JOBS>4</NTEST_PARALLEL_JOBS>
<BATCH_SYSTEM>pbs</BATCH_SYSTEM>
<ALLOCATE_SPARE_NODES>TRUE</ALLOCATE_SPARE_NODES>
<SUPPORTED_BY>E3SM</SUPPORTED_BY>
<MAX_TASKS_PER_NODE>16</MAX_TASKS_PER_NODE>
<MAX_MPITASKS_PER_NODE>16</MAX_MPITASKS_PER_NODE>
<PROJECT_REQUIRED>TRUE</PROJECT_REQUIRED>
<mpirun mpilib="default">
<executable>aprun</executable>
</mpirun>
<mpirun mpilib="mpi-serial">
<executable>aprun</executable>
</mpirun>
<module_system type="module">
<!-- list of init_path elements, one per supported language e.g. sh, perl, python-->
<init_path lang="sh">/opt/modules/default/init/sh</init_path>
<init_path lang="csh">/opt/modules/default/init/csh</init_path>
<init_path lang="python">/opt/modules/default/init/python.py</init_path>
<init_path lang="perl">/opt/modules/default/init/perl.pm</init_path>
<!-- list of cmd_path elements, one for every supported language, e.g. sh, perl, python -->
<cmd_path lang="perl">/opt/modules/default/bin/modulecmd perl</cmd_path>
<cmd_path lang="python">/opt/modules/default/bin/modulecmd python</cmd_path>
<cmd_path lang="sh">module</cmd_path>
<cmd_path lang="csh">module</cmd_path>
<!-- List of modules elements, executing commands
if compiler and mpilib condition applies -->
<!-- Always execute -->
<modules>
<command name="load">python/2.7.9</command>
<command name="unload">subversion</command>
<command name="load">subversion/1.9.3</command>
<command name="unload">cmake</command>
<command name="load">cmake3/3.6.0</command>
</modules>
<modules compiler="pgiacc"> <!-- changing pgi_acc to pgiacc -->
<command name="rm">PrgEnv-cray</command>
<command name="rm">PrgEnv-gnu</command>
<command name="rm">PrgEnv-intel</command>
<command name="rm">PrgEnv-pathscale</command>
<command name="load">PrgEnv-pgi</command>
<command name="switch">pgi pgi/17.5.0</command>
<command name="rm">cray-mpich</command>
<command name="rm">cray-libsci</command>
<command name="rm">atp</command>
<command name="rm">esmf</command>
<command name="rm">cudatoolkit</command>
<command name="load">cray-mpich/7.6.3</command>
<command name="load">cray-libsci/16.11.1</command>
<command name="load">atp/2.1.1</command>
<command name="load">esmf/5.2.0rp2</command>
<command name="load">cudatoolkit</command>
</modules>
<modules compiler="pgi">
<command name="rm">PrgEnv-cray</command>
<command name="rm">PrgEnv-gnu</command>
<command name="rm">PrgEnv-intel</command>
<command name="rm">PrgEnv-pathscale</command>
<command name="load">PrgEnv-pgi</command>
<command name="switch">pgi pgi/17.5.0</command>
<command name="rm">cray-mpich</command>
<command name="rm">cray-libsci</command>
<command name="rm">atp</command>
<command name="rm">esmf</command>
<command name="load">cray-mpich/7.6.3</command>
<command name="load">cray-libsci/16.11.1</command>
<command name="load">atp/2.1.1</command>
<command name="load">esmf/5.2.0rp2</command>
</modules>
<modules compiler="intel">
<command name="rm">PrgEnv-pgi</command>
<command name="rm">PrgEnv-cray</command>
<command name="rm">PrgEnv-gnu</command>
<command name="rm">PrgEnv-pathscale</command>
<command name="load">PrgEnv-intel</command>
<command name="rm">intel</command>
<command name="rm">cray-libsci</command>
<command name="rm">cray-mpich</command>
<command name="rm">atp</command>
<command name="load">intel/18.0.1.163</command>
<command name="load">cray-mpich/7.6.3</command>
<command name="load">atp/2.1.1</command>
</modules>
<modules compiler="cray">
<command name="rm">PrgEnv-pgi</command>
<command name="rm">PrgEnv-gnu</command>
<command name="rm">PrgEnv-intel</command>
<command name="rm">PrgEnv-pathscale</command>
<command name="load">PrgEnv-cray</command>
<command name="rm">cce</command>
<command name="rm">cray-mpich</command>
<command name="load">cce/8.6.4</command>
<command name="load">cray-mpich/7.6.3</command>
</modules>
<!-- mpi lib settings -->
<modules mpilib="mpi-serial">
<command name="rm">cray-netcdf</command>
<command name="rm">cray-netcdf-hdf5parallel</command>
<command name="load">cray-netcdf/4.4.1.1.3</command>
</modules>
<modules mpilib="!mpi-serial">
<command name="rm">cray-netcdf</command>
<command name="rm">cray-netcdf-hdf5parallel</command>
<command name="load">cray-netcdf/4.4.1.1.3</command>
<command name="load">cray-parallel-netcdf/1.8.1.3</command>
</modules>
</module_system>
<RUNDIR>$ENV{PROJWORK}/$PROJECT/$USER/$CASE/run</RUNDIR>
<EXEROOT>$CIME_OUTPUT_ROOT/$CASE/bld</EXEROOT>
<!-- difference to MAX_TASKS_PER_NODE-->
<TEST_TPUT_TOLERANCE>0.1</TEST_TPUT_TOLERANCE>
<!-- Default -->
<environment_variables>
<env name="COMPILER">$COMPILER</env>
<env name="MPILIB">$MPILIB</env>
<env name="MPICH_ENV_DISPLAY">1</env>
<env name="MPICH_VERSION_DISPLAY">1</env>
<!--env name="MPICH_CPUMASK_DISPLAY">1</env-->
<env name="MPSTKZ">128M</env>
<env name="OMP_STACKSIZE">128M</env>
</environment_variables>
<!-- Set if compiler and mpilib -->
<environment_variables compiler="pgiacc">
<!-- NOTE(wjs, 2015-03-12) The following line is needed for bit-for-bit reproducibility -->
<env name="CRAY_CPU_TARGET">istanbul</env>
<env name="CRAY_CUDA_MPS">1</env>
</environment_variables>
<environment_variables compiler="intel">
<env name="CRAYPE_LINK_TYPE">dynamic</env>
</environment_variables>
</machine>
<machine MACH="eos">
<DESC>ORNL XC30, os is CNL, 16 pes/node, batch system is PBS</DESC>
<NODENAME_REGEX>eos</NODENAME_REGEX>
<OS>CNL</OS>
<COMPILERS>intel</COMPILERS>
<MPILIBS>mpich</MPILIBS>
<SAVE_TIMING_DIR>$ENV{PROJWORK}/$PROJECT</SAVE_TIMING_DIR>
<SAVE_TIMING_DIR_PROJECTS>cli115,cli127,cli106,csc190</SAVE_TIMING_DIR_PROJECTS>
<CIME_OUTPUT_ROOT>$ENV{HOME}/acme_scratch/$PROJECT</CIME_OUTPUT_ROOT>
<DIN_LOC_ROOT>/lustre/atlas1/cli900/world-shared/cesm/inputdata</DIN_LOC_ROOT>
<DIN_LOC_ROOT_CLMFORC>/lustre/atlas1/cli900/world-shared/cesm/inputdata/atm/datm7</DIN_LOC_ROOT_CLMFORC>
<DOUT_S_ROOT>$ENV{MEMBERWORK}/$PROJECT/archive/$CASE</DOUT_S_ROOT>
<BASELINE_ROOT>/lustre/atlas1/cli900/world-shared/cesm/baselines/$COMPILER</BASELINE_ROOT>
<CCSM_CPRNC>/lustre/atlas1/cli900/world-shared/cesm/tools/cprnc/cprnc.eos</CCSM_CPRNC>
<GMAKE_J>8</GMAKE_J>
<TESTS>e3sm_developer</TESTS>
<BATCH_SYSTEM>pbs</BATCH_SYSTEM>
<SUPPORTED_BY>E3SM</SUPPORTED_BY>
<MAX_TASKS_PER_NODE>32</MAX_TASKS_PER_NODE>
<MAX_MPITASKS_PER_NODE>16</MAX_MPITASKS_PER_NODE>
<PROJECT_REQUIRED>TRUE</PROJECT_REQUIRED>
<mpirun mpilib="mpich">
<executable>aprun</executable>
<arguments>
<arg name="hyperthreading" default="2"> -j {{ hyperthreading }}</arg>
<arg name="tasks_per_numa"> -S {{ tasks_per_numa }}</arg>
<arg name="num_tasks"> -n {{ total_tasks }}</arg>
<arg name="tasks_per_node"> -N $MAX_MPITASKS_PER_NODE</arg>
<arg name="thread_count"> -d $ENV{OMP_NUM_THREADS}</arg>
<arg name="numa_node"> -cc numa_node</arg>
</arguments>
</mpirun>
<mpirun mpilib="mpi-serial">
<executable/>
</mpirun>
<module_system type="module">
<init_path lang="sh">$MODULESHOME/init/sh</init_path>
<init_path lang="csh">$MODULESHOME/init/csh</init_path>
<init_path lang="perl">$MODULESHOME/init/perl.pm</init_path>
<init_path lang="python">$MODULESHOME/init/python.py</init_path>
<cmd_path lang="sh">module</cmd_path>
<cmd_path lang="csh">module</cmd_path>
<cmd_path lang="perl">$MODULESHOME/bin/modulecmd perl</cmd_path>
<cmd_path lang="python">$MODULESHOME/bin/modulecmd python</cmd_path>
<modules>
<command name="rm">intel</command>
<command name="rm">cray</command>
<command name="rm">cray-parallel-netcdf</command>
<command name="rm">cray-libsci</command>
<command name="rm">cray-netcdf</command>
<command name="rm">cray-netcdf-hdf5parallel</command>
<command name="rm">netcdf</command>
</modules>
<modules compiler="intel">
<command name="load">intel/18.0.1.163</command>
<command name="load">papi</command>
</modules>
<modules compiler="cray">
<command name="load">PrgEnv-cray</command>
<command name="switch">cce cce/8.1.9</command>
<command name="load">cray-libsci/12.1.00</command>
</modules>
<modules compiler="gnu">
<command name="load">PrgEnv-gnu</command>
<command name="switch">gcc gcc/4.8.0</command>
<command name="load">cray-libsci/12.1.00</command>
</modules>
<modules mpilib="mpi-serial">
<command name="load">cray-netcdf/4.3.2</command>
</modules>
<modules mpilib="!mpi-serial">
<command name="load">cray-netcdf-hdf5parallel/4.3.3.1</command>
<command name="load">cray-parallel-netcdf/1.6.1</command>
</modules>
<modules>
<command name="load">cmake3/3.2.3</command>
<command name="load">python/2.7.9</command>
</modules>
</module_system>
<RUNDIR>$ENV{MEMBERWORK}/$PROJECT/$CASE/run</RUNDIR>
<EXEROOT>$CIME_OUTPUT_ROOT/$CASE/bld</EXEROOT>
<environment_variables>
<env name="MPICH_ENV_DISPLAY">1</env>
<env name="MPICH_VERSION_DISPLAY">1</env>
<!-- This increases the stack size, which is necessary
for CICE to run threaded on this machine -->
<env name="OMP_STACKSIZE">64M</env>
</environment_variables>
</machine>
<machine MACH="grizzly">
<DESC>LANL Linux Cluster, 36 pes/node, batch system slurm</DESC>
<NODENAME_REGEX>gr-fe.*.lanl.gov</NODENAME_REGEX>
<OS>LINUX</OS>
<COMPILERS>gnu,intel</COMPILERS>
<MPILIBS>mvapich,openmpi</MPILIBS>
<PROJECT>climateacme</PROJECT>
<CIME_OUTPUT_ROOT>/lustre/scratch3/turquoise/$ENV{USER}/E3SM/scratch</CIME_OUTPUT_ROOT>
<DIN_LOC_ROOT>/lustre/scratch3/turquoise/$ENV{USER}/E3SM/input_data</DIN_LOC_ROOT>
<DIN_LOC_ROOT_CLMFORC>/lustre/scratch3/turquoise/$ENV{USER}/E3SM/input_data/atm/datm7</DIN_LOC_ROOT_CLMFORC>
<DOUT_S_ROOT>/lustre/scratch3/turquoise/$ENV{USER}/E3SM/archive/$CASE</DOUT_S_ROOT>
<BASELINE_ROOT>/lustre/scratch3/turquoise/$ENV{USER}/E3SM/input_data/ccsm_baselines/$COMPILER</BASELINE_ROOT>
<CCSM_CPRNC>/turquoise/usr/projects/climate/SHARED_CLIMATE/software/wolf/cprnc/v0.40/cprnc</CCSM_CPRNC>
<GMAKE_J>4</GMAKE_J>
<TESTS>e3sm_developer</TESTS>
<BATCH_SYSTEM>slurm</BATCH_SYSTEM>
<SUPPORTED_BY>luke.vanroekel @ gmail.com</SUPPORTED_BY>
<MAX_TASKS_PER_NODE>36</MAX_TASKS_PER_NODE>
<MAX_MPITASKS_PER_NODE>32</MAX_MPITASKS_PER_NODE>
<PROJECT_REQUIRED>TRUE</PROJECT_REQUIRED>
<mpirun mpilib="default">
<executable>mpirun</executable>
<arguments>
<arg name="num_tasks"> -n {{ total_tasks }}</arg>
</arguments>
</mpirun>
<mpirun mpilib="mvapich">
<executable>srun</executable>
<arguments>
<arg name="num_tasks"> -n {{ total_tasks }}</arg>
</arguments>
</mpirun>
<mpirun mpilib="openmpi">
<executable>mpirun</executable>
<arguments>
<arg name="num_tasks"> -n {{ total_tasks }}</arg>
</arguments>
</mpirun>
<mpirun mpilib="mpi-serial">
<executable/>
</mpirun>
<module_system type="module">
<init_path lang="perl">/usr/share/Modules/init/perl.pm</init_path>
<init_path lang="python">/usr/share/Modules/init/python.py</init_path>
<init_path lang="sh">/etc/profile.d/z00_lmod.sh</init_path>
<init_path lang="csh">/etc/profile.d/z00_lmod.csh</init_path>
<cmd_path lang="perl">/usr/share/lmod/lmod/libexec/lmod perl</cmd_path>
<cmd_path lang="python">/usr/share/lmod/lmod/libexec/lmod python</cmd_path>
<cmd_path lang="sh">module</cmd_path>
<cmd_path lang="csh">module</cmd_path>
<modules>
<command name="purge"/>
<command name="use">/usr/projects/climate/SHARED_CLIMATE/modulefiles/all</command>
<command name="load">python/anaconda-2.7-climate</command>
</modules>
<modules compiler="gnu">
<command name="load">gcc/5.3.0</command>
</modules>
<modules compiler="intel">
<command name="load">intel/17.0.1</command>
</modules>
<modules mpilib="openmpi">
<command name="load">openmpi/1.10.5</command>
</modules>
<modules mpilib="mvapich">
<command name="load">mvapich2/2.2</command>
</modules>
<modules>
<command name="load">netcdf/4.4.1</command>
</modules>
<modules>
<command name="load">parallel-netcdf/1.5.0</command>
</modules>
<modules>
<command name="load">mkl</command>
</modules>
</module_system>
<RUNDIR>/lustre/scratch3/turquoise/$ENV{USER}/E3SM/cases/$CASE/run</RUNDIR>
<EXEROOT>/lustre/scratch3/turquoise/$ENV{USER}/E3SM/cases/$CASE/bld</EXEROOT>
<environment_variables>
<env name="PNETCDF_HINTS">romio_ds_write=disable;romio_ds_read=disable;romio_cb_write=enable;romio_cb_read=enable</env>
</environment_variables>
<environment_variables compiler="gnu">
<env name="MKLROOT">/opt/intel/17.0/mkl</env>
</environment_variables>
</machine>
<machine MACH="badger">
<DESC>LANL Linux Cluster, 36 pes/node, batch system slurm</DESC>
<NODENAME_REGEX>ba-fe.*.lanl.gov</NODENAME_REGEX>
<OS>LINUX</OS>
<COMPILERS>gnu,intel</COMPILERS>
<MPILIBS>mvapich,openmpi</MPILIBS>
<PROJECT>climateacme</PROJECT>
<CIME_OUTPUT_ROOT>/lustre/scratch3/turquoise/$ENV{USER}/E3SM/scratch</CIME_OUTPUT_ROOT>
<DIN_LOC_ROOT>/lustre/scratch3/turquoise/$ENV{USER}/E3SM/input_data</DIN_LOC_ROOT>
<DIN_LOC_ROOT_CLMFORC>/lustre/scratch3/turquoise/$ENV{USER}/E3SM/input_data/atm/datm7</DIN_LOC_ROOT_CLMFORC>
<DOUT_S_ROOT>/lustre/scratch3/turquoise/$ENV{USER}/E3SM/archive/$CASE</DOUT_S_ROOT>
<BASELINE_ROOT>/lustre/scratch3/turquoise/$ENV{USER}/E3SM/input_data/ccsm_baselines/$COMPILER</BASELINE_ROOT>
<CCSM_CPRNC>/turquoise/usr/projects/climate/SHARED_CLIMATE/software/wolf/cprnc/v0.40/cprnc</CCSM_CPRNC>
<GMAKE_J>4</GMAKE_J>
<TESTS>e3sm_developer</TESTS>
<BATCH_SYSTEM>slurm</BATCH_SYSTEM>
<SUPPORTED_BY>e3sm</SUPPORTED_BY>
<MAX_TASKS_PER_NODE>36</MAX_TASKS_PER_NODE>
<MAX_MPITASKS_PER_NODE>32</MAX_MPITASKS_PER_NODE>
<PROJECT_REQUIRED>TRUE</PROJECT_REQUIRED>
<mpirun mpilib="default">
<executable>mpirun</executable>
<arguments>
<arg name="num_tasks"> -n {{ total_tasks }}</arg>
</arguments>
</mpirun>
<mpirun mpilib="mvapich">
<executable>srun</executable>
<arguments>
<arg name="num_tasks"> -n {{ total_tasks }}</arg>
</arguments>
</mpirun>
<mpirun mpilib="openmpi">
<executable>mpirun</executable>
<arguments>
<arg name="num_tasks"> -n {{ total_tasks }}</arg>
</arguments>
</mpirun>
<mpirun mpilib="mpi-serial">
<executable/>
</mpirun>
<module_system type="module">
<init_path lang="perl">/usr/share/Modules/init/perl.pm</init_path>
<init_path lang="python">/usr/share/Modules/init/python.py</init_path>
<init_path lang="sh">/etc/profile.d/z00_lmod.sh</init_path>
<init_path lang="csh">/etc/profile.d/z00_lmod.csh</init_path>
<cmd_path lang="perl">/usr/share/lmod/lmod/libexec/lmod perl</cmd_path>
<cmd_path lang="python">/usr/share/lmod/lmod/libexec/lmod python</cmd_path>
<cmd_path lang="sh">module</cmd_path>
<cmd_path lang="csh">module</cmd_path>
<modules>
<command name="purge"/>
<command name="use">/usr/projects/climate/SHARED_CLIMATE/modulefiles/all</command>
<command name="load">python/anaconda-2.7-climate</command>
</modules>
<modules compiler="gnu">
<command name="load">gcc/6.4.0</command>
</modules>
<modules compiler="intel">
<command name="load">intel/17.0.4</command>
</modules>
<modules mpilib="openmpi">
<command name="load">openmpi/2.1.2</command>
</modules>
<modules mpilib="mvapich">
<command name="load">mvapich2/2.2</command>
</modules>
<modules>
<command name="load">netcdf/4.4.1.1</command>
</modules>
<modules>
<command name="load">parallel-netcdf/1.8.1</command>
</modules>
<modules>
<command name="load">mkl</command>
</modules>
</module_system>
<RUNDIR>/lustre/scratch3/turquoise/$ENV{USER}/E3SM/cases/$CASE/run</RUNDIR>
<EXEROOT>/lustre/scratch3/turquoise/$ENV{USER}/E3SM/cases/$CASE/bld</EXEROOT>
<environment_variables>
<env name="PNETCDF_HINTS">romio_ds_write=disable;romio_ds_read=disable;romio_cb_write=enable;romio_cb_read=enable</env>
</environment_variables>
<environment_variables compiler="gnu">
<env name="MKLROOT">/opt/intel/17.0/mkl</env>
</environment_variables>
</machine>
<machine MACH="mesabi">
<DESC>Mesabi batch queue</DESC>
<OS>LINUX</OS>
<COMPILERS>intel</COMPILERS>
<MPILIBS>openmpi</MPILIBS>
<CIME_OUTPUT_ROOT>/home/reichpb/scratch</CIME_OUTPUT_ROOT>
<DIN_LOC_ROOT>/home/reichpb/shared/cesm_inputdata</DIN_LOC_ROOT>
<DIN_LOC_ROOT_CLMFORC>/home/reichpb/shared/cesm_inputdata/atm/datm7</DIN_LOC_ROOT_CLMFORC>
<DOUT_S_ROOT>USERDEFINED_optional_run</DOUT_S_ROOT>
<BASELINE_ROOT>USERDEFINED_optional_run/$COMPILER</BASELINE_ROOT>
<CCSM_CPRNC>USERDEFINED_optional_test</CCSM_CPRNC>
<GMAKE_J>2</GMAKE_J>
<BATCH_SYSTEM>pbs</BATCH_SYSTEM>
<SUPPORTED_BY>chen1718 at umn dot edu</SUPPORTED_BY>
<MAX_TASKS_PER_NODE>24</MAX_TASKS_PER_NODE>
<MAX_MPITASKS_PER_NODE>24</MAX_MPITASKS_PER_NODE>
<PROJECT_REQUIRED>TRUE</PROJECT_REQUIRED>
<mpirun mpilib="default">
<executable>aprun</executable>
<arguments>
<arg name="num_tasks"> -n {{ total_tasks }}</arg>
<arg name="tasks_per_numa"> -S {{ tasks_per_numa }}</arg>
<arg name="tasks_per_node"> -N $MAX_MPITASKS_PER_NODE</arg>
<arg name="thread_count"> -d $ENV{OMP_NUM_THREADS}</arg>
</arguments>
</mpirun>
<module_system type="none"/>
<RUNDIR>$CASEROOT/run</RUNDIR>
<!-- complete path to the run directory -->
<EXEROOT>$CASEROOT/exedir</EXEROOT>
<!-- complete path to the build directory -->
<!-- complete path to the inputdata directory -->
<!-- path to the optional forcing data for CLM (for CRUNCEP forcing) -->
<!--<DOUT_S>FALSE</DOUT_S>-->
<!-- logical for short term archiving -->
<!-- complete path to a short term archiving directory -->
<!-- complete path to a long term archiving directory -->
<!-- where the cesm testing scripts write and read baseline results -->
<!-- path to the cprnc tool used to compare netcdf history files in testing -->
</machine>
<machine MACH="itasca">
<DESC>Itasca batch queue</DESC>
<OS>LINUX</OS>
<COMPILERS>intel</COMPILERS>
<MPILIBS>openmpi</MPILIBS>
<CIME_OUTPUT_ROOT>/home/reichpb/scratch</CIME_OUTPUT_ROOT>
<DIN_LOC_ROOT>/home/reichpb/shared/cesm_inputdata</DIN_LOC_ROOT>
<DIN_LOC_ROOT_CLMFORC>/home/reichpb/shared/cesm_inputdata/atm/datm7</DIN_LOC_ROOT_CLMFORC>
<DOUT_S_ROOT>USERDEFINED_optional_run</DOUT_S_ROOT>
<BASELINE_ROOT>USERDEFINED_optional_run/$COMPILER</BASELINE_ROOT>
<CCSM_CPRNC>USERDEFINED_optional_test</CCSM_CPRNC>
<GMAKE_J>2</GMAKE_J>
<BATCH_SYSTEM>pbs</BATCH_SYSTEM>
<SUPPORTED_BY>chen1718 at umn dot edu</SUPPORTED_BY>
<MAX_TASKS_PER_NODE>8</MAX_TASKS_PER_NODE>
<MAX_MPITASKS_PER_NODE>8</MAX_MPITASKS_PER_NODE>
<mpirun mpilib="default">
<executable>aprun</executable>
<arguments>
<arg name="num_tasks"> -n {{ total_tasks }}</arg>
<arg name="tasks_per_numa"> -S {{ tasks_per_numa }}</arg>
<arg name="tasks_per_node"> -N $MAX_MPITASKS_PER_NODE</arg>
<arg name="thread_count"> -d $ENV{OMP_NUM_THREADS}</arg>
</arguments>
</mpirun>
<module_system type="none"/>
<RUNDIR>$CASEROOT/run</RUNDIR>
<!-- complete path to the run directory -->
<EXEROOT>$CASEROOT/exedir</EXEROOT>
<!-- complete path to the build directory -->
<!-- complete path to the inputdata directory -->
<!-- path to the optional forcing data for CLM (for CRUNCEP forcing) -->
<!--<DOUT_S>FALSE</DOUT_S>-->
<!-- logical for short term archiving -->
<!-- complete path to a short term archiving directory -->
<!-- complete path to a long term archiving directory -->
<!-- where the cesm testing scripts write and read baseline results -->
<!-- path to the cprnc tool used to compare netcdf history files in testing -->
</machine>
<machine MACH="lawrencium-lr2">
<DESC>Lawrencium LR6 cluster at LBL, OS is Linux (intel), batch system is SLURM</DESC>
<NODENAME_REGEX>n000*</NODENAME_REGEX>
<OS>LINUX</OS>
<COMPILERS>intel,gnu</COMPILERS>
<MPILIBS>openmpi</MPILIBS>
<CHARGE_ACCOUNT>ac_acme</CHARGE_ACCOUNT>
<CIME_OUTPUT_ROOT>/global/scratch/$ENV{USER}</CIME_OUTPUT_ROOT>
<DIN_LOC_ROOT>/global/scratch/$ENV{USER}/cesm_input_datasets/</DIN_LOC_ROOT>
<DIN_LOC_ROOT_CLMFORC>/global/scratch/$ENV{USER}/cesm_input_datasets/atm/datm7</DIN_LOC_ROOT_CLMFORC>
<DOUT_S_ROOT>$CIME_OUTPUT_ROOT/cesm_archive/$CASE</DOUT_S_ROOT>
<BASELINE_ROOT>$CIME_OUTPUT_ROOT/cesm_baselines/$COMPILER</BASELINE_ROOT>
<CCSM_CPRNC>/$CIME_OUTPUT_ROOT/cesm_tools/cprnc/cprnc</CCSM_CPRNC>
<GMAKE_J>4</GMAKE_J>
<BATCH_SYSTEM>slurm</BATCH_SYSTEM>
<SUPPORTED_BY>gbisht at lbl dot gov</SUPPORTED_BY>
<MAX_TASKS_PER_NODE>12</MAX_TASKS_PER_NODE>
<MAX_MPITASKS_PER_NODE>12</MAX_MPITASKS_PER_NODE>
<PROJECT_REQUIRED>TRUE</PROJECT_REQUIRED>
<mpirun mpilib="mpi-serial">
<executable>mpirun</executable>
<arguments>
<arg name="num_tasks">-np {{ total_tasks }}</arg>
<arg name="tasks_per_node"> -npernode $MAX_MPITASKS_PER_NODE</arg>
</arguments>
</mpirun>
<mpirun mpilib="default">
<executable>mpirun</executable>
<arguments>
<arg name="num_tasks">-np {{ total_tasks }}</arg>
<arg name="tasks_per_node"> -npernode $MAX_MPITASKS_PER_NODE</arg>
</arguments>
</mpirun>
<module_system type="module">
<init_path lang="sh">/etc/profile.d/modules.sh</init_path>
<init_path lang="csh">/etc/profile.d/modules.csh</init_path>
<init_path lang="perl">/usr/Modules/init/perl.pm</init_path>
<init_path lang="python">/usr/Modules/python.py</init_path>
<cmd_path lang="sh">module</cmd_path>
<cmd_path lang="csh">module</cmd_path>
<cmd_path lang="perl">/usr/Modules/bin/modulecmd perl</cmd_path>
<cmd_path lang="python">/usr/Modules/bin/modulecmd python</cmd_path>
<modules>
<command name="purge"/>
<command name="load">cmake</command>
<command name="load">perl xml-libxml switch python/2.7</command>
</modules>
<modules compiler="intel">
<command name="load">intel/2016.4.072</command>
<command name="load">mkl</command>
</modules>
<modules compiler="intel" mpilib="mpi-serial">
<command name="load">netcdf/4.4.1.1-intel-s</command>
</modules>
<modules compiler="intel" mpilib="!mpi-serial">
<command name="load">openmpi</command>
<command name="load">netcdf/4.4.1.1-intel-p</command>
</modules>
<modules compiler="gnu">
<command name="load">gcc/6.3.0</command>
<command name="load">lapack/3.8.0-gcc</command>
</modules>
<modules compiler="gnu" mpilib="mpi-serial">
<command name="load">netcdf/5.4.1.1-gcc-s</command>
<command name="unload">openmpi/2.0.2-gcc</command>
</modules>
<modules compiler="gnu" mpilib="!mpi-serial">
<command name="load">openmpi/3.0.1-gcc</command>
<command name="load">netcdf/4.4.1.1-gcc-p</command>
<command name="unload">openmpi/2.0.2-gcc</command>
</modules>
</module_system>
<RUNDIR>$CIME_OUTPUT_ROOT/$CASE/run</RUNDIR>
<EXEROOT>$CIME_OUTPUT_ROOT/$CASE/bld</EXEROOT>
</machine>
<machine MACH="lawrencium-lr3">
<DESC>Lawrencium LR6 cluster at LBL, OS is Linux (intel), batch system is SLURM</DESC>
<NODENAME_REGEX>n000*</NODENAME_REGEX>
<OS>LINUX</OS>
<COMPILERS>intel,gnu</COMPILERS>
<MPILIBS>openmpi</MPILIBS>
<CHARGE_ACCOUNT>ac_acme</CHARGE_ACCOUNT>
<CIME_OUTPUT_ROOT>/global/scratch/$ENV{USER}</CIME_OUTPUT_ROOT>
<DIN_LOC_ROOT>/global/scratch/$ENV{USER}/cesm_input_datasets/</DIN_LOC_ROOT>
<DIN_LOC_ROOT_CLMFORC>/global/scratch/$ENV{USER}/cesm_input_datasets/atm/datm7</DIN_LOC_ROOT_CLMFORC>
<DOUT_S_ROOT>$CIME_OUTPUT_ROOT/cesm_archive/$CASE</DOUT_S_ROOT>
<BASELINE_ROOT>$CIME_OUTPUT_ROOT/cesm_baselines/$COMPILER</BASELINE_ROOT>
<CCSM_CPRNC>/$CIME_OUTPUT_ROOT/cesm_tools/cprnc/cprnc</CCSM_CPRNC>
<GMAKE_J>4</GMAKE_J>
<BATCH_SYSTEM>slurm</BATCH_SYSTEM>
<SUPPORTED_BY>gbisht at lbl dot gov</SUPPORTED_BY>
<MAX_TASKS_PER_NODE>12</MAX_TASKS_PER_NODE>
<MAX_MPITASKS_PER_NODE>12</MAX_MPITASKS_PER_NODE>
<PROJECT_REQUIRED>TRUE</PROJECT_REQUIRED>
<mpirun mpilib="mpi-serial">
<executable>mpirun</executable>
<arguments>
<arg name="num_tasks">-np {{ total_tasks }}</arg>
<arg name="tasks_per_node"> -npernode $MAX_MPITASKS_PER_NODE</arg>
</arguments>
</mpirun>
<mpirun mpilib="default">
<executable>mpirun</executable>
<arguments>
<arg name="num_tasks">-np {{ total_tasks }}</arg>
<arg name="tasks_per_node"> -npernode $MAX_MPITASKS_PER_NODE</arg>
</arguments>
</mpirun>
<module_system type="module">
<init_path lang="sh">/etc/profile.d/modules.sh</init_path>
<init_path lang="csh">/etc/profile.d/modules.csh</init_path>
<init_path lang="perl">/usr/Modules/init/perl.pm</init_path>
<init_path lang="python">/usr/Modules/python.py</init_path>
<cmd_path lang="sh">module</cmd_path>
<cmd_path lang="csh">module</cmd_path>
<cmd_path lang="perl">/usr/Modules/bin/modulecmd perl</cmd_path>
<cmd_path lang="python">/usr/Modules/bin/modulecmd python</cmd_path>
<modules>
<command name="purge"/>
<command name="load">cmake</command>
<command name="load">perl xml-libxml switch python/2.7</command>
</modules>
<modules compiler="intel">
<command name="load">intel/2016.4.072</command>
<command name="load">mkl</command>
</modules>
<modules compiler="intel" mpilib="mpi-serial">
<command name="load">netcdf/4.4.1.1-intel-s</command>
</modules>
<modules compiler="intel" mpilib="!mpi-serial">
<command name="load">openmpi</command>
<command name="load">netcdf/4.4.1.1-intel-p</command>
</modules>
<modules compiler="gnu">
<command name="load">gcc/6.3.0</command>
<command name="load">lapack/3.8.0-gcc</command>
</modules>
<modules compiler="gnu" mpilib="mpi-serial">
<command name="load">netcdf/5.4.1.1-gcc-s</command>
<command name="unload">openmpi/2.0.2-gcc</command>
</modules>
<modules compiler="gnu" mpilib="!mpi-serial">
<command name="load">openmpi/3.0.1-gcc</command>
<command name="load">netcdf/4.4.1.1-gcc-p</command>
<command name="unload">openmpi/2.0.2-gcc</command>
</modules>
</module_system>
<RUNDIR>$CIME_OUTPUT_ROOT/$CASE/run</RUNDIR>
<EXEROOT>$CIME_OUTPUT_ROOT/$CASE/bld</EXEROOT>
</machine>
<machine MACH="lawrencium-lr6">
<DESC>Lawrencium LR6 cluster at LBL, OS is Linux (intel), batch system is SLURM</DESC>
<NODENAME_REGEX>n000*</NODENAME_REGEX>
<OS>LINUX</OS>
<COMPILERS>intel,gnu</COMPILERS>
<MPILIBS>openmpi</MPILIBS>
<CHARGE_ACCOUNT>ac_acme</CHARGE_ACCOUNT>
<CIME_OUTPUT_ROOT>/global/scratch/$ENV{USER}</CIME_OUTPUT_ROOT>
<DIN_LOC_ROOT>/global/scratch/$ENV{USER}/cesm_input_datasets/</DIN_LOC_ROOT>
<DIN_LOC_ROOT_CLMFORC>/global/scratch/$ENV{USER}/cesm_input_datasets/atm/datm7</DIN_LOC_ROOT_CLMFORC>
<DOUT_S_ROOT>$CIME_OUTPUT_ROOT/cesm_archive/$CASE</DOUT_S_ROOT>
<BASELINE_ROOT>$CIME_OUTPUT_ROOT/cesm_baselines/$COMPILER</BASELINE_ROOT>
<CCSM_CPRNC>/$CIME_OUTPUT_ROOT/cesm_tools/cprnc/cprnc</CCSM_CPRNC>
<GMAKE_J>4</GMAKE_J>
<BATCH_SYSTEM>slurm</BATCH_SYSTEM>
<SUPPORTED_BY>gbisht at lbl dot gov</SUPPORTED_BY>
<MAX_TASKS_PER_NODE>12</MAX_TASKS_PER_NODE>
<MAX_MPITASKS_PER_NODE>12</MAX_MPITASKS_PER_NODE>
<PROJECT_REQUIRED>TRUE</PROJECT_REQUIRED>
<mpirun mpilib="mpi-serial">
<executable>mpirun</executable>
<arguments>
<arg name="num_tasks">-np {{ total_tasks }}</arg>
<arg name="tasks_per_node"> -npernode $MAX_MPITASKS_PER_NODE</arg>
</arguments>
</mpirun>
<mpirun mpilib="default">
<executable>mpirun</executable>
<arguments>
<arg name="num_tasks">-np {{ total_tasks }}</arg>
<arg name="tasks_per_node"> -npernode $MAX_MPITASKS_PER_NODE</arg>
</arguments>
</mpirun>
<module_system type="module">
<init_path lang="sh">/etc/profile.d/modules.sh</init_path>
<init_path lang="csh">/etc/profile.d/modules.csh</init_path>
<init_path lang="perl">/usr/Modules/init/perl.pm</init_path>
<init_path lang="python">/usr/Modules/python.py</init_path>
<cmd_path lang="sh">module</cmd_path>
<cmd_path lang="csh">module</cmd_path>
<cmd_path lang="perl">/usr/Modules/bin/modulecmd perl</cmd_path>
<cmd_path lang="python">/usr/Modules/bin/modulecmd python</cmd_path>
<modules>
<command name="purge"/>
<command name="load">cmake</command>
<command name="load">perl xml-libxml switch python/2.7</command>
</modules>
<modules compiler="intel">
<command name="load">intel/2016.4.072</command>
<command name="load">mkl</command>
</modules>
<modules compiler="intel" mpilib="mpi-serial">
<command name="load">netcdf/4.4.1.1-intel-s</command>
</modules>
<modules compiler="intel" mpilib="!mpi-serial">
<command name="load">openmpi</command>
<command name="load">netcdf/4.4.1.1-intel-p</command>
</modules>
<modules compiler="gnu">
<command name="load">gcc/6.3.0</command>
<command name="load">lapack/3.8.0-gcc</command>
</modules>
<modules compiler="gnu" mpilib="mpi-serial">
<command name="load">netcdf/5.4.1.1-gcc-s</command>
<command name="unload">openmpi/2.0.2-gcc</command>
</modules>
<modules compiler="gnu" mpilib="!mpi-serial">
<command name="load">openmpi/3.0.1-gcc</command>
<command name="load">netcdf/4.4.1.1-gcc-p</command>
<command name="unload">openmpi/2.0.2-gcc</command>
</modules>
</module_system>
<RUNDIR>$CIME_OUTPUT_ROOT/$CASE/run</RUNDIR>
<EXEROOT>$CIME_OUTPUT_ROOT/$CASE/bld</EXEROOT>
</machine>
<machine MACH="eddi">
<DESC>small developer workhorse at lbl climate sciences</DESC>
<OS>LINUX</OS>
<COMPILERS>gnu</COMPILERS>
<MPILIBS>openmpi</MPILIBS>
<PROJECT>ngeet</PROJECT>
<CIME_OUTPUT_ROOT>/home/lbleco/acme/</CIME_OUTPUT_ROOT>
<DIN_LOC_ROOT>/home/lbleco/cesm/cesm_input_datasets/</DIN_LOC_ROOT>
<DIN_LOC_ROOT_CLMFORC>/home/lbleco/cesm/cesm_input_datasets/atm/datm7/</DIN_LOC_ROOT_CLMFORC>
<DOUT_S_ROOT>/home/lbleco/acme/cesm_archive/$CASE</DOUT_S_ROOT>
<BASELINE_ROOT>/home/lbleco/acme/cesm_baselines/$COMPILER</BASELINE_ROOT>
<CCSM_CPRNC>/home/lbleco/cesm/cesm_tools/cprnc/cprnc</CCSM_CPRNC>
<GMAKE_J>1</GMAKE_J>
<BATCH_SYSTEM>none</BATCH_SYSTEM>
<SUPPORTED_BY>rgknox at lbl gov</SUPPORTED_BY>
<MAX_TASKS_PER_NODE>4</MAX_TASKS_PER_NODE>
<MAX_MPITASKS_PER_NODE>4</MAX_MPITASKS_PER_NODE>
<PROJECT_REQUIRED>FALSE</PROJECT_REQUIRED>
<mpirun mpilib="mpi-serial">
<executable/>
</mpirun>
<mpirun mpilib="default">
<executable>mpirun</executable>
<arguments>
<arg name="num_tasks">-np {{ total_tasks }}</arg>
<arg name="tasks_per_node"> -npernode $MAX_MPITASKS_PER_NODE</arg>
</arguments>
</mpirun>
<module_system type="none"/>
</machine>
<machine MACH="summitdev">
<DESC>ORNL pre-Summit testbed. Node: 2x POWER8 + 4x Tesla P100, 20 cores/node, 8 HW threads/core.</DESC>
<NODENAME_REGEX>summitdev-*</NODENAME_REGEX>
<OS>LINUX</OS>
<COMPILERS>ibm,pgi,pgiacc</COMPILERS>
<MPILIBS>spectrum-mpi,mpi-serial</MPILIBS>
<PROJECT>csc249</PROJECT>
<CHARGE_ACCOUNT>CSC249ADSE15</CHARGE_ACCOUNT>
<SAVE_TIMING_DIR>/lustre/atlas/proj-shared/$PROJECT</SAVE_TIMING_DIR>
<SAVE_TIMING_DIR_PROJECTS>cli115,cli127,cli106,csc190</SAVE_TIMING_DIR_PROJECTS>
<CIME_OUTPUT_ROOT>$ENV{HOME}/acme_scratch/$PROJECT</CIME_OUTPUT_ROOT>
<DIN_LOC_ROOT>/lustre/atlas1/cli900/world-shared/cesm/inputdata</DIN_LOC_ROOT>
<DIN_LOC_ROOT_CLMFORC>/lustre/atlas1/cli900/world-shared/cesm/inputdata/atm/datm7</DIN_LOC_ROOT_CLMFORC>
<DOUT_S_ROOT>/lustre/atlas/scratch/$ENV{USER}/$PROJECT/archive/$CASE</DOUT_S_ROOT>
<BASELINE_ROOT>/lustre/atlas1/cli900/world-shared/cesm/baselines/$COMPILER</BASELINE_ROOT>
<CCSM_CPRNC>/lustre/atlas1/cli900/world-shared/cesm/tools/cprnc/cprnc</CCSM_CPRNC>
<GMAKE_J>32</GMAKE_J>
<TESTS>e3sm_developer</TESTS>
<BATCH_SYSTEM>lsf</BATCH_SYSTEM>
<SUPPORTED_BY>acme</SUPPORTED_BY>
<MAX_TASKS_PER_NODE>160</MAX_TASKS_PER_NODE>
<MAX_MPITASKS_PER_NODE>80</MAX_MPITASKS_PER_NODE>
<PROJECT_REQUIRED>TRUE</PROJECT_REQUIRED>
<mpirun mpilib="spectrum-mpi">
<executable>/lustre/atlas/world-shared/cli900/helper_scripts/mpirun.summitdev</executable>
<!-- <executable>jsrun</executable> -->
<arguments>
<arg name="num_tasks"> -n {{ total_tasks }} -N $MAX_MPITASKS_PER_NODE</arg>
<!-- <arg name="num_tasks" > -n ALL_HOSTS -a $MAX_MPITASKS_PER_NODE </arg> -->
<!-- <arg name="thread_count"> -c $ENV{OMP_NUM_THREADS} -E OMP_NUM_THREADS=$ENV{OMP_NUM_THREADS}</arg> -->
</arguments>
</mpirun>
<module_system type="module" allow_error="true">
<!-- list of init_path elements, one per supported language e.g. sh, perl, python-->
<init_path lang="sh">/sw/summitdev/lmod/7.4.0/rhel7.2_gnu4.8.5/lmod/7.4/init/sh</init_path>
<init_path lang="csh">/sw/summitdev/lmod/7.4.0/rhel7.2_gnu4.8.5/lmod/7.4/init/csh</init_path>
<init_path lang="python">/sw/summitdev/lmod/7.4.0/rhel7.2_gnu4.8.5/lmod/7.4/init/env_modules_python.py</init_path>
<init_path lang="perl">/sw/summitdev/lmod/7.4.0/rhel7.2_gnu4.8.5/lmod/7.4/init/perl</init_path>
<!-- list of cmd_path elements, one for every supported language, e.g. sh, perl, python -->
<cmd_path lang="perl">module</cmd_path>
<cmd_path lang="python">/sw/summitdev/lmod/7.4.0/rhel7.2_gnu4.8.5/lmod/lmod/libexec/lmod python</cmd_path>
<cmd_path lang="sh">module</cmd_path>
<cmd_path lang="csh">module</cmd_path>
<!-- Always execute -->
<modules>
<command name="ls"/>
<command name="purge"/>
<command name="ls"/>
<command name="load">DefApps</command>
<command name="load">python/3.5.2</command>
<command name="load">subversion/1.9.3</command>
<command name="load">git/2.13.0</command>
<command name="load">cmake/3.6.1</command>
<command name="load">essl/5.5.0-20161110</command>
<command name="load">netlib-lapack/3.6.1</command>
</modules>
<!-- List of modules elements, executing commands if compiler and mpilib condition applies -->
<modules compiler="pgi">
<command name="rm">xl</command>
<command name="load">pgi/17.9</command>
<command name="load">spectrum-mpi/10.1.0.4-20170915</command>
<command name="ls"/>
</modules>
<modules compiler="ibm">
<command name="rm">pgi</command>
<command name="load">xl/20170914-beta</command>
<command name="load">spectrum-mpi/10.1.0.4-20170915</command>
<command name="ls"/>
</modules>
<!-- mpi lib settings -->
<modules mpilib="mpi-serial">
<command name="load">netcdf/4.4.1</command>
<command name="load">netcdf-fortran/4.4.4</command>
</modules>
<!-- Sometimes,same versions of libraries are not available for different compilers, hence the split below -->
<modules compiler="ibm" mpilib="!mpi-serial">
<command name="load">netcdf/4.4.1</command>
<command name="load">netcdf-fortran/4.4.4</command>
<command name="load">parallel-netcdf/1.7.0</command>
<command name="load">hdf5/1.10.0-patch1</command>
</modules>
<modules compiler="pgi" mpilib="!mpi-serial">
<command name="load">netcdf/4.4.1</command>
<command name="load">netcdf-fortran/4.4.4</command>
<command name="load">parallel-netcdf/1.7.0</command>
<command name="load">hdf5/1.10.0-patch1</command>
</modules>
</module_system>
<RUNDIR>/lustre/atlas/scratch/$ENV{USER}/$PROJECT/$CASE/run</RUNDIR>
<EXEROOT>$CIME_OUTPUT_ROOT/$CASE/bld</EXEROOT>
<!-- Useful jsrun options:
-n (hyphen-hyphen)nrs Number of resource sets
-a (hyphen-hyphen)tasks_per_rs Number of tasks per resource set
-c (hyphen-hyphen)cpu_per_rs Number of CPUs per resource set. Threads per rs.
-g (hyphen-hyphen)gpu_per_rs Number of GPUs per resource set
-r (hyphen-hyphen)rs_per_host Number of resource sets per host
<arg name="num_tasks" > -n ALL_HOSTS -p {{ total_tasks }} </arg>
Old options: <arg name="binding_core"> map-by core:PE=$ENV{OMP_NUM_THREADS} bind-to core </arg>
<arg name="show-binding"> report-bindings </arg>
<arg name="show-processmap"> display-map </arg>
-->
<!-- Default -->
<environment_variables>
<env name="COMPILER">$COMPILER</env>
<env name="MPILIB">$MPILIB</env>
<env name="OMP_STACKSIZE">128M</env>
<env name="NETCDF_C_PATH">$ENV{OLCF_NETCDF_ROOT}</env>
<env name="NETCDF_FORTRAN_PATH">$ENV{OLCF_NETCDF_FORTRAN_ROOT}</env>
<env name="HDF5_PATH">$ENV{OLCF_HDF5_ROOT}</env>
<env name="ESSL_PATH">$ENV{OLCF_ESSL_ROOT}</env>
<env name="NETLIB_LAPACK_PATH">$ENV{OLCF_NETLIB_LAPACK_ROOT}</env>
</environment_variables>
<!-- <environment_variables compiler="ibm"> -->
<!-- <env name="NETCDF_FORTRAN_PATH">/lustre/atlas/proj-shared/cli115/summitdev/soft/netcdf/fortran-4.4.4-xl-20170914-beta</env> -->
<!-- </environment_variables> -->
<environment_variables mpilib="!mpi-serial">
<env name="PNETCDF_PATH">$ENV{OLCF_PARALLEL_NETCDF_ROOT}</env>
</environment_variables>
</machine>
<machine MACH="summit">
<DESC>ORNL Summit. Node: 2x POWER9 + 6x Volta V100, 22 cores/socket, 4 HW threads/core.</DESC>
<NODENAME_REGEX>.*summit.*</NODENAME_REGEX>
<OS>LINUX</OS>
<COMPILERS>ibm,pgi,pgiacc,gnu</COMPILERS>
<MPILIBS>spectrum-mpi,mpi-serial</MPILIBS>
<PROJECT>cli115</PROJECT>
<CHARGE_ACCOUNT>cli115</CHARGE_ACCOUNT>
<SAVE_TIMING_DIR>/gpfs/alpine/proj-shared/$PROJECT</SAVE_TIMING_DIR>
<SAVE_TIMING_DIR_PROJECTS>cli115,cli127</SAVE_TIMING_DIR_PROJECTS>
<CIME_OUTPUT_ROOT>/gpfs/alpine/$PROJECT/proj-shared/$ENV{USER}/e3sm_scratch</CIME_OUTPUT_ROOT>
<!-- In case you wish to try HOME for building to check for filesystem issues, uncomment following -->
<!-- You may have to change RUNDIR below to use scratch file sustem. -->
<!-- <CIME_OUTPUT_ROOT>$ENV{HOME}/e3sm_scratch/$PROJECT</CIME_OUTPUT_ROOT> -->
<DIN_LOC_ROOT>/gpfs/alpine/cli115/world-shared/e3sm/inputdata</DIN_LOC_ROOT>
<DIN_LOC_ROOT_CLMFORC>/gpfs/alpine/cli115/world-shared/e3sm/inputdata/atm/datm7</DIN_LOC_ROOT_CLMFORC>
<DOUT_S_ROOT>/gpfs/alpine/$PROJECT/proj-shared/$ENV{USER}/archive/$CASE</DOUT_S_ROOT>
<BASELINE_ROOT>/gpfs/alpine/cli115/world-shared/e3sm/baselines/$COMPILER</BASELINE_ROOT>
<CCSM_CPRNC>/gpfs/alpine/cli115/world-shared/e3sm/tools/cprnc.summit/cprnc</CCSM_CPRNC>
<GMAKE_J>8</GMAKE_J>
<TESTS>e3sm_developer</TESTS>
<NTEST_PARALLEL_JOBS>4</NTEST_PARALLEL_JOBS>
<BATCH_SYSTEM>lsf</BATCH_SYSTEM>
<SUPPORTED_BY>e3sm</SUPPORTED_BY>
<MAX_TASKS_PER_NODE>84</MAX_TASKS_PER_NODE>
<MAX_MPITASKS_PER_NODE>84</MAX_MPITASKS_PER_NODE>
<PROJECT_REQUIRED>TRUE</PROJECT_REQUIRED>
<mpirun mpilib="spectrum-mpi">
<!-- Use a helper script to tweak jsrun options -->
<executable>/gpfs/alpine/world-shared/cli115/mpirun.summit</executable>
<!-- <executable>jsrun</executable> -->
<arguments>
<arg name="num_tasks"> -n {{ total_tasks }} -N {{ tasks_per_node }}</arg>
</arguments>
</mpirun>
<module_system type="module" allow_error="true">
<!-- list of init_path elements, one per supported language e.g. sh, perl, python-->
<init_path lang="sh">/sw/summit/lmod/7.7.10/rhel7.3_gnu4.8.5/lmod/lmod/init/sh</init_path>
<init_path lang="csh">/sw/summit/lmod/7.7.10/rhel7.3_gnu4.8.5/lmod/lmod/init/csh</init_path>
<init_path lang="python">/sw/summit/lmod/7.7.10/rhel7.3_gnu4.8.5/lmod/lmod/init/env_modules_python.py</init_path>
<init_path lang="perl">/sw/summit/lmod/7.7.10/rhel7.3_gnu4.8.5/lmod/lmod/init/perl</init_path>
<!-- list of cmd_path elements, one for every supported language, e.g. sh, perl, python -->
<cmd_path lang="perl">module</cmd_path>
<cmd_path lang="python">/sw/summit/lmod/7.7.10/rhel7.3_gnu4.8.5/lmod/7.7.10/libexec/lmod python</cmd_path>
<cmd_path lang="sh">module</cmd_path>
<cmd_path lang="csh">module</cmd_path>
<!-- Always execute -->
<modules>
<command name="purge"/>
<command name="ls"/>
<command name="load">DefApps</command>
<command name="load">python/3.5.2</command>
<command name="load">subversion/1.9.3</command>
<command name="load">git/2.13.0</command>
<command name="load">cmake/3.13.4</command>
<command name="load">essl/6.1.0-2</command>
<command name="load">netlib-lapack/3.8.0</command>
</modules>
<!-- List of modules elements, executing commands if compiler and mpilib condition applies -->
<modules compiler="pgi.*">
<command name="load">pgi/19.4</command>
</modules>
<modules compiler="ibm">
<command name="load">xl/16.1.1-3</command>
</modules>
<modules compiler="gnu">
<command name="load">gcc/6.4.0</command>
</modules>
<modules>
<command name="load">netcdf/4.6.1</command>
<command name="load">netcdf-fortran/4.4.4</command>
</modules>
<!-- mpi lib settings -->
<!-- Sometimes,same versions of libraries are not available for different compilers, hence the split below -->
<modules compiler="ibm" mpilib="!mpi-serial">
<command name="load">spectrum-mpi/10.3.0.1-20190611</command>
</modules>
<modules compiler="pgi.*" mpilib="!mpi-serial">
<command name="load">spectrum-mpi/10.3.0.1-20190611</command>
</modules>
<modules compiler="gnu" mpilib="!mpi-serial">
<command name="load">spectrum-mpi/10.3.0.1-20190611</command>
</modules>
<modules>
<command name="load">parallel-netcdf/1.8.1</command>
<command name="load">hdf5/1.10.3</command>
</modules>
</module_system>
<!-- <RUNDIR>/gpfs/alpine/$PROJECT/proj-shared/$ENV{USER}/e3sm_scratch/$CASE/run</RUNDIR> -->
<RUNDIR>$CIME_OUTPUT_ROOT/$CASE/run</RUNDIR>
<EXEROOT>$CIME_OUTPUT_ROOT/$CASE/bld</EXEROOT>
<!-- Ref: https://www.olcf.ornl.gov/for-users/system-user-guides/summit/ -->
<!-- 1 core/socket not available for application, so 168 = 42cores*4 in smt4 mode -->
<!-- Useful jsrun options:
-n (hyphen-hyphen)nrs Number of resource sets
-a (hyphen-hyphen)tasks_per_rs Number of tasks per resource set
-c (hyphen-hyphen)cpu_per_rs Number of CPUs per resource set. Threads per rs.
-g (hyphen-hyphen)gpu_per_rs Number of GPUs per resource set
-r (hyphen-hyphen)rs_per_host Number of resource sets per host
-->
<!-- Default -->
<environment_variables>
<env name="COMPILER">$COMPILER</env>
<env name="MPILIB">$MPILIB</env>
<env name="OMP_STACKSIZE">128M</env>
<env name="NETCDF_C_PATH">$ENV{OLCF_NETCDF_ROOT}</env>
<env name="NETCDF_FORTRAN_PATH">$ENV{OLCF_NETCDF_FORTRAN_ROOT}</env>
<env name="NETCDF_PATH">$ENV{OLCF_NETCDF_FORTRAN_ROOT}</env>
<env name="NETCDFF">$ENV{OLCF_NETCDF_FORTRAN_ROOT}</env>
<env name="ESSL_PATH">$ENV{OLCF_ESSL_ROOT}</env>
<env name="NETLIB_LAPACK_PATH">$ENV{OLCF_NETLIB_LAPACK_ROOT}</env>
</environment_variables>
<environment_variables SMP_PRESENT="TRUE">
<env name="OMP_NUM_THREADS">$ENV{OMP_NUM_THREADS}</env>
</environment_variables>
<environment_variables mpilib="!mpi-serial">
<env name="HDF5_PATH">$ENV{OLCF_HDF5_ROOT}</env>
<env name="PNETCDF_PATH">$ENV{OLCF_PARALLEL_NETCDF_ROOT}</env>
</environment_variables>
</machine>
<default_run_suffix>
<default_run_exe>${EXEROOT}/e3sm.exe </default_run_exe>
<default_run_misc_suffix> >> e3sm.log.$LID 2>&1 </default_run_misc_suffix>
</default_run_suffix>
</config_machines>
E3SM XML settings for Parallel Input/Output (PIO) library.
<?xml version="1.0"?>
<config_pio version="1.0">
<!--- uncomment and fill in relevant sections
<entry id="PIO_CONFIG_OPTS">
<values>
<value></value>
</values>
</entry>
-->
<!--- uncomment and fill in relevant sections
<entry id="PIO_ASYNC_INTERFACE">
<values>
<value></value>
</values>
</entry>
-->
<entry id="PIO_STRIDE">
<values>
<value>$MAX_MPITASKS_PER_NODE</value>
<value mach="yellowstone" grid="a%ne120.+oi%gx1">60</value>
<value mach="mira|cetus">128</value>
<value mach="mira" grid="a%ne120" compset=".*CAM5.+MPASO.*">-99</value>
</values>
</entry>
<entry id="PIO_ROOT">
<values>
<value>0</value>
</values>
</entry>
<!--- uncomment and fill in relevant sections
<entry id="PIO_NUMTASKS">
<values>
<value></value>
</values>
</entry>
-->
<entry id="PIO_TYPENAME">
<values>
<value>pnetcdf</value>
<value mach="userdefined">netcdf</value>
<value mach="melvin">netcdf</value>
<value mach="eastwind">netcdf</value>
<value mach="constance">netcdf</value>
<value mach="cascade">netcdf</value>
<value mach="sooty">netcdf</value>
<value mach="pleiades.*">netcdf</value>
<value mach="hobart" compiler="pgi">netcdf</value>
<value mach="oic5">netcdf</value>
<value mach="lawrencium-lr2">netcdf</value>
<value mach="lawrencium-lr3">netcdf</value>
<value mach="lawrencium-lr6">netcdf</value>
<value mach="cades">netcdf</value>
<value mach="grizzly">netcdf</value>
<value mach="badger">netcdf</value>
<value mpilib="mpi-serial">netcdf</value>
<value mach="bebop" mpilib="impi" compset=".*CAM5.+MPAS.*">netcdf</value>
</values>
</entry>
<!-- Using the BOX rearranger as the default -->
<entry id="PIO_REARRANGER">
<values>
<value>1</value>
</values>
</entry>
<!--- uncomment and fill in relevant sections
<entry id="PIO_DEBUG_LEVEL">
<values>
<value></value>
</values>
</entry>
-->
<!--- uncomment and fill in relevant sections
<entry id="PIO_BLOCKSIZE">
<values>
<value></value>
</values>
</entry>
-->
<entry id="PIO_BUFFER_SIZE_LIMIT">
<values>
<value MACH="sandiatoss3">1</value>
<value MACH="ghost">1</value>
</values>
</entry>
<!--- uncomment and fill in relevant sections
<entry id="OCN_PIO_STRIDE">
<values>
<value grid="a%ne120.+oi%gx1">60</value>
</values>
</entry>
-->
<!--- uncomment and fill in relevant sections
<entry id="OCN_PIO_ROOT">
<values>
<value></value>
</values>
</entry>
-->
<entry id="OCN_PIO_NUMTASKS">
<values>
<value mach="mira" grid="a%ne120" compset=".*CAM5.+MPASO.*">16</value>
</values>
</entry>
<!--- uncomment and fill in relevant sections
<entry id="OCN_PIO_TYPENAME">
<values>
<value></value>
</values>
</entry>
-->
<!-- <entry id="LND_PIO_REARRANGER">
<values>
<value compset="_CLM40" >2</value>
<value compset="_CLM45" >1</value>
<value compset="_CLM50" >1</value>
</values>
</entry> -->
<!--- uncomment and fill in relevant sections
<entry id="LND_PIO_STRIDE">
<values>
<value></value>
</values>
</entry>
-->
<!--- uncomment and fill in relevant sections
<entry id="LND_PIO_ROOT">
<values>
<value></value>
</values>
</entry>
-->
<entry id="LND_PIO_NUMTASKS">
<values>
<value mach="mira" grid="a%ne120" compset=".*CAM5.+MPASO.*">16</value>
</values>
</entry>
<entry id="LND_PIO_TYPENAME">
<values>
<value mach="bebop" compset=".*CLM45.+MALI%SIA.*">netcdf</value>
</values>
</entry>
<!--- uncomment and fill in relevant sections
<entry id="ROF_PIO_STRIDE">
<values>
<value></value>
</values>
</entry>
-->
<!--- uncomment and fill in relevant sections
<entry id="ROF_PIO_ROOT">
<values>
<value></value>
</values>
</entry>
-->
<entry id="ROF_PIO_NUMTASKS">
<values>
<value mach="mira" grid="a%ne120" compset=".*CAM5.+MPASO.*">16</value>
</values>
</entry>
<!--- uncomment and fill in relevant sections
<entry id="ROF_PIO_TYPENAME">
<values>
<value></value>
</values>
</entry>
-->
<!--- uncomment and fill in relevant sections
<entry id="ICE_PIO_STRIDE">
<values>
<value></value>
</values>
</entry>
-->
<!--- uncomment and fill in relevant sections
<entry id="ICE_PIO_ROOT">
<values>
<value></value>
</values>
</entry>
-->
<entry id="ICE_PIO_NUMTASKS">
<values>
<value mach="mira" grid="a%ne120" compset=".*CAM5.+MPASO.*">16</value>
</values>
</entry>
<!--- uncomment and fill in relevant sections
<entry id="ICE_PIO_TYPENAME">
<values>
<value></value>
</values>
</entry>
-->
<!--- uncomment and fill in relevant sections
<entry id="ATM_PIO_STRIDE">
<values>
<value></value>
</values>
</entry>
-->
<!--- uncomment and fill in relevant sections
<entry id="ATM_PIO_ROOT">
<values>
<value></value>
</values>
</entry>
-->
<entry id="ATM_PIO_NUMTASKS">
<values>
<value mach="mira" grid="a%ne120" compset=".*CAM5.+MPASO.*">16</value>
</values>
</entry>
<entry id="ATM_PIO_TYPENAME">
<values>
<value compset="DATM">netcdf</value>
</values>
</entry>
<!--- uncomment and fill in relevant sections
<entry id="CPL_PIO_STRIDE">
<values>
<value></value>
</values>
</entry>
-->
<!--- uncomment and fill in relevant sections
<entry id="CPL_PIO_ROOT">
<values>
<value></value>
</values>
</entry>
-->
<entry id="CPL_PIO_NUMTASKS">
<values>
<value mach="mira" grid="a%ne120" compset=".*CAM5.+MPASO.*">16</value>
</values>
</entry>
<entry id="CPL_PIO_TYPENAME">
<values>
<value mach="mira" grid="a%ne120">netcdf</value>
</values>
</entry>
<!--- uncomment and fill in relevant sections
<entry id="GLC_PIO_STRIDE">
<values>
<value></value>
</values>
</entry>
-->
<!--- uncomment and fill in relevant sections
<entry id="GLC_PIO_ROOT">
<values>
<value></value>
</values>
</entry>
-->
<entry id="GLC_PIO_NUMTASKS">
<values>
<value mach="mira" grid="a%ne120" compset=".*CAM5.+MPASO.*">1</value>
</values>
</entry>
<!--- uncomment and fill in relevant sections
<entry id="GLC_PIO_TYPENAME">
<values>
<value></value>
</values>
</entry>
-->
<!--- uncomment and fill in relevant sections
<entry id="WAV_PIO_STRIDE">
<values>
<value></value>
</values>
</entry>
-->
<!--- uncomment and fill in relevant sections
<entry id="WAV_PIO_ROOT">
<values>
<value></value>
</values>
</entry>
-->
<entry id="WAV_PIO_NUMTASKS">
<values>
<value mach="mira" grid="a%ne120" compset=".*CAM5.+MPASO.*">1</value>
</values>
</entry>
<!--- uncomment and fill in relevant sections
<entry id="WAV_PIO_TYPENAME">
<values>
<value></value>
</values>
</entry>
-->
<!--- uncomment and fill in relevant sections
<entry id="ESP_PIO_STRIDE">
<values>
<value></value>
</values>
</entry>
-->
<!--- uncomment and fill in relevant sections
<entry id="ESP_PIO_ROOT">
<values>
<value></value>
</values>
</entry>
-->
<entry id="ESP_PIO_NUMTASKS">
<values>
<value mach="mira" grid="a%ne120" compset=".*CAM5.+MPASO.*">1</value>
</values>
</entry>
<!--- uncomment and fill in relevant sections
<entry id="ESP_PIO_TYPENAME">
<values>
<value></value>
</values>
</entry>
-->
</config_pio>