CESM: Difference between revisions

From Alliance Doc
Jump to navigation Jump to search
(update remaining config_machine.xml files)
(add descriptive introduction)
 
(5 intermediate revisions by one other user not shown)
Line 1: Line 1:
{{draft}}
{{draft}}
"The [https://www.cesm.ucar.edu/ Community Earth System Model] is a fully coupled global climate model developed in collaboration with colleagues in the research community. CESM provides state of the art computer simulations of Earth's past, present, and future climate states."


=Porting and Validating=
=Porting and Validating=
Line 5: Line 7:
The below configuration files and commands are designed for a local installation of CESM 2.1.5:
The below configuration files and commands are designed for a local installation of CESM 2.1.5:


Before making the adaptations as described below, please [https://www.cesm.ucar.edu/models/cesm2/download download CESM 2.1.5 from the CESM developers].
Before making the adaptations as described below, please [https://www.cesm.ucar.edu/models/cesm2/download download CESM 2.1.5 from the CESM developers] in your local directory.


==Local machine file==
==Local machine file==
Line 27: Line 29:
     <OS>LINUX</OS>
     <OS>LINUX</OS>
     <COMPILERS>intel,gnu</COMPILERS>
     <COMPILERS>intel,gnu</COMPILERS>
     <MPILIBS>openmpi,intelmpi</MPILIBS>
     <MPILIBS>openmpi</MPILIBS>


     <PROJECT>def-EDIT_THIS</PROJECT>
     <PROJECT>def-EDIT_THIS</PROJECT>
Line 44: Line 46:
     <PROJECT_REQUIRED>TRUE</PROJECT_REQUIRED>
     <PROJECT_REQUIRED>TRUE</PROJECT_REQUIRED>


     <mpirun mpilib="intelmpi">
     <mpirun mpilib="openmpi">
       <executable>mpirun</executable>
       <executable>mpirun</executable>
       <arguments>
       <arguments>
Line 69: Line 71:
<command name="load">cmake/3.27.7</command>
<command name="load">cmake/3.27.7</command>
       </modules>
       </modules>
       <modules mpilib="intelmpi">
       <modules mpilib="openmpi">
         <command name="load">openmpi/4.1.5</command>
         <command name="load">openmpi/4.1.5</command>
         <command name="load">hdf5-mpi/1.14.2</command>
         <command name="load">hdf5-mpi/1.14.2</command>
Line 95: Line 97:
     </resource_limits>
     </resource_limits>
   </machine>
   </machine>
  <default_run_suffix>
    <default_run_exe>${EXEROOT}/cesm.exe </default_run_exe>
    <default_run_misc_suffix> >> cesm.log.$LID 2>&amp;1 </default_run_misc_suffix>
  </default_run_suffix>
</config_machines>
</config_machines>
}}
}}
Line 118: Line 115:
     <OS>LINUX</OS>
     <OS>LINUX</OS>
     <COMPILERS>intel,gnu</COMPILERS>
     <COMPILERS>intel,gnu</COMPILERS>
     <MPILIBS>openmpi,intelmpi</MPILIBS>
     <MPILIBS>openmpi</MPILIBS>


     <PROJECT>def-EDIT_THIS</PROJECT>
     <PROJECT>def-EDIT_THIS</PROJECT>
Line 135: Line 132:
     <PROJECT_REQUIRED>TRUE</PROJECT_REQUIRED>
     <PROJECT_REQUIRED>TRUE</PROJECT_REQUIRED>


     <mpirun mpilib="intelmpi">
     <mpirun mpilib="openmpi">
       <executable>mpirun</executable>
       <executable>mpirun</executable>
       <arguments>
       <arguments>
Line 159: Line 156:
<command name="load">cmake/3.27.7</command>
<command name="load">cmake/3.27.7</command>
       </modules>
       </modules>
       <modules mpilib="intelmpi">
       <modules mpilib="openmpi">
         <command name="load">openmpi/4.1.5</command>
         <command name="load">openmpi/4.1.5</command>
         <command name="load">hdf5-mpi/1.14.2</command>
         <command name="load">hdf5-mpi/1.14.2</command>
Line 185: Line 182:
     </resource_limits>
     </resource_limits>
   </machine>
   </machine>
  <default_run_suffix>
    <default_run_exe>${EXEROOT}/cesm.exe </default_run_exe>
    <default_run_misc_suffix> >> cesm.log.$LID 2>&amp;1 </default_run_misc_suffix>
  </default_run_suffix>
</config_machines>
</config_machines>
}}
}}
Line 208: Line 200:
     <OS>LINUX</OS>
     <OS>LINUX</OS>
     <COMPILERS>intel,gnu</COMPILERS>
     <COMPILERS>intel,gnu</COMPILERS>
     <MPILIBS>openmpi,intelmpi</MPILIBS>
     <MPILIBS>openmpi</MPILIBS>


     <PROJECT>def-EDIT_THIS</PROJECT>
     <PROJECT>def-EDIT_THIS</PROJECT>
Line 225: Line 217:
     <PROJECT_REQUIRED>TRUE</PROJECT_REQUIRED>
     <PROJECT_REQUIRED>TRUE</PROJECT_REQUIRED>


     <mpirun mpilib="intelmpi">
     <mpirun mpilib="openmpi">
       <executable>mpirun</executable>
       <executable>mpirun</executable>
       <arguments>
       <arguments>
Line 249: Line 241:
<command name="load">cmake/3.27.7</command>
<command name="load">cmake/3.27.7</command>
       </modules>
       </modules>
       <modules mpilib="intelmpi">
       <modules mpilib="openmpi">
         <command name="load">openmpi/4.1.5</command>
         <command name="load">openmpi/4.1.5</command>
         <command name="load">hdf5-mpi/1.14.2</command>
         <command name="load">hdf5-mpi/1.14.2</command>
Line 275: Line 267:
     </resource_limits>
     </resource_limits>
   </machine>
   </machine>
  <default_run_suffix>
    <default_run_exe>${EXEROOT}/cesm.exe </default_run_exe>
    <default_run_misc_suffix> >> cesm.log.$LID 2>&amp;1 </default_run_misc_suffix>
  </default_run_suffix>
</config_machines>
</config_machines>
}}
}}
Line 298: Line 285:
     <OS>LINUX</OS>
     <OS>LINUX</OS>
     <COMPILERS>intel,gnu</COMPILERS>
     <COMPILERS>intel,gnu</COMPILERS>
     <MPILIBS>openmpi,intelmpi</MPILIBS>
     <MPILIBS>openmpi</MPILIBS>


     <PROJECT>def-EDIT_THIS</PROJECT>
     <PROJECT>def-EDIT_THIS</PROJECT>
Line 315: Line 302:
     <PROJECT_REQUIRED>TRUE</PROJECT_REQUIRED>
     <PROJECT_REQUIRED>TRUE</PROJECT_REQUIRED>


     <mpirun mpilib="intelmpi">
     <mpirun mpilib="openmpi">
       <executable>mpirun</executable>
       <executable>mpirun</executable>
       <arguments>
       <arguments>
Line 339: Line 326:
<command name="load">cmake/3.27.7</command>
<command name="load">cmake/3.27.7</command>
       </modules>
       </modules>
       <modules mpilib="intelmpi">
       <modules mpilib="openmpi">
         <command name="load">openmpi/4.1.5</command>
         <command name="load">openmpi/4.1.5</command>
         <command name="load">hdf5-mpi/1.14.2</command>
         <command name="load">hdf5-mpi/1.14.2</command>
Line 365: Line 352:
     </resource_limits>
     </resource_limits>
   </machine>
   </machine>
  <default_run_suffix>
    <default_run_exe>${EXEROOT}/cesm.exe </default_run_exe>
    <default_run_misc_suffix> >> cesm.log.$LID 2>&amp;1 </default_run_misc_suffix>
  </default_run_suffix>
</config_machines>
</config_machines>
}}
}}
Line 388: Line 370:
     <OS>LINUX</OS>
     <OS>LINUX</OS>
     <COMPILERS>intel,gnu</COMPILERS>
     <COMPILERS>intel,gnu</COMPILERS>
     <MPILIBS>openmpi,intelmpi</MPILIBS>
     <MPILIBS>openmpi</MPILIBS>


     <PROJECT>def-EDIT_THIS</PROJECT>
     <PROJECT>def-EDIT_THIS</PROJECT>
Line 405: Line 387:
     <PROJECT_REQUIRED>TRUE</PROJECT_REQUIRED>
     <PROJECT_REQUIRED>TRUE</PROJECT_REQUIRED>


     <mpirun mpilib="intelmpi">
     <mpirun mpilib="openmpi">
       <executable>mpirun</executable>
       <executable>mpirun</executable>
       <arguments>
       <arguments>
Line 429: Line 411:
<command name="load">cmake/3.27.7</command>
<command name="load">cmake/3.27.7</command>
       </modules>
       </modules>
       <modules mpilib="intelmpi">
       <modules mpilib="openmpi">
         <command name="load">openmpi/4.1.5</command>
         <command name="load">openmpi/4.1.5</command>
         <command name="load">hdf5-mpi/1.14.2</command>
         <command name="load">hdf5-mpi/1.14.2</command>
Line 455: Line 437:
     </resource_limits>
     </resource_limits>
   </machine>
   </machine>
  <default_run_suffix>
    <default_run_exe>${EXEROOT}/cesm.exe </default_run_exe>
    <default_run_misc_suffix> >> cesm.log.$LID 2>&amp;1 </default_run_misc_suffix>
  </default_run_suffix>
</config_machines>
</config_machines>
}}
}}
Line 470: Line 447:
{{Command2
{{Command2
|prompt=[name@server ~]$
|prompt=[name@server ~]$
|xmllint --noout --schema $EBROOTCESM/cime/config/xml_schemas/config_machines.xsd ~/.cime/config_machines.xml
|xmllint --noout --schema /path/to/CESM/cime/config/xml_schemas/config_machines.xsd ~/.cime/config_machines.xml
|result=
|result=
/home/name/.cime/config_machines.xml validates
/home/name/.cime/config_machines.xml validates
Line 490: Line 467:
{{Command2
{{Command2
|prompt=[name@server ~]$
|prompt=[name@server ~]$
|less $EBROOTCESM/cime/config/xml_schemas/config_machines_template.xml
|less /path/to/CESM/cime/config/xml_schemas/config_machines_template.xml
}}
}}
</li>
</li>
Line 528: Line 505:
{{Command2
{{Command2
|prompt=[name@server ~]$
|prompt=[name@server ~]$
|xmllint --noout --schema $EBROOTCESM/cime/config/xml_schemas/config_batch.xsd ~/.cime/config_batch.xml
|xmllint --noout --schema /path/to/CESM/cime/config/xml_schemas/config_batch.xsd ~/.cime/config_batch.xml
|result=
|result=
/home/name/.cime/config_batch.xml validates
/home/name/.cime/config_batch.xml validates
Line 536: Line 513:
<li>Check the documentation for additional <b>[https://esmci.github.io/cime/versions/maint-5.6/html/xml_files/cesm.html#cimeroot-config-cesm-machines configuration parameters and examples]</b>.</li>
<li>Check the documentation for additional <b>[https://esmci.github.io/cime/versions/maint-5.6/html/xml_files/cesm.html#cimeroot-config-cesm-machines configuration parameters and examples]</b>.</li>
</ul>
</ul>
==Local compilers file==
<ul>
<li>Create and edit the file <code>~/.cime/config_compilers.xml</code> from the following minimal content per cluster:
<tabs>
<tab name="Beluga">
{{File
  |name=~/.cime/config_compilers.xml
  |lang="xml"
  |contents=
<?xml version="1.0"?>
<compiler MACH="beluga">
  <CPPDEFS>
    <!-- these flags enable nano timers -->
    <append MODEL="gptl"> -DHAVE_NANOTIME -DBIT64 -DHAVE_VPRINTF -DHAVE_BACKTRACE -DHAVE_SLASHPROC -DHAVE_COMM_F2C -DHAVE_TIMES -DHAVE_GETTIMEOFDAY </append>
  </CPPDEFS>
  <NETCDF_PATH>$ENV{NETCDF_FORTRAN_ROOT}</NETCDF_PATH>
  <PIO_FILESYSTEM_HINTS>lustre</PIO_FILESYSTEM_HINTS>
  <PNETCDF_PATH>$ENV{PARALLEL_NETCDF_ROOT}</PNETCDF_PATH>
  <SLIBS>
    <append>-L$(NETCDF_PATH)/lib -lnetcdff -L$(NETCDF_C_ROOT)/lib -lnetcdf -L$(NETLIB_LAPACK_PATH)/lib/intel64 -lmkl -ldl </append>
  </SLIBS>
</compiler>
}}
</tab>
<tab name="Cedar">
{{File
  |name=~/.cime/config_compilers.xml
  |lang="xml"
  |contents=
<?xml version="1.0"?>
<compiler MACH="cedar">
  <CPPDEFS>
    <!-- these flags enable nano timers -->
    <append MODEL="gptl"> -DHAVE_NANOTIME -DBIT64 -DHAVE_VPRINTF -DHAVE_BACKTRACE -DHAVE_SLASHPROC -DHAVE_COMM_F2C -DHAVE_TIMES -DHAVE_GETTIMEOFDAY </append>
  </CPPDEFS>
  <NETCDF_PATH>$ENV{NETCDF_FORTRAN_ROOT}</NETCDF_PATH>
  <PIO_FILESYSTEM_HINTS>lustre</PIO_FILESYSTEM_HINTS>
  <PNETCDF_PATH>$ENV{PARALLEL_NETCDF_ROOT}</PNETCDF_PATH>
  <SLIBS>
    <append>-L$(NETCDF_PATH)/lib -lnetcdff -L$(NETCDF_C_ROOT)/lib -lnetcdf -L$(NETLIB_LAPACK_PATH)/lib/intel64 -lmkl -ldl </append>
  </SLIBS>
</compiler>
}}
</tab>
<tab name="Graham">
{{File
  |name=~/.cime/config_compilers.xml
  |lang="xml"
  |contents=
<?xml version="1.0"?>
<compiler MACH="graham">
  <CPPDEFS>
    <!-- these flags enable nano timers -->
    <append MODEL="gptl"> -DHAVE_NANOTIME -DBIT64 -DHAVE_VPRINTF -DHAVE_BACKTRACE -DHAVE_SLASHPROC -DHAVE_COMM_F2C -DHAVE_TIMES -DHAVE_GETTIMEOFDAY </append>
  </CPPDEFS>
  <NETCDF_PATH>$ENV{NETCDF_FORTRAN_ROOT}</NETCDF_PATH>
  <PIO_FILESYSTEM_HINTS>lustre</PIO_FILESYSTEM_HINTS>
  <PNETCDF_PATH>$ENV{PARALLEL_NETCDF_ROOT}</PNETCDF_PATH>
  <SLIBS>
    <append>-L$(NETCDF_PATH)/lib -lnetcdff -L$(NETCDF_C_ROOT)/lib -lnetcdf -L$(NETLIB_LAPACK_PATH)/lib/intel64 -lmkl -ldl </append>
  </SLIBS>
</compiler>
}}
</tab>
<tab name="Narval">
{{File
  |name=~/.cime/config_compilers.xml
  |lang="xml"
  |contents=
<?xml version="1.0"?>
<compiler MACH="narval">
  <CPPDEFS>
    <!-- these flags enable nano timers -->
    <append MODEL="gptl"> -DHAVE_NANOTIME -DBIT64 -DHAVE_VPRINTF -DHAVE_BACKTRACE -DHAVE_SLASHPROC -DHAVE_COMM_F2C -DHAVE_TIMES -DHAVE_GETTIMEOFDAY </append>
  </CPPDEFS>
  <NETCDF_PATH>$ENV{NETCDF_FORTRAN_ROOT}</NETCDF_PATH>
  <PIO_FILESYSTEM_HINTS>lustre</PIO_FILESYSTEM_HINTS>
  <PNETCDF_PATH>$ENV{PARALLEL_NETCDF_ROOT}</PNETCDF_PATH>
  <SLIBS>
    <append>-L$(NETCDF_PATH)/lib -lnetcdff -L$(NETCDF_C_ROOT)/lib -lnetcdf -L$(NETLIB_LAPACK_PATH)/lib/intel64 -lmkl -ldl </append>
  </SLIBS>
</compiler>
}}
</tab>
<tab name="Niagara">
{{File
  |name=~/.cime/config_compilers.xml
  |lang="xml"
  |contents=
<?xml version="1.0"?>
<compiler MACH="niagara">
  <CPPDEFS>
    <!-- these flags enable nano timers -->
    <append MODEL="gptl"> -DHAVE_NANOTIME -DBIT64 -DHAVE_VPRINTF -DHAVE_BACKTRACE -DHAVE_SLASHPROC -DHAVE_COMM_F2C -DHAVE_TIMES -DHAVE_GETTIMEOFDAY </append>
  </CPPDEFS>
  <NETCDF_PATH>$ENV{NETCDF_FORTRAN_ROOT}</NETCDF_PATH>
  <PIO_FILESYSTEM_HINTS>lustre</PIO_FILESYSTEM_HINTS>
  <PNETCDF_PATH>$ENV{PARALLEL_NETCDF_ROOT}</PNETCDF_PATH>
  <SLIBS>
    <append>-L$(NETCDF_PATH)/lib -lnetcdff -L$(NETCDF_C_ROOT)/lib -lnetcdf -L$(NETLIB_LAPACK_PATH)/lib/intel64 -lmkl -ldl </append>
  </SLIBS>
</compiler>
}}
</tab>
</tabs>
==Checkout externals==
Before your first use of CESM, you may checkout the individual model components by running the checkout_externals script.
{{Command2
|prompt=[name@server ~]$
|/path/to/CESM/manage_externals/checkout_externals
}}
You may need to accept a certificate from the CESM repository to download input files.


==Creating a test case==
==Creating a test case==
Line 541: Line 641:
{{Command2
{{Command2
|prompt=[name@server ~]$
|prompt=[name@server ~]$
|create_newcase --case test_case --compset IHistClm50Bgc --res f19_g17
|/path/to/CESM/cime/scripts/create_newcase --case test_case --compset IHistClm50Bgc --res f19_g17
}}
}}



Latest revision as of 14:14, 10 October 2024


This article is a draft

This is not a complete article: This is a draft, a work in progress that is intended to be published into an article, which may or may not be ready for inclusion in the main wiki. It should not necessarily be considered factual or authoritative.




"The Community Earth System Model is a fully coupled global climate model developed in collaboration with colleagues in the research community. CESM provides state of the art computer simulations of Earth's past, present, and future climate states."

Porting and Validating[edit]

The below configuration files and commands are designed for a local installation of CESM 2.1.5:

Before making the adaptations as described below, please download CESM 2.1.5 from the CESM developers in your local directory.

Local machine file[edit]

  • Create and edit the file ~/.cime/config_machines.xml from the following minimal content per cluster:
    File : ~/.cime/config_machines.xml

    <?xml version="1.0"?>
    
    <config_machines version="2.0">
      <machine MACH="beluga">
        <DESC>https://docs.alliancecan.ca/wiki/Béluga/en</DESC>
        <NODENAME_REGEX>b[cegl].*.int.ets1.calculquebec.ca</NODENAME_REGEX>
    
        <OS>LINUX</OS>
        <COMPILERS>intel,gnu</COMPILERS>
        <MPILIBS>openmpi</MPILIBS>
    
        <PROJECT>def-EDIT_THIS</PROJECT>
        <CHARGE_ACCOUNT>def-EDIT_THIS</CHARGE_ACCOUNT>
    
        <CIME_OUTPUT_ROOT>/scratch/$USER/cesm/output</CIME_OUTPUT_ROOT>
        <DIN_LOC_ROOT>/scratch/$USER/cesm/inputdata</DIN_LOC_ROOT>
        <DIN_LOC_ROOT_CLMFORC>${DIN_LOC_ROOT}/atm/datm7</DIN_LOC_ROOT_CLMFORC>
        <DOUT_S_ROOT>$CIME_OUTPUT_ROOT/archive/case</DOUT_S_ROOT>
        <GMAKE>make</GMAKE>
        <GMAKE_J>8</GMAKE_J>
        <BATCH_SYSTEM>slurm</BATCH_SYSTEM>
        <SUPPORTED_BY>support@tech.alliancecan.ca</SUPPORTED_BY>
        <MAX_TASKS_PER_NODE>40</MAX_TASKS_PER_NODE>
        <MAX_MPITASKS_PER_NODE>40</MAX_MPITASKS_PER_NODE>
        <PROJECT_REQUIRED>TRUE</PROJECT_REQUIRED>
    
        <mpirun mpilib="openmpi">
          <executable>mpirun</executable>
          <arguments>
            <arg name="anum_tasks"> -np [[:Template:Total tasks]]</arg>
          </arguments>
        </mpirun>
        <module_system type="module" allow_error="true">
          <init_path lang="perl">/cvmfs/soft.computecanada.ca/custom/software/lmod/lmod/init/perl</init_path>
          <init_path lang="python">/cvmfs/soft.computecanada.ca/custom/software/lmod/lmod/init/env_modules_python.py</init_path>
          <init_path lang="csh">/cvmfs/soft.computecanada.ca/custom/software/lmod/lmod/init/csh</init_path>
          <init_path lang="sh">/cvmfs/soft.computecanada.ca/custom/software/lmod/lmod/init/sh</init_path>
          <cmd_path lang="perl">/cvmfs/soft.computecanada.ca/custom/software/lmod/lmod/libexec/lmod perl</cmd_path>
          <cmd_path lang="python">/cvmfs/soft.computecanada.ca/custom/software/lmod/lmod/libexec/lmod python</cmd_path>
          <cmd_path lang="csh">module</cmd_path>
          <cmd_path lang="sh">module</cmd_path>
          <modules>
          <command name="purge"/>
     	<command name="load">StdEnv/2023</command>
          </modules>
          <modules compiler="intel">
    	<command name="load">intel/2023.2.1</command>
    	<command name="load">git-annex/10.20231129</command>
    	<command name="load">cmake/3.27.7</command>
          </modules>
          <modules mpilib="openmpi">
            <command name="load">openmpi/4.1.5</command>
            <command name="load">hdf5-mpi/1.14.2</command>
            <command name="load">netcdf-c++4-mpi/4.3.1</command>
            <command name="load">netcdf-fortran-mpi/4.6.1</command>
            <command name="load">netcdf-mpi/4.9.2</command>
    	<command name="load">xml-libxml/2.0208</command>
    	<command name="load">flexiblas/3.3.1</command>
          </modules>
        </module_system>
        <environment_variables>
                <env name="NETCDF_PATH">/cvmfs/soft.computecanada.ca/easybuild/software/2023/x86-64-v4/MPI/intel2023/openmpi4/pnetcdf/1.12.3</env>
                <env name="NETCDF_FORTRAN_PATH">/cvmfs/soft.computecanada.ca/easybuild/software/2023/x86-64-v4/MPI/intel2023/openmpi4/netcdf-fortran-mpi/4.6.1/</env>
                <env name="NETCDF_C_PATH">/cvmfs/soft.computecanada.ca/easybuild/software/2023/x86-64-v4/MPI/intel2023/openmpi4/netcdf-c++4-mpi/4.3.1/</env>
                <env name="NETLIB_LAPACK_PATH">/cvmfs/soft.computecanada.ca/easybuild/software/2023/x86-64-v3/Core/imkl/2023.2.0/mkl/2023.2.0/</env>
    	    <env name="OMP_STACKSIZE">256M</env>
                <env name="I_MPI_CC">icc</env>
                <env name="I_MPI_FC">ifort</env>
                <env name="I_MPI_F77">ifort</env>
                <env name="I_MPI_F90">ifort</env>
                <env name="I_MPI_CXX">icpc</env>
        </environment_variables>
        <resource_limits>
          <resource name="RLIMIT_STACK">300000000</resource>
        </resource_limits>
      </machine>
    </config_machines>
    


    File : ~/.cime/config_machines.xml

    <?xml version="1.0"?>
    
    <config_machines version="2.0">
      <machine MACH="cedar">
        <DESC>https://docs.alliancecan.ca/wiki/Cedar</DESC>
        <NODENAME_REGEX>c[de].*.computecanada.ca</NODENAME_REGEX>
    
        <OS>LINUX</OS>
        <COMPILERS>intel,gnu</COMPILERS>
        <MPILIBS>openmpi</MPILIBS>
    
        <PROJECT>def-EDIT_THIS</PROJECT>
        <CHARGE_ACCOUNT>def-EDIT_THIS</CHARGE_ACCOUNT>
    
        <CIME_OUTPUT_ROOT>/scratch/$USER/cesm/output</CIME_OUTPUT_ROOT>
        <DIN_LOC_ROOT>/scratch/$USER/cesm/inputdata</DIN_LOC_ROOT>
        <DIN_LOC_ROOT_CLMFORC>${DIN_LOC_ROOT}/atm/datm7</DIN_LOC_ROOT_CLMFORC>
        <DOUT_S_ROOT>$CIME_OUTPUT_ROOT/archive/case</DOUT_S_ROOT>
        <GMAKE>make</GMAKE>
        <GMAKE_J>8</GMAKE_J>
        <BATCH_SYSTEM>slurm</BATCH_SYSTEM>
        <SUPPORTED_BY>support@tech.alliancecan.ca</SUPPORTED_BY>
        <MAX_TASKS_PER_NODE>48</MAX_TASKS_PER_NODE>
        <MAX_MPITASKS_PER_NODE>48</MAX_MPITASKS_PER_NODE>
        <PROJECT_REQUIRED>TRUE</PROJECT_REQUIRED>
    
        <mpirun mpilib="openmpi">
          <executable>mpirun</executable>
          <arguments>
            <arg name="anum_tasks"> -np [[:Template:Total tasks]]</arg>
          </arguments>
        </mpirun>
        <module_system type="module" allow_error="true">
          <init_path lang="perl">/cvmfs/soft.computecanada.ca/custom/software/lmod/lmod/init/perl</init_path>
          <init_path lang="python">/cvmfs/soft.computecanada.ca/custom/software/lmod/lmod/init/env_modules_python.py</init_path>
          <init_path lang="csh">/cvmfs/soft.computecanada.ca/custom/software/lmod/lmod/init/csh</init_path>
          <init_path lang="sh">/cvmfs/soft.computecanada.ca/custom/software/lmod/lmod/init/sh</init_path>
          <cmd_path lang="perl">/cvmfs/soft.computecanada.ca/custom/software/lmod/lmod/libexec/lmod perl</cmd_path>
          <cmd_path lang="python">/cvmfs/soft.computecanada.ca/custom/software/lmod/lmod/libexec/lmod python</cmd_path>
          <cmd_path lang="csh">module</cmd_path>
          <cmd_path lang="sh">module</cmd_path>
          <modules>
          <command name="purge"/>
     	<command name="load">StdEnv/2023</command>
          </modules>
          <modules compiler="intel">
    	<command name="load">intel/2023.2.1</command>
    	<command name="load">git-annex/10.20231129</command>
    	<command name="load">cmake/3.27.7</command>
          </modules>
          <modules mpilib="openmpi">
            <command name="load">openmpi/4.1.5</command>
            <command name="load">hdf5-mpi/1.14.2</command>
            <command name="load">netcdf-c++4-mpi/4.3.1</command>
            <command name="load">netcdf-fortran-mpi/4.6.1</command>
            <command name="load">netcdf-mpi/4.9.2</command>
    	<command name="load">xml-libxml/2.0208</command>
    	<command name="load">flexiblas/3.3.1</command>
          </modules>
        </module_system>
        <environment_variables>
                <env name="NETCDF_PATH">/cvmfs/soft.computecanada.ca/easybuild/software/2023/x86-64-v4/MPI/intel2023/openmpi4/pnetcdf/1.12.3</env>
                <env name="NETCDF_FORTRAN_PATH">/cvmfs/soft.computecanada.ca/easybuild/software/2023/x86-64-v4/MPI/intel2023/openmpi4/netcdf-fortran-mpi/4.6.1/</env>
                <env name="NETCDF_C_PATH">/cvmfs/soft.computecanada.ca/easybuild/software/2023/x86-64-v4/MPI/intel2023/openmpi4/netcdf-c++4-mpi/4.3.1/</env>
                <env name="NETLIB_LAPACK_PATH">/cvmfs/soft.computecanada.ca/easybuild/software/2023/x86-64-v3/Core/imkl/2023.2.0/mkl/2023.2.0/</env>
    	    <env name="OMP_STACKSIZE">256M</env>
                <env name="I_MPI_CC">icc</env>
                <env name="I_MPI_FC">ifort</env>
                <env name="I_MPI_F77">ifort</env>
                <env name="I_MPI_F90">ifort</env>
                <env name="I_MPI_CXX">icpc</env>
        </environment_variables>
        <resource_limits>
          <resource name="RLIMIT_STACK">300000000</resource>
        </resource_limits>
      </machine>
    </config_machines>
    


    File : ~/.cime/config_machines.xml

    <?xml version="1.0"?>
    
    <config_machines version="2.0">
      <machine MACH="graham">
        <DESC>https://docs.alliancecan.ca/wiki/Graham</DESC>
        <NODENAME_REGEX>gra.*</NODENAME_REGEX>
    
        <OS>LINUX</OS>
        <COMPILERS>intel,gnu</COMPILERS>
        <MPILIBS>openmpi</MPILIBS>
    
        <PROJECT>def-EDIT_THIS</PROJECT>
        <CHARGE_ACCOUNT>def-EDIT_THIS</CHARGE_ACCOUNT>
    
        <CIME_OUTPUT_ROOT>/scratch/$USER/cesm/output</CIME_OUTPUT_ROOT>
        <DIN_LOC_ROOT>/scratch/$USER/cesm/inputdata</DIN_LOC_ROOT>
        <DIN_LOC_ROOT_CLMFORC>${DIN_LOC_ROOT}/atm/datm7</DIN_LOC_ROOT_CLMFORC>
        <DOUT_S_ROOT>$CIME_OUTPUT_ROOT/archive/case</DOUT_S_ROOT>
        <GMAKE>make</GMAKE>
        <GMAKE_J>8</GMAKE_J>
        <BATCH_SYSTEM>slurm</BATCH_SYSTEM>
        <SUPPORTED_BY>support@tech.alliancecan.ca</SUPPORTED_BY>
        <MAX_TASKS_PER_NODE>44</MAX_TASKS_PER_NODE>
        <MAX_MPITASKS_PER_NODE>44</MAX_MPITASKS_PER_NODE>
        <PROJECT_REQUIRED>TRUE</PROJECT_REQUIRED>
    
        <mpirun mpilib="openmpi">
          <executable>mpirun</executable>
          <arguments>
            <arg name="anum_tasks"> -np [[:Template:Total tasks]]</arg>
          </arguments>
        </mpirun>
        <module_system type="module" allow_error="true">
          <init_path lang="perl">/cvmfs/soft.computecanada.ca/custom/software/lmod/lmod/init/perl</init_path>
          <init_path lang="python">/cvmfs/soft.computecanada.ca/custom/software/lmod/lmod/init/env_modules_python.py</init_path>
          <init_path lang="csh">/cvmfs/soft.computecanada.ca/custom/software/lmod/lmod/init/csh</init_path>
          <init_path lang="sh">/cvmfs/soft.computecanada.ca/custom/software/lmod/lmod/init/sh</init_path>
          <cmd_path lang="perl">/cvmfs/soft.computecanada.ca/custom/software/lmod/lmod/libexec/lmod perl</cmd_path>
          <cmd_path lang="python">/cvmfs/soft.computecanada.ca/custom/software/lmod/lmod/libexec/lmod python</cmd_path>
          <cmd_path lang="csh">module</cmd_path>
          <cmd_path lang="sh">module</cmd_path>
          <modules>
          <command name="purge"/>
     	<command name="load">StdEnv/2023</command>
          </modules>
          <modules compiler="intel">
    	<command name="load">intel/2023.2.1</command>
    	<command name="load">git-annex/10.20231129</command>
    	<command name="load">cmake/3.27.7</command>
          </modules>
          <modules mpilib="openmpi">
            <command name="load">openmpi/4.1.5</command>
            <command name="load">hdf5-mpi/1.14.2</command>
            <command name="load">netcdf-c++4-mpi/4.3.1</command>
            <command name="load">netcdf-fortran-mpi/4.6.1</command>
            <command name="load">netcdf-mpi/4.9.2</command>
    	<command name="load">xml-libxml/2.0208</command>
    	<command name="load">flexiblas/3.3.1</command>
          </modules>
        </module_system>
        <environment_variables>
                <env name="NETCDF_PATH">/cvmfs/soft.computecanada.ca/easybuild/software/2023/x86-64-v4/MPI/intel2023/openmpi4/pnetcdf/1.12.3</env>
                <env name="NETCDF_FORTRAN_PATH">/cvmfs/soft.computecanada.ca/easybuild/software/2023/x86-64-v4/MPI/intel2023/openmpi4/netcdf-fortran-mpi/4.6.1/</env>
                <env name="NETCDF_C_PATH">/cvmfs/soft.computecanada.ca/easybuild/software/2023/x86-64-v4/MPI/intel2023/openmpi4/netcdf-c++4-mpi/4.3.1/</env>
                <env name="NETLIB_LAPACK_PATH">/cvmfs/soft.computecanada.ca/easybuild/software/2023/x86-64-v3/Core/imkl/2023.2.0/mkl/2023.2.0/</env>
    	    <env name="OMP_STACKSIZE">256M</env>
                <env name="I_MPI_CC">icc</env>
                <env name="I_MPI_FC">ifort</env>
                <env name="I_MPI_F77">ifort</env>
                <env name="I_MPI_F90">ifort</env>
                <env name="I_MPI_CXX">icpc</env>
        </environment_variables>
        <resource_limits>
          <resource name="RLIMIT_STACK">300000000</resource>
        </resource_limits>
      </machine>
    </config_machines>
    


    File : ~/.cime/config_machines.xml

    <?xml version="1.0"?>
    
    <config_machines version="2.0">
      <machine MACH="narval">
        <DESC>https://docs.alliancecan.ca/wiki/Narval/en</DESC>
        <NODENAME_REGEX>n[acgl].*.narval.calcul.quebec</NODENAME_REGEX>
    
        <OS>LINUX</OS>
        <COMPILERS>intel,gnu</COMPILERS>
        <MPILIBS>openmpi</MPILIBS>
    
        <PROJECT>def-EDIT_THIS</PROJECT>
        <CHARGE_ACCOUNT>def-EDIT_THIS</CHARGE_ACCOUNT>
    
        <CIME_OUTPUT_ROOT>/scratch/$USER/cesm/output</CIME_OUTPUT_ROOT>
        <DIN_LOC_ROOT>/scratch/$USER/cesm/inputdata</DIN_LOC_ROOT>
        <DIN_LOC_ROOT_CLMFORC>${DIN_LOC_ROOT}/atm/datm7</DIN_LOC_ROOT_CLMFORC>
        <DOUT_S_ROOT>$CIME_OUTPUT_ROOT/archive/case</DOUT_S_ROOT>
        <GMAKE>make</GMAKE>
        <GMAKE_J>8</GMAKE_J>
        <BATCH_SYSTEM>slurm</BATCH_SYSTEM>
        <SUPPORTED_BY>support@tech.alliancecan.ca</SUPPORTED_BY>
        <MAX_TASKS_PER_NODE>64</MAX_TASKS_PER_NODE>
        <MAX_MPITASKS_PER_NODE>64</MAX_MPITASKS_PER_NODE>
        <PROJECT_REQUIRED>TRUE</PROJECT_REQUIRED>
    
        <mpirun mpilib="openmpi">
          <executable>mpirun</executable>
          <arguments>
            <arg name="anum_tasks"> -np [[:Template:Total tasks]]</arg>
          </arguments>
        </mpirun>
        <module_system type="module" allow_error="true">
          <init_path lang="perl">/cvmfs/soft.computecanada.ca/custom/software/lmod/lmod/init/perl</init_path>
          <init_path lang="python">/cvmfs/soft.computecanada.ca/custom/software/lmod/lmod/init/env_modules_python.py</init_path>
          <init_path lang="csh">/cvmfs/soft.computecanada.ca/custom/software/lmod/lmod/init/csh</init_path>
          <init_path lang="sh">/cvmfs/soft.computecanada.ca/custom/software/lmod/lmod/init/sh</init_path>
          <cmd_path lang="perl">/cvmfs/soft.computecanada.ca/custom/software/lmod/lmod/libexec/lmod perl</cmd_path>
          <cmd_path lang="python">/cvmfs/soft.computecanada.ca/custom/software/lmod/lmod/libexec/lmod python</cmd_path>
          <cmd_path lang="csh">module</cmd_path>
          <cmd_path lang="sh">module</cmd_path>
          <modules>
          <command name="purge"/>
     	<command name="load">StdEnv/2023</command>
          </modules>
          <modules compiler="intel">
    	<command name="load">intel/2023.2.1</command>
    	<command name="load">git-annex/10.20231129</command>
    	<command name="load">cmake/3.27.7</command>
          </modules>
          <modules mpilib="openmpi">
            <command name="load">openmpi/4.1.5</command>
            <command name="load">hdf5-mpi/1.14.2</command>
            <command name="load">netcdf-c++4-mpi/4.3.1</command>
            <command name="load">netcdf-fortran-mpi/4.6.1</command>
            <command name="load">netcdf-mpi/4.9.2</command>
    	<command name="load">xml-libxml/2.0208</command>
    	<command name="load">flexiblas/3.3.1</command>
          </modules>
        </module_system>
        <environment_variables>
                <env name="NETCDF_PATH">/cvmfs/soft.computecanada.ca/easybuild/software/2023/x86-64-v4/MPI/intel2023/openmpi4/pnetcdf/1.12.3</env>
                <env name="NETCDF_FORTRAN_PATH">/cvmfs/soft.computecanada.ca/easybuild/software/2023/x86-64-v4/MPI/intel2023/openmpi4/netcdf-fortran-mpi/4.6.1/</env>
                <env name="NETCDF_C_PATH">/cvmfs/soft.computecanada.ca/easybuild/software/2023/x86-64-v4/MPI/intel2023/openmpi4/netcdf-c++4-mpi/4.3.1/</env>
                <env name="NETLIB_LAPACK_PATH">/cvmfs/soft.computecanada.ca/easybuild/software/2023/x86-64-v3/Core/imkl/2023.2.0/mkl/2023.2.0/</env>
    	    <env name="OMP_STACKSIZE">256M</env>
                <env name="I_MPI_CC">icc</env>
                <env name="I_MPI_FC">ifort</env>
                <env name="I_MPI_F77">ifort</env>
                <env name="I_MPI_F90">ifort</env>
                <env name="I_MPI_CXX">icpc</env>
        </environment_variables>
        <resource_limits>
          <resource name="RLIMIT_STACK">300000000</resource>
        </resource_limits>
      </machine>
    </config_machines>
    


    File : ~/.cime/config_machines.xml

    <?xml version="1.0"?>
    
    <config_machines version="2.0">
      <machine MACH="niagara">
        <DESC>https://docs.alliancecan.ca/wiki/Niagara</DESC>
        <NODENAME_REGEX>nia.*.scinet.local</NODENAME_REGEX>
    
        <OS>LINUX</OS>
        <COMPILERS>intel,gnu</COMPILERS>
        <MPILIBS>openmpi</MPILIBS>
    
        <PROJECT>def-EDIT_THIS</PROJECT>
        <CHARGE_ACCOUNT>def-EDIT_THIS</CHARGE_ACCOUNT>
    
        <CIME_OUTPUT_ROOT>/scratch/$USER/cesm/output</CIME_OUTPUT_ROOT>
        <DIN_LOC_ROOT>/scratch/$USER/cesm/inputdata</DIN_LOC_ROOT>
        <DIN_LOC_ROOT_CLMFORC>${DIN_LOC_ROOT}/atm/datm7</DIN_LOC_ROOT_CLMFORC>
        <DOUT_S_ROOT>$CIME_OUTPUT_ROOT/archive/case</DOUT_S_ROOT>
        <GMAKE>make</GMAKE>
        <GMAKE_J>8</GMAKE_J>
        <BATCH_SYSTEM>slurm</BATCH_SYSTEM>
        <SUPPORTED_BY>support@tech.alliancecan.ca</SUPPORTED_BY>
        <MAX_TASKS_PER_NODE>40</MAX_TASKS_PER_NODE>
        <MAX_MPITASKS_PER_NODE>40</MAX_MPITASKS_PER_NODE>
        <PROJECT_REQUIRED>TRUE</PROJECT_REQUIRED>
    
        <mpirun mpilib="openmpi">
          <executable>mpirun</executable>
          <arguments>
            <arg name="anum_tasks"> -np [[:Template:Total tasks]]</arg>
          </arguments>
        </mpirun>
        <module_system type="module" allow_error="true">
          <init_path lang="perl">/cvmfs/soft.computecanada.ca/custom/software/lmod/lmod/init/perl</init_path>
          <init_path lang="python">/cvmfs/soft.computecanada.ca/custom/software/lmod/lmod/init/env_modules_python.py</init_path>
          <init_path lang="csh">/cvmfs/soft.computecanada.ca/custom/software/lmod/lmod/init/csh</init_path>
          <init_path lang="sh">/cvmfs/soft.computecanada.ca/custom/software/lmod/lmod/init/sh</init_path>
          <cmd_path lang="perl">/cvmfs/soft.computecanada.ca/custom/software/lmod/lmod/libexec/lmod perl</cmd_path>
          <cmd_path lang="python">/cvmfs/soft.computecanada.ca/custom/software/lmod/lmod/libexec/lmod python</cmd_path>
          <cmd_path lang="csh">module</cmd_path>
          <cmd_path lang="sh">module</cmd_path>
          <modules>
          <command name="purge"/>
     	<command name="load">StdEnv/2023</command>
          </modules>
          <modules compiler="intel">
    	<command name="load">intel/2023.2.1</command>
    	<command name="load">git-annex/10.20231129</command>
    	<command name="load">cmake/3.27.7</command>
          </modules>
          <modules mpilib="openmpi">
            <command name="load">openmpi/4.1.5</command>
            <command name="load">hdf5-mpi/1.14.2</command>
            <command name="load">netcdf-c++4-mpi/4.3.1</command>
            <command name="load">netcdf-fortran-mpi/4.6.1</command>
            <command name="load">netcdf-mpi/4.9.2</command>
    	<command name="load">xml-libxml/2.0208</command>
    	<command name="load">flexiblas/3.3.1</command>
          </modules>
        </module_system>
        <environment_variables>
                <env name="NETCDF_PATH">/cvmfs/soft.computecanada.ca/easybuild/software/2023/x86-64-v4/MPI/intel2023/openmpi4/pnetcdf/1.12.3</env>
                <env name="NETCDF_FORTRAN_PATH">/cvmfs/soft.computecanada.ca/easybuild/software/2023/x86-64-v4/MPI/intel2023/openmpi4/netcdf-fortran-mpi/4.6.1/</env>
                <env name="NETCDF_C_PATH">/cvmfs/soft.computecanada.ca/easybuild/software/2023/x86-64-v4/MPI/intel2023/openmpi4/netcdf-c++4-mpi/4.3.1/</env>
                <env name="NETLIB_LAPACK_PATH">/cvmfs/soft.computecanada.ca/easybuild/software/2023/x86-64-v3/Core/imkl/2023.2.0/mkl/2023.2.0/</env>
    	    <env name="OMP_STACKSIZE">256M</env>
                <env name="I_MPI_CC">icc</env>
                <env name="I_MPI_FC">ifort</env>
                <env name="I_MPI_F77">ifort</env>
                <env name="I_MPI_F90">ifort</env>
                <env name="I_MPI_CXX">icpc</env>
        </environment_variables>
        <resource_limits>
          <resource name="RLIMIT_STACK">300000000</resource>
        </resource_limits>
      </machine>
    </config_machines>
    


  • Validate your XML machine file with the following commands:
    [name@server ~]$ xmllint --noout --schema /path/to/CESM/cime/config/xml_schemas/config_machines.xsd ~/.cime/config_machines.xml
    /home/name/.cime/config_machines.xml validates
    
    [name@beluga ~]$ query_config --machines current
    beluga (current) : https://docs.alliancecan.ca/wiki/Béluga/en
          os              LINUX
          compilers       intel,gnu
          pes/node        40
          max_tasks/node  40
    
  • Check the official template for additional parameters:
    [name@server ~]$ less /path/to/CESM/cime/config/xml_schemas/config_machines_template.xml
    

Local batch file[edit]

  • Create and edit the file ~/.cime/config_batch.xml from the following minimal content:
    File : ~/.cime/config_batch.xml

    <?xml version="1.0"?>
    
    <config_batch version="2.1">
      <batch_system type="slurm">
        <batch_submit>sbatch</batch_submit>
        <submit_args>
          <arg flag="--time" name="$JOB_WALLCLOCK_TIME"/>
          <arg flag="--account" name="$PROJECT"/>
        </submit_args>
        <directives>
          <directive>--mem=0</directive>
        </directives>
        <queues>
          <queue default="true">default</queue>
        </queues>
      </batch_system>
    </config_batch>
    


  • Validate your XML machine file with the following commands:
    [name@server ~]$ xmllint --noout --schema /path/to/CESM/cime/config/xml_schemas/config_batch.xsd ~/.cime/config_batch.xml
    /home/name/.cime/config_batch.xml validates
    
  • Check the documentation for additional configuration parameters and examples.

Local compilers file[edit]

  • Create and edit the file ~/.cime/config_compilers.xml from the following minimal content per cluster:
    File : ~/.cime/config_compilers.xml

    <?xml version="1.0"?>
    
    	<compiler MACH="beluga">
    	  <CPPDEFS>
    	    <append MODEL="gptl"> -DHAVE_NANOTIME -DBIT64 -DHAVE_VPRINTF -DHAVE_BACKTRACE -DHAVE_SLASHPROC -DHAVE_COMM_F2C -DHAVE_TIMES -DHAVE_GETTIMEOFDAY </append>
    	  </CPPDEFS>
    	  <NETCDF_PATH>$ENV{NETCDF_FORTRAN_ROOT}</NETCDF_PATH>
    	  <PIO_FILESYSTEM_HINTS>lustre</PIO_FILESYSTEM_HINTS>
    	  <PNETCDF_PATH>$ENV{PARALLEL_NETCDF_ROOT}</PNETCDF_PATH>
    	  <SLIBS>
    	    <append>-L$(NETCDF_PATH)/lib -lnetcdff -L$(NETCDF_C_ROOT)/lib -lnetcdf -L$(NETLIB_LAPACK_PATH)/lib/intel64 -lmkl -ldl </append>
    	  </SLIBS>
    	</compiler>
    


    File : ~/.cime/config_compilers.xml

    <?xml version="1.0"?>
    
    	<compiler MACH="cedar">
    	  <CPPDEFS>
    	    <append MODEL="gptl"> -DHAVE_NANOTIME -DBIT64 -DHAVE_VPRINTF -DHAVE_BACKTRACE -DHAVE_SLASHPROC -DHAVE_COMM_F2C -DHAVE_TIMES -DHAVE_GETTIMEOFDAY </append>
    	  </CPPDEFS>
    	  <NETCDF_PATH>$ENV{NETCDF_FORTRAN_ROOT}</NETCDF_PATH>
    	  <PIO_FILESYSTEM_HINTS>lustre</PIO_FILESYSTEM_HINTS>
    	  <PNETCDF_PATH>$ENV{PARALLEL_NETCDF_ROOT}</PNETCDF_PATH>
    	  <SLIBS>
    	    <append>-L$(NETCDF_PATH)/lib -lnetcdff -L$(NETCDF_C_ROOT)/lib -lnetcdf -L$(NETLIB_LAPACK_PATH)/lib/intel64 -lmkl -ldl </append>
    	  </SLIBS>
    	</compiler>
    


    File : ~/.cime/config_compilers.xml

    <?xml version="1.0"?>
    
    	<compiler MACH="graham">
    	  <CPPDEFS>
    	    <append MODEL="gptl"> -DHAVE_NANOTIME -DBIT64 -DHAVE_VPRINTF -DHAVE_BACKTRACE -DHAVE_SLASHPROC -DHAVE_COMM_F2C -DHAVE_TIMES -DHAVE_GETTIMEOFDAY </append>
    	  </CPPDEFS>
    	  <NETCDF_PATH>$ENV{NETCDF_FORTRAN_ROOT}</NETCDF_PATH>
    	  <PIO_FILESYSTEM_HINTS>lustre</PIO_FILESYSTEM_HINTS>
    	  <PNETCDF_PATH>$ENV{PARALLEL_NETCDF_ROOT}</PNETCDF_PATH>
    	  <SLIBS>
    	    <append>-L$(NETCDF_PATH)/lib -lnetcdff -L$(NETCDF_C_ROOT)/lib -lnetcdf -L$(NETLIB_LAPACK_PATH)/lib/intel64 -lmkl -ldl </append>
    	  </SLIBS>
    	</compiler>
    


    File : ~/.cime/config_compilers.xml

    <?xml version="1.0"?>
    
    	<compiler MACH="narval">
    	  <CPPDEFS>
    	    <append MODEL="gptl"> -DHAVE_NANOTIME -DBIT64 -DHAVE_VPRINTF -DHAVE_BACKTRACE -DHAVE_SLASHPROC -DHAVE_COMM_F2C -DHAVE_TIMES -DHAVE_GETTIMEOFDAY </append>
    	  </CPPDEFS>
    	  <NETCDF_PATH>$ENV{NETCDF_FORTRAN_ROOT}</NETCDF_PATH>
    	  <PIO_FILESYSTEM_HINTS>lustre</PIO_FILESYSTEM_HINTS>
    	  <PNETCDF_PATH>$ENV{PARALLEL_NETCDF_ROOT}</PNETCDF_PATH>
    	  <SLIBS>
    	    <append>-L$(NETCDF_PATH)/lib -lnetcdff -L$(NETCDF_C_ROOT)/lib -lnetcdf -L$(NETLIB_LAPACK_PATH)/lib/intel64 -lmkl -ldl </append>
    	  </SLIBS>
    	</compiler>
    


    File : ~/.cime/config_compilers.xml

    <?xml version="1.0"?>
    
    	<compiler MACH="niagara">
    	  <CPPDEFS>
    	    <append MODEL="gptl"> -DHAVE_NANOTIME -DBIT64 -DHAVE_VPRINTF -DHAVE_BACKTRACE -DHAVE_SLASHPROC -DHAVE_COMM_F2C -DHAVE_TIMES -DHAVE_GETTIMEOFDAY </append>
    	  </CPPDEFS>
    	  <NETCDF_PATH>$ENV{NETCDF_FORTRAN_ROOT}</NETCDF_PATH>
    	  <PIO_FILESYSTEM_HINTS>lustre</PIO_FILESYSTEM_HINTS>
    	  <PNETCDF_PATH>$ENV{PARALLEL_NETCDF_ROOT}</PNETCDF_PATH>
    	  <SLIBS>
    	    <append>-L$(NETCDF_PATH)/lib -lnetcdff -L$(NETCDF_C_ROOT)/lib -lnetcdf -L$(NETLIB_LAPACK_PATH)/lib/intel64 -lmkl -ldl </append>
    	  </SLIBS>
    	</compiler>
    


    Checkout externals[edit]

    Before your first use of CESM, you may checkout the individual model components by running the checkout_externals script.

    [name@server ~]$ /path/to/CESM/manage_externals/checkout_externals
    

    You may need to accept a certificate from the CESM repository to download input files.


    Creating a test case[edit]

    The following command assumes the default model cesm and the current machine:

    [name@server ~]$ /path/to/CESM/cime/scripts/create_newcase --case test_case --compset IHistClm50Bgc --res f19_g17
    


    Reference[edit]