diff --git a/config/cesm/machines/config_batch.xml b/config/cesm/machines/config_batch.xml
index 9d91e2d9b6f..896e37cc9cf 100644
--- a/config/cesm/machines/config_batch.xml
+++ b/config/cesm/machines/config_batch.xml
@@ -343,39 +343,6 @@
-
- sbatch
-
-
-
-
-
-
- -C haswell
-
-
- regular
-
-
-
-
-
- sbatch
-
-
-
-
-
-
- -C knl,quad,cache
- -S 2
-
-
- regular
-
-
-
-
sbatch
@@ -620,6 +587,24 @@
+
+ sbatch
+
+
+
+
+
+
+ --constraint=cpu
+
+
+ regular
+ debug
+
+
+
+
+
@@ -718,7 +703,7 @@
- -R "span[ptile={{ tasks_per_node }}]"
+ -R "span[ptile={{ tasks_per_node }}]"
p_short
diff --git a/config/cesm/machines/config_compilers.xml b/config/cesm/machines/config_compilers.xml
index fae1ab4f95f..73d44b42306 100644
--- a/config/cesm/machines/config_compilers.xml
+++ b/config/cesm/machines/config_compilers.xml
@@ -755,6 +755,7 @@ using a fortran linker.
-lnetcdff -lnetcdf
+
-march=core-avx2 -no-fma
@@ -764,6 +765,22 @@ using a fortran linker.
+
+
+ -march=core-avx2 -no-fma
+
+
+ -march=core-avx2 -no-fma -qno-opt-dynamic-align -fp-model precise -std=gnu99
+ -std=gnu89
+
+
+ -L$(NETCDF_PATH)/lib -lnetcdff -lnetcdf
+ -L$(PNETCDF_PATH)/lib -lpnetcdf
+
+
+
+
+
-O2
diff --git a/config/cesm/machines/config_machines.xml b/config/cesm/machines/config_machines.xml
index a34f7f63fb9..287e5152267 100644
--- a/config/cesm/machines/config_machines.xml
+++ b/config/cesm/machines/config_machines.xml
@@ -851,213 +851,116 @@ This allows using a different mpirun command to launch unit tests
-
-
-
- NERSC XC40 Haswell, os is CNL, 32 pes/node, batch system is Slurm
- cori
- CNL
- intel,gnu,cray
- mpt
- $ENV{SCRATCH}
- /project/projectdirs/ccsm1/inputdata
- /project/projectdirs/ccsm1/inputdata/atm/datm7
+
+ Perlmutter CPU-only nodes at NERSC. Phase2 only: Each node has 2 AMD EPYC 7713 64-Core (Milan) 512GB batch system is Slurm
+
+ Linux
+ intel
+ mpich
+ $ENV{PSCRATCH}
+ /global/cfs/cdirs/ccsm1/inputdata
+ /global/cfs/cdirs/ccsm1/inputdata/atm/datm7
$CIME_OUTPUT_ROOT/archive/$CASE
- /project/projectdirs/ccsm1/ccsm_baselines
- /project/projectdirs/ccsm1/tools/cprnc.corip1/cprnc
+ /global/cfs/cdirs/ccsm1/ccsm_baselines
+ /global/cfs/cdirs/ccsm1/tools/cprnc.perlmutter/cprnc
8
slurm
cseg
- 64
- 32
+ 256
+ 128
srun
--label
- -n {{ total_tasks }}
- -c {{ srun_binding }}
+ -n {{ total_tasks }} -N {{ num_nodes }}
+ -c $SHELL{echo 256/`./xmlquery --value MAX_MPITASKS_PER_NODE`|bc}
+ $SHELL{if [ 128 -ge `./xmlquery --value MAX_MPITASKS_PER_NODE` ]; then echo "--cpu_bind=cores"; else echo "--cpu_bind=threads";fi;}
+ -m plane=$SHELL{echo `./xmlquery --value MAX_MPITASKS_PER_NODE`}
-
- /opt/modules/default/init/perl.pm
- /opt/modules/default/init/python.py
- /opt/modules/default/init/sh
- /opt/modules/default/init/csh
- /opt/modules/default/bin/modulecmd perl
- /opt/modules/default/bin/modulecmd python
+
+ /usr/share/lmod/8.3.1/init/perl
+ /usr/share/lmod/8.3.1/init/python
+ /usr/share/lmod/8.3.1/init/sh
+ /usr/share/lmod/8.3.1/init/csh
+ /usr/share/lmod/lmod/libexec/lmod perl
+ /usr/share/lmod/lmod/libexec/lmod python
module
module
-
- PrgEnv-intel
- PrgEnv-cray
- PrgEnv-gnu
- intel
- cce
- cray-parallel-netcdf
- cray-parallel-hdf5
- pmi
- cray-libsci
- cray-mpich2
- cray-mpich
- cray-netcdf
- cray-hdf5
- cray-netcdf-hdf5parallel
- craype-sandybridge
- craype-ivybridge
- craype
-
-
- PrgEnv-intel
- intel intel/19.0.3.199
- /global/project/projectdirs/ccsm1/modulefiles/cori
-
-
- esmf/7.1.0r-defio-intel18.0.1.163-mpi-O-cori-haswell
-
-
- esmf/7.1.0r-netcdf-intel18.0.1.163-mpiuni-O-haswell
+
+ cray-hdf5-parallel
+ cray-netcdf-hdf5parallel
+ cray-parallel-netcdf
+ cray-netcdf
+ cray-hdf5
+ PrgEnv-gnu
+ PrgEnv-intel
+ PrgEnv-nvidia
+ PrgEnv-cray
+ PrgEnv-aocc
+ intel
+ intel-oneapi
+ nvidia
+ aocc
+ cudatoolkit
+ climate-utils
+ craype-accel-nvidia80
+ craype-accel-host
+ perftools-base
+ perftools
+ darshan
-
- PrgEnv-cray
- cce cce/12.0.3
-
- PrgEnv-gnu
- gcc gcc/8.3.0
-
-
- cray-memkind
- craype craype/2.6.2
-
-
- cray-libsci/20.09.1
-
-
- cray-mpich/7.7.19
-
-
- cray-hdf5/1.12.1.1
- cray-netcdf/4.8.1.1
-
-
- cray-hdf5-parallel/1.12.1.1
- cray-netcdf-hdf5parallel/4.8.1.1
- cray-parallel-netcdf/1.12.2.1
-
-
- cmake/3.22.1
-
-
-
- 256M
- spread
- threads
-
-
-
-
-
-
- NERSC XC* KNL, os is CNL, 68 pes/node, batch system is Slurm
- CNL
- intel,gnu,cray
- mpt
- $ENV{SCRATCH}
- /project/projectdirs/ccsm1/inputdata
- /project/projectdirs/ccsm1/inputdata/atm/datm7
- $CIME_OUTPUT_ROOT/archive/$CASE
- /project/projectdirs/ccsm1/ccsm_baselines
- /project/projectdirs/ccsm1/tools/cprnc.corip1/cprnc
- 8
- slurm
- cseg
- 256
- 64
- 68
-
- srun
-
- --label
- -n {{ total_tasks }}
- -c {{ srun_binding }} --cpu_bind=cores
-
-
-
- /opt/modules/default/init/perl.pm
- /opt/modules/default/init/python.py
- /opt/modules/default/init/sh
- /opt/modules/default/init/csh
- /opt/modules/default/bin/modulecmd perl
- /opt/modules/default/bin/modulecmd python
- module
- module
-
- craype-mic-knl
- craype-haswell
- PrgEnv-intel
- PrgEnv-cray
- PrgEnv-gnu
- intel
- cce
- cray-parallel-netcdf
- cray-parallel-hdf5
- pmi
- cray-libsci
- cray-mpich2
- cray-mpich
- cray-netcdf
- cray-hdf5
- cray-netcdf-hdf5parallel
+ PrgEnv-gnu/8.3.3
+ gcc/11.2.0
+ cray-libsci/23.02.1.1
- PrgEnv-intel
- intel intel/19.1.2.254
- /global/project/projectdirs/ccsm1/modulefiles/cori
-
-
- esmf/7.1.0r-defio-intel18.0.1.163-mpi-O-cori-knl
-
-
- esmf/7.1.0r-netcdf-intel18.0.1.163-mpiuni-O-knl
+ PrgEnv-intel/8.3.3
+ intel/2023.1.0
-
- PrgEnv-cray
- cce cce/8.6.5
+
+ PrgEnv-nvidia
+ nvidia/22.7
+ cray-libsci/23.02.1.1
-
- PrgEnv-gnu
- gcc gcc/8.3.0
-
-
- cray-memkind
- craype craype/2.7.10
- craype-mic-knl
-
-
- cray-libsci/20.09.1
+
+
+ PrgEnv-aocc
+ aocc/4.0.0
+ cray-libsci/23.02.1.1
+
- cray-mpich/7.7.19
-
-
- cray-hdf5/1.12.1.1
- cray-netcdf/4.8.1.1
-
-
- cray-netcdf-hdf5parallel/4.8.1.1
- cray-hdf5-parallel/1.12.1.1
- cray-parallel-netcdf/1.12.2.1
+ craype-accel-host
+ craype/2.7.20
+ cray-mpich/8.1.25
+ cray-hdf5-parallel/1.12.2.3
+ cray-netcdf-hdf5parallel/4.9.0.3
+ cray-parallel-netcdf/1.12.3.3
+ cmake/3.24.3
- 256M
+ 1
+ 1
+ 128M
spread
threads
+ FALSE
+ /global/cfs/cdirs/e3sm/perl/lib/perl5-only-switch
+ software
+ MPI_Bcast
+ $ENV{CRAY_NETCDF_HDF5PARALLEL_PREFIX}
+ $ENV{CRAY_PARALLEL_NETCDF_PREFIX}
+
+ -1
+