From 9aa3cc88bd5aa11f12a13d92697de2e1d8bedede Mon Sep 17 00:00:00 2001
From: TerrenceMcGuinness-NOAA <terry.mcguinness@noaa.gov>
Date: Wed, 18 Oct 2023 17:29:20 +0000
Subject: [PATCH 01/54] trying status of CI as a workflow action

---
 .github/workflows/orion_status.yaml | 57 +++++++++++++++++++++++++++++
 1 file changed, 57 insertions(+)
 create mode 100644 .github/workflows/orion_status.yaml

diff --git a/.github/workflows/orion_status.yaml b/.github/workflows/orion_status.yaml
new file mode 100644
index 000000000..38eb189b2
--- /dev/null
+++ b/.github/workflows/orion_status.yaml
@@ -0,0 +1,57 @@
+name: OrionStatus
+
+on:
+  workflow_dispatch:
+    pull_request:
+    types: [labeled]
+
+jobs:
+
+  getlabels:
+    runs-on: ubuntu-latest
+    outputs:
+      labels: ${{ steps.id.outputs.labels }}
+    steps:
+      - name: Get Label Steps
+        id: id
+        env:
+          GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+          OWNER: ${{ github.repository_owner }}
+          REPO_NAME: ${{ github.event.repository.name }}
+          PULL_REQUEST_NUMBER: ${{ github.event.pull_request.number }}
+        run: |
+          LABELS1="$(gh api repos/$OWNER/$REPO_NAME/pulls/$PULL_REQUEST_NUMBER --jq '.labels.[].name')"
+          LABELS=$(echo "$LABELS1" | tr '\n' ' ')
+          echo "labels=$LABELS" >> $GITHUB_OUTPUT
+
+  Ready:
+    if: contains( needs.getlabels.outputs.labels, 'CI-Orion-Ready')
+    runs-on: ubuntu-latest
+    needs:
+     - getlabels
+
+    steps:
+    - name: Ready
+      run: echo "Ready"
+
+  Building:
+    if: contains( needs.getlabels.outputs.labels, 'CI-Orion-Building')
+    runs-on: ubuntu-latest
+    needs:
+     - getlabels
+     - Ready
+
+    steps:
+    - name: Building
+      run: echo "Building"
+
+  Passed:
+    if: contains( needs.getlabels.outputs.labels, 'CI-Orion-Passed')
+    runs-on: ubuntu-latest
+    needs:
+     - getlabels
+     - Building
+
+    steps:
+    - name: Passed
+      run: echo "Passed"
-- 
GitLab


From 92f2306f340ef297f469451df9ac323df5e8260f Mon Sep 17 00:00:00 2001
From: TerrenceMcGuinness-NOAA <terry.mcguinness@noaa.gov>
Date: Wed, 18 Oct 2023 18:20:45 +0000
Subject: [PATCH 02/54] added PR number as input

---
 .github/workflows/orion_status.yaml | 9 ++++++---
 1 file changed, 6 insertions(+), 3 deletions(-)

diff --git a/.github/workflows/orion_status.yaml b/.github/workflows/orion_status.yaml
index 38eb189b2..a673d5093 100644
--- a/.github/workflows/orion_status.yaml
+++ b/.github/workflows/orion_status.yaml
@@ -2,8 +2,11 @@ name: OrionStatus
 
 on:
   workflow_dispatch:
-    pull_request:
-    types: [labeled]
+    inputs:
+      pr_number:
+        description: PR number
+        type: string
+  pull_request: [labeled]
 
 jobs:
 
@@ -18,7 +21,7 @@ jobs:
           GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
           OWNER: ${{ github.repository_owner }}
           REPO_NAME: ${{ github.event.repository.name }}
-          PULL_REQUEST_NUMBER: ${{ github.event.pull_request.number }}
+          PULL_REQUEST_NUMBER: ${{ inputs.pr_number }}
         run: |
           LABELS1="$(gh api repos/$OWNER/$REPO_NAME/pulls/$PULL_REQUEST_NUMBER --jq '.labels.[].name')"
           LABELS=$(echo "$LABELS1" | tr '\n' ' ')
-- 
GitLab


From 8c4b2f0246bf0f1709f317ac349d5a43e48ab53b Mon Sep 17 00:00:00 2001
From: TerrenceMcGuinness-NOAA <terrence.mcguinness@cox.net>
Date: Wed, 18 Oct 2023 18:24:37 +0000
Subject: [PATCH 03/54] Update orion_status.yaml

type labeled on new line
---
 .github/workflows/orion_status.yaml | 3 ++-
 1 file changed, 2 insertions(+), 1 deletion(-)

diff --git a/.github/workflows/orion_status.yaml b/.github/workflows/orion_status.yaml
index a673d5093..210e41fb1 100644
--- a/.github/workflows/orion_status.yaml
+++ b/.github/workflows/orion_status.yaml
@@ -6,7 +6,8 @@ on:
       pr_number:
         description: PR number
         type: string
-  pull_request: [labeled]
+  pull_request:
+     types: [labeled]
 
 jobs:
 
-- 
GitLab


From 74aa7347c606d6a9edcd842f27af8f5c8fb85901 Mon Sep 17 00:00:00 2001
From: TerrenceMcGuinness-NOAA <terry.mcguinness@noaa.gov>
Date: Wed, 18 Oct 2023 18:37:54 +0000
Subject: [PATCH 04/54] take out PR on on

---
 .github/workflows/orion_status.yaml | 1 -
 1 file changed, 1 deletion(-)

diff --git a/.github/workflows/orion_status.yaml b/.github/workflows/orion_status.yaml
index a673d5093..80bb6861c 100644
--- a/.github/workflows/orion_status.yaml
+++ b/.github/workflows/orion_status.yaml
@@ -6,7 +6,6 @@ on:
       pr_number:
         description: PR number
         type: string
-  pull_request: [labeled]
 
 jobs:
 
-- 
GitLab


From 9b8d071e4d6a550d027719e09d06814673f6992f Mon Sep 17 00:00:00 2001
From: TerrenceMcGuinness-NOAA <terry.mcguinness@noaa.gov>
Date: Wed, 18 Oct 2023 19:07:17 +0000
Subject: [PATCH 05/54] take out PR on on do loops

---
 .github/workflows/orion_status.yaml | 21 +++++++++++++++------
 1 file changed, 15 insertions(+), 6 deletions(-)

diff --git a/.github/workflows/orion_status.yaml b/.github/workflows/orion_status.yaml
index 80bb6861c..b92c27031 100644
--- a/.github/workflows/orion_status.yaml
+++ b/.github/workflows/orion_status.yaml
@@ -27,17 +27,19 @@ jobs:
           echo "labels=$LABELS" >> $GITHUB_OUTPUT
 
   Ready:
-    if: contains( needs.getlabels.outputs.labels, 'CI-Orion-Ready')
     runs-on: ubuntu-latest
     needs:
      - getlabels
 
     steps:
     - name: Ready
-      run: echo "Ready"
+      run:  |
+        until[ $ready ]
+        do
+          ready=$(${{ contains( needs.getlabels.outputs.labels, 'CI-Orion-Ready') }})
+        done  
 
   Building:
-    if: contains( needs.getlabels.outputs.labels, 'CI-Orion-Building')
     runs-on: ubuntu-latest
     needs:
      - getlabels
@@ -45,10 +47,13 @@ jobs:
 
     steps:
     - name: Building
-      run: echo "Building"
+      run: |
+        until[ $building ]
+        do
+          building=$(${{ contains( needs.getlabels.outputs.labels, 'CI-Orion-Building') }})
+        done  
 
   Passed:
-    if: contains( needs.getlabels.outputs.labels, 'CI-Orion-Passed')
     runs-on: ubuntu-latest
     needs:
      - getlabels
@@ -56,4 +61,8 @@ jobs:
 
     steps:
     - name: Passed
-      run: echo "Passed"
+      run: |
+        until[ $passed ]
+        do
+          passed=$(${{ contains( needs.getlabels.outputs.labels, 'CI-Orion-Passed') }})
+        done  
-- 
GitLab


From de4fd80d356447194bc6e9386bc8b77b21dc63db Mon Sep 17 00:00:00 2001
From: TerrenceMcGuinness-NOAA <terry.mcguinness@noaa.gov>
Date: Wed, 18 Oct 2023 19:15:24 +0000
Subject: [PATCH 06/54] take out PR on on do loops syntax until

---
 .github/workflows/orion_status.yaml | 9 ++++++---
 1 file changed, 6 insertions(+), 3 deletions(-)

diff --git a/.github/workflows/orion_status.yaml b/.github/workflows/orion_status.yaml
index b92c27031..cc745e4d1 100644
--- a/.github/workflows/orion_status.yaml
+++ b/.github/workflows/orion_status.yaml
@@ -34,7 +34,8 @@ jobs:
     steps:
     - name: Ready
       run:  |
-        until[ $ready ]
+        ready=False
+        until [ $ready ];
         do
           ready=$(${{ contains( needs.getlabels.outputs.labels, 'CI-Orion-Ready') }})
         done  
@@ -48,7 +49,8 @@ jobs:
     steps:
     - name: Building
       run: |
-        until[ $building ]
+        building=False
+        until [ $building ];
         do
           building=$(${{ contains( needs.getlabels.outputs.labels, 'CI-Orion-Building') }})
         done  
@@ -62,7 +64,8 @@ jobs:
     steps:
     - name: Passed
       run: |
-        until[ $passed ]
+        passed=False
+        until [ $passed ];
         do
           passed=$(${{ contains( needs.getlabels.outputs.labels, 'CI-Orion-Passed') }})
         done  
-- 
GitLab


From 3623231113fda22da14de68a0c30d6301619fb78 Mon Sep 17 00:00:00 2001
From: TerrenceMcGuinness-NOAA <terry.mcguinness@noaa.gov>
Date: Wed, 18 Oct 2023 19:32:20 +0000
Subject: [PATCH 07/54] take out PR on on do loops syntax until singleton

---
 .github/workflows/orion_status.yaml | 6 +++---
 1 file changed, 3 insertions(+), 3 deletions(-)

diff --git a/.github/workflows/orion_status.yaml b/.github/workflows/orion_status.yaml
index cc745e4d1..86750b1f0 100644
--- a/.github/workflows/orion_status.yaml
+++ b/.github/workflows/orion_status.yaml
@@ -35,7 +35,7 @@ jobs:
     - name: Ready
       run:  |
         ready=False
-        until [ $ready ];
+        until $ready
         do
           ready=$(${{ contains( needs.getlabels.outputs.labels, 'CI-Orion-Ready') }})
         done  
@@ -50,7 +50,7 @@ jobs:
     - name: Building
       run: |
         building=False
-        until [ $building ];
+        until $building
         do
           building=$(${{ contains( needs.getlabels.outputs.labels, 'CI-Orion-Building') }})
         done  
@@ -65,7 +65,7 @@ jobs:
     - name: Passed
       run: |
         passed=False
-        until [ $passed ];
+        until $passed
         do
           passed=$(${{ contains( needs.getlabels.outputs.labels, 'CI-Orion-Passed') }})
         done  
-- 
GitLab


From 218a084cfcef6032419d9df0bd40a1d6840009be Mon Sep 17 00:00:00 2001
From: TerrenceMcGuinness-NOAA <terry.mcguinness@noaa.gov>
Date: Wed, 18 Oct 2023 19:37:54 +0000
Subject: [PATCH 08/54] take out PR on on do loops syntax until set faulse

---
 .github/workflows/orion_status.yaml | 6 +++---
 1 file changed, 3 insertions(+), 3 deletions(-)

diff --git a/.github/workflows/orion_status.yaml b/.github/workflows/orion_status.yaml
index 86750b1f0..7c29c23ed 100644
--- a/.github/workflows/orion_status.yaml
+++ b/.github/workflows/orion_status.yaml
@@ -34,7 +34,7 @@ jobs:
     steps:
     - name: Ready
       run:  |
-        ready=False
+        ready=false
         until $ready
         do
           ready=$(${{ contains( needs.getlabels.outputs.labels, 'CI-Orion-Ready') }})
@@ -49,7 +49,7 @@ jobs:
     steps:
     - name: Building
       run: |
-        building=False
+        building=false
         until $building
         do
           building=$(${{ contains( needs.getlabels.outputs.labels, 'CI-Orion-Building') }})
@@ -64,7 +64,7 @@ jobs:
     steps:
     - name: Passed
       run: |
-        passed=False
+        passed=false
         until $passed
         do
           passed=$(${{ contains( needs.getlabels.outputs.labels, 'CI-Orion-Passed') }})
-- 
GitLab


From 83d5c791058b0486c7d506f88d28fb2303ad113d Mon Sep 17 00:00:00 2001
From: TerrenceMcGuinness-NOAA <terry.mcguinness@noaa.gov>
Date: Wed, 18 Oct 2023 19:53:11 +0000
Subject: [PATCH 09/54] take out PR on on do loops syntax until direct

---
 .github/workflows/orion_status.yaml | 12 +++---------
 1 file changed, 3 insertions(+), 9 deletions(-)

diff --git a/.github/workflows/orion_status.yaml b/.github/workflows/orion_status.yaml
index 7c29c23ed..743d89b9f 100644
--- a/.github/workflows/orion_status.yaml
+++ b/.github/workflows/orion_status.yaml
@@ -34,10 +34,8 @@ jobs:
     steps:
     - name: Ready
       run:  |
-        ready=false
-        until $ready
+        until ${{ contains( needs.getlabels.outputs.labels, 'CI-Orion-Ready') }}
         do
-          ready=$(${{ contains( needs.getlabels.outputs.labels, 'CI-Orion-Ready') }})
         done  
 
   Building:
@@ -49,10 +47,8 @@ jobs:
     steps:
     - name: Building
       run: |
-        building=false
-        until $building
+        until ${{ contains( needs.getlabels.outputs.labels, 'CI-Orion-Building') }}
         do
-          building=$(${{ contains( needs.getlabels.outputs.labels, 'CI-Orion-Building') }})
         done  
 
   Passed:
@@ -64,8 +60,6 @@ jobs:
     steps:
     - name: Passed
       run: |
-        passed=false
-        until $passed
+        until ${{ contains( needs.getlabels.outputs.labels, 'CI-Orion-Passed') }}
         do
-          passed=$(${{ contains( needs.getlabels.outputs.labels, 'CI-Orion-Passed') }})
         done  
-- 
GitLab


From de55f1a651e70204e0164fc5ef00f9faf6aaa9de Mon Sep 17 00:00:00 2001
From: TerrenceMcGuinness-NOAA <terry.mcguinness@noaa.gov>
Date: Wed, 18 Oct 2023 20:01:32 +0000
Subject: [PATCH 10/54] take out PR on on do loops needed noop

---
 .github/workflows/orion_status.yaml | 9 ++++++---
 1 file changed, 6 insertions(+), 3 deletions(-)

diff --git a/.github/workflows/orion_status.yaml b/.github/workflows/orion_status.yaml
index 743d89b9f..e5d62e19f 100644
--- a/.github/workflows/orion_status.yaml
+++ b/.github/workflows/orion_status.yaml
@@ -36,7 +36,8 @@ jobs:
       run:  |
         until ${{ contains( needs.getlabels.outputs.labels, 'CI-Orion-Ready') }}
         do
-        done  
+         noop=""
+        done
 
   Building:
     runs-on: ubuntu-latest
@@ -49,7 +50,8 @@ jobs:
       run: |
         until ${{ contains( needs.getlabels.outputs.labels, 'CI-Orion-Building') }}
         do
-        done  
+         noop=""
+        done
 
   Passed:
     runs-on: ubuntu-latest
@@ -62,4 +64,5 @@ jobs:
       run: |
         until ${{ contains( needs.getlabels.outputs.labels, 'CI-Orion-Passed') }}
         do
-        done  
+         noop=""
+        done
-- 
GitLab


From 5b3e6cc3a6b3b3b44a9e12032c866d6137749f27 Mon Sep 17 00:00:00 2001
From: TerrenceMcGuinness-NOAA <terry.mcguinness@noaa.gov>
Date: Wed, 18 Oct 2023 21:06:49 +0000
Subject: [PATCH 11/54] take out PR jobs with no steps

---
 .github/workflows/orion_status.yaml | 3 ---
 1 file changed, 3 deletions(-)

diff --git a/.github/workflows/orion_status.yaml b/.github/workflows/orion_status.yaml
index e5d62e19f..eacfa374d 100644
--- a/.github/workflows/orion_status.yaml
+++ b/.github/workflows/orion_status.yaml
@@ -31,7 +31,6 @@ jobs:
     needs:
      - getlabels
 
-    steps:
     - name: Ready
       run:  |
         until ${{ contains( needs.getlabels.outputs.labels, 'CI-Orion-Ready') }}
@@ -45,7 +44,6 @@ jobs:
      - getlabels
      - Ready
 
-    steps:
     - name: Building
       run: |
         until ${{ contains( needs.getlabels.outputs.labels, 'CI-Orion-Building') }}
@@ -59,7 +57,6 @@ jobs:
      - getlabels
      - Building
 
-    steps:
     - name: Passed
       run: |
         until ${{ contains( needs.getlabels.outputs.labels, 'CI-Orion-Passed') }}
-- 
GitLab


From 022dd282373d24e702ef059948b65a1cd89144b8 Mon Sep 17 00:00:00 2001
From: TerrenceMcGuinness-NOAA <terry.mcguinness@noaa.gov>
Date: Wed, 18 Oct 2023 21:07:44 +0000
Subject: [PATCH 12/54] take out PR jobs need steps

---
 .github/workflows/orion_status.yaml | 5 ++++-
 1 file changed, 4 insertions(+), 1 deletion(-)

diff --git a/.github/workflows/orion_status.yaml b/.github/workflows/orion_status.yaml
index eacfa374d..cc483383d 100644
--- a/.github/workflows/orion_status.yaml
+++ b/.github/workflows/orion_status.yaml
@@ -31,6 +31,7 @@ jobs:
     needs:
      - getlabels
 
+    steps:
     - name: Ready
       run:  |
         until ${{ contains( needs.getlabels.outputs.labels, 'CI-Orion-Ready') }}
@@ -44,6 +45,7 @@ jobs:
      - getlabels
      - Ready
 
+    steps:
     - name: Building
       run: |
         until ${{ contains( needs.getlabels.outputs.labels, 'CI-Orion-Building') }}
@@ -57,9 +59,10 @@ jobs:
      - getlabels
      - Building
 
+    steps:
     - name: Passed
       run: |
         until ${{ contains( needs.getlabels.outputs.labels, 'CI-Orion-Passed') }}
         do
-         noop=""
+          noop=""
         done
-- 
GitLab


From c9c8ae25820be9f8ca8a29d37e360d7a0845005f Mon Sep 17 00:00:00 2001
From: TerrenceMcGuinness-NOAA <terry.mcguinness@noaa.gov>
Date: Wed, 18 Oct 2023 21:09:01 +0000
Subject: [PATCH 13/54] take out PR jobs need steps

---
 .github/workflows/orion_status.yaml | 6 +++---
 1 file changed, 3 insertions(+), 3 deletions(-)

diff --git a/.github/workflows/orion_status.yaml b/.github/workflows/orion_status.yaml
index cc483383d..5c53186e5 100644
--- a/.github/workflows/orion_status.yaml
+++ b/.github/workflows/orion_status.yaml
@@ -36,7 +36,7 @@ jobs:
       run:  |
         until ${{ contains( needs.getlabels.outputs.labels, 'CI-Orion-Ready') }}
         do
-         noop=""
+         sleep 10m
         done
 
   Building:
@@ -50,7 +50,7 @@ jobs:
       run: |
         until ${{ contains( needs.getlabels.outputs.labels, 'CI-Orion-Building') }}
         do
-         noop=""
+         sleep 10m
         done
 
   Passed:
@@ -64,5 +64,5 @@ jobs:
       run: |
         until ${{ contains( needs.getlabels.outputs.labels, 'CI-Orion-Passed') }}
         do
-          noop=""
+         sleep 10m
         done
-- 
GitLab


From 74a5b5a85b6fc3667c621cf9ff757f2f6dbca14e Mon Sep 17 00:00:00 2001
From: TerrenceMcGuinness-NOAA <terry.mcguinness@noaa.gov>
Date: Thu, 19 Oct 2023 19:06:17 +0000
Subject: [PATCH 14/54] added reusable workflow for getting lables for each job

---
 .github/workflows/getlabels.yaml    | 57 +++++++++++++++++++++++++++++
 .github/workflows/orion_status.yaml | 17 ---------
 2 files changed, 57 insertions(+), 17 deletions(-)
 create mode 100644 .github/workflows/getlabels.yaml

diff --git a/.github/workflows/getlabels.yaml b/.github/workflows/getlabels.yaml
new file mode 100644
index 000000000..dda30b218
--- /dev/null
+++ b/.github/workflows/getlabels.yaml
@@ -0,0 +1,57 @@
+on:
+  workflow_call:
+      inputs:
+         pr_number:
+            required: true
+            type: string
+         max_runtime:
+            required: false
+            type: string
+         stage: 
+            required: true
+            type: string
+
+      outputs:
+        state:
+            description: "The return state of Stage (true/false)"
+            value: ${{ jobs.getlabels.outputs.state }}
+
+      secrets:
+        token:
+            required: true      
+jobs:
+
+ getlabels:
+    runs-on: ubuntu-latest
+    outputs:
+      state: ${{ steps.id.outputs.state }}
+    steps:
+      - name: Get Label Steps
+        id: id
+        env:
+          GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+          OWNER: ${{ github.repository_owner }}
+          REPO_NAME: ${{ github.event.repository.name }}
+          PULL_REQUEST_NUMBER: ${{ inputs.pr_number }}
+          STAGE: ${{ inputs.stage }}
+          MAX_TIME: ${{ inputs.max_runtime }}
+        run: |
+          DONE=false
+          count=0
+          until false
+          do
+             LABELS1="$(gh api repos/$OWNER/$REPO_NAME/pulls/$PULL_REQUEST_NUMBER --jq '.labels.[].name')"
+             LABELS=$(echo "$LABELS1" | tr '\n' ' ')
+             check_label="CI-Orion-${STAGE}"
+             if [[ "${LABELS}" == *"${check_label}"* ]]; then
+                DONE=true
+                break
+             fi   
+             sleep 10m
+             count=$((count+10))
+             if [[ ${count} -gt ${MAX_TIME} ]]; then
+                DONE=false
+                break
+             fi
+          done
+          echo "state=$DONE" >> $GITHUB_OUPUT   
diff --git a/.github/workflows/orion_status.yaml b/.github/workflows/orion_status.yaml
index 5c53186e5..880060950 100644
--- a/.github/workflows/orion_status.yaml
+++ b/.github/workflows/orion_status.yaml
@@ -9,23 +9,6 @@ on:
 
 jobs:
 
-  getlabels:
-    runs-on: ubuntu-latest
-    outputs:
-      labels: ${{ steps.id.outputs.labels }}
-    steps:
-      - name: Get Label Steps
-        id: id
-        env:
-          GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
-          OWNER: ${{ github.repository_owner }}
-          REPO_NAME: ${{ github.event.repository.name }}
-          PULL_REQUEST_NUMBER: ${{ inputs.pr_number }}
-        run: |
-          LABELS1="$(gh api repos/$OWNER/$REPO_NAME/pulls/$PULL_REQUEST_NUMBER --jq '.labels.[].name')"
-          LABELS=$(echo "$LABELS1" | tr '\n' ' ')
-          echo "labels=$LABELS" >> $GITHUB_OUTPUT
-
   Ready:
     runs-on: ubuntu-latest
     needs:
-- 
GitLab


From 9e306ef382b4844d78e28e5ded0d8212661482f8 Mon Sep 17 00:00:00 2001
From: TerrenceMcGuinness-NOAA <terry.mcguinness@noaa.gov>
Date: Thu, 19 Oct 2023 19:14:11 +0000
Subject: [PATCH 15/54] fixed uses path

---
 .github/workflows/orion_status.yaml | 78 +++++++++++++++++------------
 1 file changed, 47 insertions(+), 31 deletions(-)

diff --git a/.github/workflows/orion_status.yaml b/.github/workflows/orion_status.yaml
index 880060950..d56a05012 100644
--- a/.github/workflows/orion_status.yaml
+++ b/.github/workflows/orion_status.yaml
@@ -6,46 +6,62 @@ on:
       pr_number:
         description: PR number
         type: string
+      max_runtime:
+        description: Maximum time out time for running experment
+        type: string
 
 jobs:
 
+  get_Ready:
+    uses: ./.github/workflows/getlabels.yaml
+    with:
+      pr_number: inputs.pr_number
+      max_runtime: 20
+      stage: "Ready"
+    secrets: inherit
+
   Ready:
     runs-on: ubuntu-latest
-    needs:
-     - getlabels
-
+    needs: get_Ready
     steps:
-    - name: Ready
-      run:  |
-        until ${{ contains( needs.getlabels.outputs.labels, 'CI-Orion-Ready') }}
-        do
-         sleep 10m
-        done
+     - run:  |
+         if [[ ${{ needs.get_Ready.outputs.state }}  ]]; then
+           echo "Waiting for Ready State timmed out"
+           exit 1
+         fi  
+
+  get_Building:       
+    uses: ./.github/workflows/getlabels.yaml
+    with:
+      pr_number: inputs.pr_number
+      max_runtime: 40
+      stage: "Building"
+    secrets: inherit
 
   Building:
     runs-on: ubuntu-latest
-    needs:
-     - getlabels
-     - Ready
-
+    needs: get_Building
     steps:
-    - name: Building
-      run: |
-        until ${{ contains( needs.getlabels.outputs.labels, 'CI-Orion-Building') }}
-        do
-         sleep 10m
-        done
-
-  Passed:
-    runs-on: ubuntu-latest
-    needs:
-     - getlabels
-     - Building
+      - run: |
+         if [[ ${{ needs.get_Building.outputs.state }}  ]]; then
+           echo "Waiting for Ready State timmed out"
+           exit 1
+         fi
 
+  get_Running:
+    uses: ./.github/workflows/getlabels.yaml
+    with:
+      pr_number: inputs.pr_number
+      max_runtime: 60
+      stage: "Running"
+    secrets: inherit
+
+  Running:
+    runs-on: ubuntu-latest
+    needs: get_Running
     steps:
-    - name: Passed
-      run: |
-        until ${{ contains( needs.getlabels.outputs.labels, 'CI-Orion-Passed') }}
-        do
-         sleep 10m
-        done
+      - run: |
+         if [[ ${{ needs.get_Running.outputs.state }}  ]]; then
+           echo "Waiting for Ready State timmed out"
+           exit 1
+         fi  
\ No newline at end of file
-- 
GitLab


From 1430ab38082525044b85286b51d8584aedbf63b3 Mon Sep 17 00:00:00 2001
From: TerrenceMcGuinness-NOAA <terry.mcguinness@noaa.gov>
Date: Thu, 19 Oct 2023 19:39:17 +0000
Subject: [PATCH 16/54] added dependances for main jobs and name in getlabels

---
 .github/workflows/getlabels.yaml    | 2 ++
 .github/workflows/orion_status.yaml | 4 ++--
 2 files changed, 4 insertions(+), 2 deletions(-)

diff --git a/.github/workflows/getlabels.yaml b/.github/workflows/getlabels.yaml
index dda30b218..ffc40cc92 100644
--- a/.github/workflows/getlabels.yaml
+++ b/.github/workflows/getlabels.yaml
@@ -1,3 +1,5 @@
+name: Get Labels
+
 on:
   workflow_call:
       inputs:
diff --git a/.github/workflows/orion_status.yaml b/.github/workflows/orion_status.yaml
index d56a05012..b2a80a40b 100644
--- a/.github/workflows/orion_status.yaml
+++ b/.github/workflows/orion_status.yaml
@@ -40,7 +40,7 @@ jobs:
 
   Building:
     runs-on: ubuntu-latest
-    needs: get_Building
+    needs: [get_Building, Ready]
     steps:
       - run: |
          if [[ ${{ needs.get_Building.outputs.state }}  ]]; then
@@ -58,7 +58,7 @@ jobs:
 
   Running:
     runs-on: ubuntu-latest
-    needs: get_Running
+    needs: [get_Running, Building]
     steps:
       - run: |
          if [[ ${{ needs.get_Running.outputs.state }}  ]]; then
-- 
GitLab


From 56b07342064b4d4d3404180241002368858191f8 Mon Sep 17 00:00:00 2001
From: TerrenceMcGuinness-NOAA <terry.mcguinness@noaa.gov>
Date: Thu, 19 Oct 2023 20:06:10 +0000
Subject: [PATCH 17/54] change order in needs

---
 .github/workflows/orion_status.yaml | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/.github/workflows/orion_status.yaml b/.github/workflows/orion_status.yaml
index b2a80a40b..7cc7a1252 100644
--- a/.github/workflows/orion_status.yaml
+++ b/.github/workflows/orion_status.yaml
@@ -40,7 +40,7 @@ jobs:
 
   Building:
     runs-on: ubuntu-latest
-    needs: [get_Building, Ready]
+    needs: [Ready, get_Building]
     steps:
       - run: |
          if [[ ${{ needs.get_Building.outputs.state }}  ]]; then
@@ -58,7 +58,7 @@ jobs:
 
   Running:
     runs-on: ubuntu-latest
-    needs: [get_Running, Building]
+    needs: [Building, get_Running]
     steps:
       - run: |
          if [[ ${{ needs.get_Running.outputs.state }}  ]]; then
-- 
GitLab


From 1a5d0b51642eb70f71e693355e86212d3607b7b3 Mon Sep 17 00:00:00 2001
From: Walter Kolczynski - NOAA <Walter.Kolczynski@noaa.gov>
Date: Fri, 20 Oct 2023 15:54:33 +0000
Subject: [PATCH 18/54] Split clean-up into separate job (#1906)

Moves the clean-up that was previously done in the archive jobs into
their own separate job. The clean-up is also streamlined considerably
by using only `COM_TOP` instead of going through every template.
There is also additional streamlining/corrections in the function that
does the actual removing.

Some settings used by both jobs were elevated to `config.base`.
Others only needed for cleanup were moved to the new config for
that job.

Also corrects a small error encountered when attempting to rerun an
ensemble forecast.

Resolves #583
Resolves #1872
---
 jobs/JGLOBAL_CLEANUP                       |  17 ++
 jobs/rocoto/arch.sh                        |   1 -
 jobs/rocoto/cleanup.sh                     |  19 +++
 parm/config/gfs/config.arch                |   9 -
 parm/config/gfs/config.base.emc.dyn        |   5 +-
 parm/config/gfs/config.cleanup             |  25 +++
 parm/config/gfs/config.resources           |   9 +-
 scripts/exgdas_enkf_earc.sh                | 168 ------------------
 scripts/exgdas_enkf_fcst.sh                |   2 +
 scripts/exglobal_archive.sh                | 189 ---------------------
 scripts/exglobal_cleanup.sh                | 106 ++++++++++++
 workflow/applications/gfs_cycled.py        |   6 +-
 workflow/applications/gfs_forecast_only.py |   4 +-
 workflow/rocoto/gfs_tasks.py               |  17 ++
 workflow/rocoto/tasks.py                   |   2 +-
 15 files changed, 204 insertions(+), 375 deletions(-)
 create mode 100755 jobs/JGLOBAL_CLEANUP
 create mode 100755 jobs/rocoto/cleanup.sh
 create mode 100644 parm/config/gfs/config.cleanup
 create mode 100755 scripts/exglobal_cleanup.sh

diff --git a/jobs/JGLOBAL_CLEANUP b/jobs/JGLOBAL_CLEANUP
new file mode 100755
index 000000000..ad938ccf6
--- /dev/null
+++ b/jobs/JGLOBAL_CLEANUP
@@ -0,0 +1,17 @@
+#! /usr/bin/env bash
+
+source "${HOMEgfs}/ush/preamble.sh"
+source "${HOMEgfs}/ush/jjob_header.sh" -e "cleanup" -c "base cleanup"
+
+"${HOMEgfs}/scripts/exglobal_cleanup.sh"
+status=$?
+[[ ${status} -ne 0 ]] && exit "${status}"
+
+##########################################
+# Remove the Temporary working directory
+##########################################
+cd "${DATAROOT}" || (echo "${DATAROOT} does not exist. ABORT!"; exit 1)
+[[ ${KEEPDATA} = "NO" ]] && rm -rf "${DATA}"
+
+exit 0
+
diff --git a/jobs/rocoto/arch.sh b/jobs/rocoto/arch.sh
index 2f62d8b35..d949b7d76 100755
--- a/jobs/rocoto/arch.sh
+++ b/jobs/rocoto/arch.sh
@@ -16,5 +16,4 @@ export jobid="${job}.$$"
 "${HOMEgfs}"/jobs/JGLOBAL_ARCHIVE
 status=$?
 
-
 exit "${status}"
diff --git a/jobs/rocoto/cleanup.sh b/jobs/rocoto/cleanup.sh
new file mode 100755
index 000000000..96303fde5
--- /dev/null
+++ b/jobs/rocoto/cleanup.sh
@@ -0,0 +1,19 @@
+#! /usr/bin/env bash
+
+source "${HOMEgfs}/ush/preamble.sh"
+
+###############################################################
+# Source FV3GFS workflow modules
+. "${HOMEgfs}"/ush/load_fv3gfs_modules.sh
+status=$?
+[[ ${status} -ne 0 ]] && exit "${status}"
+
+export job="cleanup"
+export jobid="${job}.$$"
+
+###############################################################
+# Execute the JJOB
+"${HOMEgfs}"/jobs/JGLOBAL_CLEANUP
+status=$?
+
+exit "${status}"
diff --git a/parm/config/gfs/config.arch b/parm/config/gfs/config.arch
index 31a3713fb..a23bcce6a 100644
--- a/parm/config/gfs/config.arch
+++ b/parm/config/gfs/config.arch
@@ -12,13 +12,4 @@ export ARCH_GAUSSIAN="YES"
 export ARCH_GAUSSIAN_FHMAX=${FHMAX_GFS}
 export ARCH_GAUSSIAN_FHINC=${FHOUT_GFS}
 
-#--online archive of nemsio files for fit2obs verification
-export FITSARC="YES"
-export FHMAX_FITS=132
-[[ "${FHMAX_FITS}" -gt "${FHMAX_GFS}" ]] && export FHMAX_FITS=${FHMAX_GFS}
-
-#--starting and ending hours of previous cycles to be removed from rotating directory
-export RMOLDSTD=144
-export RMOLDEND=24
-
 echo "END: config.arch"
diff --git a/parm/config/gfs/config.base.emc.dyn b/parm/config/gfs/config.base.emc.dyn
index 09d8897a3..b77787794 100644
--- a/parm/config/gfs/config.base.emc.dyn
+++ b/parm/config/gfs/config.base.emc.dyn
@@ -394,6 +394,9 @@ export ARCH_CYC=00           # Archive data at this cycle for warm_start capabil
 export ARCH_WARMICFREQ=4     # Archive frequency in days for warm_start capability
 export ARCH_FCSTICFREQ=1     # Archive frequency in days for gdas and gfs forecast-only capability
 
-export DELETE_COM_IN_ARCHIVE_JOB="YES"   # NO=retain ROTDIR.  YES default in arch.sh and earc.sh.
+#--online archive of nemsio files for fit2obs verification
+export FITSARC="YES"
+export FHMAX_FITS=132
+[[ "${FHMAX_FITS}" -gt "${FHMAX_GFS}" ]] && export FHMAX_FITS=${FHMAX_GFS}
 
 echo "END: config.base"
diff --git a/parm/config/gfs/config.cleanup b/parm/config/gfs/config.cleanup
new file mode 100644
index 000000000..1908c91bb
--- /dev/null
+++ b/parm/config/gfs/config.cleanup
@@ -0,0 +1,25 @@
+#! /usr/bin/env bash
+
+########## config.cleanup ##########
+echo "BEGIN: config.cleanup"
+
+# Get task specific resources
+source "${EXPDIR}/config.resources" cleanup
+
+export CLEANUP_COM="YES"   # NO=retain ROTDIR.  YES default in cleanup.sh
+
+#--starting and ending hours of previous cycles to be removed from rotating directory
+export RMOLDSTD=144
+export RMOLDEND=24
+
+# Specify the list of files to exclude from the first stage of cleanup
+# Because arrays cannot be exported, list is a single string of comma-
+# separated values. This string is split to form an array at runtime.
+case ${RUN} in
+	gdas | gfs)	exclude_string="*prepbufr*, *cnvstat*, *atmanl.nc" ;;
+	enkf*)      exclude_string="*f006.ens*" ;;
+	*)			exclude_string="" ;;
+esac
+export exclude_string
+
+echo "END: config.cleanup"
\ No newline at end of file
diff --git a/parm/config/gfs/config.resources b/parm/config/gfs/config.resources
index d6654b61e..6503ae552 100644
--- a/parm/config/gfs/config.resources
+++ b/parm/config/gfs/config.resources
@@ -14,7 +14,7 @@ if [[ $# -ne 1 ]]; then
     echo "atmensanlinit atmensanlrun atmensanlfinal"
     echo "landanl"
     echo "aeroanlinit aeroanlrun aeroanlfinal"
-    echo "anal sfcanl analcalc analdiag fcst post vrfy fit2obs metp arch echgres"
+    echo "anal sfcanl analcalc analdiag fcst post vrfy fit2obs metp arch cleanup echgres"
     echo "eobs ediag eomg eupd ecen esfc efcs epos earc"
     echo "init_chem mom6ic ocnpost"
     echo "waveinit waveprep wavepostsbs wavepostbndpnt wavepostbndpntbll wavepostpnt"
@@ -773,6 +773,13 @@ elif [[ ${step} = "arch" || ${step} = "earc" || ${step} = "getic" ]]; then
       eval "export memory_${step}=50GB"
     fi
 
+elif [[ ${step} == "cleanup" ]]; then
+    export wtime_cleanup="01:00:00"
+    export npe_cleanup=1
+    export npe_node_cleanup=1
+    export nth_cleanup=1
+    export memory_cleanup="4096M"
+
 elif [[ ${step} = "stage_ic" ]]; then
 
     export wtime_stage_ic="00:15:00"
diff --git a/scripts/exgdas_enkf_earc.sh b/scripts/exgdas_enkf_earc.sh
index 1bb941f88..a1bcba4d7 100755
--- a/scripts/exgdas_enkf_earc.sh
+++ b/scripts/exgdas_enkf_earc.sh
@@ -133,172 +133,4 @@ if [ "${ENSGRP}" -eq 0 ]; then
         "gsistat.${RUN}.${PDY}${cyc}.ensmean"
 fi
 
-
-if [[ "${DELETE_COM_IN_ARCHIVE_JOB:-YES}" == NO ]] ; then
-    exit 0
-fi
-
-###############################################################
-# ENSGRP 0 also does clean-up
-###############################################################
-if [[ "${ENSGRP}" -eq 0 ]]; then
-    function remove_files() {
-        # TODO: move this to a new location
-        local directory=$1
-        shift
-        if [[ ! -d ${directory} ]]; then
-            echo "No directory ${directory} to remove files from, skiping"
-            return
-        fi
-        local exclude_list=""
-        if (($# > 0)); then
-            exclude_list=$*
-        fi
-        local file_list
-        declare -a file_list
-        # Suppress warnings about chained commands suppressing exit codes
-        # shellcheck disable=SC2312
-        readarray -t file_list < <(find -L "${directory}" -type f)
-        if (( ${#file_list[@]} == 0 )); then return; fi
-        for exclude in ${exclude_list}; do
-            echo "Excluding ${exclude}"
-            declare -a file_list_old=("${file_list[@]}")
-            # Suppress warnings about chained commands suppressing exit codes
-            # shellcheck disable=SC2312
-            readarray file_list < <(printf -- '%s\n' "${file_list_old[@]}" | grep -v "${exclude}")
-            if (( ${#file_list[@]} == 0 )); then return; fi
-        done
-
-        for file in "${file_list[@]}"; do
-            rm -f "${file}"
-        done
-        # Remove directory if empty
-        rmdir "${directory}" || true
-    }
-
-    # Start start and end dates to remove
-    GDATEEND=$(${NDATE} -"${RMOLDEND_ENKF:-24}"  "${PDY}${cyc}")
-    GDATE=$(${NDATE} -"${RMOLDSTD_ENKF:-120}" "${PDY}${cyc}")
-
-    while [ "${GDATE}" -le "${GDATEEND}" ]; do
-
-        gPDY="${GDATE:0:8}"
-        gcyc="${GDATE:8:2}"
-
-        if [[ -d ${COM_TOP} ]]; then
-            rocotolog="${EXPDIR}/logs/${GDATE}.log"
-            if [[ -f "${rocotolog}" ]]; then
-                set +e
-                 # Suppress warnings about chained commands suppressing exit codes
-                # shellcheck disable=SC2312
-                testend=$(tail -n 1 "${rocotolog}" | grep "This cycle is complete: Success")
-                rc=$?
-                set_strict
-                if [ "${rc}" -eq 0 ]; then
-                    case ${CDUMP} in
-                        gdas)   nmem="${NMEM_ENS}";;
-                        gfs)    nmem="${NMEM_ENS_GFS}";;
-                        *)
-                            echo "FATAL ERROR: Unknown CDUMP ${CDUMP} during cleanup"
-                            exit 10
-                            ;;
-                    esac
-
-                    readarray memlist< <(seq --format="mem%03g" 1 "${nmem}")
-                    memlist+=("ensstat")
-
-                    for mem in "${memlist[@]}"; do
-                        # Atmos
-                        exclude_list="f006.ens"
-                        # Suppress warnings about chained commands suppressing exit codes
-                        # shellcheck disable=SC2312
-                        templates=$(compgen -A variable | grep 'COM_ATMOS_.*_TMPL')
-                        for template in ${templates}; do
-                            MEMDIR="${mem}" YMD="${gPDY}" HH="${gcyc}" generate_com "directory:${template}"
-                            remove_files "${directory}" "${exclude_list[@]}"
-                        done
-
-                        # Wave
-                        exclude_list=""
-                        # Suppress warnings about chained commands suppressing exit codes
-                        # shellcheck disable=SC2312
-                        templates=$(compgen -A variable | grep 'COM_WAVE_.*_TMPL')
-                        for template in ${templates}; do
-                            MEMDIR="${mem}" YMD="${gPDY}" HH="${gcyc}" generate_com "directory:${template}"
-                            remove_files "${directory}" "${exclude_list[@]}"
-                        done
-
-                        # Ocean
-                        exclude_list=""
-                        # Suppress warnings about chained commands suppressing exit codes
-                        # shellcheck disable=SC2312
-                        templates=$(compgen -A variable | grep 'COM_OCEAN_.*_TMPL')
-                        for template in ${templates}; do
-                            MEMDIR="${mem}" YMD="${gPDY}" HH="${gcyc}" generate_com "directory:${template}"
-                            remove_files "${directory}" "${exclude_list[@]}"
-                        done
-
-                        # Ice
-                        exclude_list=""
-                        # Suppress warnings about chained commands suppressing exit codes
-                        # shellcheck disable=SC2312
-                        templates=$(compgen -A variable | grep 'COM_ICE_.*_TMPL')
-                        for template in ${templates}; do
-                            MEMDIR="${mem}" YMD="${gPDY}" HH="${gcyc}" generate_com "directory:${template}"
-                            remove_files "${directory}" "${exclude_list[@]}"
-                        done
-
-                        # Aerosols (GOCART)
-                        exclude_list=""
-                        # Suppress warnings about chained commands suppressing exit codes
-                        # shellcheck disable=SC2312
-                        templates=$(compgen -A variable | grep 'COM_CHEM_.*_TMPL')
-                        for template in ${templates}; do
-                            MEMDIR="${mem}" YMD="${gPDY}" HH="${gcyc}" generate_com "directory:${template}"
-                            remove_files "${directory}" "${exclude_list[@]}"
-                        done
-
-                        # Mediator
-                        exclude_list=""
-                        # Suppress warnings about chained commands suppressing exit codes
-                        # shellcheck disable=SC2312
-                        templates=$(compgen -A variable | grep 'COM_MED_.*_TMPL')
-                        for template in ${templates}; do
-                            MEMDIR="${mem}" YMD="${gPDY}" HH="${gcyc}" generate_com "directory:${template}"
-                            remove_files "${directory}" "${exclude_list[@]}"
-                        done
-                    done
-                fi
-            fi
-        fi
-
-        # Remove any empty directories
-        YMD=${gPDY} HH=${gcyc} generate_com target_dir:COM_TOP_TMPL
-        target_dir="${ROTDIR:?}/${RUN}.${gPDY}/${gcyc}/"
-        if [[ -d ${target_dir} ]]; then
-            find "${target_dir}" -empty -type d -delete
-        fi
-
-        # Advance to next cycle
-        GDATE=$(${NDATE} +"${assim_freq}" "${GDATE}")
-    done
-fi
-
-# Remove enkf*.$rPDY for the older of GDATE or RDATE
-GDATE=$(${NDATE} -"${RMOLDSTD_ENKF:-120}" "${PDY}${cyc}")
-fhmax=${FHMAX_GFS}
-RDATE=$(${NDATE} -"${fhmax}" "${PDY}${cyc}")
-if [ "${GDATE}" -lt "${RDATE}" ]; then
-    RDATE=${GDATE}
-fi
-rPDY=$(echo "${RDATE}" | cut -c1-8)
-clist="enkfgdas enkfgfs"
-for ctype in ${clist}; do
-    COMIN="${ROTDIR}/${ctype}.${rPDY}"
-    [[ -d ${COMIN} ]] && rm -rf "${COMIN}"
-done
-
-###############################################################
-
-
 exit 0
diff --git a/scripts/exgdas_enkf_fcst.sh b/scripts/exgdas_enkf_fcst.sh
index 85344e4e3..7eb2a3a71 100755
--- a/scripts/exgdas_enkf_fcst.sh
+++ b/scripts/exgdas_enkf_fcst.sh
@@ -122,7 +122,9 @@ for imem in $(seq "${ENSBEG}" "${ENSEND}"); do
 
    skip_mem="NO"
    if [[ -f ${EFCSGRP}.fail ]]; then
+      set +e
       memstat=$(grep "MEMBER ${ENSMEM}" "${EFCSGRP}.fail" | grep -c "PASS")
+      set_strict
       [[ ${memstat} -eq 1 ]] && skip_mem="YES"
    fi
 
diff --git a/scripts/exglobal_archive.sh b/scripts/exglobal_archive.sh
index 5fea07f4e..78a6d60b6 100755
--- a/scripts/exglobal_archive.sh
+++ b/scripts/exglobal_archive.sh
@@ -284,193 +284,4 @@ if [[ ${HPSSARCH} = "YES" || ${LOCALARCH} = "YES" ]]; then
 fi  ##end of HPSS archive
 ###############################################################
 
-
-
-###############################################################
-# Clean up previous cycles; various depths
-# PRIOR CYCLE: Leave the prior cycle alone
-GDATE=$(${NDATE} -"${assim_freq}" "${PDY}${cyc}")
-
-# PREVIOUS to the PRIOR CYCLE
-GDATE=$(${NDATE} -"${assim_freq}" "${GDATE}")
-gPDY="${GDATE:0:8}"
-gcyc="${GDATE:8:2}"
-
-# Remove the TMPDIR directory
-# TODO Only prepbufr is currently using this directory, and all jobs should be
-#   cleaning up after themselves anyway
-COMIN="${DATAROOT}/${GDATE}"
-[[ -d ${COMIN} ]] && rm -rf "${COMIN}"
-
-if [[ "${DELETE_COM_IN_ARCHIVE_JOB:-YES}" == NO ]] ; then
-    exit 0
-fi
-
-# Step back every assim_freq hours and remove old rotating directories
-# for successful cycles (defaults from 24h to 120h).
-# Retain files needed by Fit2Obs
-# TODO: This whole section needs to be revamped to remove marine component
-#  directories and not look at the rocoto log.
-GDATEEND=$(${NDATE} -"${RMOLDEND:-24}"  "${PDY}${cyc}")
-GDATE=$(${NDATE} -"${RMOLDSTD:-120}" "${PDY}${cyc}")
-RTOFS_DATE=$(${NDATE} -48 "${PDY}${cyc}")
-function remove_files() {
-    # TODO: move this to a new location
-    local directory=$1
-    shift
-    if [[ ! -d ${directory} ]]; then
-        echo "No directory ${directory} to remove files from, skiping"
-        return
-    fi
-    local exclude_list=""
-    if (($# > 0)); then
-        exclude_list=$*
-    fi
-    local file_list
-    declare -a file_list
-    readarray -t file_list < <(find -L "${directory}" -type f)
-    if (( ${#file_list[@]} == 0 )); then return; fi
-    # echo "Number of files to remove before exclusions: ${#file_list[@]}"
-    for exclude in ${exclude_list}; do
-        echo "Excluding ${exclude}"
-        declare -a file_list_old=("${file_list[@]}")
-        readarray file_list < <(printf -- '%s\n' "${file_list_old[@]}" | grep -v "${exclude}")
-        # echo "Number of files to remove after exclusion: ${#file_list[@]}"
-        if (( ${#file_list[@]} == 0 )); then return; fi
-    done
-    # echo "Number of files to remove after exclusions: ${#file_list[@]}"
-
-    for file in "${file_list[@]}"; do
-        rm -f "${file}"
-    done
-    # Remove directory if empty
-    rmdir "${directory}" || true
-}
-
-while [ "${GDATE}" -le "${GDATEEND}" ]; do
-    gPDY="${GDATE:0:8}"
-    gcyc="${GDATE:8:2}"
-    COMINrtofs="${ROTDIR}/rtofs.${gPDY}"
-    if [ -d "${COM_TOP}" ]; then
-        rocotolog="${EXPDIR}/logs/${GDATE}.log"
-        if [ -f "${rocotolog}" ]; then
-            set +e
-            testend=$(tail -n 1 "${rocotolog}" | grep "This cycle is complete: Success")
-            rc=$?
-            set_strict
-
-            if [ "${rc}" -eq 0 ]; then
-                # Obs
-                exclude_list="prepbufr"
-                templates="COM_OBS"
-                for template in ${templates}; do
-                    YMD="${gPDY}" HH="${gcyc}" generate_com "directory:${template}"
-                    remove_files "${directory}" "${exclude_list[@]}"
-                done
-
-                # Atmos
-                exclude_list="cnvstat atmanl.nc"
-                templates=$(compgen -A variable | grep 'COM_ATMOS_.*_TMPL')
-                for template in ${templates}; do
-                    YMD="${gPDY}" HH="${gcyc}" generate_com "directory:${template}"
-                    remove_files "${directory}" "${exclude_list[@]}"
-                done
-
-                # Wave
-                exclude_list=""
-                templates=$(compgen -A variable | grep 'COM_WAVE_.*_TMPL')
-                for template in ${templates}; do
-                    YMD="${gPDY}" HH="${gcyc}" generate_com "directory:${template}"
-                    remove_files "${directory}" "${exclude_list[@]}"
-                done
-
-                # Ocean
-                exclude_list=""
-                templates=$(compgen -A variable | grep 'COM_OCEAN_.*_TMPL')
-                for template in ${templates}; do
-                    YMD="${gPDY}" HH="${gcyc}" generate_com "directory:${template}"
-                    remove_files "${directory}" "${exclude_list[@]}"
-                done
-
-                # Ice
-                exclude_list=""
-                templates=$(compgen -A variable | grep 'COM_ICE_.*_TMPL')
-                for template in ${templates}; do
-                    YMD="${gPDY}" HH="${gcyc}" generate_com "directory:${template}"
-                    remove_files "${directory}" "${exclude_list[@]}"
-                done
-
-                # Aerosols (GOCART)
-                exclude_list=""
-                templates=$(compgen -A variable | grep 'COM_CHEM_.*_TMPL')
-                for template in ${templates}; do
-                    YMD="${gPDY}" HH="${gcyc}" generate_com "directory:${template}"
-                    remove_files "${directory}" "${exclude_list[@]}"
-                done
-
-                # Mediator
-                exclude_list=""
-                templates=$(compgen -A variable | grep 'COM_MED_.*_TMPL')
-                for template in ${templates}; do
-                    YMD="${gPDY}" HH="${gcyc}" generate_com "directory:${template}"
-                    remove_files "${directory}" "${exclude_list[@]}"
-                done
-
-                if [ -d "${COMINrtofs}" ] && [ "${GDATE}" -lt "${RTOFS_DATE}" ]; then rm -rf "${COMINrtofs}" ; fi
-            fi
-        fi
-    fi
-
-    # Remove mdl gfsmos directory
-    if [ "${RUN}" = "gfs" ]; then
-        COMIN="${ROTDIR}/gfsmos.${gPDY}"
-        if [ -d "${COMIN}" ] && [ "${GDATE}" -lt "${CDATE_MOS}" ]; then rm -rf "${COMIN}" ; fi
-    fi
-
-    # Remove any empty directories
-    target_dir="${ROTDIR:?}/${RUN}.${gPDY}/${gcyc}/"
-    if [[ -d ${target_dir} ]]; then
-        find "${target_dir}" -empty -type d -delete
-    fi
-
-    GDATE=$(${NDATE} +"${assim_freq}" "${GDATE}")
-done
-
-# Remove archived gaussian files used for Fit2Obs in $VFYARC that are
-# $FHMAX_FITS plus a delta before $CDATE.  Touch existing archived
-# gaussian files to prevent the files from being removed by automatic
-# scrubber present on some machines.
-
-if [ "${RUN}" = "gfs" ]; then
-    fhmax=$((FHMAX_FITS+36))
-    RDATE=$(${NDATE} -"${fhmax}" "${PDY}${cyc}")
-    rPDY=$(echo "${RDATE}" | cut -c1-8)
-    COMIN="${VFYARC}/${RUN}.${rPDY}"
-    [[ -d ${COMIN} ]] && rm -rf "${COMIN}"
-
-    TDATE=$(${NDATE} -"${FHMAX_FITS}" "${PDY}${cyc}")
-    while [ "${TDATE}" -lt "${PDY}${cyc}" ]; do
-        tPDY=$(echo "${TDATE}" | cut -c1-8)
-        tcyc=$(echo "${TDATE}" | cut -c9-10)
-        TDIR=${VFYARC}/${RUN}.${tPDY}/${tcyc}
-        [[ -d ${TDIR} ]] && touch "${TDIR}"/*
-        TDATE=$(${NDATE} +6 "${TDATE}")
-    done
-fi
-
-# Remove $RUN.$rPDY for the older of GDATE or RDATE
-GDATE=$(${NDATE} -"${RMOLDSTD:-120}" "${PDY}${cyc}")
-fhmax=${FHMAX_GFS}
-RDATE=$(${NDATE} -"${fhmax}" "${PDY}${cyc}")
-if [ "${GDATE}" -lt "${RDATE}" ]; then
-    RDATE=${GDATE}
-fi
-rPDY=$(echo "${RDATE}" | cut -c1-8)
-COMIN="${ROTDIR}/${RUN}.${rPDY}"
-[[ -d ${COMIN} ]] && rm -rf "${COMIN}"
-
-
-###############################################################
-
-
 exit 0
diff --git a/scripts/exglobal_cleanup.sh b/scripts/exglobal_cleanup.sh
new file mode 100755
index 000000000..5d7c0a978
--- /dev/null
+++ b/scripts/exglobal_cleanup.sh
@@ -0,0 +1,106 @@
+#! /usr/bin/env bash
+
+source "${HOMEgfs}/ush/preamble.sh"
+
+###############################################################
+# Clean up previous cycles; various depths
+# PRIOR CYCLE: Leave the prior cycle alone
+# shellcheck disable=SC2153
+GDATE=$(date --utc +%Y%m%d%H -d "${PDY} ${cyc} -${assim_freq} hours")
+# PREVIOUS to the PRIOR CYCLE
+GDATE=$(date --utc +%Y%m%d%H -d "${GDATE:0:8} ${GDATE:8:2} -${assim_freq} hours")
+
+# Remove the TMPDIR directory
+# TODO Only prepbufr is currently using this directory, and all jobs should be
+#   cleaning up after themselves anyway
+COMIN="${DATAROOT}/${GDATE}"
+[[ -d ${COMIN} ]] && rm -rf "${COMIN}"
+
+if [[ "${CLEANUP_COM:-YES}" == NO ]] ; then
+    exit 0
+fi
+
+# Step back every assim_freq hours and remove old rotating directories
+# for successful cycles (defaults from 24h to 120h).
+# Retain files needed by Fit2Obs
+last_date=$(date --utc +%Y%m%d%H -d "${PDY} ${cyc} -${RMOLDEND:-24} hours" )
+first_date=$(date --utc +%Y%m%d%H -d "${PDY} ${cyc} -${RMOLDSTD:-120} hours")
+last_rtofs=$(date --utc +%Y%m%d%H -d "${PDY} ${cyc} -${RMOLDRTOFS:-48} hours")
+function remove_files() {
+    local directory=$1
+    shift
+    if [[ ! -d ${directory} ]]; then
+        echo "No directory ${directory} to remove files from, skiping"
+        return
+    fi
+    local find_exclude_string=""
+    for exclude in "$@"; do
+        find_exclude_string+="${find_exclude_string} -name ${exclude} -or"
+    done
+    # Chop off any trailing or
+    find_exclude_string="${find_exclude_string[*]/%-or}"
+    # Remove all regular files that do not match
+    # shellcheck disable=SC2086
+    find "${directory}" -type f -not \( ${find_exclude_string} \) -delete
+    # Remove all symlinks that do not match
+    # shellcheck disable=SC2086
+    find "${directory}" -type l -not \( ${find_exclude_string} \) -delete
+    # Remove any empty directories
+    find "${directory}" -type d -empty -delete
+}
+
+for (( current_date=first_date; current_date <= last_date; \
+  current_date=$(date --utc +%Y%m%d%H -d "${current_date:0:8} ${current_date:8:2} +${assim_freq} hours") )); do
+    current_PDY="${current_date:0:8}"
+    current_cyc="${current_date:8:2}"
+    rtofs_dir="${ROTDIR}/rtofs.${current_PDY}"
+    rocotolog="${EXPDIR}/logs/${current_date}.log"
+    if [[ -f "${rocotolog}" ]]; then
+        # TODO: This needs to be revamped to not look at the rocoto log.
+        # shellcheck disable=SC2312
+        if [[ $(tail -n 1 "${rocotolog}") =~ "This cycle is complete: Success" ]]; then
+            YMD="${current_PDY}" HH="${current_cyc}" generate_com COM_TOP
+            if [[ -d "${COM_TOP}" ]]; then
+                IFS=", " read -r -a exclude_list <<< "${exclude_string:-}"
+                remove_files "${COM_TOP}" "${exclude_list[@]:-}"
+            fi
+            if [[ -d "${rtofs_dir}" ]] && (( current_date < last_rtofs )); then rm -rf "${rtofs_dir}" ; fi
+        fi
+    fi
+
+    # Remove mdl gfsmos directory
+    if [[ "${RUN}" == "gfs" ]]; then
+        mos_dir="${ROTDIR}/gfsmos.${current_PDY}"
+        if [[ -d "${mos_dir}" ]] && (( current_date < CDATE_MOS )); then rm -rf "${mos_dir}" ; fi
+    fi
+done
+
+# Remove archived gaussian files used for Fit2Obs in $VFYARC that are
+# $FHMAX_FITS plus a delta before $CDATE. Touch existing archived
+# gaussian files to prevent the files from being removed by automatic
+# scrubber present on some machines.
+
+if [[ "${RUN}" == "gfs" ]]; then
+    fhmax=$((FHMAX_FITS + 36))
+    RDATE=$(date --utc +%Y%m%d%H -d "${PDY} ${cyc} -${fhmax} hours")
+    verify_dir="${ROTDIR}/vrfyarch/${RUN}.${RDATE:0:8}"
+    [[ -d ${verify_dir} ]] && rm -rf "${verify_dir}"
+
+    touch_date=$(date --utc +%Y%m%d%H -d "${PDY} ${cyc} -${FHMAX_FITS} hours")
+    while (( touch_date < "${PDY}${cyc}" )); do
+        touch_PDY="${touch_date:0:8}"
+        touch_cyc="${touch_date:8:2}"
+        touch_dir="${ROTDIR}/vrfyarch/${RUN}.${touch_PDY}/${touch_cyc}"
+        [[ -d ${touch_dir} ]] && touch "${touch_dir}"/*
+        touch_date=$(date --utc +%Y%m%d%H -d "${touch_PDY} ${touch_cyc} +6 hours")
+    done
+fi
+
+# Remove $RUN.$rPDY for the older of GDATE or RDATE
+GDATE=$(date --utc +%Y%m%d%H -d "${PDY} ${cyc} -${RMOLDSTD:-120} hours")
+RDATE=$(date --utc +%Y%m%d%H -d "${PDY} ${cyc} -${FHMAX_GFS} hours")
+if (( GDATE < RDATE )); then
+    RDATE=${GDATE}
+fi
+deletion_target="${ROTDIR}/${RUN}.${RDATE:0:8}"
+if [[ -d ${deletion_target} ]]; then rm -rf "${deletion_target}"; fi
diff --git a/workflow/applications/gfs_cycled.py b/workflow/applications/gfs_cycled.py
index 633e93bac..6eff929d5 100644
--- a/workflow/applications/gfs_cycled.py
+++ b/workflow/applications/gfs_cycled.py
@@ -47,7 +47,7 @@ class GFSCycledAppConfig(AppConfig):
         if self.do_ocean:
             configs += ['ocnpost']
 
-        configs += ['sfcanl', 'analcalc', 'fcst', 'post', 'vrfy', 'fit2obs', 'arch']
+        configs += ['sfcanl', 'analcalc', 'fcst', 'post', 'vrfy', 'fit2obs', 'arch', 'cleanup']
 
         if self.do_hybvar:
             if self.do_jediatmens:
@@ -106,7 +106,7 @@ class GFSCycledAppConfig(AppConfig):
         #    gdas_gfs_common_tasks_after_fcst += ['ocnpost']
         gdas_gfs_common_tasks_after_fcst += ['vrfy']
 
-        gdas_gfs_common_cleanup_tasks = ['arch']
+        gdas_gfs_common_cleanup_tasks = ['arch', 'cleanup']
 
         if self.do_jediatmvar:
             gdas_gfs_common_tasks_before_fcst += ['prepatmiodaobs', 'atmanlinit', 'atmanlrun', 'atmanlfinal']
@@ -137,7 +137,7 @@ class GFSCycledAppConfig(AppConfig):
             else:
                 hybrid_tasks += ['eobs', 'eupd', 'echgres']
                 hybrid_tasks += ['ediag'] if self.lobsdiag_forenkf else ['eomg']
-            hybrid_after_eupd_tasks += ['ecen', 'esfc', 'efcs', 'epos', 'earc']
+            hybrid_after_eupd_tasks += ['ecen', 'esfc', 'efcs', 'epos', 'earc', 'cleanup']
 
         # Collect all "gdas" cycle tasks
         gdas_tasks = gdas_gfs_common_tasks_before_fcst.copy()
diff --git a/workflow/applications/gfs_forecast_only.py b/workflow/applications/gfs_forecast_only.py
index e6d1ab35a..73e17ee7a 100644
--- a/workflow/applications/gfs_forecast_only.py
+++ b/workflow/applications/gfs_forecast_only.py
@@ -15,7 +15,7 @@ class GFSForecastOnlyAppConfig(AppConfig):
         Returns the config_files that are involved in the forecast-only app
         """
 
-        configs = ['stage_ic', 'fcst', 'arch']
+        configs = ['stage_ic', 'fcst', 'arch', 'cleanup']
 
         if self.do_atm:
             configs += ['post', 'vrfy']
@@ -109,6 +109,6 @@ class GFSForecastOnlyAppConfig(AppConfig):
         if self.do_wafs:
             tasks += ['wafs', 'wafsgcip', 'wafsgrib2', 'wafsgrib20p25', 'wafsblending', 'wafsblending0p25']
 
-        tasks += ['arch']  # arch **must** be the last task
+        tasks += ['arch', 'cleanup']  # arch and cleanup **must** be the last tasks
 
         return {f"{self._base['CDUMP']}": tasks}
diff --git a/workflow/rocoto/gfs_tasks.py b/workflow/rocoto/gfs_tasks.py
index 33e2ec82f..e41e4ebcc 100644
--- a/workflow/rocoto/gfs_tasks.py
+++ b/workflow/rocoto/gfs_tasks.py
@@ -944,6 +944,23 @@ class GFSTasks(Tasks):
 
         return task
 
+    # Cleanup
+    def cleanup(self):
+        deps = []
+        if 'enkf' in self.cdump:
+            dep_dict = {'type': 'metatask', 'name': 'enkfgdaseamn'}
+            deps.append(rocoto.add_dependency(dep_dict))
+        else:
+            dep_dict = {'type': 'task', 'name': f'{self.cdump}arch'}
+            deps.append(rocoto.add_dependency(dep_dict))
+
+        dependencies = rocoto.create_dependency(dep_condition='and', dep=deps)
+
+        resources = self.get_resource('cleanup')
+        task = create_wf_task('cleanup', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies)
+
+        return task
+
     # Start of ensemble tasks
     def eobs(self):
         deps = []
diff --git a/workflow/rocoto/tasks.py b/workflow/rocoto/tasks.py
index b9716c938..29ed57daf 100644
--- a/workflow/rocoto/tasks.py
+++ b/workflow/rocoto/tasks.py
@@ -11,7 +11,7 @@ __all__ = ['Tasks', 'create_wf_task']
 class Tasks:
     SERVICE_TASKS = ['arch', 'earc']
     VALID_TASKS = ['aerosol_init', 'stage_ic',
-                   'prep', 'anal', 'sfcanl', 'analcalc', 'analdiag', 'arch',
+                   'prep', 'anal', 'sfcanl', 'analcalc', 'analdiag', 'arch', "cleanup",
                    'prepatmiodaobs', 'atmanlinit', 'atmanlrun', 'atmanlfinal',
                    'ocnanalprep', 'ocnanalbmat', 'ocnanalrun', 'ocnanalchkpt', 'ocnanalpost', 'ocnanalvrfy',
                    'earc', 'ecen', 'echgres', 'ediag', 'efcs',
-- 
GitLab


From 08ce4f8d3ed8e07b4d488a80b5054c6206b04404 Mon Sep 17 00:00:00 2001
From: Walter Kolczynski - NOAA <Walter.Kolczynski@noaa.gov>
Date: Fri, 20 Oct 2023 16:14:53 +0000
Subject: [PATCH 19/54] Fix enkfgfs cleanup dependency (#1941)

When #1906 was merged, the dependency for enkf cycles was hard-coded
to use the enkfgdas archive instead of depending on the `RUN`.
---
 workflow/rocoto/gfs_tasks.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/workflow/rocoto/gfs_tasks.py b/workflow/rocoto/gfs_tasks.py
index e41e4ebcc..56449cb9d 100644
--- a/workflow/rocoto/gfs_tasks.py
+++ b/workflow/rocoto/gfs_tasks.py
@@ -948,7 +948,7 @@ class GFSTasks(Tasks):
     def cleanup(self):
         deps = []
         if 'enkf' in self.cdump:
-            dep_dict = {'type': 'metatask', 'name': 'enkfgdaseamn'}
+            dep_dict = {'type': 'metatask', 'name': f'{self.cdump}eamn'}
             deps.append(rocoto.add_dependency(dep_dict))
         else:
             dep_dict = {'type': 'task', 'name': f'{self.cdump}arch'}
-- 
GitLab


From 7e14adb123904628109c64c7d288c9a49d8f15c2 Mon Sep 17 00:00:00 2001
From: TerrenceMcGuinness-NOAA <terry.mcguinness@noaa.gov>
Date: Sat, 21 Oct 2023 01:27:34 +0000
Subject: [PATCH 20/54] took out poll

---
 .github/workflows/getlabels.yaml | 25 +++++++------------------
 1 file changed, 7 insertions(+), 18 deletions(-)

diff --git a/.github/workflows/getlabels.yaml b/.github/workflows/getlabels.yaml
index ffc40cc92..c9dcbc615 100644
--- a/.github/workflows/getlabels.yaml
+++ b/.github/workflows/getlabels.yaml
@@ -38,22 +38,11 @@ jobs:
           STAGE: ${{ inputs.stage }}
           MAX_TIME: ${{ inputs.max_runtime }}
         run: |
-          DONE=false
-          count=0
-          until false
-          do
-             LABELS1="$(gh api repos/$OWNER/$REPO_NAME/pulls/$PULL_REQUEST_NUMBER --jq '.labels.[].name')"
-             LABELS=$(echo "$LABELS1" | tr '\n' ' ')
-             check_label="CI-Orion-${STAGE}"
-             if [[ "${LABELS}" == *"${check_label}"* ]]; then
-                DONE=true
-                break
-             fi   
-             sleep 10m
-             count=$((count+10))
-             if [[ ${count} -gt ${MAX_TIME} ]]; then
-                DONE=false
-                break
-             fi
-          done
+          LABELS1="$(gh api repos/$OWNER/$REPO_NAME/pulls/$PULL_REQUEST_NUMBER --jq '.labels.[].name')"
+          LABELS=$(echo "$LABELS1" | tr '\n' ' ')
+          check_label="CI-Orion-${STAGE}"
+          if [[ "${LABELS}" == *"${check_label}"* ]]; then
+              DONE=true
+              break
+          fi   
           echo "state=$DONE" >> $GITHUB_OUPUT   
-- 
GitLab


From 0615fad5eb03f5e743ddd8b7d8ab1154c6e5fe8c Mon Sep 17 00:00:00 2001
From: TerrenceMcGuinness-NOAA <terry.mcguinness@noaa.gov>
Date: Sat, 21 Oct 2023 01:31:03 +0000
Subject: [PATCH 21/54] took out secrets required line

---
 .github/workflows/getlabels.yaml | 3 ---
 1 file changed, 3 deletions(-)

diff --git a/.github/workflows/getlabels.yaml b/.github/workflows/getlabels.yaml
index c9dcbc615..156dcc332 100644
--- a/.github/workflows/getlabels.yaml
+++ b/.github/workflows/getlabels.yaml
@@ -18,9 +18,6 @@ on:
             description: "The return state of Stage (true/false)"
             value: ${{ jobs.getlabels.outputs.state }}
 
-      secrets:
-        token:
-            required: true      
 jobs:
 
  getlabels:
-- 
GitLab


From a9145c79014be6ac4674067de7aa9920d6c762c8 Mon Sep 17 00:00:00 2001
From: TerrenceMcGuinness-NOAA <terry.mcguinness@noaa.gov>
Date: Sat, 21 Oct 2023 01:33:39 +0000
Subject: [PATCH 22/54] took out secrets required line

---
 .github/workflows/getlabels.yaml | 1 +
 1 file changed, 1 insertion(+)

diff --git a/.github/workflows/getlabels.yaml b/.github/workflows/getlabels.yaml
index 156dcc332..3c5f44147 100644
--- a/.github/workflows/getlabels.yaml
+++ b/.github/workflows/getlabels.yaml
@@ -35,6 +35,7 @@ jobs:
           STAGE: ${{ inputs.stage }}
           MAX_TIME: ${{ inputs.max_runtime }}
         run: |
+          echo "gh api repos/$OWNER/$REPO_NAME/pulls/$PULL_REQUEST_NUMBER --jq '.labels.[].name'"
           LABELS1="$(gh api repos/$OWNER/$REPO_NAME/pulls/$PULL_REQUEST_NUMBER --jq '.labels.[].name')"
           LABELS=$(echo "$LABELS1" | tr '\n' ' ')
           check_label="CI-Orion-${STAGE}"
-- 
GitLab


From 9b8d608932c135256ad1f32ffea10baffd82f180 Mon Sep 17 00:00:00 2001
From: TerrenceMcGuinness-NOAA <terry.mcguinness@noaa.gov>
Date: Sat, 21 Oct 2023 01:37:40 +0000
Subject: [PATCH 23/54] added {{}} on inputs.pr_number

---
 .github/workflows/orion_status.yaml | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/.github/workflows/orion_status.yaml b/.github/workflows/orion_status.yaml
index 7cc7a1252..3bcd3cb5c 100644
--- a/.github/workflows/orion_status.yaml
+++ b/.github/workflows/orion_status.yaml
@@ -15,7 +15,7 @@ jobs:
   get_Ready:
     uses: ./.github/workflows/getlabels.yaml
     with:
-      pr_number: inputs.pr_number
+      pr_number: ${{ inputs.pr_number }}
       max_runtime: 20
       stage: "Ready"
     secrets: inherit
-- 
GitLab


From ac1b51cc79271c5e6baa0bef4586e64efd03e4f9 Mon Sep 17 00:00:00 2001
From: TerrenceMcGuinness-NOAA <terry.mcguinness@noaa.gov>
Date: Sat, 21 Oct 2023 01:42:21 +0000
Subject: [PATCH 24/54] fixed other pr_number assiginments

---
 .github/workflows/getlabels.yaml    | 27 +++++++++++++++++++--------
 .github/workflows/orion_status.yaml |  4 ++--
 2 files changed, 21 insertions(+), 10 deletions(-)

diff --git a/.github/workflows/getlabels.yaml b/.github/workflows/getlabels.yaml
index 3c5f44147..3d6bfaffa 100644
--- a/.github/workflows/getlabels.yaml
+++ b/.github/workflows/getlabels.yaml
@@ -35,12 +35,23 @@ jobs:
           STAGE: ${{ inputs.stage }}
           MAX_TIME: ${{ inputs.max_runtime }}
         run: |
-          echo "gh api repos/$OWNER/$REPO_NAME/pulls/$PULL_REQUEST_NUMBER --jq '.labels.[].name'"
-          LABELS1="$(gh api repos/$OWNER/$REPO_NAME/pulls/$PULL_REQUEST_NUMBER --jq '.labels.[].name')"
-          LABELS=$(echo "$LABELS1" | tr '\n' ' ')
-          check_label="CI-Orion-${STAGE}"
-          if [[ "${LABELS}" == *"${check_label}"* ]]; then
-              DONE=true
-              break
-          fi   
+          DONE=false
+          count=0
+          until false
+          do
+             echo "$(gh api repos/$OWNER/$REPO_NAME/pulls/$PULL_REQUEST_NUMBER --jq '.labels.[].name'"
+             LABELS1="$(gh api repos/$OWNER/$REPO_NAME/pulls/$PULL_REQUEST_NUMBER --jq '.labels.[].name')"
+             LABELS=$(echo "$LABELS1" | tr '\n' ' ')
+             check_label="CI-Orion-${STAGE}"
+             if [[ "${LABELS}" == *"${check_label}"* ]]; then
+                DONE=true
+                break
+             fi   
+             sleep 10m
+             count=$((count+10))
+             if [[ ${count} -gt ${MAX_TIME} ]]; then
+                DONE=false
+                break
+             fi
+          done
           echo "state=$DONE" >> $GITHUB_OUPUT   
diff --git a/.github/workflows/orion_status.yaml b/.github/workflows/orion_status.yaml
index 3bcd3cb5c..e33141173 100644
--- a/.github/workflows/orion_status.yaml
+++ b/.github/workflows/orion_status.yaml
@@ -33,7 +33,7 @@ jobs:
   get_Building:       
     uses: ./.github/workflows/getlabels.yaml
     with:
-      pr_number: inputs.pr_number
+      pr_number: ${{ inputs.pr_number }}
       max_runtime: 40
       stage: "Building"
     secrets: inherit
@@ -51,7 +51,7 @@ jobs:
   get_Running:
     uses: ./.github/workflows/getlabels.yaml
     with:
-      pr_number: inputs.pr_number
+      pr_number: ${{ inputs.pr_number }}
       max_runtime: 60
       stage: "Running"
     secrets: inherit
-- 
GitLab


From 404a4543078bfd6cdbf7d90a3f0afb90d6196e6d Mon Sep 17 00:00:00 2001
From: TerrenceMcGuinness-NOAA <terry.mcguinness@noaa.gov>
Date: Sat, 21 Oct 2023 01:43:48 +0000
Subject: [PATCH 25/54] EOF error working

---
 .github/workflows/getlabels.yaml | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/.github/workflows/getlabels.yaml b/.github/workflows/getlabels.yaml
index 3d6bfaffa..993d5a715 100644
--- a/.github/workflows/getlabels.yaml
+++ b/.github/workflows/getlabels.yaml
@@ -54,4 +54,4 @@ jobs:
                 break
              fi
           done
-          echo "state=$DONE" >> $GITHUB_OUPUT   
+          echo "state=$DONE" >> $GITHUB_OUPUT
-- 
GitLab


From 381832a197fde0e4ad63a120c9017e8d45f164fa Mon Sep 17 00:00:00 2001
From: TerrenceMcGuinness-NOAA <terry.mcguinness@noaa.gov>
Date: Sat, 21 Oct 2023 01:45:43 +0000
Subject: [PATCH 26/54] EOF error working II

---
 .github/workflows/getlabels.yaml | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/.github/workflows/getlabels.yaml b/.github/workflows/getlabels.yaml
index 993d5a715..2372bbb9a 100644
--- a/.github/workflows/getlabels.yaml
+++ b/.github/workflows/getlabels.yaml
@@ -39,7 +39,7 @@ jobs:
           count=0
           until false
           do
-             echo "$(gh api repos/$OWNER/$REPO_NAME/pulls/$PULL_REQUEST_NUMBER --jq '.labels.[].name'"
+             echo "gh api repos/$OWNER/$REPO_NAME/pulls/$PULL_REQUEST_NUMBER --jq '.labels.[].name'"
              LABELS1="$(gh api repos/$OWNER/$REPO_NAME/pulls/$PULL_REQUEST_NUMBER --jq '.labels.[].name')"
              LABELS=$(echo "$LABELS1" | tr '\n' ' ')
              check_label="CI-Orion-${STAGE}"
-- 
GitLab


From 3f6bd46d73ec96621ee178abe40f5a97cd068f49 Mon Sep 17 00:00:00 2001
From: TerrenceMcGuinness-NOAA <terry.mcguinness@noaa.gov>
Date: Sat, 21 Oct 2023 01:52:33 +0000
Subject: [PATCH 27/54] ambiguous redrirect to GITHUB_OUTPUT

---
 .github/workflows/getlabels.yaml | 3 ++-
 1 file changed, 2 insertions(+), 1 deletion(-)

diff --git a/.github/workflows/getlabels.yaml b/.github/workflows/getlabels.yaml
index 2372bbb9a..020e937e4 100644
--- a/.github/workflows/getlabels.yaml
+++ b/.github/workflows/getlabels.yaml
@@ -54,4 +54,5 @@ jobs:
                 break
              fi
           done
-          echo "state=$DONE" >> $GITHUB_OUPUT
+          echo "state=$DONE"
+          echo "state=$DONE" >> $env:GITHUB_OUPUT
-- 
GitLab


From ab5d110b795c758578ed9d4be63c915728c5ed77 Mon Sep 17 00:00:00 2001
From: TerrenceMcGuinness-NOAA <terry.mcguinness@noaa.gov>
Date: Sat, 21 Oct 2023 02:13:02 +0000
Subject: [PATCH 28/54] tryig to get needs output from get_Ready

---
 .github/workflows/getlabels.yaml    | 9 ++++-----
 .github/workflows/orion_status.yaml | 5 +++--
 2 files changed, 7 insertions(+), 7 deletions(-)

diff --git a/.github/workflows/getlabels.yaml b/.github/workflows/getlabels.yaml
index 020e937e4..15fc204c2 100644
--- a/.github/workflows/getlabels.yaml
+++ b/.github/workflows/getlabels.yaml
@@ -2,6 +2,10 @@ name: Get Labels
 
 on:
   workflow_call:
+      outputs:
+        state:
+            description: "The return state of Stage (true/false)"
+            value: ${{ jobs.getlabels.outputs.state }}
       inputs:
          pr_number:
             required: true
@@ -13,11 +17,6 @@ on:
             required: true
             type: string
 
-      outputs:
-        state:
-            description: "The return state of Stage (true/false)"
-            value: ${{ jobs.getlabels.outputs.state }}
-
 jobs:
 
  getlabels:
diff --git a/.github/workflows/orion_status.yaml b/.github/workflows/orion_status.yaml
index e33141173..b3c76a34d 100644
--- a/.github/workflows/orion_status.yaml
+++ b/.github/workflows/orion_status.yaml
@@ -30,8 +30,9 @@ jobs:
            exit 1
          fi  
 
-  get_Building:       
+  get_Building:    
     uses: ./.github/workflows/getlabels.yaml
+    needs: Ready
     with:
       pr_number: ${{ inputs.pr_number }}
       max_runtime: 40
@@ -40,7 +41,7 @@ jobs:
 
   Building:
     runs-on: ubuntu-latest
-    needs: [Ready, get_Building]
+    needs:  get_Building
     steps:
       - run: |
          if [[ ${{ needs.get_Building.outputs.state }}  ]]; then
-- 
GitLab


From 35c4877f2a953035a9fcd25f8f5328d1b3034db2 Mon Sep 17 00:00:00 2001
From: TerrenceMcGuinness-NOAA <terry.mcguinness@noaa.gov>
Date: Sat, 21 Oct 2023 02:26:54 +0000
Subject: [PATCH 29/54] still not getting outputs

---
 .github/workflows/getlabels.yaml    | 2 +-
 .github/workflows/orion_status.yaml | 3 ++-
 2 files changed, 3 insertions(+), 2 deletions(-)

diff --git a/.github/workflows/getlabels.yaml b/.github/workflows/getlabels.yaml
index 15fc204c2..8789da3cd 100644
--- a/.github/workflows/getlabels.yaml
+++ b/.github/workflows/getlabels.yaml
@@ -54,4 +54,4 @@ jobs:
              fi
           done
           echo "state=$DONE"
-          echo "state=$DONE" >> $env:GITHUB_OUPUT
+          echo "state=$DONE" >> "${GITHUB_OUPUT}"
diff --git a/.github/workflows/orion_status.yaml b/.github/workflows/orion_status.yaml
index b3c76a34d..d607f1725 100644
--- a/.github/workflows/orion_status.yaml
+++ b/.github/workflows/orion_status.yaml
@@ -51,6 +51,7 @@ jobs:
 
   get_Running:
     uses: ./.github/workflows/getlabels.yaml
+    needs: Building
     with:
       pr_number: ${{ inputs.pr_number }}
       max_runtime: 60
@@ -59,7 +60,7 @@ jobs:
 
   Running:
     runs-on: ubuntu-latest
-    needs: [Building, get_Running]
+    needs:  get_Running
     steps:
       - run: |
          if [[ ${{ needs.get_Running.outputs.state }}  ]]; then
-- 
GitLab


From 725e5258559a8b6a0f6d95ff592aa31315656170 Mon Sep 17 00:00:00 2001
From: TerrenceMcGuinness-NOAA <terry.mcguinness@noaa.gov>
Date: Sat, 21 Oct 2023 02:34:03 +0000
Subject: [PATCH 30/54] quotes on GITHUB_OUTPUT

---
 .github/workflows/getlabels.yaml | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/.github/workflows/getlabels.yaml b/.github/workflows/getlabels.yaml
index 8789da3cd..7f9e1c090 100644
--- a/.github/workflows/getlabels.yaml
+++ b/.github/workflows/getlabels.yaml
@@ -54,4 +54,4 @@ jobs:
              fi
           done
           echo "state=$DONE"
-          echo "state=$DONE" >> "${GITHUB_OUPUT}"
+          echo "state=$DONE" >> ${GITHUB_OUPUT}
-- 
GitLab


From 461297d10f29a920b5c5e4b9394d0de8a40f38e9 Mon Sep 17 00:00:00 2001
From: TerrenceMcGuinness-NOAA <terry.mcguinness@noaa.gov>
Date: Sat, 21 Oct 2023 02:49:06 +0000
Subject: [PATCH 31/54] trying crazy Sys IO

---
 .github/workflows/getlabels.yaml | 5 ++++-
 1 file changed, 4 insertions(+), 1 deletion(-)

diff --git a/.github/workflows/getlabels.yaml b/.github/workflows/getlabels.yaml
index 7f9e1c090..c8193482e 100644
--- a/.github/workflows/getlabels.yaml
+++ b/.github/workflows/getlabels.yaml
@@ -54,4 +54,7 @@ jobs:
              fi
           done
           echo "state=$DONE"
-          echo "state=$DONE" >> ${GITHUB_OUPUT}
+          var file = Environment.GetEnvironmentVariable("GITHUB_OUTPUT");
+          var content = System.IO.File.ReadAllText(file);
+          System.IO.File.WriteAllText(file, content + "state=${DONE}" + "\n");
+          # echo "state=$DONE" >> ${GITHUB_OUPUT}
-- 
GitLab


From 9bdf6f51c443bfd3a2249c54e4d0599baed27728 Mon Sep 17 00:00:00 2001
From: TerrenceMcGuinness-NOAA <terry.mcguinness@noaa.gov>
Date: Sat, 21 Oct 2023 03:07:41 +0000
Subject: [PATCH 32/54] addeing yml file

---
 .github/workflows/getlabels.yml | 57 +++++++++++++++++++++++++++++++++
 1 file changed, 57 insertions(+)
 create mode 100644 .github/workflows/getlabels.yml

diff --git a/.github/workflows/getlabels.yml b/.github/workflows/getlabels.yml
new file mode 100644
index 000000000..e8ebc733f
--- /dev/null
+++ b/.github/workflows/getlabels.yml
@@ -0,0 +1,57 @@
+name: Get Labels
+
+on:
+  workflow_call:
+      outputs:
+        state:
+            description: "The return state of Stage (true/false)"
+            value: ${{ jobs.getlabels.outputs.state }}
+      inputs:
+         pr_number:
+            required: true
+            type: string
+         max_runtime:
+            required: false
+            type: string
+         stage: 
+            required: true
+            type: string
+
+jobs:
+
+ getlabels:
+    runs-on: ubuntu-latest
+    outputs:
+      state: ${{ steps.id.outputs.state }}
+    steps:
+      - name: Get Label Steps
+        id: id
+        env:
+          GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+          OWNER: ${{ github.repository_owner }}
+          REPO_NAME: ${{ github.event.repository.name }}
+          PULL_REQUEST_NUMBER: ${{ inputs.pr_number }}
+          STAGE: ${{ inputs.stage }}
+          MAX_TIME: ${{ inputs.max_runtime }}
+        run: |
+          DONE=false
+          count=0
+          until false
+          do
+             echo "gh api repos/$OWNER/$REPO_NAME/pulls/$PULL_REQUEST_NUMBER --jq '.labels.[].name'"
+             LABELS1="$(gh api repos/$OWNER/$REPO_NAME/pulls/$PULL_REQUEST_NUMBER --jq '.labels.[].name')"
+             LABELS=$(echo "$LABELS1" | tr '\n' ' ')
+             check_label="CI-Orion-${STAGE}"
+             if [[ "${LABELS}" == *"${check_label}"* ]]; then
+                DONE=true
+                break
+             fi   
+             sleep 10m
+             count=$((count+10))
+             if [[ ${count} -gt ${MAX_TIME} ]]; then
+                DONE=false
+                break
+             fi
+          done
+          echo "state=${DONE}"
+          echo "state=${DONE}" >> ${GITHUB_OUPUT}
-- 
GitLab


From 3e7a55982e36e55f30cf57deade49e036c9fae36 Mon Sep 17 00:00:00 2001
From: TerrenceMcGuinness-NOAA <terry.mcguinness@noaa.gov>
Date: Sat, 21 Oct 2023 03:08:22 +0000
Subject: [PATCH 33/54] remove yml file

---
 .github/workflows/getlabels.yml | 57 ---------------------------------
 1 file changed, 57 deletions(-)
 delete mode 100644 .github/workflows/getlabels.yml

diff --git a/.github/workflows/getlabels.yml b/.github/workflows/getlabels.yml
deleted file mode 100644
index e8ebc733f..000000000
--- a/.github/workflows/getlabels.yml
+++ /dev/null
@@ -1,57 +0,0 @@
-name: Get Labels
-
-on:
-  workflow_call:
-      outputs:
-        state:
-            description: "The return state of Stage (true/false)"
-            value: ${{ jobs.getlabels.outputs.state }}
-      inputs:
-         pr_number:
-            required: true
-            type: string
-         max_runtime:
-            required: false
-            type: string
-         stage: 
-            required: true
-            type: string
-
-jobs:
-
- getlabels:
-    runs-on: ubuntu-latest
-    outputs:
-      state: ${{ steps.id.outputs.state }}
-    steps:
-      - name: Get Label Steps
-        id: id
-        env:
-          GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
-          OWNER: ${{ github.repository_owner }}
-          REPO_NAME: ${{ github.event.repository.name }}
-          PULL_REQUEST_NUMBER: ${{ inputs.pr_number }}
-          STAGE: ${{ inputs.stage }}
-          MAX_TIME: ${{ inputs.max_runtime }}
-        run: |
-          DONE=false
-          count=0
-          until false
-          do
-             echo "gh api repos/$OWNER/$REPO_NAME/pulls/$PULL_REQUEST_NUMBER --jq '.labels.[].name'"
-             LABELS1="$(gh api repos/$OWNER/$REPO_NAME/pulls/$PULL_REQUEST_NUMBER --jq '.labels.[].name')"
-             LABELS=$(echo "$LABELS1" | tr '\n' ' ')
-             check_label="CI-Orion-${STAGE}"
-             if [[ "${LABELS}" == *"${check_label}"* ]]; then
-                DONE=true
-                break
-             fi   
-             sleep 10m
-             count=$((count+10))
-             if [[ ${count} -gt ${MAX_TIME} ]]; then
-                DONE=false
-                break
-             fi
-          done
-          echo "state=${DONE}"
-          echo "state=${DONE}" >> ${GITHUB_OUPUT}
-- 
GitLab


From b6566149215f90e4afb000d6390b60754185ed00 Mon Sep 17 00:00:00 2001
From: TerrenceMcGuinness-NOAA <terry.mcguinness@noaa.gov>
Date: Sat, 21 Oct 2023 03:09:13 +0000
Subject: [PATCH 34/54] added () on DONE

---
 .github/workflows/getlabels.yaml | 7 ++-----
 1 file changed, 2 insertions(+), 5 deletions(-)

diff --git a/.github/workflows/getlabels.yaml b/.github/workflows/getlabels.yaml
index c8193482e..e8ebc733f 100644
--- a/.github/workflows/getlabels.yaml
+++ b/.github/workflows/getlabels.yaml
@@ -53,8 +53,5 @@ jobs:
                 break
              fi
           done
-          echo "state=$DONE"
-          var file = Environment.GetEnvironmentVariable("GITHUB_OUTPUT");
-          var content = System.IO.File.ReadAllText(file);
-          System.IO.File.WriteAllText(file, content + "state=${DONE}" + "\n");
-          # echo "state=$DONE" >> ${GITHUB_OUPUT}
+          echo "state=${DONE}"
+          echo "state=${DONE}" >> ${GITHUB_OUPUT}
-- 
GitLab


From ed3429e3f3c1cf4a0258b0d7f14c256ef71e1c3c Mon Sep 17 00:00:00 2001
From: TerrenceMcGuinness-NOAA <terry.mcguinness@noaa.gov>
Date: Sat, 21 Oct 2023 03:16:05 +0000
Subject: [PATCH 35/54] adding env back to GITHUB_OUPUT

---
 .github/workflows/getlabels.yaml | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/.github/workflows/getlabels.yaml b/.github/workflows/getlabels.yaml
index e8ebc733f..b861658a8 100644
--- a/.github/workflows/getlabels.yaml
+++ b/.github/workflows/getlabels.yaml
@@ -54,4 +54,4 @@ jobs:
              fi
           done
           echo "state=${DONE}"
-          echo "state=${DONE}" >> ${GITHUB_OUPUT}
+          echo "state=${DONE}" >> $env:GITHUB_OUTPUT
-- 
GitLab


From 00e92e286387b4bca03285420a93b47ec1844f31 Mon Sep 17 00:00:00 2001
From: TerrenceMcGuinness-NOAA <terry.mcguinness@noaa.gov>
Date: Sat, 21 Oct 2023 03:24:48 +0000
Subject: [PATCH 36/54] added read write permissions to reuse

---
 .github/workflows/getlabels.yaml | 6 +++++-
 1 file changed, 5 insertions(+), 1 deletion(-)

diff --git a/.github/workflows/getlabels.yaml b/.github/workflows/getlabels.yaml
index b861658a8..156ee222a 100644
--- a/.github/workflows/getlabels.yaml
+++ b/.github/workflows/getlabels.yaml
@@ -17,6 +17,10 @@ on:
             required: true
             type: string
 
+permissions:
+  contents: read
+  packages: write
+
 jobs:
 
  getlabels:
@@ -54,4 +58,4 @@ jobs:
              fi
           done
           echo "state=${DONE}"
-          echo "state=${DONE}" >> $env:GITHUB_OUTPUT
+          echo "state=${DONE}" >> $GITHUB_OUTPUT
-- 
GitLab


From 7cda4501f36017947fe8564011c9cde106e38ee0 Mon Sep 17 00:00:00 2001
From: TerrenceMcGuinness-NOAA <terry.mcguinness@noaa.gov>
Date: Sat, 21 Oct 2023 03:34:14 +0000
Subject: [PATCH 37/54] fixed logic in status workflow for on set

---
 .github/workflows/orion_status.yaml | 22 ++++++++++++++--------
 1 file changed, 14 insertions(+), 8 deletions(-)

diff --git a/.github/workflows/orion_status.yaml b/.github/workflows/orion_status.yaml
index d607f1725..3be30a623 100644
--- a/.github/workflows/orion_status.yaml
+++ b/.github/workflows/orion_status.yaml
@@ -13,7 +13,7 @@ on:
 jobs:
 
   get_Ready:
-    uses: ./.github/workflows/getlabels.yaml
+    uses: ./.github/workflows/getlabels.yml
     with:
       pr_number: ${{ inputs.pr_number }}
       max_runtime: 20
@@ -25,9 +25,11 @@ jobs:
     needs: get_Ready
     steps:
      - run:  |
-         if [[ ${{ needs.get_Ready.outputs.state }}  ]]; then
-           echo "Waiting for Ready State timmed out"
+         if [[ "${{ needs.get_Ready.outputs.state }}" == "false"  ]]; then
+           echo "Ready Timmed out"
            exit 1
+         elif [[ "${{ needs.get_Ready.outputs.state }}" == "true"  ]]; then
+           echo "Ready Set"
          fi  
 
   get_Building:    
@@ -44,10 +46,12 @@ jobs:
     needs:  get_Building
     steps:
       - run: |
-         if [[ ${{ needs.get_Building.outputs.state }}  ]]; then
-           echo "Waiting for Ready State timmed out"
+         if [[ "${{ needs.get_Building.outputs.state }}" == "false"  ]]; then
+           echo "Building Timmed out"
            exit 1
-         fi
+         elif [[ "${{ needs.get_Building.outputs.state }}" == "true"  ]]; then
+           echo "Building Set"
+         fi  
 
   get_Running:
     uses: ./.github/workflows/getlabels.yaml
@@ -63,7 +67,9 @@ jobs:
     needs:  get_Running
     steps:
       - run: |
-         if [[ ${{ needs.get_Running.outputs.state }}  ]]; then
-           echo "Waiting for Ready State timmed out"
+         if [[ "${{ needs.get_Running.outputs.state }}" == "false"  ]]; then
+           echo "Running Timmed out"
            exit 1
+         elif [[ "${{ needs.get_Running.outputs.state }}" == "true"  ]]; then
+           echo "Running Set"
          fi  
\ No newline at end of file
-- 
GitLab


From 522e4e30d4f224e4cc81cff42ef29ac699b784cf Mon Sep 17 00:00:00 2001
From: TerrenceMcGuinness-NOAA <terry.mcguinness@noaa.gov>
Date: Sat, 21 Oct 2023 03:37:30 +0000
Subject: [PATCH 38/54] noop on getlables

---
 .github/workflows/getlabels.yaml | 1 -
 1 file changed, 1 deletion(-)

diff --git a/.github/workflows/getlabels.yaml b/.github/workflows/getlabels.yaml
index 156ee222a..cff128217 100644
--- a/.github/workflows/getlabels.yaml
+++ b/.github/workflows/getlabels.yaml
@@ -16,7 +16,6 @@ on:
          stage: 
             required: true
             type: string
-
 permissions:
   contents: read
   packages: write
-- 
GitLab


From 752210cfed4f4e0b50f7e5123c70b9b000745348 Mon Sep 17 00:00:00 2001
From: TerrenceMcGuinness-NOAA <terry.mcguinness@noaa.gov>
Date: Sat, 21 Oct 2023 03:39:26 +0000
Subject: [PATCH 39/54] issue with orion status

---
 .github/workflows/orion_status.yaml | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/.github/workflows/orion_status.yaml b/.github/workflows/orion_status.yaml
index 3be30a623..a9ce1def1 100644
--- a/.github/workflows/orion_status.yaml
+++ b/.github/workflows/orion_status.yaml
@@ -25,10 +25,10 @@ jobs:
     needs: get_Ready
     steps:
      - run:  |
-         if [[ "${{ needs.get_Ready.outputs.state }}" == "false"  ]]; then
+         if [[ ${{ needs.get_Ready.outputs.state }} == "false"  ]]; then
            echo "Ready Timmed out"
            exit 1
-         elif [[ "${{ needs.get_Ready.outputs.state }}" == "true"  ]]; then
+         elif [[ ${{ needs.get_Ready.outputs.state }} == "true"  ]]; then
            echo "Ready Set"
          fi  
 
-- 
GitLab


From 56b6a5f2d2bf8c84b53739fe64afb443b6ae76b7 Mon Sep 17 00:00:00 2001
From: TerrenceMcGuinness-NOAA <terry.mcguinness@noaa.gov>
Date: Sat, 21 Oct 2023 03:42:10 +0000
Subject: [PATCH 40/54] misspelled yaml as yml

---
 .github/workflows/orion_status.yaml | 6 +++---
 1 file changed, 3 insertions(+), 3 deletions(-)

diff --git a/.github/workflows/orion_status.yaml b/.github/workflows/orion_status.yaml
index a9ce1def1..76d557127 100644
--- a/.github/workflows/orion_status.yaml
+++ b/.github/workflows/orion_status.yaml
@@ -13,7 +13,7 @@ on:
 jobs:
 
   get_Ready:
-    uses: ./.github/workflows/getlabels.yml
+    uses: ./.github/workflows/getlabels.yaml
     with:
       pr_number: ${{ inputs.pr_number }}
       max_runtime: 20
@@ -25,10 +25,10 @@ jobs:
     needs: get_Ready
     steps:
      - run:  |
-         if [[ ${{ needs.get_Ready.outputs.state }} == "false"  ]]; then
+         if [[ "${{ needs.get_Ready.outputs.state }}" == "false"  ]]; then
            echo "Ready Timmed out"
            exit 1
-         elif [[ ${{ needs.get_Ready.outputs.state }} == "true"  ]]; then
+         elif [[ "${{ needs.get_Ready.outputs.state }}" == "true"  ]]; then
            echo "Ready Set"
          fi  
 
-- 
GitLab


From 4b5cd0bc435fc158258ca38c3d5f44add6b60469 Mon Sep 17 00:00:00 2001
From: Rahul Mahajan <aerorahul@users.noreply.github.com>
Date: Mon, 23 Oct 2023 12:04:26 -0400
Subject: [PATCH 41/54] Fix nth_eupd in gfs/config.resources.  remove sections
 of jobs not run as part of gefs from gefs/config.resources (#1952)

---
 parm/config/gefs/config.resources | 407 +-----------------------------
 parm/config/gfs/config.resources  |   4 +-
 2 files changed, 3 insertions(+), 408 deletions(-)

diff --git a/parm/config/gefs/config.resources b/parm/config/gefs/config.resources
index 91699a988..33156a768 100644
--- a/parm/config/gefs/config.resources
+++ b/parm/config/gefs/config.resources
@@ -9,17 +9,12 @@ if [[ $# -ne 1 ]]; then
     echo "Must specify an input task argument to set resource variables!"
     echo "argument can be any one of the following:"
     echo "coupled_ic aerosol_init"
-    echo "atmanlinit atmanlrun atmanlfinal"
-    echo "atmensanlinit atmensanlrun atmensanlfinal"
-    echo "landanlprep landanlinit landanlrun landanlfinal"
-    echo "aeroanlinit aeroanlrun aeroanlfinal"
-    echo "anal sfcanl analcalc analdiag fcst post vrfy fit2obs metp arch echgres"
-    echo "eobs ediag eomg eupd ecen esfc efcs epos earc"
+    echo "sfcanl analcalc analdiag fcst post vrfy fit2obs metp arch echgres"
+    echo "ecen esfc efcs epos earc"
     echo "init_chem mom6ic ocnpost"
     echo "waveinit waveprep wavepostsbs wavepostbndpnt wavepostbndpntbll wavepostpnt"
     echo "wavegempak waveawipsbulls waveawipsgridded"
     echo "postsnd awips gempak"
-    echo "ocnanalprep ocnanalbmat ocnanalrun ocnanalchkpt ocnanalpost ocnanalvrfy"
     exit 1
 
 fi
@@ -168,303 +163,6 @@ elif [[ ${step} = "waveawipsgridded" ]]; then
     export NTASKS=${npe_waveawipsgridded}
     export memory_waveawipsgridded_gfs="1GB"
 
-elif [[ "${step}" = "atmanlinit" ]]; then
-
-    export wtime_atmanlinit="00:10:00"
-    export npe_atmanlinit=1
-    export nth_atmanlinit=1
-    npe_node_atmanlinit=$(echo "${npe_node_max} / ${nth_atmanlinit}" | bc)
-    export npe_node_atmanlinit
-    export memory_atmanlinit="3072M"
-
-elif [[ "${step}" = "atmanlrun" ]]; then
-
-    # make below case dependent later
-    export layout_x=1
-    export layout_y=1
-
-    export wtime_atmanlrun="00:30:00"
-    npe_atmanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc)
-    export npe_atmanlrun
-    npe_atmanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc)
-    export npe_atmanlrun_gfs
-    export nth_atmanlrun=1
-    export nth_atmanlrun_gfs=${nth_atmanlrun}
-    npe_node_atmanlrun=$(echo "${npe_node_max} / ${nth_atmanlrun}" | bc)
-    export npe_node_atmanlrun
-    export is_exclusive=True
-
-elif [[ "${step}" = "atmanlfinal" ]]; then
-
-    export wtime_atmanlfinal="00:30:00"
-    export npe_atmanlfinal=${npe_node_max}
-    export nth_atmanlfinal=1
-    npe_node_atmanlfinal=$(echo "${npe_node_max} / ${nth_atmanlfinal}" | bc)
-    export npe_node_atmanlfinal
-    export is_exclusive=True
-
-elif [[ "${step}" = "landanlprep" || "${step}" = "landanlinit" || "${step}" = "landanlrun"  || "${step}" = "landanlfinal" ]]; then
-   # below lines are for creating JEDI YAML
-   case ${CASE} in
-     C768)
-        layout_x=6
-        layout_y=6
-        ;;
-     C384)
-        layout_x=5
-        layout_y=5
-        ;;
-     C192 | C96 | C48)
-        layout_x=1
-        layout_y=1
-        ;;
-     *)
-        echo "FATAL ERROR: Resolution not supported for land analysis'"
-        exit 1
-   esac
-
-   export layout_x
-   export layout_y
-
-   if [[ "${step}" = "landanlinit" || "${step}" = "landanlfinal" ]]; then
-       declare -x "wtime_${step}"="00:10:00"
-       declare -x "npe_${step}"=1
-       declare -x "nth_${step}"=1
-       temp_stepname="nth_${step}"
-       declare -x "npe_node_${step}"="$(echo "${npe_node_max} / ${!temp_stepname}" | bc)"
-       declare -x "memory_${step}"="3072M"
-   elif [[ "${step}" = "landanlrun" ]]; then
-       export wtime_landanlrun="00:30:00"
-       npe_landanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc)
-       export npe_landanlrun
-       export nth_landanlrun=1
-       npe_node_landanlrun=$(echo "${npe_node_max} / ${nth_landanlrun}" | bc)
-       export npe_node_landanlrun
-       export is_exclusive=True
-   elif [[ "${step}" = "landanlprep" ]]; then
-       export wtime_landanlprep="00:30:00"
-       npe_landanlprep=$(echo "${layout_x} * ${layout_y} * 6" | bc)
-       export npe_landanlprep
-       export nth_landanlprep=1
-       npe_node_landanlprep=$(echo "${npe_node_max} / ${nth_landanlprep}" | bc)
-       export npe_node_landanlprep
-       export is_exclusive=True
-   fi
-
-elif [[ "${step}" = "aeroanlinit" ]]; then
-
-   # below lines are for creating JEDI YAML
-   case ${CASE} in
-     C768)
-        layout_x=6
-        layout_y=6
-        ;;
-      C384)
-        layout_x=5
-        layout_y=5
-        ;;
-     C192 | C96 | C48)
-        layout_x=8
-        layout_y=8
-        ;;
-      *)
-          echo "FATAL ERROR: Resolution not supported for aerosol analysis'"
-          exit 1
-    esac
-
-    export layout_x
-    export layout_y
-
-    export wtime_aeroanlinit="00:10:00"
-    export npe_aeroanlinit=1
-    export nth_aeroanlinit=1
-    npe_node_aeroanlinit=$(echo "${npe_node_max} / ${nth_aeroanlinit}" | bc)
-    export npe_node_aeroanlinit
-    export memory_aeroanlinit="3072M"
-
-elif [[ "${step}" = "aeroanlrun" ]]; then
-
-   case ${CASE} in
-     C768)
-        layout_x=6
-        layout_y=6
-        ;;
-      C384)
-        layout_x=5
-        layout_y=5
-        ;;
-     C192 | C96 | C48)
-        layout_x=8
-        layout_y=8
-        ;;
-      *)
-          echo "FATAL ERROR: Resolution ${CASE} is not supported, ABORT!"
-          exit 1
-    esac
-
-    export layout_x
-    export layout_y
-
-    export wtime_aeroanlrun="00:30:00"
-    npe_aeroanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc)
-    export npe_aeroanlrun
-    npe_aeroanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc)
-    export npe_aeroanlrun_gfs
-    export nth_aeroanlrun=1
-    export nth_aeroanlrun_gfs=1
-    npe_node_aeroanlrun=$(echo "${npe_node_max} / ${nth_aeroanlrun}" | bc)
-    export npe_node_aeroanlrun
-    export is_exclusive=True
-
-elif [[ "${step}" = "aeroanlfinal" ]]; then
-
-    export wtime_aeroanlfinal="00:10:00"
-    export npe_aeroanlfinal=1
-    export nth_aeroanlfinal=1
-    npe_node_aeroanlfinal=$(echo "${npe_node_max} / ${nth_aeroanlfinal}" | bc)
-    export npe_node_aeroanlfinal
-    export memory_aeroanlfinal="3072M"
-
-elif [[ "${step}" = "ocnanalprep" ]]; then
-
-    export wtime_ocnanalprep="00:10:00"
-    export npe_ocnanalprep=1
-    export nth_ocnanalprep=1
-    npe_node_ocnanalprep=$(echo "${npe_node_max} / ${nth_ocnanalprep}" | bc)
-    export npe_node_ocnanalprep
-    export memory_ocnanalprep="24GB"
-
-elif [[ "${step}" = "ocnanalbmat" ]]; then
-   npes=16
-   case ${CASE} in
-      C384)
-        npes=480
-        ;;
-      C48)
-        npes=16
-        ;;
-      *)
-          echo "FATAL: Resolution not supported'"
-          exit 1
-    esac
-
-    export wtime_ocnanalbmat="00:30:00"
-    export npe_ocnanalbmat=${npes}
-    export nth_ocnanalbmat=1
-    export is_exclusive=True
-    npe_node_ocnanalbmat=$(echo "${npe_node_max} / ${nth_ocnanalbmat}" | bc)
-    export npe_node_ocnanalbmat
-
-elif [[ "${step}" = "ocnanalrun" ]]; then
-   npes=16
-   case ${CASE} in
-      C384)
-        npes=480
-        ;;
-      C48)
-        npes=16
-        ;;
-      *)
-          echo "FATAL: Resolution not supported'"
-          exit 1
-    esac
-
-    export wtime_ocnanalrun="00:30:00"
-    export npe_ocnanalrun=${npes}
-    export nth_ocnanalrun=1
-    export is_exclusive=True
-    npe_node_ocnanalrun=$(echo "${npe_node_max} / ${nth_ocnanalrun}" | bc)
-    export npe_node_ocnanalrun
-
-elif [[ "${step}" = "ocnanalchkpt" ]]; then
-
-   export wtime_ocnanalchkpt="00:10:00"
-   export npe_ocnanalchkpt=1
-   export nth_ocnanalchkpt=1
-   npe_node_ocnanalchkpt=$(echo "${npe_node_max} / ${nth_ocnanalchkpt}" | bc)
-   export npe_node_ocnanalchkpt
-   case ${CASE} in
-      C384)
-        export memory_ocnanalchkpt="128GB"
-        ;;
-      C48)
-        export memory_ocnanalchkpt="32GB"
-        ;;
-      *)
-          echo "FATAL: Resolution not supported'"
-          exit 1
-    esac
-
-elif [[ "${step}" = "ocnanalpost" ]]; then
-
-    export wtime_ocnanalpost="00:30:00"
-    export npe_ocnanalpost=${npe_node_max}
-    export nth_ocnanalpost=1
-    npe_node_ocnanalpost=$(echo "${npe_node_max} / ${nth_ocnanalpost}" | bc)
-    export npe_node_ocnanalpost
-
-elif [[ "${step}" = "ocnanalvrfy" ]]; then
-
-    export wtime_ocnanalvrfy="00:35:00"
-    export npe_ocnanalvrfy=1
-    export nth_ocnanalvrfy=1
-    npe_node_ocnanalvrfy=$(echo "${npe_node_max} / ${nth_ocnanalvrfy}" | bc)
-    export npe_node_ocnanalvrfy
-    export memory_ocnanalvrfy="24GB"
-
-elif [[ ${step} = "anal" ]]; then
-
-    export wtime_anal="00:50:00"
-    export wtime_anal_gfs="00:40:00"
-    export npe_anal=780
-    export nth_anal=5
-    export npe_anal_gfs=825
-    export nth_anal_gfs=5
-    if [[ "${machine}" = "WCOSS2" ]]; then
-      export nth_anal=8
-      export nth_anal_gfs=8
-    fi
-    if [[ ${CASE} = "C384" ]]; then
-      export npe_anal=160
-      export npe_anal_gfs=160
-      export nth_anal=10
-      export nth_anal_gfs=10
-      if [[ ${machine} = "S4" ]]; then
-         #On the S4-s4 partition, this is accomplished by increasing the task
-         #count to a multiple of 32
-         if [[ ${PARTITION_BATCH} = "s4" ]]; then
-            export npe_anal=416
-            export npe_anal_gfs=416
-         fi
-         #S4 is small, so run this task with just 1 thread
-         export nth_anal=1
-         export nth_anal_gfs=1
-         export wtime_anal="02:00:00"
-      fi
-    fi
-    if [[ ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then
-      export npe_anal=84
-      export npe_anal_gfs=84
-      if [[ ${machine} = "S4" ]]; then
-         export nth_anal=4
-         export nth_anal_gfs=4
-         #Adjust job count for S4
-         if [[ ${PARTITION_BATCH} = "s4" ]]; then
-            export npe_anal=88
-            export npe_anal_gfs=88
-         elif [[ ${PARTITION_BATCH} = "ivy" ]]; then
-            export npe_anal=90
-            export npe_anal_gfs=90
-         fi
-      fi
-    fi
-    npe_node_anal=$(echo "${npe_node_max} / ${nth_anal}" | bc)
-    export npe_node_anal
-    export nth_cycle=${nth_anal}
-    npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc)
-    export npe_node_cycle
-    export is_exclusive=True
-
 elif [[ ${step} = "analcalc" ]]; then
 
     export wtime_analcalc="00:10:00"
@@ -726,107 +424,6 @@ elif [[ ${step} = "coupled_ic" ]]; then
     export nth_coupled_ic=1
     export is_exclusive=True
 
-elif [[ "${step}" = "atmensanlinit" ]]; then
-
-    export wtime_atmensanlinit="00:10:00"
-    export npe_atmensanlinit=1
-    export nth_atmensanlinit=1
-    npe_node_atmensanlinit=$(echo "${npe_node_max} / ${nth_atmensanlinit}" | bc)
-    export npe_node_atmensanlinit
-    export memory_atmensanlinit="3072M"
-
-elif [[ "${step}" = "atmensanlrun" ]]; then
-
-    # make below case dependent later
-    export layout_x=1
-    export layout_y=1
-
-    export wtime_atmensanlrun="00:30:00"
-    npe_atmensanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc)
-    export npe_atmensanlrun
-    npe_atmensanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc)
-    export npe_atmensanlrun_gfs
-    export nth_atmensanlrun=1
-    export nth_atmensanlrun_gfs=${nth_atmensanlrun}
-    npe_node_atmensanlrun=$(echo "${npe_node_max} / ${nth_atmensanlrun}" | bc)
-    export npe_node_atmensanlrun
-    export is_exclusive=True
-
-elif [[ "${step}" = "atmensanlfinal" ]]; then
-
-    export wtime_atmensanlfinal="00:30:00"
-    export npe_atmensanlfinal=${npe_node_max}
-    export nth_atmensanlfinal=1
-    npe_node_atmensanlfinal=$(echo "${npe_node_max} / ${nth_atmensanlfinal}" | bc)
-    export npe_node_atmensanlfinal
-    export is_exclusive=True
-
-elif [[ ${step} = "eobs" || ${step} = "eomg" ]]; then
-
-    export wtime_eobs="00:15:00"
-    export wtime_eomg="01:00:00"
-    if [[ ${CASE} = "C768" ]]; then
-      export npe_eobs=200
-    elif [[ ${CASE} = "C384" ]]; then
-      export npe_eobs=100
-    elif [[ ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then
-      export npe_eobs=40
-    fi
-    export npe_eomg=${npe_eobs}
-    export nth_eobs=2
-    export nth_eomg=${nth_eobs}
-    npe_node_eobs=$(echo "${npe_node_max} / ${nth_eobs}" | bc)
-    export npe_node_eobs
-    export npe_node_eomg=${npe_node_eobs}
-    export is_exclusive=True
-    #The number of tasks and cores used must be the same for eobs
-    #For S4, this is accomplished by running 10 tasks/node
-    if [[ ${machine} = "S4" ]]; then
-       export npe_node_eobs=10
-    fi
-
-elif [[ ${step} = "ediag" ]]; then
-
-    export wtime_ediag="00:15:00"
-    export npe_ediag=48
-    export nth_ediag=1
-    npe_node_ediag=$(echo "${npe_node_max} / ${nth_ediag}" | bc)
-    export npe_node_ediag
-    export memory_ediag="30GB"
-
-elif [[ ${step} = "eupd" ]]; then
-
-    export wtime_eupd="00:30:00"
-    if [[ ${CASE} = "C768" ]]; then
-      export npe_eupd=480
-      export nth_eupd=6
-      if [[ "${machine}" = "WCOSS2" ]]; then
-        export npe_eupd=315
-        export nth_eupd=14
-      fi
-    elif [[ ${CASE} = "C384" ]]; then
-      export npe_eupd=270
-      export nth_eupd=8
-      if [[ "${machine}" = "WCOSS2" ]]; then
-        export npe_eupd=315
-        export nth_eupd=14
-      elif [[ "${machine}" = "HERA" || "${machine}" = "JET" ]]; then
-        export nth_eupd=8
-      elif [[ ${machine} = "S4" ]]; then
-         export npe_eupd=160
-         export nth_eupd=2
-      fi
-    elif [[ ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then
-      export npe_eupd=42
-      export nth_eupd=2
-      if [[ "${machine}" = "HERA" || "${machine}" = "JET" ]]; then
-        export nth_eupd=4
-      fi
-    fi
-    npe_node_eupd=$(echo "${npe_node_max} / ${nth_eupd}" | bc)
-    export npe_node_eupd
-    export is_exclusive=True
-
 elif [[ ${step} = "ecen" ]]; then
 
     export wtime_ecen="00:10:00"
diff --git a/parm/config/gfs/config.resources b/parm/config/gfs/config.resources
index 6503ae552..9919b81b7 100644
--- a/parm/config/gfs/config.resources
+++ b/parm/config/gfs/config.resources
@@ -868,12 +868,10 @@ elif [[ ${step} = "eupd" ]]; then
       fi
     elif [[ ${CASE} = "C384" ]]; then
       export npe_eupd=270
-      export nth_eupd=2
+      export nth_eupd=8
       if [[ "${machine}" = "WCOSS2" ]]; then
         export npe_eupd=315
         export nth_eupd=14
-      elif [[ "${machine}" = "HERA" || "${machine}" = "JET" ]]; then
-        export nth_eupd=8
       elif [[ ${machine} = "S4" ]]; then
          export npe_eupd=160
          export nth_eupd=2
-- 
GitLab


From 8940adddfe9d21189740e71487603fb2acee2336 Mon Sep 17 00:00:00 2001
From: David Huber <69919478+DavidHuber-NOAA@users.noreply.github.com>
Date: Tue, 24 Oct 2023 11:02:29 -0400
Subject: [PATCH 42/54] Optimize the checkout script (#1956)

* Multithread the checkout script #1953
---
 sorc/checkout.sh | 33 +++++++++++++++++++++------------
 1 file changed, 21 insertions(+), 12 deletions(-)

diff --git a/sorc/checkout.sh b/sorc/checkout.sh
index 9c9addad1..3554cd99d 100755
--- a/sorc/checkout.sh
+++ b/sorc/checkout.sh
@@ -34,7 +34,7 @@ function checkout() {
   #   logdir [default: $(pwd)]: where you want logfiles written
   #   CLEAN [default: NO]:      whether to delete existing directories and create a fresh clone
   #
-  # Usage: checkout <dir> <remote> <version>
+  # Usage: checkout <dir> <remote> <version> <cpus> <reccursive>
   #
   #   Arguments
   #     dir:     Directory for the clone
@@ -48,7 +48,8 @@ function checkout() {
   dir="$1"
   remote="$2"
   version="$3"
-  recursive=${4:-"YES"}
+  cpus="${4:-1}"  # Default 1 thread
+  recursive=${5:-"YES"}
 
   name=$(echo "${dir}" | cut -d '.' -f 1)
   echo "Performing checkout of ${name}"
@@ -90,7 +91,7 @@ function checkout() {
   fi
   if [[ "${recursive}" == "YES" ]]; then
     echo "|-- Updating submodules (if any)"
-    git submodule update --init --recursive >> "${logfile}" 2>&1
+    git submodule update --init --recursive -j "${cpus}" >> "${logfile}" 2>&1
     status=$?
     if ((status > 0)); then
       echo "    WARNING: Error while updating submodules of ${name}"
@@ -149,25 +150,33 @@ source "${topdir}/../workflow/gw_setup.sh"
 
 # The checkout version should always be a speciifc commit (hash or tag), not a branch
 errs=0
-checkout "wxflow"          "https://github.com/NOAA-EMC/wxflow"                 "528f5ab"                    ; errs=$((errs + $?))
-checkout "gfs_utils.fd"    "https://github.com/NOAA-EMC/gfs-utils"              "a283262"                    ; errs=$((errs + $?))
-checkout "ufs_utils.fd"    "https://github.com/ufs-community/UFS_UTILS.git"     "72a0471"                    ; errs=$((errs + $?))
-checkout "ufs_model.fd"    "https://github.com/ufs-community/ufs-weather-model" "${ufs_model_hash:-4d05445}" ; errs=$((errs + $?))
-checkout "verif-global.fd" "https://github.com/NOAA-EMC/EMC_verif-global.git"   "c267780"                    ; errs=$((errs + $?))
+# Checkout UFS submodules in parallel
+checkout "ufs_model.fd"    "https://github.com/ufs-community/ufs-weather-model" "${ufs_model_hash:-4d05445}" "8" ; errs=$((errs + $?))
+
+# Run all other checkouts simultaneously with just 1 core each to handle submodules.
+checkout "wxflow"          "https://github.com/NOAA-EMC/wxflow"                 "528f5ab" &
+checkout "gfs_utils.fd"    "https://github.com/NOAA-EMC/gfs-utils"              "a283262" &
+checkout "ufs_utils.fd"    "https://github.com/ufs-community/UFS_UTILS.git"     "72a0471" &
+checkout "verif-global.fd" "https://github.com/NOAA-EMC/EMC_verif-global.git"   "c267780" &
 
 if [[ ${checkout_gsi} == "YES" ]]; then
-  checkout "gsi_enkf.fd" "https://github.com/NOAA-EMC/GSI.git" "ca19008" "NO"; errs=$((errs + $?))
+  checkout "gsi_enkf.fd" "https://github.com/NOAA-EMC/GSI.git" "ca19008" "1" "NO" &
 fi
 
 if [[ ${checkout_gdas} == "YES" ]]; then
-  checkout "gdas.cd" "https://github.com/NOAA-EMC/GDASApp.git" "d347d22"; errs=$((errs + $?))
+  checkout "gdas.cd" "https://github.com/NOAA-EMC/GDASApp.git" "d347d22" &
 fi
 
 if [[ ${checkout_gsi} == "YES" || ${checkout_gdas} == "YES" ]]; then
-  checkout "gsi_utils.fd"    "https://github.com/NOAA-EMC/GSI-Utils.git"   "322cc7b"; errs=$((errs + $?))
-  checkout "gsi_monitor.fd"  "https://github.com/NOAA-EMC/GSI-Monitor.git" "45783e3"; errs=$((errs + $?))
+  checkout "gsi_utils.fd"    "https://github.com/NOAA-EMC/GSI-Utils.git"   "322cc7b" &
+  checkout "gsi_monitor.fd"  "https://github.com/NOAA-EMC/GSI-Monitor.git" "45783e3" &
 fi
 
+# Go through each PID and verify no errors were reported.
+for checkout_pid in $(jobs -p); do
+  wait "${checkout_pid}" || errs=$((errs + $?))
+done
+
 if (( errs > 0 )); then
   echo "WARNING: One or more errors encountered during checkout process, please check logs before building"
 fi
-- 
GitLab


From e2c624d8904cd988394c73d0edb22fa593229d3f Mon Sep 17 00:00:00 2001
From: RussTreadon-NOAA <26926959+RussTreadon-NOAA@users.noreply.github.com>
Date: Tue, 24 Oct 2023 13:32:52 -0400
Subject: [PATCH 43/54] Refactor UFSDA ATM var and ens layout (#1945)

---
 parm/config/gfs/config.atmanl      |  7 ++-----
 parm/config/gfs/config.atmensanl   |  7 ++-----
 parm/config/gfs/config.resources   | 13 +++++++++++++
 parm/config/gfs/yaml/defaults.yaml |  8 ++++++++
 4 files changed, 25 insertions(+), 10 deletions(-)

diff --git a/parm/config/gfs/config.atmanl b/parm/config/gfs/config.atmanl
index 0d388f94b..abfbd8073 100644
--- a/parm/config/gfs/config.atmanl
+++ b/parm/config/gfs/config.atmanl
@@ -13,11 +13,8 @@ export STATICB_TYPE="gsibec"
 export BERROR_YAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/berror/staticb_${STATICB_TYPE}.yaml
 export INTERP_METHOD='barycentric'
 
-export layout_x=1
-export layout_y=1
-
-export io_layout_x=1
-export io_layout_y=1
+export io_layout_x=@IO_LAYOUT_X@
+export io_layout_y=@IO_LAYOUT_Y@
 
 export JEDIEXE=${HOMEgfs}/exec/fv3jedi_var.x
 
diff --git a/parm/config/gfs/config.atmensanl b/parm/config/gfs/config.atmensanl
index 7a696fa73..58fd7b6e2 100644
--- a/parm/config/gfs/config.atmensanl
+++ b/parm/config/gfs/config.atmensanl
@@ -10,11 +10,8 @@ export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/lists/lgetkf_prototype.yaml
 export ATMENSYAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/lgetkf/lgetkf.yaml
 export INTERP_METHOD='barycentric'
 
-export layout_x=1
-export layout_y=1
-
-export io_layout_x=1
-export io_layout_y=1
+export io_layout_x=@IO_LAYOUT_X@
+export io_layout_y=@IO_LAYOUT_Y@
 
 export JEDIEXE=${HOMEgfs}/exec/fv3jedi_letkf.x
 
diff --git a/parm/config/gfs/config.resources b/parm/config/gfs/config.resources
index 9919b81b7..fc6624df9 100644
--- a/parm/config/gfs/config.resources
+++ b/parm/config/gfs/config.resources
@@ -189,6 +189,15 @@ elif [[ ${step} = "waveawipsgridded" ]]; then
 
 elif [[ "${step}" = "atmanlinit" ]]; then
 
+    # make below case dependent later
+    export layout_x=1
+    export layout_y=1
+
+    layout_gsib_x=$(echo "${layout_x} * 3" | bc)
+    export layout_gsib_x
+    layout_gsib_y=$(echo "${layout_y} * 2" | bc)
+    export layout_gsib_y
+
     export wtime_atmanlinit="00:10:00"
     export npe_atmanlinit=1
     export nth_atmanlinit=1
@@ -790,6 +799,10 @@ elif [[ ${step} = "stage_ic" ]]; then
 
 elif [[ "${step}" = "atmensanlinit" ]]; then
 
+    # make below case dependent later
+    export layout_x=1
+    export layout_y=1
+
     export wtime_atmensanlinit="00:10:00"
     export npe_atmensanlinit=1
     export nth_atmensanlinit=1
diff --git a/parm/config/gfs/yaml/defaults.yaml b/parm/config/gfs/yaml/defaults.yaml
index 8c2b4ff22..c0298edb1 100644
--- a/parm/config/gfs/yaml/defaults.yaml
+++ b/parm/config/gfs/yaml/defaults.yaml
@@ -6,6 +6,14 @@ base:
   DO_JEDILANDDA: "NO"
   DO_MERGENSST: "NO"
 
+atmanl:
+  IO_LAYOUT_X: 1
+  IO_LAYOUT_Y: 1
+
+atmensanl:
+  IO_LAYOUT_X: 1
+  IO_LAYOUT_Y: 1
+
 aeroanl:
   IO_LAYOUT_X: 1
   IO_LAYOUT_Y: 1
-- 
GitLab


From 1b00224e18842cd873eb1779be08f96687e49e1f Mon Sep 17 00:00:00 2001
From: Kate Friedman <kate.friedman@noaa.gov>
Date: Tue, 24 Oct 2023 14:48:24 -0400
Subject: [PATCH 44/54] Set SENDCOM=YES for tracker/genesis tasks (#1971)

Set SENDCOM to YES in config.vrfy to get outputs copied back to COM.

Will reevaluate the need for SENDCOM when moving the tracker/genesis jobs out of the vrfy job with issue #235 work.

Refs #1947
---
 parm/config/gfs/config.vrfy | 2 ++
 1 file changed, 2 insertions(+)

diff --git a/parm/config/gfs/config.vrfy b/parm/config/gfs/config.vrfy
index 0f0ce4ff9..8754609c5 100644
--- a/parm/config/gfs/config.vrfy
+++ b/parm/config/gfs/config.vrfy
@@ -66,6 +66,8 @@ fi
 # Cyclone genesis and cyclone track verification
 #-------------------------------------------------
 
+export SENDCOM="YES" # Needed by tracker/genesis scripts still
+
 export HOMEens_tracker=$BASE_GIT/TC_tracker/${tracker_ver}
 
 if [[ "${VRFYTRAK}" = "YES" ]]; then
-- 
GitLab


From c58deae0cf078d1ee093529064d74f60482aa3f4 Mon Sep 17 00:00:00 2001
From: "Henry R. Winterbottom"
 <49202169+HenryWinterbottom-NOAA@users.noreply.github.com>
Date: Tue, 24 Oct 2023 16:19:04 -0600
Subject: [PATCH 45/54] Updates for NOAA CSP AWS global-workflow related file
 paths. (#1970)

Co-authored-by: henrywinterbottom-wxdev <henry.winterbottom.wxdev@gmail.com>
---
 docs/source/noaa_csp.rst | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/docs/source/noaa_csp.rst b/docs/source/noaa_csp.rst
index 3af895629..66317efe9 100644
--- a/docs/source/noaa_csp.rst
+++ b/docs/source/noaa_csp.rst
@@ -183,14 +183,14 @@ the global-workflow.
 
 The software stack supporting the ``develop`` branch of the
 global-workflow is provided for the user and is located beneath
-``/contrib/global-workflow/spack-stack``. The modules required for the
+``/contrib/emc_static/spack-stack``. The modules required for the
 global-workflow execution may be loaded as follows.
 
 .. code-block:: bash
 
    user@host:$ module unuse /opt/cray/craype/default/modulefiles
    user@host:$ module unuse /opt/cray/modulefiles
-   user@host:$ module use /contrib/global-workflow/spack-stack/miniconda/modulefiles/miniconda
+   user@host:$ module use /contrib/emc_static/spack-stack/miniconda/modulefiles/miniconda
    user@host:$ module load py39_4.12.0
    user@host:$ module load rocoto/1.3.3
 
-- 
GitLab


From 7cdfad4eaa7abe0769ff13396c54e6d93afebf8f Mon Sep 17 00:00:00 2001
From: TerrenceMcGuinness-NOAA <terrence.mcguinness@cox.net>
Date: Tue, 24 Oct 2023 22:19:46 +0000
Subject: [PATCH 46/54] Build GDASapp for CI tests (#1964)

* added -u to global checkout so CI test builds tests for GDASapps

* Update check_ci.sh

needed more quotes

---------

Co-authored-by: TerrenceMcGuinness-NOAA <terry.mcguinness@noaa.gov>
---
 ci/scripts/check_ci.sh       | 4 ++--
 ci/scripts/clone-build_ci.sh | 2 +-
 2 files changed, 3 insertions(+), 3 deletions(-)

diff --git a/ci/scripts/check_ci.sh b/ci/scripts/check_ci.sh
index d5cf6a20b..097e20ced 100755
--- a/ci/scripts/check_ci.sh
+++ b/ci/scripts/check_ci.sh
@@ -89,9 +89,9 @@ for pr in ${pr_list}; do
     # Check to see if this PR that was opened by the weekly tests and if so close it if it passed on all platforms
     weekly_labels=$(${GH} pr view "${pr}" --repo "${REPO_URL}"  --json headRefName,labels,author --jq 'select(.author.login | contains("emcbot")) | select(.headRefName | contains("weekly_ci")) | .labels[].name ') || true
     if [[ -n "${weekly_labels}" ]]; then
-      num_platforms=$(find ../platforms -type f -name "config.*" | wc -l)
+      num_platforms=$(find "${ROOT_DIR}/ci/platforms" -type f -name "config.*" | wc -l)
       passed=0
-      for platforms in ../platforms/config.*; do
+      for platforms in "${ROOT_DIR}"/ci/platforms/config.*; do
         machine=$(basename "${platforms}" | cut -d. -f2)
         if [[ "${weekly_labels}" == *"CI-${machine^}-Passed"* ]]; then
           ((passed=passed+1))
diff --git a/ci/scripts/clone-build_ci.sh b/ci/scripts/clone-build_ci.sh
index 796e4b701..03eff1315 100755
--- a/ci/scripts/clone-build_ci.sh
+++ b/ci/scripts/clone-build_ci.sh
@@ -79,7 +79,7 @@ echo "${commit}" > "../commit"
 cd sorc || exit 1
 set +e
 # TODO enable -u later when GDASApp tests are added
-./checkout.sh -c -g >> log.checkout 2>&1
+./checkout.sh -c -g -u >> log.checkout 2>&1
 checkout_status=$?
 if [[ ${checkout_status} != 0 ]]; then
   {
-- 
GitLab


From e817f5dd38c26a88f76d90eb71124f1acbfc5a8f Mon Sep 17 00:00:00 2001
From: Walter Kolczynski - NOAA <Walter.Kolczynski@noaa.gov>
Date: Wed, 25 Oct 2023 02:03:14 +0000
Subject: [PATCH 47/54] Fix path for marine products (#1966)

When PR #1823 was merged, the name of the marine product template
was not updated in ocnpost, leading the products to be placed in
the wrong location and missed by the archive job.

Resolves #1902
---
 jobs/rocoto/ocnpost.sh | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/jobs/rocoto/ocnpost.sh b/jobs/rocoto/ocnpost.sh
index 0766ac3b3..5a2dc091c 100755
--- a/jobs/rocoto/ocnpost.sh
+++ b/jobs/rocoto/ocnpost.sh
@@ -29,7 +29,7 @@ YMD=${PDY} HH=${cyc} generate_com -rx COM_OCEAN_HISTORY COM_OCEAN_2D COM_OCEAN_3
   COM_OCEAN_XSECT COM_ICE_HISTORY
 
 for grid in "0p50" "0p25"; do
-  YMD=${PDY} HH=${cyc} GRID=${grid} generate_com -rx "COM_OCEAN_GRIB_${grid}:COM_OCEAN_GRIB_TMPL"
+  YMD=${PDY} HH=${cyc} GRID=${grid} generate_com -rx "COM_OCEAN_GRIB_${grid}:COM_OCEAN_GRIB_GRID_TMPL"
 done
 
 for outdir in COM_OCEAN_2D COM_OCEAN_3D COM_OCEAN_XSECT COM_OCEAN_GRIB_0p25 COM_OCEAN_GRIB_0p50; do
-- 
GitLab


From 0b5cf9b67a8146d9d4815ea6545f7f2524120d83 Mon Sep 17 00:00:00 2001
From: Kate Friedman <kate.friedman@noaa.gov>
Date: Wed, 25 Oct 2023 13:17:28 -0400
Subject: [PATCH 48/54] Update GFS version in index.rst to v16.3.10 (#1976)

Update the "State of operations" GFS version number to new v16.3.10 (Annual CO2 fix file update in operations).

Refs #1924
---
 docs/source/index.rst | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/docs/source/index.rst b/docs/source/index.rst
index face361de..4c39e8dcb 100644
--- a/docs/source/index.rst
+++ b/docs/source/index.rst
@@ -10,7 +10,7 @@ Status
 ======
 
 * State of develop (HEAD) branch: GFSv17+ development
-* State of operations (dev/gfs.v16 branch): GFS v16.3.9 `tag: [gfs.v16.3.9] <https://github.com/NOAA-EMC/global-workflow/releases/tag/gfs.v16.3.9>`_
+* State of operations (dev/gfs.v16 branch): GFS v16.3.10 `tag: [gfs.v16.3.10] <https://github.com/NOAA-EMC/global-workflow/releases/tag/gfs.v16.3.10>`_
 
 =============
 Code managers
-- 
GitLab


From 8556541daa79b0180fde48a58a8dcfb2f8c56ea5 Mon Sep 17 00:00:00 2001
From: Rahul Mahajan <aerorahul@users.noreply.github.com>
Date: Wed, 25 Oct 2023 16:15:28 -0400
Subject: [PATCH 49/54] Fix incorrect usage of CFP on WCOSS2 (#1977)

---
 ush/run_mpmd.sh | 7 +++++--
 1 file changed, 5 insertions(+), 2 deletions(-)

diff --git a/ush/run_mpmd.sh b/ush/run_mpmd.sh
index 352a41131..24cb3f265 100755
--- a/ush/run_mpmd.sh
+++ b/ush/run_mpmd.sh
@@ -4,6 +4,9 @@ source "${HOMEgfs}/ush/preamble.sh"
 
 cmdfile=${1:?"run_mpmd requires an input file containing commands to execute in MPMD mode"}
 
+# Determine the number of MPMD processes from incoming ${cmdfile}
+nprocs=$(wc -l < "${cmdfile}")
+
 # Local MPMD file containing instructions to run in CFP
 mpmd_cmdfile="${DATA:-}/mpmd_cmdfile"
 if [[ -s "${mpmd_cmdfile}" ]]; then rm -f "${mpmd_cmdfile}"; fi
@@ -19,7 +22,6 @@ if [[ "${launcher:-}" =~ ^srun.* ]]; then  #  srun-based system e.g. Hera, Orion
     ((nm=nm+1))
   done < "${cmdfile}"
 
-  nprocs=$(wc -l < "${mpmd_cmdfile}")
   set +e
   # shellcheck disable=SC2086
   ${launcher:-} ${mpmd_opt:-} -n ${nprocs} "${mpmd_cmdfile}"
@@ -42,7 +44,8 @@ elif [[ "${launcher:-}" =~ ^mpiexec.* ]]; then  # mpiexec
   done < "${cmdfile}"
 
   chmod 755 "${mpmd_cmdfile}"
-  ${launcher:-} "${mpmd_cmdfile}"
+  # shellcheck disable=SC2086
+  ${launcher:-} -np ${nprocs} ${mpmd_opt:-} "${mpmd_cmdfile}"
   rc=$?
   if (( rc == 0 )); then
     out_files=$(find . -name 'mpmd.*.out')
-- 
GitLab


From c02e118c0ac5485109c9bd33472ff42db309cd9b Mon Sep 17 00:00:00 2001
From: Jessica Meixner <jessica.meixner@noaa.gov>
Date: Thu, 26 Oct 2023 15:05:05 -0400
Subject: [PATCH 50/54] Update ufs model to version from 10-12-23 (#1933)

This updates the ufs-weather-model to the commit hash from 10-12-23 from the HR2 tag.
The diffs can be seen here: https://github.com/ufs-community/ufs-weather-model/compare/GFSv17.HR2...68050e58589a82ab509aaefaafdc2a6b90f34e48

Resolves #1811
---
 Externals.cfg                                 |  2 +-
 env/WCOSS2.env                                |  1 +
 jobs/rocoto/efcs.sh                           | 20 +-----
 jobs/rocoto/fcst.sh                           | 63 ++--------------
 jobs/rocoto/waveinit.sh                       |  3 +-
 jobs/rocoto/wavepostbndpnt.sh                 |  3 +-
 jobs/rocoto/wavepostbndpntbll.sh              |  3 +-
 jobs/rocoto/wavepostpnt.sh                    |  3 +-
 jobs/rocoto/wavepostsbs.sh                    |  3 +-
 jobs/rocoto/waveprep.sh                       |  3 +-
 parm/config/gefs/config.base.emc.dyn          |  2 +-
 parm/config/gfs/config.resources              |  2 +-
 parm/ufs/chem/CAP.rc                          |  2 +
 parm/ufs/chem/GOCART2G_GridComp.rc            |  2 +
 parm/ufs/nems.configure.cpld.IN               | 11 ++-
 parm/ufs/nems.configure.cpld_aero.IN          | 11 ++-
 .../ufs/nems.configure.cpld_aero_outerwave.IN | 11 ++-
 parm/ufs/nems.configure.cpld_aero_wave.IN     | 12 +++-
 parm/ufs/nems.configure.cpld_outerwave.IN     | 11 ++-
 parm/ufs/nems.configure.cpld_wave.IN          | 11 ++-
 scripts/exglobal_forecast.sh                  |  1 +
 sorc/checkout.sh                              |  2 +-
 ush/forecast_postdet.sh                       | 23 +++++-
 ush/load_ufswm_modules.sh                     | 71 +++++++++++++++++++
 ush/nems_configure.sh                         |  2 +
 ush/parsing_namelists_FV3.sh                  | 12 ----
 26 files changed, 172 insertions(+), 118 deletions(-)
 create mode 100755 ush/load_ufswm_modules.sh

diff --git a/Externals.cfg b/Externals.cfg
index eaf397edb..1b30c321b 100644
--- a/Externals.cfg
+++ b/Externals.cfg
@@ -8,7 +8,7 @@ protocol = git
 required = True
 
 [UFS]
-tag = 4d05445
+tag = 68050e5
 local_path = sorc/ufs_model.fd
 repo_url = https://github.com/ufs-community/ufs-weather-model.git
 protocol = git
diff --git a/env/WCOSS2.env b/env/WCOSS2.env
index 22d65ba0e..ae5ceaa02 100755
--- a/env/WCOSS2.env
+++ b/env/WCOSS2.env
@@ -35,6 +35,7 @@ elif [[ "${step}" = "preplandobs" ]]; then
 
 elif [[ "${step}" = "waveinit" ]] || [[ "${step}" = "waveprep" ]] || [[ "${step}" = "wavepostsbs" ]] || [[ "${step}" = "wavepostbndpnt" ]] || [[ "${step}" = "wavepostbndpntbll"  ]] || [[ "${step}" = "wavepostpnt" ]]; then
 
+    export USE_CFP="YES"
     if [[ "${step}" = "waveprep" ]] && [[ "${CDUMP}" = "gfs" ]]; then export NTASKS=${NTASKS_gfs} ; fi
     export wavempexec="${launcher} -np"
     export wave_mpmd=${mpmd_opt}
diff --git a/jobs/rocoto/efcs.sh b/jobs/rocoto/efcs.sh
index 46a25ac75..fa9dc0c26 100755
--- a/jobs/rocoto/efcs.sh
+++ b/jobs/rocoto/efcs.sh
@@ -5,23 +5,9 @@ source "${HOMEgfs}/ush/preamble.sh"
 ###############################################################
 # Source FV3GFS workflow modules
 #. ${HOMEgfs}/ush/load_fv3gfs_modules.sh
-#status=$?
-#[[ ${status} -ne 0 ]] && exit ${status}
-
-# TODO: clean this up
-source "${HOMEgfs}/ush/detect_machine.sh"
-set +x
-source "${HOMEgfs}/ush/module-setup.sh"
-module use "${HOMEgfs}/sorc/ufs_model.fd/tests"
-module load modules.ufs_model.lua
-# Workflow needs utilities from prod_util (setPDY.sh, ndate, etc.)
-module load prod_util
-if [[ "${MACHINE_ID}" = "wcoss2" ]]; then
-  module load cray-pals
-fi
-module list
-unset MACHINE_ID
-set_trace
+. ${HOMEgfs}/ush/load_ufswm_modules.sh
+status=$?
+[[ ${status} -ne 0 ]] && exit ${status}
 
 export job="efcs"
 export jobid="${job}.$$"
diff --git a/jobs/rocoto/fcst.sh b/jobs/rocoto/fcst.sh
index 9d59f70bd..a5be293f9 100755
--- a/jobs/rocoto/fcst.sh
+++ b/jobs/rocoto/fcst.sh
@@ -3,65 +3,11 @@
 source "${HOMEgfs}/ush/preamble.sh"
 
 ###############################################################
-# Source FV3GFS workflow modules
+# Source UFS Weather Model workflow modules
 #. ${HOMEgfs}/ush/load_fv3gfs_modules.sh
-#status=$?
-#[[ ${status} -ne 0 ]] && exit ${status}
-
-# TODO: clean this up
-source "${HOMEgfs}/ush/detect_machine.sh"
-set +x
-source "${HOMEgfs}/ush/module-setup.sh"
-if [[ "${MACHINE_ID}" != "noaacloud" ]]; then
-  module use "${HOMEgfs}/sorc/ufs_model.fd/tests"
-  module load modules.ufs_model.lua
-  module load prod_util
-fi
-  
-if [[ "${MACHINE_ID}" = "wcoss2" ]]; then
-  module load cray-pals
-fi
-if [[ "${MACHINE_ID}" = "hera" ]]; then
-  module use "/scratch2/NCEPDEV/ensemble/save/Walter.Kolczynski/modulefiles/core"
-  module load "miniconda3/4.6.14"
-  module load "gfs_workflow/1.0.0"
-# TODO: orion and wcoss2 will be uncommented when they are ready.  This comment block will be removed in the next PR
-#elif [[ "${MACHINE_ID}" = "orion" ]]; then
-#  module use "/home/rmahajan/opt/global-workflow/modulefiles/core"
-#  module load "python/3.7.5"
-#  module load "gfs_workflow/1.0.0"
-#elif [[ "${MACHINE_ID}" = "wcoss2" ]]; then
-#  module load "python/3.7.5"
-fi
-if [[ "${MACHINE_ID}" == "noaacloud" ]]; then
-   if [[ "${PW_CSP:-}" = "aws" ]]; then
-
-      # TODO: This can be cleaned-up; most of this is a hack for now.
-      module use "/contrib/spack-stack/envs/ufswm/install/modulefiles/Core"
-      module load "stack-intel"
-      module load "stack-intel-oneapi-mpi"
-      module use -a "/contrib/spack-stack/miniconda/modulefiles/miniconda/"
-      module load "py39_4.12.0"
-      module load "ufs-weather-model-env/1.0.0"
-      export NETCDF="/contrib/spack-stack/miniconda/apps/miniconda/py39_4.12.0"
-      # TODO: Are there plans for EPIC to maintain this package or should GW provide support?
-      export UTILROOT="/contrib/global-workflow/NCEPLIBS-prod_util" 
-      export PATH="${PATH}:/contrib/global-workflow/bin"
-      ndate_path="$(command -v ndate)"
-      export NDATE="${ndate_path}"
-   fi
-fi
-
-module list
-unset MACHINE_ID
-set_trace
-
-###############################################################
-# exglobal_forecast.py requires the following in PYTHONPATH
-# This will be moved to a module load when ready
-wxflowPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/wxflow/src:${HOMEgfs}/ush/python/pygfs"
-PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${wxflowPATH}"
-export PYTHONPATH
+. ${HOMEgfs}/ush/load_ufswm_modules.sh
+status=$?
+[[ ${status} -ne 0 ]] && exit ${status}
 
 export job="fcst"
 export jobid="${job}.$$"
@@ -71,5 +17,4 @@ export jobid="${job}.$$"
 ${HOMEgfs}/jobs/JGLOBAL_FORECAST
 status=$?
 
-
 exit ${status}
diff --git a/jobs/rocoto/waveinit.sh b/jobs/rocoto/waveinit.sh
index d0c3f4992..b38367d09 100755
--- a/jobs/rocoto/waveinit.sh
+++ b/jobs/rocoto/waveinit.sh
@@ -5,7 +5,8 @@ source "${HOMEgfs}/ush/preamble.sh"
 ###############################################################
 echo
 echo "=============== START TO SOURCE FV3GFS WORKFLOW MODULES ==============="
-. ${HOMEgfs}/ush/load_fv3gfs_modules.sh
+#. ${HOMEgfs}/ush/load_fv3gfs_modules.sh
+. ${HOMEgfs}/ush/load_ufswm_modules.sh
 status=$?
 [[ ${status} -ne 0 ]] && exit ${status}
 
diff --git a/jobs/rocoto/wavepostbndpnt.sh b/jobs/rocoto/wavepostbndpnt.sh
index 5d2649835..1a4f94015 100755
--- a/jobs/rocoto/wavepostbndpnt.sh
+++ b/jobs/rocoto/wavepostbndpnt.sh
@@ -5,7 +5,8 @@ source "${HOMEgfs}/ush/preamble.sh"
 ###############################################################
 echo
 echo "=============== START TO SOURCE FV3GFS WORKFLOW MODULES ==============="
-. ${HOMEgfs}/ush/load_fv3gfs_modules.sh
+#. ${HOMEgfs}/ush/load_fv3gfs_modules.sh
+. ${HOMEgfs}/ush/load_ufswm_modules.sh
 status=$?
 [[ ${status} -ne 0 ]] && exit ${status}
 
diff --git a/jobs/rocoto/wavepostbndpntbll.sh b/jobs/rocoto/wavepostbndpntbll.sh
index ce4f9e6b2..2d128facb 100755
--- a/jobs/rocoto/wavepostbndpntbll.sh
+++ b/jobs/rocoto/wavepostbndpntbll.sh
@@ -5,7 +5,8 @@ source "${HOMEgfs}/ush/preamble.sh"
 ###############################################################
 echo
 echo "=============== START TO SOURCE FV3GFS WORKFLOW MODULES ==============="
-. ${HOMEgfs}/ush/load_fv3gfs_modules.sh
+#. ${HOMEgfs}/ush/load_fv3gfs_modules.sh
+. ${HOMEgfs}/ush/load_ufswm_modules.sh
 status=$?
 [[ ${status} -ne 0 ]] && exit ${status}
 
diff --git a/jobs/rocoto/wavepostpnt.sh b/jobs/rocoto/wavepostpnt.sh
index 9efb755de..60017394f 100755
--- a/jobs/rocoto/wavepostpnt.sh
+++ b/jobs/rocoto/wavepostpnt.sh
@@ -5,7 +5,8 @@ source "${HOMEgfs}/ush/preamble.sh"
 ###############################################################
 echo
 echo "=============== START TO SOURCE FV3GFS WORKFLOW MODULES ==============="
-. ${HOMEgfs}/ush/load_fv3gfs_modules.sh
+#. ${HOMEgfs}/ush/load_fv3gfs_modules.sh
+. ${HOMEgfs}/ush/load_ufswm_modules.sh
 status=$?
 [[ ${status} -ne 0 ]] && exit ${status}
 
diff --git a/jobs/rocoto/wavepostsbs.sh b/jobs/rocoto/wavepostsbs.sh
index e4bea0bc3..f4789210d 100755
--- a/jobs/rocoto/wavepostsbs.sh
+++ b/jobs/rocoto/wavepostsbs.sh
@@ -4,7 +4,8 @@ source "${HOMEgfs}/ush/preamble.sh"
 
 ###############################################################
 # Source FV3GFS workflow modules
-. ${HOMEgfs}/ush/load_fv3gfs_modules.sh
+#. ${HOMEgfs}/ush/load_fv3gfs_modules.sh
+. ${HOMEgfs}/ush/load_ufswm_modules.sh
 status=$?
 [[ ${status} -ne 0 ]] && exit ${status}
 
diff --git a/jobs/rocoto/waveprep.sh b/jobs/rocoto/waveprep.sh
index 0cbafde87..fa934167b 100755
--- a/jobs/rocoto/waveprep.sh
+++ b/jobs/rocoto/waveprep.sh
@@ -5,7 +5,8 @@ source "${HOMEgfs}/ush/preamble.sh"
 ###############################################################
 echo
 echo "=============== START TO SOURCE FV3GFS WORKFLOW MODULES ==============="
-. ${HOMEgfs}/ush/load_fv3gfs_modules.sh
+#. ${HOMEgfs}/ush/load_fv3gfs_modules.sh
+. ${HOMEgfs}/ush/load_ufswm_modules.sh
 status=$?
 [[ ${status} -ne 0 ]] && exit ${status}
 
diff --git a/parm/config/gefs/config.base.emc.dyn b/parm/config/gefs/config.base.emc.dyn
index e3e221c00..3d1742140 100644
--- a/parm/config/gefs/config.base.emc.dyn
+++ b/parm/config/gefs/config.base.emc.dyn
@@ -82,7 +82,7 @@ export VERBOSE="YES"
 export KEEPDATA="NO"
 export CHGRP_RSTPROD="@CHGRP_RSTPROD@"
 export CHGRP_CMD="@CHGRP_CMD@"
-export NCDUMP="${NETCDF}/bin/ncdump"
+export NCDUMP="${NETCDF:-}/bin/ncdump"
 export NCLEN="${HOMEgfs}/ush/getncdimlen"
 
 # Machine environment, jobs, and other utility scripts
diff --git a/parm/config/gfs/config.resources b/parm/config/gfs/config.resources
index fc6624df9..5b9fa1723 100644
--- a/parm/config/gfs/config.resources
+++ b/parm/config/gfs/config.resources
@@ -149,7 +149,7 @@ elif [[ ${step} = "wavepostbndpntbll" ]]; then
 
 elif [[ ${step} = "wavepostpnt" ]]; then
 
-    export wtime_wavepostpnt="01:30:00"
+    export wtime_wavepostpnt="04:00:00"
     export npe_wavepostpnt=200
     export nth_wavepostpnt=1
     npe_node_wavepostpnt=$(echo "${npe_node_max} / ${nth_wavepostpnt}" | bc)
diff --git a/parm/ufs/chem/CAP.rc b/parm/ufs/chem/CAP.rc
index 64425b1bb..d40106ae8 100644
--- a/parm/ufs/chem/CAP.rc
+++ b/parm/ufs/chem/CAP.rc
@@ -8,6 +8,8 @@ REPORT_THROUGHPUT: .false.
 
 USE_SHMEM: 0
 
+GridType: Cubed-Sphere
+
 MAPL_ENABLE_TIMERS: NO
 MAPL_ENABLE_MEMUTILS: NO
 PRINTSPEC: 0  # (0: OFF, 1: IMPORT & EXPORT, 2: IMPORT, 3: EXPORT)
diff --git a/parm/ufs/chem/GOCART2G_GridComp.rc b/parm/ufs/chem/GOCART2G_GridComp.rc
index 5ea4fa45d..18954f8cd 100644
--- a/parm/ufs/chem/GOCART2G_GridComp.rc
+++ b/parm/ufs/chem/GOCART2G_GridComp.rc
@@ -39,3 +39,5 @@ PASSIVE_INSTANCES_NI:
 aerosol_monochromatic_optics_wavelength_in_nm_from_LUT: 470 550 670 870
 wavelengths_for_profile_aop_in_nm: 470 550 670 870               # must be included in LUT
 wavelengths_for_vertically_integrated_aop_in_nm: 470 550 670 870 # must be included in LUT
+
+use_threads: .TRUE.
diff --git a/parm/ufs/nems.configure.cpld.IN b/parm/ufs/nems.configure.cpld.IN
index 0f6e68cc4..2182a9638 100644
--- a/parm/ufs/nems.configure.cpld.IN
+++ b/parm/ufs/nems.configure.cpld.IN
@@ -73,6 +73,7 @@ runSeq::
      MED med_phases_post_atm
      ICE -> MED :remapMethod=redist
      MED med_phases_post_ice
+     MED med_phases_ocnalb_run
      MED med_phases_prep_ocn_accum
    @
    OCN -> MED :remapMethod=redist
@@ -89,12 +90,10 @@ MED_attributes::
       ATM_model = @[atm_model]
       ICE_model = @[ice_model]
       OCN_model = @[ocn_model]
-      history_n = 0
-      history_option = nhours
-      history_ymd = -999
       coupling_mode = @[CPLMODE]
       history_tile_atm = @[ATMTILESIZE]
       pio_rearranger = box
+      ocean_albedo_limit = @[ocean_albedo_limit]
 ::
 ALLCOMP_attributes::
       ScalarFieldCount = 2
@@ -114,4 +113,10 @@ ALLCOMP_attributes::
       stop_n = @[FHMAX]
       stop_option = nhours
       stop_ymd = -999
+      orb_eccen = 1.e36
+      orb_iyear = 2000
+      orb_iyear_align = 2000
+      orb_mode = fixed_year
+      orb_mvelp = 1.e36
+      orb_obliq = 1.e36
 ::
diff --git a/parm/ufs/nems.configure.cpld_aero.IN b/parm/ufs/nems.configure.cpld_aero.IN
index 9d4afff12..7b1331809 100644
--- a/parm/ufs/nems.configure.cpld_aero.IN
+++ b/parm/ufs/nems.configure.cpld_aero.IN
@@ -85,6 +85,7 @@ runSeq::
      MED med_phases_post_atm
      ICE -> MED :remapMethod=redist
      MED med_phases_post_ice
+     MED med_phases_ocnalb_run
      MED med_phases_prep_ocn_accum
    @
    OCN -> MED :remapMethod=redist
@@ -101,12 +102,10 @@ MED_attributes::
       ATM_model = @[atm_model]
       ICE_model = @[ice_model]
       OCN_model = @[ocn_model]
-      history_n = 0
-      history_option = nhours
-      history_ymd = -999
       coupling_mode = @[CPLMODE]
       history_tile_atm = @[ATMTILESIZE]
       pio_rearranger = box
+      ocean_albedo_limit = @[ocean_albedo_limit]
 ::
 ALLCOMP_attributes::
       ScalarFieldCount = 2
@@ -126,4 +125,10 @@ ALLCOMP_attributes::
       stop_n = @[FHMAX]
       stop_option = nhours
       stop_ymd = -999
+      orb_eccen = 1.e36
+      orb_iyear = 2000
+      orb_iyear_align = 2000
+      orb_mode = fixed_year
+      orb_mvelp = 1.e36
+      orb_obliq = 1.e36
 ::
diff --git a/parm/ufs/nems.configure.cpld_aero_outerwave.IN b/parm/ufs/nems.configure.cpld_aero_outerwave.IN
index 78a009b87..fbbf4441f 100644
--- a/parm/ufs/nems.configure.cpld_aero_outerwave.IN
+++ b/parm/ufs/nems.configure.cpld_aero_outerwave.IN
@@ -101,6 +101,7 @@ runSeq::
      MED med_phases_post_atm
      ICE -> MED :remapMethod=redist
      MED med_phases_post_ice
+     MED med_phases_ocnalb_run
      MED med_phases_prep_ocn_accum
      MED med_phases_prep_wav_accum
    @
@@ -121,12 +122,10 @@ MED_attributes::
       ICE_model = @[ice_model]
       OCN_model = @[ocn_model]
       WAV_model = @[wav_model]
-      history_n = 0
-      history_option = nhours
-      history_ymd = -999
       coupling_mode = @[CPLMODE]
       history_tile_atm = @[ATMTILESIZE]
       pio_rearranger = box
+      ocean_albedo_limit = @[ocean_albedo_limit]
 ::
 ALLCOMP_attributes::
       ScalarFieldCount = 2
@@ -146,4 +145,10 @@ ALLCOMP_attributes::
       stop_n = @[FHMAX]
       stop_option = nhours
       stop_ymd = -999
+      orb_eccen = 1.e36
+      orb_iyear = 2000
+      orb_iyear_align = 2000
+      orb_mode = fixed_year
+      orb_mvelp = 1.e36
+      orb_obliq = 1.e36
 ::
diff --git a/parm/ufs/nems.configure.cpld_aero_wave.IN b/parm/ufs/nems.configure.cpld_aero_wave.IN
index 6b886b062..2bb041698 100644
--- a/parm/ufs/nems.configure.cpld_aero_wave.IN
+++ b/parm/ufs/nems.configure.cpld_aero_wave.IN
@@ -104,6 +104,7 @@ runSeq::
      MED med_phases_post_ice
      WAV -> MED :remapMethod=redist
      MED med_phases_post_wav
+     MED med_phases_ocnalb_run
      MED med_phases_prep_ocn_accum
    @
    OCN -> MED :remapMethod=redist
@@ -121,11 +122,10 @@ MED_attributes::
       ICE_model = @[ice_model]
       OCN_model = @[ocn_model]
       WAV_model = @[wav_model]
-      history_n = 0
-      history_option = nhours
-      history_ymd = -999
       coupling_mode = @[CPLMODE]
       history_tile_atm = @[ATMTILESIZE]
+      pio_rearranger = box
+      ocean_albedo_limit = @[ocean_albedo_limit]
 ::
 ALLCOMP_attributes::
       ScalarFieldCount = 2
@@ -145,4 +145,10 @@ ALLCOMP_attributes::
       stop_n = @[FHMAX]
       stop_option = nhours
       stop_ymd = -999
+      orb_eccen = 1.e36
+      orb_iyear = 2000
+      orb_iyear_align = 2000
+      orb_mode = fixed_year
+      orb_mvelp = 1.e36
+      orb_obliq = 1.e36
 ::
diff --git a/parm/ufs/nems.configure.cpld_outerwave.IN b/parm/ufs/nems.configure.cpld_outerwave.IN
index 736e0cf3f..521e59941 100644
--- a/parm/ufs/nems.configure.cpld_outerwave.IN
+++ b/parm/ufs/nems.configure.cpld_outerwave.IN
@@ -89,6 +89,7 @@ runSeq::
      MED med_phases_post_atm
      ICE -> MED :remapMethod=redist
      MED med_phases_post_ice
+     MED med_phases_ocnalb_run
      MED med_phases_prep_ocn_accum
      MED med_phases_prep_wav_accum
    @
@@ -109,12 +110,10 @@ MED_attributes::
       ICE_model = @[ice_model]
       OCN_model = @[ocn_model]
       WAV_model = @[wav_model]
-      history_n = 0
-      history_option = nhours
-      history_ymd = -999
       coupling_mode = @[CPLMODE]
       history_tile_atm = @[ATMTILESIZE]
       pio_rearranger = box
+      ocean_albedo_limit = @[ocean_albedo_limit]
 ::
 ALLCOMP_attributes::
       ScalarFieldCount = 2
@@ -134,4 +133,10 @@ ALLCOMP_attributes::
       stop_n = @[FHMAX]
       stop_option = nhours
       stop_ymd = -999
+      orb_eccen = 1.e36
+      orb_iyear = 2000
+      orb_iyear_align = 2000
+      orb_mode = fixed_year
+      orb_mvelp = 1.e36
+      orb_obliq = 1.e36
 ::
diff --git a/parm/ufs/nems.configure.cpld_wave.IN b/parm/ufs/nems.configure.cpld_wave.IN
index 3a1c91890..f9f4bc99f 100644
--- a/parm/ufs/nems.configure.cpld_wave.IN
+++ b/parm/ufs/nems.configure.cpld_wave.IN
@@ -92,6 +92,7 @@ runSeq::
      MED med_phases_post_ice
      WAV -> MED :remapMethod=redist
      MED med_phases_post_wav
+     MED med_phases_ocnalb_run
      MED med_phases_prep_ocn_accum
    @
    OCN -> MED :remapMethod=redist
@@ -109,12 +110,10 @@ MED_attributes::
       ICE_model = @[ice_model]
       OCN_model = @[ocn_model]
       WAV_model = @[wav_model]
-      history_n = 0
-      history_option = nhours
-      history_ymd = -999
       coupling_mode = @[CPLMODE]
       history_tile_atm = @[ATMTILESIZE]
       pio_rearranger = box
+      ocean_albedo_limit = @[ocean_albedo_limit]
 ::
 ALLCOMP_attributes::
       ScalarFieldCount = 2
@@ -134,4 +133,10 @@ ALLCOMP_attributes::
       stop_n = @[FHMAX]
       stop_option = nhours
       stop_ymd = -999
+      orb_eccen = 1.e36
+      orb_iyear = 2000
+      orb_iyear_align = 2000
+      orb_mode = fixed_year
+      orb_mvelp = 1.e36
+      orb_obliq = 1.e36
 ::
diff --git a/scripts/exglobal_forecast.sh b/scripts/exglobal_forecast.sh
index 076c635c1..86cea85de 100755
--- a/scripts/exglobal_forecast.sh
+++ b/scripts/exglobal_forecast.sh
@@ -164,6 +164,7 @@ FV3_out
 [[ ${cplflx} = .true. ]] && MOM6_out
 [[ ${cplwav} = .true. ]] && WW3_out
 [[ ${cplice} = .true. ]] && CICE_out
+[[ ${cplchm} = .true. ]] && GOCART_out
 [[ ${esmf_profile:-} = .true. ]] && CPL_out
 echo "MAIN: Output copied to COMROT"
 
diff --git a/sorc/checkout.sh b/sorc/checkout.sh
index 3554cd99d..1a3d2c9da 100755
--- a/sorc/checkout.sh
+++ b/sorc/checkout.sh
@@ -151,7 +151,7 @@ source "${topdir}/../workflow/gw_setup.sh"
 # The checkout version should always be a speciifc commit (hash or tag), not a branch
 errs=0
 # Checkout UFS submodules in parallel
-checkout "ufs_model.fd"    "https://github.com/ufs-community/ufs-weather-model" "${ufs_model_hash:-4d05445}" "8" ; errs=$((errs + $?))
+checkout "ufs_model.fd"    "https://github.com/ufs-community/ufs-weather-model" "${ufs_model_hash:-68050e5}" "8" ; errs=$((errs + $?))
 
 # Run all other checkouts simultaneously with just 1 core each to handle submodules.
 checkout "wxflow"          "https://github.com/NOAA-EMC/wxflow"                 "528f5ab" &
diff --git a/ush/forecast_postdet.sh b/ush/forecast_postdet.sh
index 1336abe5b..e3166cd72 100755
--- a/ush/forecast_postdet.sh
+++ b/ush/forecast_postdet.sh
@@ -1038,7 +1038,26 @@ GOCART_postdet() {
       rm -f "${COM_CHEM_HISTORY}/gocart.inst_aod.${vdate:0:8}_${vdate:8:2}00z.nc4"
     fi
 
-    ${NLN} "${COM_CHEM_HISTORY}/gocart.inst_aod.${vdate:0:8}_${vdate:8:2}00z.nc4" \
-           "${DATA}/gocart.inst_aod.${vdate:0:8}_${vdate:8:2}00z.nc4"
+    #To Do: Temporarily removing this as this will crash gocart, adding copy statement at the end 
+    #${NLN} "${COM_CHEM_HISTORY}/gocart.inst_aod.${vdate:0:8}_${vdate:8:2}00z.nc4" \
+    #       "${DATA}/gocart.inst_aod.${vdate:0:8}_${vdate:8:2}00z.nc4"
   done
 }
+
+GOCART_out() {
+  echo "SUB ${FUNCNAME[0]}: Copying output data for GOCART"
+
+  # Copy gocart.inst_aod after the forecast is run (and successfull)
+  # TO DO: this should be linked but there were issues where gocart was crashing if it was linked
+  local fhr 
+  local vdate 
+  for fhr in ${FV3_OUTPUT_FH}; do
+    if (( fhr == 0 )); then continue; fi
+    vdate=$(date --utc -d "${current_cycle:0:8} ${current_cycle:8:2} + ${fhr} hours" +%Y%m%d%H)
+    ${NCP} "${DATA}/gocart.inst_aod.${vdate:0:8}_${vdate:8:2}00z.nc4" \
+      "${COM_CHEM_HISTORY}/gocart.inst_aod.${vdate:0:8}_${vdate:8:2}00z.nc4"
+  done
+
+
+}
+
diff --git a/ush/load_ufswm_modules.sh b/ush/load_ufswm_modules.sh
new file mode 100755
index 000000000..2d6e6a02c
--- /dev/null
+++ b/ush/load_ufswm_modules.sh
@@ -0,0 +1,71 @@
+#! /usr/bin/env bash
+
+###############################################################
+if [[ "${DEBUG_WORKFLOW:-NO}" == "NO" ]]; then
+    echo "Loading modules quietly..."
+    set +x
+fi
+
+# Setup runtime environment by loading modules
+ulimit_s=$( ulimit -S -s )
+
+source "${HOMEgfs}/ush/detect_machine.sh"
+source "${HOMEgfs}/ush/module-setup.sh"
+if [[ "${MACHINE_ID}" != "noaacloud" ]]; then
+  module use "${HOMEgfs}/sorc/ufs_model.fd/tests"
+  module load modules.ufs_model.lua
+  if [[ "${MACHINE_ID}" = "wcoss2" ]]; then
+    module load prod_util
+    module load cray-pals
+    module load cfp
+    module load libjpeg
+  else
+    module load prod-util
+    export UTILROOT=${prod_util_ROOT}
+  fi
+  module load wgrib2
+  export WGRIB2=wgrib2
+fi
+if [[ "${MACHINE_ID}" = "hera" ]]; then
+  module use "/scratch2/NCEPDEV/ensemble/save/Walter.Kolczynski/modulefiles/core"
+  module load "miniconda3/4.6.14"
+  module load "gfs_workflow/1.0.0"
+fi
+if [[ "${MACHINE_ID}" == "noaacloud" ]]; then
+   if [[ "${PW_CSP:-}" = "aws" ]]; then
+
+      # TODO: This can be cleaned-up; most of this is a hack for now.
+      module use "/contrib/spack-stack/envs/ufswm/install/modulefiles/Core"
+      module load "stack-intel"
+      module load "stack-intel-oneapi-mpi"
+      module use -a "/contrib/spack-stack/miniconda/modulefiles/miniconda/"
+      module load "py39_4.12.0"
+      module load "ufs-weather-model-env/1.0.0"
+      export NETCDF="/contrib/spack-stack/miniconda/apps/miniconda/py39_4.12.0"
+      # TODO: Are there plans for EPIC to maintain this package or should GW provide support?
+      export UTILROOT="/contrib/global-workflow/NCEPLIBS-prod_util"
+      export PATH="${PATH}:/contrib/global-workflow/bin"
+      ndate_path="$(command -v ndate)"
+      export NDATE="${ndate_path}"
+   fi
+fi
+
+module list
+unset MACHINE_ID
+
+###############################################################
+# exglobal_forecast.py requires the following in PYTHONPATH
+# This will be moved to a module load when ready
+wxflowPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/wxflow/src:${HOMEgfs}/ush/python/pygfs"
+PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${wxflowPATH}"
+export PYTHONPATH
+
+# Restore stack soft limit:
+ulimit -S -s "${ulimit_s}"
+unset ulimit_s
+
+# If this function exists in the environment, run it; else do not
+ftype=$(type -t set_trace)
+if [[ "${ftype}" == "function" ]]; then
+  set_trace
+fi
diff --git a/ush/nems_configure.sh b/ush/nems_configure.sh
index ecbc4acde..28c6713dd 100755
--- a/ush/nems_configure.sh
+++ b/ush/nems_configure.sh
@@ -56,7 +56,9 @@ if [[ "${cplflx}" = ".true." ]]; then
   local CPLMODE="${cplmode}"
   local coupling_interval_fast_sec="${CPL_FAST}"
   local RESTART_N="${restart_interval}"
+  local ocean_albedo_limit=0.06
   local ATMTILESIZE="${CASE:1}"
+  local ocean_albedo_limit=0.06
 fi
 
 if [[ "${cplice}" = ".true." ]]; then
diff --git a/ush/parsing_namelists_FV3.sh b/ush/parsing_namelists_FV3.sh
index 27cb96ab8..ec3af8341 100755
--- a/ush/parsing_namelists_FV3.sh
+++ b/ush/parsing_namelists_FV3.sh
@@ -74,18 +74,6 @@ cat > input.nml <<EOF
   ${diag_manager_nml:-}
 /
 
-&fms_io_nml
-  checksum_required = .false.
-  max_files_r = 100
-  max_files_w = 100
-  ${fms_io_nml:-}
-/
-
-&mpp_io_nml
-  shuffle=${shuffle:-1}
-  deflate_level=${deflate_level:-1}
-/
-
 &fms_nml
   clock_grain = 'ROUTINE'
   domains_stack_size = ${domains_stack_size:-3000000}
-- 
GitLab


From 962368884991670d3a1ee1c5a065d5bce4b7561a Mon Sep 17 00:00:00 2001
From: David Huber <69919478+DavidHuber-NOAA@users.noreply.github.com>
Date: Thu, 26 Oct 2023 16:10:23 -0400
Subject: [PATCH 51/54] Test tarballs for rstprod before calling chgrp (#1967)

Certain tarballs may or may not contain `rstprod` data.  For instance, the first half cycle
gdas and enkfgdas tarballs will not contain `rstprod`, while future cycles likely will.  Also,
some systems do not have `rstprod` on them.  This will test the contents of the tarballs
first before attempting to change the group to rstprod.

Resolves #1460
---
 scripts/exgdas_enkf_earc.sh | 37 +++++++++++++++++++++++++++----
 scripts/exglobal_archive.sh | 44 ++++++++++++++++++++++++++++++++-----
 2 files changed, 71 insertions(+), 10 deletions(-)

diff --git a/scripts/exgdas_enkf_earc.sh b/scripts/exgdas_enkf_earc.sh
index a1bcba4d7..f172a4ef4 100755
--- a/scripts/exgdas_enkf_earc.sh
+++ b/scripts/exgdas_enkf_earc.sh
@@ -112,12 +112,41 @@ if [ "${ENSGRP}" -eq 0 ]; then
         fi
 
         set +e
-        ${TARCMD} -P -cvf "${ATARDIR}/${PDY}${cyc}/${RUN}.tar" $(cat "${ARCH_LIST}/${RUN}.txt")
+        # Check if the tarball will have rstprod in it
+        has_rstprod="NO"
+        while IFS= read -r file; do
+            if [[ -f ${file} ]]; then
+                group=$( stat -c "%G" "${file}" )
+                if [[ "${group}" == "rstprod" ]]; then
+                    has_rstprod="YES"
+                    break
+                fi
+            fi
+        done < "${ARCH_LIST}/${RUN}.txt"
+
+        # Create the tarball
+        tar_fl=${ATARDIR}/${PDY}${cyc}/${RUN}.tar
+        ${TARCMD} -P -cvf "${tar_fl}" $(cat "${ARCH_LIST}/${RUN}.txt")
         status=$?
-        ${HSICMD} chgrp rstprod "${ATARDIR}/${PDY}${cyc}/${RUN}.tar"
-        ${HSICMD} chmod 640 "${ATARDIR}/${PDY}${cyc}/${RUN}.tar"
+
+        # If rstprod was found, change the group of the tarball
+        if [[ "${has_rstprod}" == "YES" ]]; then
+            ${HSICMD} chgrp rstprod "${tar_fl}"
+            stat_chgrp=$?
+            ${HSICMD} chmod 640 "${tar_fl}"
+            stat_chgrp=$((stat_chgrp+$?))
+            if [[ "${stat_chgrp}" -gt 0 ]]; then
+                echo "FATAL ERROR: Unable to properly restrict ${tar_fl}!"
+                echo "Attempting to delete ${tar_fl}"
+                ${HSICMD} rm "${tar_fl}"
+                echo "Please verify that ${tar_fl} was deleted!"
+                exit "${stat_chgrp}"
+            fi
+        fi
+
+        # For safety, test if the htar/tar command failed only after changing groups
         if (( status != 0 && ${PDY}${cyc} >= firstday )); then
-            echo "FATAL ERROR: ${TARCMD} ${PDY}${cyc} ${RUN}.tar failed"
+            echo "FATAL ERROR: ${TARCMD} ${tar_fl} failed"
             exit "${status}"
         fi
         set_strict
diff --git a/scripts/exglobal_archive.sh b/scripts/exglobal_archive.sh
index 78a6d60b6..54323a0dd 100755
--- a/scripts/exglobal_archive.sh
+++ b/scripts/exglobal_archive.sh
@@ -262,17 +262,49 @@ if [[ ${HPSSARCH} = "YES" || ${LOCALARCH} = "YES" ]]; then
     shopt -s extglob
     for targrp in ${targrp_list}; do
         set +e
-        ${TARCMD} -P -cvf "${ATARDIR}/${PDY}${cyc}/${targrp}.tar" $(cat "${ARCH_LIST}/${targrp}.txt")
-        status=$?
+
+        # Test whether gdas.tar or gdas_restarta.tar will have rstprod data
+        has_rstprod="NO"
         case ${targrp} in
             'gdas'|'gdas_restarta')
-                ${HSICMD} chgrp rstprod "${ATARDIR}/${CDATE}/${targrp}.tar"
-                ${HSICMD} chmod 640 "${ATARDIR}/${CDATE}/${targrp}.tar"
+                # Test for rstprod in each archived file
+                while IFS= read -r file; do
+                    if [[ -f ${file} ]]; then
+                        group=$( stat -c "%G" "${file}" )
+                        if [[ "${group}" == "rstprod" ]]; then
+                            has_rstprod="YES"
+                            break
+                        fi
+                    fi
+                done < "${ARCH_LIST}/${targrp}.txt"
+
                 ;;
             *) ;;
         esac
-        if [ "${status}" -ne 0 ] && [ "${PDY}${cyc}" -ge "${firstday}" ]; then
-            echo "FATAL ERROR: ${TARCMD} ${PDY}${cyc} ${targrp}.tar failed"
+
+        # Create the tarball
+        tar_fl="${ATARDIR}/${PDY}${cyc}/${targrp}.tar"
+        ${TARCMD} -P -cvf "${tar_fl}" $(cat "${ARCH_LIST}/${targrp}.txt")
+        status=$?
+
+        # Change group to rstprod if it was found even if htar/tar failed in case of partial creation
+        if [[ "${has_rstprod}" == "YES" ]]; then
+            ${HSICMD} chgrp rstprod "${tar_fl}"
+            stat_chgrp=$?
+            ${HSICMD} chmod 640 "${tar_fl}"
+            stat_chgrp=$((stat_chgrp+$?))
+            if [ "${stat_chgrp}" -gt 0 ]; then
+                echo "FATAL ERROR: Unable to properly restrict ${tar_fl}!"
+                echo "Attempting to delete ${tar_fl}"
+                ${HSICMD} rm "${tar_fl}"
+                echo "Please verify that ${tar_fl} was deleted!"
+                exit "${stat_chgrp}"
+            fi
+        fi
+
+        # For safety, test if the htar/tar command failed after changing groups
+        if [[ "${status}" -ne 0 ]] && [[ "${PDY}${cyc}" -ge "${firstday}" ]]; then
+            echo "FATAL ERROR: ${TARCMD} ${tar_fl} failed"
             exit "${status}"
         fi
         set_strict
-- 
GitLab


From 77c1ff2bfa26b341b01f41e50ee0ecb31cc4f661 Mon Sep 17 00:00:00 2001
From: RussTreadon-NOAA <26926959+RussTreadon-NOAA@users.noreply.github.com>
Date: Thu, 26 Oct 2023 23:44:27 -0400
Subject: [PATCH 52/54] Update GDASApp hash (#1975)

Update GDASApp hash to bring recent UFSDA development into g-w.

Resolves #1972
---
 Externals.cfg    | 2 +-
 sorc/checkout.sh | 2 +-
 2 files changed, 2 insertions(+), 2 deletions(-)

diff --git a/Externals.cfg b/Externals.cfg
index 1b30c321b..abe5f30aa 100644
--- a/Externals.cfg
+++ b/Externals.cfg
@@ -57,7 +57,7 @@ protocol = git
 required = False
 
 [GDASApp]
-hash = d347d22
+hash = 7659c10
 local_path = sorc/gdas.cd
 repo_url = https://github.com/NOAA-EMC/GDASApp.git
 protocol = git
diff --git a/sorc/checkout.sh b/sorc/checkout.sh
index 1a3d2c9da..a756c8d04 100755
--- a/sorc/checkout.sh
+++ b/sorc/checkout.sh
@@ -164,7 +164,7 @@ if [[ ${checkout_gsi} == "YES" ]]; then
 fi
 
 if [[ ${checkout_gdas} == "YES" ]]; then
-  checkout "gdas.cd" "https://github.com/NOAA-EMC/GDASApp.git" "d347d22" &
+  checkout "gdas.cd" "https://github.com/NOAA-EMC/GDASApp.git" "7659c10" &
 fi
 
 if [[ ${checkout_gsi} == "YES" || ${checkout_gdas} == "YES" ]]; then
-- 
GitLab


From aff6ca7ae559e424dae128d6cfd5da4a4e3049e7 Mon Sep 17 00:00:00 2001
From: Anil Kumar <108816337+AnilKumar-NOAA@users.noreply.github.com>
Date: Fri, 27 Oct 2023 14:11:55 -0400
Subject: [PATCH 53/54] GEFS Staging in exglobal_stage_ic  (#1892)

Adjusts source paths for forecast-only ICs in support of extension to ensembles.
New paths omit the resolution (that is captured by the IC id name) and allow for
a member level.

Directories with the reorganized ICs have been added to the three tier-1 machines
under glopara space.

Building on this, also adds capability to stage GEFS ICs.

Resolves #911
---
 parm/config/gefs/config.base.emc.dyn |   1 +
 parm/config/gefs/config.coupled_ic   |  43 --------
 parm/config/gefs/config.resources    |  12 +--
 parm/config/gefs/config.stage_ic     |  23 +++++
 parm/config/gfs/config.stage_ic      |  24 ++---
 scripts/exglobal_stage_ic.sh         | 148 +++++++++++++--------------
 workflow/applications/gefs.py        |   4 +-
 workflow/hosts/hera.yaml             |   2 +-
 workflow/hosts/wcoss2.yaml           |   2 +-
 workflow/rocoto/gefs_tasks.py        |  58 ++++++++++-
 workflow/rocoto/gfs_tasks.py         |  19 ++--
 workflow/setup_expt.py               |  11 +-
 12 files changed, 181 insertions(+), 166 deletions(-)
 delete mode 100644 parm/config/gefs/config.coupled_ic
 create mode 100644 parm/config/gefs/config.stage_ic

diff --git a/parm/config/gefs/config.base.emc.dyn b/parm/config/gefs/config.base.emc.dyn
index 3d1742140..b62c921ed 100644
--- a/parm/config/gefs/config.base.emc.dyn
+++ b/parm/config/gefs/config.base.emc.dyn
@@ -40,6 +40,7 @@ export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2
 export PACKAGEROOT="@PACKAGEROOT@"    # TODO: set via prod_envir in Ops
 export COMROOT="@COMROOT@"    # TODO: set via prod_envir in Ops
 export COMINsyn="@COMINsyn@"
+export BASE_CPLIC="@BASE_CPLIC@"
 
 # USER specific paths
 export HOMEDIR="@HOMEDIR@"
diff --git a/parm/config/gefs/config.coupled_ic b/parm/config/gefs/config.coupled_ic
deleted file mode 100644
index 50fab283b..000000000
--- a/parm/config/gefs/config.coupled_ic
+++ /dev/null
@@ -1,43 +0,0 @@
-#! /usr/bin/env bash
-
-########## config.coupled_ic ##########
-
-echo "BEGIN: config.coupled_ic"
-
-# Get task specific resources
-source ${EXPDIR}/config.resources coupled_ic
-
-if [[ "${machine}" == "WCOSS2" ]]; then
-  export BASE_CPLIC="/lfs/h2/emc/couple/noscrub/Jiande.Wang/IC"
-elif [[ "${machine}" == "HERA" ]]; then
-  export BASE_CPLIC="/scratch1/NCEPDEV/climate/role.ufscpara/IC"
-elif [[ "${machine}" == "ORION" ]]; then
-  export BASE_CPLIC="/work/noaa/global/glopara/data/ICSDIR/prototype_ICs"
-elif [[ "${machine}" == "S4" ]]; then
-  export BASE_CPLIC="/data/prod/glopara/coupled_ICs"
-elif [[ "${machine}" == "JET" ]]; then
-  export BASE_CPLIC="/mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs"
-fi
-
-
-case "${CASE}" in
-  "C384")
-    #C384 and P8 ICs
-    export CPL_ATMIC=GEFS-NoahMP-aerosols-p8c
-    export CPL_ICEIC=CPC
-    export CPL_OCNIC=CPC3Dvar
-    export CPL_WAVIC=GEFSwave20210528v2
-    ;;
-  "C768")
-    export CPL_ATMIC=HR1
-    export CPL_ICEIC=HR1
-    export CPL_OCNIC=HR1
-    export CPL_WAVIC=HR1
-    ;;
-  *)
-    echo "Unrecognized case: ${1}"
-    exit 1
-    ;;
-esac
-
-echo "END: config.coupled_ic"
diff --git a/parm/config/gefs/config.resources b/parm/config/gefs/config.resources
index 33156a768..74e985408 100644
--- a/parm/config/gefs/config.resources
+++ b/parm/config/gefs/config.resources
@@ -8,7 +8,7 @@ if [[ $# -ne 1 ]]; then
 
     echo "Must specify an input task argument to set resource variables!"
     echo "argument can be any one of the following:"
-    echo "coupled_ic aerosol_init"
+    echo "stage_ic aerosol_init"
     echo "sfcanl analcalc analdiag fcst post vrfy fit2obs metp arch echgres"
     echo "ecen esfc efcs epos earc"
     echo "init_chem mom6ic ocnpost"
@@ -416,12 +416,12 @@ elif [[ ${step} = "arch" || ${step} = "earc" ]]; then
       eval "export memory_${step}=50GB"
     fi
 
-elif [[ ${step} = "coupled_ic" ]]; then
+elif [[ ${step} = "stage_ic" ]]; then
 
-    export wtime_coupled_ic="00:15:00"
-    export npe_coupled_ic=1
-    export npe_node_coupled_ic=1
-    export nth_coupled_ic=1
+    export wtime_stage_ic="00:15:00"
+    export npe_stage_ic=1
+    export npe_node_stage_ic=1
+    export nth_stage_ic=1
     export is_exclusive=True
 
 elif [[ ${step} = "ecen" ]]; then
diff --git a/parm/config/gefs/config.stage_ic b/parm/config/gefs/config.stage_ic
new file mode 100644
index 000000000..e2bb0af2b
--- /dev/null
+++ b/parm/config/gefs/config.stage_ic
@@ -0,0 +1,23 @@
+#! /usr/bin/env bash
+
+########## config.stage_ic ##########
+
+echo "BEGIN: config.stage_ic"
+
+# Get task specific resources
+source "${EXPDIR}/config.resources" stage_ic
+
+case "${CASE}" in
+  "C48")
+    export CPL_ATMIC="gefs_test"
+    export CPL_ICEIC="gefs_test"
+    export CPL_OCNIC="gefs_test"
+    export CPL_WAVIC="gefs_test"
+    ;;
+  *)
+    echo "FATAL ERROR Unrecognized resolution: ${CASE}"
+    exit 1
+    ;;
+esac
+
+echo "END: config.stage_ic"
diff --git a/parm/config/gfs/config.stage_ic b/parm/config/gfs/config.stage_ic
index 4e29c3306..6d081b3fe 100644
--- a/parm/config/gfs/config.stage_ic
+++ b/parm/config/gfs/config.stage_ic
@@ -9,22 +9,22 @@ source "${EXPDIR}/config.resources" stage_ic
 
 case "${CASE}" in
   "C48" | "C96")
-    export CPL_ATMIC=workflowtest
-    export CPL_ICEIC=workflowtest
-    export CPL_OCNIC=workflowtest
-    export CPL_WAVIC=workflowtest
+    export CPL_ATMIC="workflow_${CASE}_refactored"
+    export CPL_ICEIC="workflow_${CASE}_refactored"
+    export CPL_OCNIC="workflow_${CASE}_refactored"
+    export CPL_WAVIC="workflow_${CASE}_refactored"
     ;;
   "C384")
-    export CPL_ATMIC=GEFS-NoahMP-aerosols-p8c
-    export CPL_ICEIC=CPC
-    export CPL_OCNIC=CPC3Dvar
-    export CPL_WAVIC=GEFSwave20210528v2
+    export CPL_ATMIC=GEFS-NoahMP-aerosols-p8c_refactored
+    export CPL_ICEIC=CPC_refactored
+    export CPL_OCNIC=CPC3Dvar_refactored
+    export CPL_WAVIC=GEFSwave20210528v2_refactored
     ;;
   "C768")
-    export CPL_ATMIC=HR2
-    export CPL_ICEIC=HR1
-    export CPL_OCNIC=HR1
-    export CPL_WAVIC=HR1
+    export CPL_ATMIC=HR2_refactored
+    export CPL_ICEIC=HR1_refactored
+    export CPL_OCNIC=HR1_refactored
+    export CPL_WAVIC=HR1_refactored
     ;;
   *)
     echo "FATAL ERROR Unrecognized resolution: ${CASE}"
diff --git a/scripts/exglobal_stage_ic.sh b/scripts/exglobal_stage_ic.sh
index e42a4943d..fc95fa665 100755
--- a/scripts/exglobal_stage_ic.sh
+++ b/scripts/exglobal_stage_ic.sh
@@ -1,4 +1,4 @@
-#! /usr/bin/env bash
+#!/usr/bin/env bash
 
 source "${HOMEgfs}/ush/preamble.sh"
 
@@ -8,99 +8,89 @@ GDATE=$(date --utc -d "${PDY} ${cyc} - ${assim_freq} hours" +%Y%m%d%H)
 gPDY="${GDATE:0:8}"
 gcyc="${GDATE:8:2}"
 
+MEMDIR_ARRAY=()
+if [[ "${RUN}" == "gefs" ]]; then
+  # Populate the member_dirs array based on the value of NMEM_ENS
+  for ((ii = 0; ii <= "${NMEM_ENS}"; ii++)); do
+    MEMDIR_ARRAY+=("mem$(printf "%03d" "${ii}")")
+  done
+else
+  MEMDIR_ARRAY+=("")
+fi
+
 # Initialize return code
 err=0
 
-error_message(){
-    echo "FATAL ERROR: Unable to copy ${1} to ${2} (Error code ${3})"
+error_message() {
+  echo "FATAL ERROR: Unable to copy ${1} to ${2} (Error code ${3})"
 }
 
 ###############################################################
-# Start staging
-
-# Stage the FV3 initial conditions to ROTDIR (cold start)
-YMD=${PDY} HH=${cyc} generate_com -r COM_ATMOS_INPUT
-[[ ! -d "${COM_ATMOS_INPUT}" ]] && mkdir -p "${COM_ATMOS_INPUT}"
-source="${BASE_CPLIC}/${CPL_ATMIC}/${PDY}${cyc}/${CDUMP}/${CASE}/INPUT/gfs_ctrl.nc"
-target="${COM_ATMOS_INPUT}/gfs_ctrl.nc"
-${NCP} "${source}" "${target}"
-rc=$?
-(( rc != 0 )) && error_message "${source}" "${target}" "${rc}"
-err=$((err + rc))
-for ftype in gfs_data sfc_data; do
-  for tt in $(seq 1 6); do
-    source="${BASE_CPLIC}/${CPL_ATMIC}/${PDY}${cyc}/${CDUMP}/${CASE}/INPUT/${ftype}.tile${tt}.nc"
-    target="${COM_ATMOS_INPUT}/${ftype}.tile${tt}.nc"
-    ${NCP} "${source}" "${target}"
-    rc=$?
-    (( rc != 0 )) && error_message "${source}" "${target}" "${rc}"
-    err=$((err + rc))
-  done
-done
-
-# Stage ocean initial conditions to ROTDIR (warm start)
-if [[ "${DO_OCN:-}" = "YES" ]]; then
-  YMD=${gPDY} HH=${gcyc} generate_com -r COM_OCEAN_RESTART
-  [[ ! -d "${COM_OCEAN_RESTART}" ]] && mkdir -p "${COM_OCEAN_RESTART}"
-  source="${BASE_CPLIC}/${CPL_OCNIC}/${PDY}${cyc}/ocn/${OCNRES}/MOM.res.nc"
-  target="${COM_OCEAN_RESTART}/${PDY}.${cyc}0000.MOM.res.nc"
-  ${NCP} "${source}" "${target}"
+for MEMDIR in "${MEMDIR_ARRAY[@]}"; do
+  # Stage the FV3 initial conditions to ROTDIR (cold start)
+  YMD=${PDY} HH=${cyc} generate_com COM_ATMOS_INPUT
+  [[ ! -d "${COM_ATMOS_INPUT}" ]] && mkdir -p "${COM_ATMOS_INPUT}"
+  src="${BASE_CPLIC}/${CPL_ATMIC}/${PDY}${cyc}/${MEMDIR}/atmos/gfs_ctrl.nc"
+  tgt="${COM_ATMOS_INPUT}/gfs_ctrl.nc"
+  ${NCP} "${src}" "${tgt}"
   rc=$?
-  (( rc != 0 )) && error_message "${source}" "${target}" "${rc}"
+  ((rc != 0)) && error_message "${src}" "${tgt}" "${rc}"
   err=$((err + rc))
-  case "${OCNRES}" in
-    "500" | "100")  # Only 5 degree or 1 degree ocean does not have MOM.res_[1-4].nc files
-    ;;
-    "025")  # Only 1/4 degree ocean has MOM.res_[1-4].nc files
-      for nn in $(seq 1 4); do
-        source="${BASE_CPLIC}/${CPL_OCNIC}/${PDY}${cyc}/ocn/${OCNRES}/MOM.res_${nn}.nc"
-        if [[ -f "${source}" ]]; then
-          target="${COM_OCEAN_RESTART}/${PDY}.${cyc}0000.MOM.res_${nn}.nc"
-          ${NCP} "${source}" "${target}"
-          rc=$?
-          (( rc != 0 )) && error_message "${source}" "${target}" "${rc}"
-          err=$((err + rc))
-        fi
-      done
-    ;;
-    *)
-      echo "FATAL ERROR: Unsupported ocean resolution ${OCNRES}"
-      rc=1
+  for ftype in gfs_data sfc_data; do
+    for ((tt = 1; tt <= 6; tt++)); do
+      src="${BASE_CPLIC}/${CPL_ATMIC}/${PDY}${cyc}/${MEMDIR}/atmos/${ftype}.tile${tt}.nc"
+      tgt="${COM_ATMOS_INPUT}/${ftype}.tile${tt}.nc"
+      ${NCP} "${src}" "${tgt}"
+      rc=$?
+      tgt="${COM_ATMOS_INPUT}/${ftype}.tile${tt}.nc"
+      ${NCP} "${src}" "${tgt}"
+      rc=$?
+      ((rc != 0)) && error_message "${src}" "${tgt}" "${rc}"
       err=$((err + rc))
-    ;;
-  esac
-fi
-
-# Stage ice initial conditions to ROTDIR (warm start)
-if [[ "${DO_ICE:-}" = "YES" ]]; then
-  YMD=${gPDY} HH=${gcyc} generate_com -r COM_ICE_RESTART
-  [[ ! -d "${COM_ICE_RESTART}" ]] && mkdir -p "${COM_ICE_RESTART}"
-  ICERESdec=$(echo "${ICERES}" | awk '{printf "%0.2f", $1/100}')
-  source="${BASE_CPLIC}/${CPL_ICEIC}/${PDY}${cyc}/ice/${ICERES}/cice5_model_${ICERESdec}.res_${PDY}${cyc}.nc"
-  target="${COM_ICE_RESTART}/${PDY}.${cyc}0000.cice_model.res.nc"
-  ${NCP} "${source}" "${target}"
-  rc=$?
-  (( rc != 0 )) && error_message "${source}" "${target}" "${rc}"
-  err=$((err + rc))
-fi
+    done
+  done
 
-# Stage the WW3 initial conditions to ROTDIR (warm start; TODO: these should be placed in $RUN.$gPDY/$gcyc)
-if [[ "${DO_WAVE:-}" = "YES" ]]; then
-  YMD=${PDY} HH=${cyc} generate_com -r COM_WAVE_RESTART
-  [[ ! -d "${COM_WAVE_RESTART}" ]] && mkdir -p "${COM_WAVE_RESTART}"
-  for grdID in ${waveGRD}; do  # TODO: check if this is a bash array; if so adjust
-    source="${BASE_CPLIC}/${CPL_WAVIC}/${PDY}${cyc}/wav/${grdID}/${PDY}.${cyc}0000.restart.${grdID}"
-    target="${COM_WAVE_RESTART}/${PDY}.${cyc}0000.restart.${grdID}"
-    ${NCP} "${source}" "${target}"
+  # Stage ocean initial conditions to ROTDIR (warm start)
+  if [[ "${DO_OCN:-}" = "YES" ]]; then
+    YMD=${gPDY} HH=${gcyc} generate_com COM_OCEAN_RESTART
+    [[ ! -d "${COM_OCEAN_RESTART}" ]] && mkdir -p "${COM_OCEAN_RESTART}"
+    src="${BASE_CPLIC}/${CPL_OCNIC}/${PDY}${cyc}/${MEMDIR}/ocean/${PDY}.${cyc}0000.MOM.res.nc"
+    tgt="${COM_OCEAN_RESTART}/${PDY}.${cyc}0000.MOM.res.nc"
+    ${NCP} "${src}" "${tgt}"
     rc=$?
-    (( rc != 0 )) && error_message "${source}" "${target}" "${rc}"
+    ((rc != 0)) && error_message "${src}" "${tgt}" "${rc}"
     err=$((err + rc))
-  done
-fi
+  fi
+  # Stage ice initial conditions to ROTDIR (warm start)
+  if [[ "${DO_ICE:-}" = "YES" ]]; then
+    YMD=${gPDY} HH=${gcyc} generate_com COM_ICE_RESTART
+    [[ ! -d "${COM_ICE_RESTART}" ]] && mkdir -p "${COM_ICE_RESTART}"
+    src="${BASE_CPLIC}/${CPL_ICEIC}/${PDY}${cyc}/${MEMDIR}/ice/${PDY}.${cyc}0000.cice_model.res.nc"
+    tgt="${COM_ICE_RESTART}/${PDY}.${cyc}0000.cice_model.res.nc"
+    ${NCP} "${src}" "${tgt}"
+    rc=$?
+    ((rc != 0)) && error_message "${src}" "${tgt}" "${rc}"
+    err=$((err + rc))
+  fi
+
+  # Stage the WW3 initial conditions to ROTDIR (warm start; TODO: these should be placed in $RUN.$gPDY/$gcyc)
+  if [[ "${DO_WAVE:-}" = "YES" ]]; then
+    YMD=${PDY} HH=${cyc} generate_com COM_WAVE_RESTART
+    [[ ! -d "${COM_WAVE_RESTART}" ]] && mkdir -p "${COM_WAVE_RESTART}"
+    for grdID in ${waveGRD}; do # TODO: check if this is a bash array; if so adjust
+      src="${BASE_CPLIC}/${CPL_WAVIC}/${PDY}${cyc}/${MEMDIR}/wave/${PDY}.${cyc}0000.restart.${grdID}"
+      tgt="${COM_WAVE_RESTART}/${PDY}.${cyc}0000.restart.${grdID}"
+      ${NCP} "${src}" "${tgt}"
+      rc=$?
+      ((rc != 0)) && error_message "${src}" "${tgt}" "${rc}"
+      err=$((err + rc))
+    done
+  fi
 
+done # for MEMDIR in "${MEMDIR_ARRAY[@]}"; do
 ###############################################################
 # Check for errors and exit if any of the above failed
-if  [[ "${err}" -ne 0 ]] ; then
+if [[ "${err}" -ne 0 ]]; then
   echo "FATAL ERROR: Unable to copy ICs from ${BASE_CPLIC} to ${ROTDIR}; ABORT!"
   exit "${err}"
 fi
diff --git a/workflow/applications/gefs.py b/workflow/applications/gefs.py
index a46451bd3..8ac4cdc18 100644
--- a/workflow/applications/gefs.py
+++ b/workflow/applications/gefs.py
@@ -14,7 +14,7 @@ class GEFSAppConfig(AppConfig):
         """
         Returns the config_files that are involved in gefs
         """
-        configs = ['fcst']
+        configs = ['stage_ic', 'fcst']
 
         if self.nens > 0:
             configs += ['efcs']
@@ -32,7 +32,7 @@ class GEFSAppConfig(AppConfig):
 
     def get_task_names(self):
 
-        tasks = ['fcst']
+        tasks = ['stage_ic', 'fcst']
 
         if self.nens > 0:
             tasks += ['efcs']
diff --git a/workflow/hosts/hera.yaml b/workflow/hosts/hera.yaml
index 61270b7b2..31911f2d2 100644
--- a/workflow/hosts/hera.yaml
+++ b/workflow/hosts/hera.yaml
@@ -1,6 +1,6 @@
 BASE_GIT: '/scratch1/NCEPDEV/global/glopara/git'
 DMPDIR: '/scratch1/NCEPDEV/global/glopara/dump'
-BASE_CPLIC: '/scratch1/NCEPDEV/climate/role.ufscpara/IC'
+BASE_CPLIC: '/scratch1/NCEPDEV/global/glopara/data/ICSDIR/prototype_ICs'
 PACKAGEROOT: '/scratch1/NCEPDEV/global/glopara/nwpara'
 COMROOT: '/scratch1/NCEPDEV/global/glopara/com'
 COMINsyn: '${COMROOT}/gfs/prod/syndat'
diff --git a/workflow/hosts/wcoss2.yaml b/workflow/hosts/wcoss2.yaml
index 2a301064d..41e1044ef 100644
--- a/workflow/hosts/wcoss2.yaml
+++ b/workflow/hosts/wcoss2.yaml
@@ -1,6 +1,6 @@
 BASE_GIT: '/lfs/h2/emc/global/save/emc.global/git'
 DMPDIR: '/lfs/h2/emc/dump/noscrub/dump'
-BASE_CPLIC: '/lfs/h2/emc/couple/noscrub/Jiande.Wang/IC'
+BASE_CPLIC: '/lfs/h2/emc/global/noscrub/emc.global/data/ICSDIR/prototype_ICs'
 PACKAGEROOT: '${PACKAGEROOT:-"/lfs/h1/ops/prod/packages"}'
 COMROOT: '${COMROOT:-"/lfs/h1/ops/prod/com"}'
 COMINsyn: '${COMROOT}/gfs/v16.3/syndat'
diff --git a/workflow/rocoto/gefs_tasks.py b/workflow/rocoto/gefs_tasks.py
index b0c56bdb6..c5dae3a13 100644
--- a/workflow/rocoto/gefs_tasks.py
+++ b/workflow/rocoto/gefs_tasks.py
@@ -8,9 +8,64 @@ class GEFSTasks(Tasks):
     def __init__(self, app_config: AppConfig, cdump: str) -> None:
         super().__init__(app_config, cdump)
 
+    def stage_ic(self):
+
+        cpl_ic = self._configs['stage_ic']
+
+        deps = []
+
+        # Atm ICs
+        if self.app_config.do_atm:
+            prefix = f"{cpl_ic['BASE_CPLIC']}/{cpl_ic['CPL_ATMIC']}/@Y@m@d@H/mem000/atmos"
+            for file in ['gfs_ctrl.nc'] + \
+                        [f'{datatype}_data.tile{tile}.nc'
+                         for datatype in ['gfs', 'sfc']
+                         for tile in range(1, self.n_tiles + 1)]:
+                data = f"{prefix}/{file}"
+                dep_dict = {'type': 'data', 'data': data}
+                deps.append(rocoto.add_dependency(dep_dict))
+
+        # Ocean ICs
+        if self.app_config.do_ocean:
+            ocn_res = f"{self._base.get('OCNRES', '025'):03d}"
+            prefix = f"{cpl_ic['BASE_CPLIC']}/{cpl_ic['CPL_OCNIC']}/@Y@m@d@H/mem000/ocean"
+            data = f"{prefix}/@Y@m@d.@H0000.MOM.res.nc"
+            dep_dict = {'type': 'data', 'data': data}
+            deps.append(rocoto.add_dependency(dep_dict))
+            if ocn_res in ['025']:
+                # 0.25 degree ocean model also has these additional restarts
+                for res in [f'res_{res_index}' for res_index in range(1, 4)]:
+                    data = f"{prefix}/@Y@m@d.@H0000.MOM.{res}.nc"
+                    dep_dict = {'type': 'data', 'data': data}
+                    deps.append(rocoto.add_dependency(dep_dict))
+
+        # Ice ICs
+        if self.app_config.do_ice:
+            prefix = f"{cpl_ic['BASE_CPLIC']}/{cpl_ic['CPL_ICEIC']}/@Y@m@d@H/mem000/ice"
+            data = f"{prefix}/@Y@m@d.@H0000.cice_model.res.nc"
+            dep_dict = {'type': 'data', 'data': data}
+            deps.append(rocoto.add_dependency(dep_dict))
+
+        # Wave ICs
+        if self.app_config.do_wave:
+            prefix = f"{cpl_ic['BASE_CPLIC']}/{cpl_ic['CPL_WAVIC']}/@Y@m@d@H/mem000/wave"
+            for wave_grid in self._configs['waveinit']['waveGRD'].split():
+                data = f"{prefix}/{wave_grid}/@Y@m@d.@H0000.restart.{wave_grid}"
+                dep_dict = {'type': 'data', 'data': data}
+                deps.append(rocoto.add_dependency(dep_dict))
+
+        dependencies = rocoto.create_dependency(dep_condition='and', dep=deps)
+
+        resources = self.get_resource('stage_ic')
+        task = create_wf_task('stage_ic', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies)
+
+        return task
+
     def fcst(self):
         # TODO: Add real dependencies
         dependencies = []
+        dep_dict = {'type': 'task', 'name': f'{self.cdump}stage_ic'}
+        dependencies.append(rocoto.add_dependency(dep_dict))
 
         resources = self.get_resource('fcst')
         task = create_wf_task('fcst', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies)
@@ -18,8 +73,9 @@ class GEFSTasks(Tasks):
         return task
 
     def efcs(self):
-        # TODO: Add real dependencies
         dependencies = []
+        dep_dict = {'type': 'task', 'name': f'{self.cdump}stage_ic'}
+        dependencies.append(rocoto.add_dependency(dep_dict))
 
         efcsenvars = self.envars.copy()
         efcsenvars.append(rocoto.create_envar(name='ENSGRP', value='#grp#'))
diff --git a/workflow/rocoto/gfs_tasks.py b/workflow/rocoto/gfs_tasks.py
index 56449cb9d..d5c5ba2ed 100644
--- a/workflow/rocoto/gfs_tasks.py
+++ b/workflow/rocoto/gfs_tasks.py
@@ -23,13 +23,12 @@ class GFSTasks(Tasks):
 
         # Atm ICs
         if self.app_config.do_atm:
-            atm_res = self._base.get('CASE', 'C384')
-            prefix = f"{cpl_ic['BASE_CPLIC']}/{cpl_ic['CPL_ATMIC']}/@Y@m@d@H/{self.cdump}"
+            prefix = f"{cpl_ic['BASE_CPLIC']}/{cpl_ic['CPL_ATMIC']}/@Y@m@d@H/atmos"
             for file in ['gfs_ctrl.nc'] + \
                         [f'{datatype}_data.tile{tile}.nc'
                          for datatype in ['gfs', 'sfc']
                          for tile in range(1, self.n_tiles + 1)]:
-                data = f"{prefix}/{atm_res}/INPUT/{file}"
+                data = f"{prefix}/{file}"
                 dep_dict = {'type': 'data', 'data': data}
                 deps.append(rocoto.add_dependency(dep_dict))
         else:  # data-atmosphere
@@ -42,31 +41,29 @@ class GFSTasks(Tasks):
         # Ocean ICs
         if self.app_config.do_ocean:
             ocn_res = f"{self._base.get('OCNRES', '025'):03d}"
-            prefix = f"{cpl_ic['BASE_CPLIC']}/{cpl_ic['CPL_OCNIC']}/@Y@m@d@H/ocn"
-            data = f"{prefix}/{ocn_res}/MOM.res.nc"
+            prefix = f"{cpl_ic['BASE_CPLIC']}/{cpl_ic['CPL_OCNIC']}/@Y@m@d@H/ocean"
+            data = f"{prefix}/@Y@m@d.@H0000.MOM.res.nc"
             dep_dict = {'type': 'data', 'data': data}
             deps.append(rocoto.add_dependency(dep_dict))
             if ocn_res in ['025']:
                 # 0.25 degree ocean model also has these additional restarts
                 for res in [f'res_{res_index}' for res_index in range(1, 4)]:
-                    data = f"{prefix}/{ocn_res}/MOM.{res}.nc"
+                    data = f"{prefix}/@Y@m@d.@H0000.MOM.{res}.nc"
                     dep_dict = {'type': 'data', 'data': data}
                     deps.append(rocoto.add_dependency(dep_dict))
 
         # Ice ICs
         if self.app_config.do_ice:
-            ice_res = f"{self._base.get('ICERES', '025'):03d}"
-            ice_res_dec = f'{float(ice_res) / 100:.2f}'
             prefix = f"{cpl_ic['BASE_CPLIC']}/{cpl_ic['CPL_ICEIC']}/@Y@m@d@H/ice"
-            data = f"{prefix}/{ice_res}/cice5_model_{ice_res_dec}.res_@Y@m@d@H.nc"
+            data = f"{prefix}/@Y@m@d.@H0000.cice_model.res.nc"
             dep_dict = {'type': 'data', 'data': data}
             deps.append(rocoto.add_dependency(dep_dict))
 
         # Wave ICs
         if self.app_config.do_wave:
-            prefix = f"{cpl_ic['BASE_CPLIC']}/{cpl_ic['CPL_WAVIC']}/@Y@m@d@H/wav"
+            prefix = f"{cpl_ic['BASE_CPLIC']}/{cpl_ic['CPL_WAVIC']}/@Y@m@d@H/wave"
             for wave_grid in self._configs['waveinit']['waveGRD'].split():
-                data = f"{prefix}/{wave_grid}/@Y@m@d.@H0000.restart.{wave_grid}"
+                data = f"{prefix}/@Y@m@d.@H0000.restart.{wave_grid}"
                 dep_dict = {'type': 'data', 'data': data}
                 deps.append(rocoto.add_dependency(dep_dict))
 
diff --git a/workflow/setup_expt.py b/workflow/setup_expt.py
index a9810ec3f..a808cafd9 100755
--- a/workflow/setup_expt.py
+++ b/workflow/setup_expt.py
@@ -232,16 +232,7 @@ def fill_COMROT_forecasts(host, inputs):
     """
     Implementation of 'fill_COMROT' for forecast-only mode
     """
-    if inputs.system in ['gfs']:
-        print('forecast-only mode treats ICs differently and cannot be staged here')
-    elif inputs.system in ['gefs']:  # Temporarily copy ICs from icsdir into COM for      testing
-        print('temporary hack to stage gefs ICs for testing')
-        comrot = os.path.join(inputs.comrot, inputs.pslot)
-        idatestr = datetime_to_YMDH(inputs.idate)
-        current_cycle_dir = f"gefs.{idatestr[:8]}"
-        cmd = f"cp -as {inputs.icsdir}/{current_cycle_dir} {comrot}/{current_cycle_dir}"
-        os.system(cmd)
-    return
+    print('forecast-only mode treats ICs differently and cannot be staged here')
 
 
 def fill_EXPDIR(inputs):
-- 
GitLab


From eabc82ad30c118645c0e6e216162d823882a8139 Mon Sep 17 00:00:00 2001
From: Guillaume Vernieres <guillaume.vernieres@noaa.gov>
Date: Mon, 30 Oct 2023 10:31:27 -0400
Subject: [PATCH 54/54] Make the early cycle work with the coupled UFS
 configured as S2S  (#1954)

---
 jobs/JGLOBAL_FORECAST        | 4 ++--
 jobs/JGLOBAL_STAGE_IC        | 4 ++++
 scripts/exglobal_stage_ic.sh | 4 ++--
 workflow/rocoto/gfs_tasks.py | 2 +-
 4 files changed, 9 insertions(+), 5 deletions(-)

diff --git a/jobs/JGLOBAL_FORECAST b/jobs/JGLOBAL_FORECAST
index 7fb0fbe4f..e42c81eaa 100755
--- a/jobs/JGLOBAL_FORECAST
+++ b/jobs/JGLOBAL_FORECAST
@@ -61,13 +61,13 @@ fi
 if [[ ${DO_OCN} == "YES" ]]; then
   YMD=${PDY} HH=${cyc} generate_com -rx COM_MED_RESTART COM_OCEAN_RESTART COM_OCEAN_INPUT \
     COM_OCEAN_HISTORY COM_OCEAN_ANALYSIS
-  RUN=${CDUMP} YMD="${gPDY}" HH="${gcyc}" generate_com -rx \
+  RUN=${rCDUMP} YMD="${gPDY}" HH="${gcyc}" generate_com -rx \
     COM_OCEAN_RESTART_PREV:COM_OCEAN_RESTART_TMPL
 fi
 
 if [[ ${DO_ICE} == "YES" ]]; then
   YMD=${PDY} HH=${cyc} generate_com -rx COM_ICE_HISTORY COM_ICE_INPUT COM_ICE_RESTART
-  RUN=${CDUMP} YMD="${gPDY}" HH="${gcyc}" generate_com -rx \
+  RUN=${rCDUMP} YMD="${gPDY}" HH="${gcyc}" generate_com -rx \
     COM_ICE_RESTART_PREV:COM_ICE_RESTART_TMPL
 fi
 
diff --git a/jobs/JGLOBAL_STAGE_IC b/jobs/JGLOBAL_STAGE_IC
index 437c8f40a..317231871 100755
--- a/jobs/JGLOBAL_STAGE_IC
+++ b/jobs/JGLOBAL_STAGE_IC
@@ -3,6 +3,10 @@
 source "${HOMEgfs}/ush/preamble.sh"
 source "${HOMEgfs}/ush/jjob_header.sh" -e "stage_ic" -c "base stage_ic"
 
+# Restart conditions for GFS cycle come from GDAS
+# shellcheck disable=SC2153
+rCDUMP=${CDUMP}
+[[ ${CDUMP} = "gfs" ]] && export rCDUMP="gdas"
 
 # Execute the Script
 "${HOMEgfs}/scripts/exglobal_stage_ic.sh"
diff --git a/scripts/exglobal_stage_ic.sh b/scripts/exglobal_stage_ic.sh
index fc95fa665..43812adc8 100755
--- a/scripts/exglobal_stage_ic.sh
+++ b/scripts/exglobal_stage_ic.sh
@@ -52,7 +52,7 @@ for MEMDIR in "${MEMDIR_ARRAY[@]}"; do
 
   # Stage ocean initial conditions to ROTDIR (warm start)
   if [[ "${DO_OCN:-}" = "YES" ]]; then
-    YMD=${gPDY} HH=${gcyc} generate_com COM_OCEAN_RESTART
+    RUN=${rCDUMP} YMD=${gPDY} HH=${gcyc} generate_com COM_OCEAN_RESTART
     [[ ! -d "${COM_OCEAN_RESTART}" ]] && mkdir -p "${COM_OCEAN_RESTART}"
     src="${BASE_CPLIC}/${CPL_OCNIC}/${PDY}${cyc}/${MEMDIR}/ocean/${PDY}.${cyc}0000.MOM.res.nc"
     tgt="${COM_OCEAN_RESTART}/${PDY}.${cyc}0000.MOM.res.nc"
@@ -63,7 +63,7 @@ for MEMDIR in "${MEMDIR_ARRAY[@]}"; do
   fi
   # Stage ice initial conditions to ROTDIR (warm start)
   if [[ "${DO_ICE:-}" = "YES" ]]; then
-    YMD=${gPDY} HH=${gcyc} generate_com COM_ICE_RESTART
+    RUN=${rCDUMP} YMD=${gPDY} HH=${gcyc} generate_com COM_ICE_RESTART
     [[ ! -d "${COM_ICE_RESTART}" ]] && mkdir -p "${COM_ICE_RESTART}"
     src="${BASE_CPLIC}/${CPL_ICEIC}/${PDY}${cyc}/${MEMDIR}/ice/${PDY}.${cyc}0000.cice_model.res.nc"
     tgt="${COM_ICE_RESTART}/${PDY}.${cyc}0000.cice_model.res.nc"
diff --git a/workflow/rocoto/gfs_tasks.py b/workflow/rocoto/gfs_tasks.py
index d5c5ba2ed..55055a46e 100644
--- a/workflow/rocoto/gfs_tasks.py
+++ b/workflow/rocoto/gfs_tasks.py
@@ -363,7 +363,7 @@ class GFSTasks(Tasks):
 
     def ocnanalprep(self):
 
-        ocean_hist_path = self._template_to_rocoto_cycstring(self._base["COM_OCEAN_HISTORY_TMPL"])
+        ocean_hist_path = self._template_to_rocoto_cycstring(self._base["COM_OCEAN_HISTORY_TMPL"], {'RUN': 'gdas'})
 
         deps = []
         data = f'{ocean_hist_path}/gdas.t@Hz.ocnf009.nc'
-- 
GitLab