diff --git a/Dockerfile b/Dockerfile
index 1d5a59f..c492cf6 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -1,4 +1,5 @@
-FROM centos:centos7
+FROM rockylinux:8.8
+#FROM --platform=$TARGETPLATFORM rockylinux:8.8
 
 # Install needed utils
 RUN rm -fr /var/cache/yum/* && yum clean all && yum -y install --setopt=tsflags=nodocs epel-release && \
diff --git a/Dockerfile.template b/Dockerfile.template
index 57096d5..e628e2f 100644
--- a/Dockerfile.template
+++ b/Dockerfile.template
@@ -1,4 +1,4 @@
-FROM tier/shib-idp:latest
+FROM i2incommon/shib-idp:latest5
 
 # The build args below can be used at build-time to tell the build process where to find your config files.  This is for a completely burned-in config.
 ARG TOMCFG=config/tomcat
@@ -22,7 +22,7 @@ ADD ${SHBVIEWS} /opt/shibboleth-idp/views
 #ADD ${SHBMSGS} /opt/shibboleth-idp/messages
 ADD ${SHBMD} /opt/shibboleth-idp/metadata
 
-# new for 4.1.0: install the Duo OIDC integration
+# new for 4.1.0+: install the Duo OIDC integration
 #      https://wiki.shibboleth.net/confluence/display/IDPPLUGINS/DuoOIDCAuthnConfiguration
 # For unattended install of plugins, trust must be manually bootstrapped.  You should never automate the retreival of this file (like this) for production.
 #ADD https://github.internet2.edu/raw/docker/ShibbIdP_ConfigBuilder_Container/master/oidc-common-truststore.asc /opt/shibboleth-idp/credentials/net.shibboleth.idp.plugin.authn.duo.nimbus/truststore.asc
diff --git a/Jenkinsfile b/Jenkinsfile
index 64d4325..2b1e16e 100644
--- a/Jenkinsfile
+++ b/Jenkinsfile
@@ -1,76 +1,160 @@
-node {
 
-  stage 'Checkout'
+pipeline {
+    agent { node { label 'docker-multi-arch' } }
+    environment { 
+        maintainer = "t"
+        imagename = 's'
+        tag = 'l'
+        DOCKERHUBPW=credentials('tieradmin-dockerhub-pw')
 
-    checkout scm
-
-  stage 'Acquire util'
-    
-    sh 'mkdir -p tmp && mkdir -p bin'
-    dir('tmp'){
-      git([ url: "https://github.internet2.edu/docker/util.git",
-          credentialsId: "jenkins-github-access-token" ])
-      sh 'rm -rf ./bin/windows/'
-      sh 'mv -f ./bin/* ../bin/.'
     }
-    sh 'rm -rf tmp'
+    stages {
+        stage('Setting build context') {
+            steps {
+                script {
+                    maintainer = maintain()
+                    imagename = imagename()
+                    if(env.BRANCH_NAME == "master") {
+                       tag = "latest"
+                    } else {
+                       tag = env.BRANCH_NAME
+                    }
+                    if(!imagename){
+                        echo "You must define an imagename in common.bash"
+                        currentBuild.result = 'FAILURE'
+                     }
+                    sh 'mkdir -p tmp && mkdir -p bin'
+                    dir('tmp'){
+                      git([ url: "https://github.internet2.edu/docker/util.git", credentialsId: "jenkins-github-access-token" ])
+                      sh 'rm -rf ../bin/*'
+                      sh 'mv ./bin/* ../bin/.'
+                    }
+                    // Build and test scripts expect that 'tag' is present in common.bash. This is necessary for both Jenkins and standalone testing.
+                    // We don't care if there are more 'tag' assignments there. The latest one wins.
+                    sh "echo >> common.bash ; echo \"tag=\\\"${tag}\\\"\" >> common.bash ; echo common.bash ; cat common.bash"
+                }  
+             }
+        }    
+        stage('Clean') {
+            steps {
+                script {
+                   try{
+                     sh 'bin/destroy.sh >> debug'
+                   } catch(error) {
+                     def error_details = readFile('./debug');
+                     def message = "BUILD ERROR: There was a problem building the Base Image. \n\n ${error_details}"
+                     sh "rm -f ./debug"
+                     handleError(message)
+                   }
+                }
+            }
+        } 
+        stage('Build') {
+            steps {
+                script {
+                  try{
+                        sh 'docker login -u tieradmin -p $DOCKERHUBPW'
+                        // fails if already exists
+                        // sh 'docker buildx create --use --name multiarch --append'
+                        sh 'docker buildx inspect --bootstrap'
+                        sh 'docker buildx ls'
+                        sh "docker buildx build --platform linux/amd64 -t ${imagename}_${tag} --load ."
+                        sh "docker buildx build --platform linux/arm64 -t ${imagename}_${tag}:arm64 --load ."
+                  } catch(error) {
+                     def error_details = readFile('./debug');
+                      def message = "BUILD ERROR: There was a problem building ${maintainer}/${imagename}:${tag}. \n\n ${error_details}"
+                     sh "rm -f ./debug"
+                     handleError(message)
+                  }
+                }
+            }
+        }
+        stage('Scan') {
+            steps {
+                script {
+                   try {
+                         echo "Starting security scan..."
+                         // Install trivy and HTML template
+                         sh 'curl -sfL https://raw.githubusercontent.com/aquasecurity/trivy/main/contrib/install.sh | sh -s -- -b /usr/local/bin v0.31.1'
+                         sh 'curl -sfL https://raw.githubusercontent.com/aquasecurity/trivy/main/contrib/html.tpl > html.tpl'
 
-  stage 'Setting build context'
-  
-    def maintainer = maintainer()
-    def imagename = imagename()
-    def tag
-    
-    // Tag images created on master branch with 'latest'
-    if(env.BRANCH_NAME == "master"){
-      tag = "latest"
-    }else{
-      tag = env.BRANCH_NAME
-    }
-        
-    if(!imagename){
-      echo "You must define an imagename in common.bash"
-      currentBuild.result = 'FAILURE'
-     }
-     if(maintainer){
-      echo "Building ${imagename}:${tag} for ${maintainer}"
-     }
-     
-  stage 'Build'
-    try{
-      sh 'bin/build.sh &> debug'
-    } catch(error) {
-      def error_details = readFile('./debug');
-      def message = "BUILD ERROR: There was a problem building ${imagename}:${tag}. \n\n ${error_details}"
-      sh "rm -f ./debug"
-      handleError(message)
+                         // Scan container for all vulnerability levels
+                         echo "Scanning for all vulnerabilities..."
+                         sh 'mkdir -p reports'
+                         // 2 commented scans below are OS-only, in case timeout issues occur
+                         sh "trivy image --timeout 10m --ignore-unfixed --vuln-type os,library --severity CRITICAL,HIGH --no-progress --security-checks vuln --format template --template '@html.tpl' -o reports/container-scan.html ${imagename}_${tag}"
+                         // sh "trivy image --ignore-unfixed --vuln-type os --severity CRITICAL,HIGH --no-progress --security-checks vuln --format template --template '@html.tpl' -o reports/container-scan.html ${imagename}_${tag}"
+                         sh "trivy image --timeout 10m --ignore-unfixed --vuln-type os,library --severity CRITICAL,HIGH --no-progress --security-checks vuln --format template --template '@html.tpl' -o reports/container-scan-arm.html ${imagename}_${tag}:arm64"
+                         // sh "trivy image --ignore-unfixed --vuln-type os --severity CRITICAL,HIGH --no-progress --security-checks vuln --format template --template '@html.tpl' -o reports/container-scan-arm.html ${imagename}_${tag}:arm64"
+                         publishHTML target : [
+                             allowMissing: true,
+                             alwaysLinkToLastBuild: true,
+                             keepAll: true,
+                             reportDir: 'reports',
+                             reportFiles: 'container-scan.html',
+                             reportName: 'Security Scan',
+                             reportTitles: 'Security Scan'
+                          ]
+                         publishHTML target : [
+                             allowMissing: true,
+                             alwaysLinkToLastBuild: true,
+                             keepAll: true,
+                             reportDir: 'reports',
+                             reportFiles: 'container-scan-arm.html',
+                             reportName: 'Security Scan (ARM)',
+                             reportTitles: 'Security Scan (ARM)'
+                          ]
+                         // Scan again and fail on CRITICAL vulns
+                         //below can be temporarily commented to prevent build from failing
+                         echo "Scanning for CRITICAL vulnerabilities only (fatal)..."
+                         // 2 scans below are temp (os scan only, no lib scan), while timeout issues are worked
+                         // sh "trivy image --ignore-unfixed --vuln-type os,library --exit-code 1 --severity CRITICAL ${imagename}_${tag}"
+                         // sh "trivy image --ignore-unfixed --vuln-type os,library --exit-code 1 --severity CRITICAL ${imagename}_${tag}:arm64"
+                         sh "trivy image --ignore-unfixed --vuln-type os --exit-code 1 --severity CRITICAL ${imagename}_${tag}"
+                         sh "trivy image --ignore-unfixed --vuln-type os --exit-code 1 --severity CRITICAL ${imagename}_${tag}:arm64"
+                         //echo "Skipping scan for CRITICAL vulnerabilities (temporary)..."
+                   } catch(error) {
+                           def error_details = readFile('./debug');
+                           def message = "BUILD ERROR: There was a problem scanning ${imagename}:${tag}. \n\n ${error_details}"
+                           sh "rm -f ./debug"
+                           handleError(message)
+                   }
+                }
+            }
+        }
+        stage('Push') {
+            steps {
+                script {
+                        sh 'docker login -u tieradmin -p $DOCKERHUBPW'
+                        // fails if already exists
+                        // sh 'docker buildx create --use --name multiarch --append'
+                        sh 'docker buildx inspect --bootstrap'
+                        sh 'docker buildx ls'
+                        echo "Pushing image to dockerhub..."
+                        sh "docker buildx build --push --platform linux/arm64,linux/amd64 -t ${maintainer}/${imagename}:${tag} ."
+                 }
+            }
+        }
+        stage('Notify') {
+            steps{
+                echo "$maintainer"
+                slackSend color: 'good', message: "$maintainer/$imagename:$tag pushed to DockerHub"
+            }
+        }
     }
-    
-/*  stage 'Tests'
-  
-    try{
-      sh 'bin/test.sh &> debug'
-    } catch(error) {
-      def error_details = readFile('./debug');
-      def message = "BUILD ERROR: There was a problem building ${imagename}:${tag}. \n\n ${error_details}"
-      sh "rm -f ./debug"
-      handleError(message)
-    }*/
-    
-  stage 'Push'
-
-    docker.withRegistry('https://registry.hub.docker.com/',   "dockerhub-$maintainer") {
-          def baseImg = docker.build("$maintainer/$imagename")
-          baseImg.push("$tag")
+    post { 
+        always { 
+            echo 'Done Building.'
+        }
+        failure {
+            // slackSend color: 'good', message: "Build failed"
+            handleError("BUILD ERROR: There was a problem building ${maintainer}/${imagename}:${tag}.")
+        }
     }
-    
-  stage 'Notify'
-  
-    slackSend color: 'good', message: "$maintainer/$imagename:$tag pushed to DockerHub"
-
 }
 
-def maintainer() {
+
+def maintain() {
   def matcher = readFile('common.bash') =~ 'maintainer="(.+)"'
   matcher ? matcher[0][1] : 'tier'
 }
@@ -84,6 +168,7 @@ def handleError(String message){
   echo "${message}"
   currentBuild.setResult("FAILED")
   slackSend color: 'danger', message: "${message}"
-  //step([$class: 'Mailer', notifyEveryUnstableBuild: true, recipients: 'chubing@internet2.edu', sendToIndividuals: true])
+  //step([$class: 'Mailer', notifyEveryUnstableBuild: true, recipients: 'pcaskey@internet2.edu', sendToIndividuals: true])
   sh 'exit 1'
 }
+
diff --git a/common.bash b/common.bash
index e630b3b..6412aed 100644
--- a/common.bash
+++ b/common.bash
@@ -1,5 +1,6 @@
 registry="docker.io"
-maintainer="tier"
+maintainer="i2incommon"
+previous_maintainer="tier"
 basename="shibbidp_configbuilder_container"
 imagename="shibbidp_configbuilder_container"
-version="0.7"
+version="0.8"
diff --git a/configBuilder.sh b/configBuilder.sh
index 45969e8..505cdf3 100755
--- a/configBuilder.sh
+++ b/configBuilder.sh
@@ -7,9 +7,11 @@ cd /scriptrun
 
 
 # script config items
-SHB_CFG_URL=https://github.internet2.edu/docker/shib-idp-conftree/archive/4.1-InCommon.zip
-TOM_CFG_URL=https://github.internet2.edu/docker/shib-idp-tomcat-config/archive/master.zip
-TMP_DIR_S=/tmp/4.1-InCommon
+#SHB_CFG_URL=https://github.internet2.edu/docker/shib-idp-conftree/archive/4.1-InCommon.zip
+SHB_CFG_URL=https://github.internet2.edu/docker/shib-idp-conftree/archive/refs/heads/5.0-InCommon.zip
+#TOM_CFG_URL=https://github.internet2.edu/docker/shib-idp-tomcat-config/archive/master.zip
+TOM_CFG_URL=https://github.internet2.edu/docker/shib-idp-tomcat-config/archive/refs/heads/tomcat10-1.zip
+TMP_DIR_S=/tmp/5.0-InCommon
 TMP_DIR_T=/tmp/tomcfg
 TMP_DIR_D=/tmp/buildfiles
 
@@ -78,17 +80,17 @@ unzip -o -d ${TMP_DIR_T} ${TMP_DIR_T}.zip > /dev/null 2>&1
 ### cp relevant folders from expanded zip to appropriate locations at $PWD/* ###
 ################################################################################
 #
-cp -rf ${TMP_DIR_S}/shib-idp-conftree-4.1-InCommon/conf/idp.properties ${SHBCFG}
-cp -rf ${TMP_DIR_S}/shib-idp-conftree-4.1-InCommon/conf/ldap.properties ${SHBCFG}
-cp -rf ${TMP_DIR_S}/shib-idp-conftree-4.1-InCommon/conf/metadata-providers.xml ${SHBCFG}
-cp -rf ${TMP_DIR_S}/shib-idp-conftree-4.1-InCommon/conf/attribute-resolver.xml ${SHBCFG}
-cp -rf ${TMP_DIR_S}/shib-idp-conftree-4.1-InCommon/conf/attribute-filter.xml ${SHBCFG}
-cp -rf ${TMP_DIR_S}/shib-idp-conftree-4.1-InCommon/conf/logback.xml ${SHBCFG}
-cp -rf ${TMP_DIR_S}/shib-idp-conftree-4.1-InCommon/views/* ${SHBVIEWS}
-cp -rf ${TMP_DIR_S}/shib-idp-conftree-4.1-InCommon/credentials/inc-md-cert-mdq.pem ${SHBCREDS}
-cp -rf ${TMP_DIR_S}/shib-idp-conftree-4.1-InCommon/credentials/sealer.kver ${SHBCREDS}
-cp -rf ${TMP_DIR_T}/shib-idp-tomcat-config-master/conf/* ${TOMCFG}
-cp -rf ${TMP_DIR_T}/shib-idp-tomcat-config-master/wwwroot/* ${TOMWWWROOT}
+cp -rf ${TMP_DIR_S}/shib-idp-conftree-5.0-InCommon/conf/idp.properties ${SHBCFG}
+cp -rf ${TMP_DIR_S}/shib-idp-conftree-5.0-InCommon/conf/ldap.properties ${SHBCFG}
+cp -rf ${TMP_DIR_S}/shib-idp-conftree-5.0-InCommon/conf/metadata-providers.xml ${SHBCFG}
+cp -rf ${TMP_DIR_S}/shib-idp-conftree-5.0-InCommon/conf/attribute-resolver.xml ${SHBCFG}
+cp -rf ${TMP_DIR_S}/shib-idp-conftree-5.0-InCommon/conf/attribute-filter.xml ${SHBCFG}
+cp -rf ${TMP_DIR_S}/shib-idp-conftree-5.0-InCommon/conf/logback.xml ${SHBCFG}
+cp -rf ${TMP_DIR_S}/shib-idp-conftree-5.0-InCommon/views/* ${SHBVIEWS}
+cp -rf ${TMP_DIR_S}/shib-idp-conftree-5.0-InCommon/credentials/inc-md-cert-mdq.pem ${SHBCREDS}
+cp -rf ${TMP_DIR_S}/shib-idp-conftree-5.0-InCommon/credentials/sealer.kver ${SHBCREDS}
+cp -rf ${TMP_DIR_T}/shib-idp-tomcat-config-tomcat10-1/conf/* ${TOMCFG}
+cp -rf ${TMP_DIR_T}/shib-idp-tomcat-config-tomcat10-1/wwwroot/* ${TOMWWWROOT}
 
 
 #####################################################
@@ -287,30 +289,47 @@ openssl x509 -req -days 1825 -in idp-encryption.csr -signkey idp-encryption.key
 #
 cp *.key *.crt ../${SHBCREDS}
 
+
+
+
+
+
+
 # build self-signed cert for Tomcat to use with https
 #
 # ensure keytool
-command -v keytool >/dev/null 2>&1 || { echo >&2 "ERROR: keytool is required, but doesn't appear to be installed.  Aborting..."; exit 1; }
+#command -v keytool >/dev/null 2>&1 || { echo >&2 "ERROR: keytool is required, but doesn't appear to be installed.  Aborting..."; exit 1; }
+#
+#if test -f ssl_keystore.jks; then
+#    mv ssl_keystore.jks ssl_keystore.jks.old
+#fi
+#
+#cat > data.conf << EOF
+#${FQDN}
+#SUBJ_OU
+#SUBJ_O
+#SUBJ_CITY
+#SUBJ_STATE
+#SUBJ_COUNTRY
+#yes
+#
+#
+#EOF
+#
+#STOREPWD=$(uuidgen)
+#keytool -genkey -keyalg RSA -alias selfsigned -keystore ssl_keystore.jks -storepass $STOREPWD -validity 360 -keysize 2048 < data.conf >> ${LOGFILE} 2>&1
+#cp ssl_keystore.jks ../${TOMCERT}/keystore.jks
 
-if test -f ssl_keystore.jks; then
-    mv ssl_keystore.jks ssl_keystore.jks.old
-fi
+# new https cert/key (PEM)
+openssl req -new -nodes -newkey rsa:2048 -subj "/commonName=${FQDN}" -batch -keyout idp-https.key -out idp-https.csr >> ${LOGFILE} 2>&1
+openssl x509 -req -days 365 -in idp-https.csr -signkey idp-https.key -out idp-https.crt >> ${LOGFILE} 2>&1
+#
+rm -f idp-https.csr
+cp idp-https.* ../${TOMCERT}
 
-cat > data.conf << EOF
-${FQDN}
-SUBJ_OU
-SUBJ_O
-SUBJ_CITY
-SUBJ_STATE
-SUBJ_COUNTRY
-yes
 
 
-EOF
 
-STOREPWD=$(uuidgen)
-keytool -genkey -keyalg RSA -alias selfsigned -keystore ssl_keystore.jks -storepass $STOREPWD -validity 360 -keysize 2048 < data.conf >> ${LOGFILE} 2>&1
-cp ssl_keystore.jks ../${TOMCERT}/keystore.jks
 
 #
 # OK, next build the shibboleth sealer java keystore
@@ -429,12 +448,12 @@ EOF
 # configure SSL keystore password in tomcat's config file: 
 #    conf/tomcat/server.xml replace: keystorePass="password"
 #
-echo "Updating Tomcat's server.xml with the generated password"
-
-if test \! -f ${TOMCFG}/server.xml.dist; then
-    cp ${TOMCFG}/server.xml ${TOMCFG}/server.xml.dist
-fi
-sed "s#keystorePass=\"password\"#keystorePass=\"${STOREPWD}\"#" ${TOMCFG}/server.xml.dist > ${TOMCFG}/server.xml
+#echo "Updating Tomcat's server.xml with the generated password"
+#
+#if test \! -f ${TOMCFG}/server.xml.dist; then
+#    cp ${TOMCFG}/server.xml ${TOMCFG}/server.xml.dist
+#fi
+#sed "s#keystorePass=\"password\"#keystorePass=\"${STOREPWD}\"#" ${TOMCFG}/server.xml.dist > ${TOMCFG}/server.xml
 
 
 #