diff --git a/gcp-deployment/gcp-cleanup-guide.md b/gcp-deployment/gcp-cleanup-guide.md index 11750e8..fe1f6cd 100644 --- a/gcp-deployment/gcp-cleanup-guide.md +++ b/gcp-deployment/gcp-cleanup-guide.md @@ -37,14 +37,6 @@ gcloud container images list-tags gcr.io/YOUR_PROJECT_ID/IMAGE_NAME --format='ge # Delete disks gcloud compute disks delete DISK_NAME --zone=ZONE -# Delete firewall rules -gcloud compute firewall-rules delete RULE_NAME - -# Delete load balancers -gcloud compute forwarding-rules delete RULE_NAME --global - -# Delete static IPs -gcloud compute addresses delete ADDRESS_NAME --region=REGION ``` diff --git a/gcp-deployment/k8s-artifacts/data-pipeline-job.yaml b/gcp-deployment/k8s-artifacts/data-pipeline-job.yaml index eefcdae..33fd7b9 100644 --- a/gcp-deployment/k8s-artifacts/data-pipeline-job.yaml +++ b/gcp-deployment/k8s-artifacts/data-pipeline-job.yaml @@ -14,7 +14,7 @@ spec: emptyDir: {} initContainers: - name: extract - image: gcr.io/${PROJECT_ID}/data-pipeline-extract:latest + image: us-docker.pkg.dev/${PROJECT_ID}/my-docker-repo/data-pipeline-extract:latest envFrom: - secretRef: name: db-credentials @@ -22,7 +22,7 @@ spec: - name: shared-data mountPath: /data - name: load - image: gcr.io/${PROJECT_ID}/data-pipeline-load:latest + image: us-docker.pkg.dev/${PROJECT_ID}/my-docker-repo/data-pipeline-load:latest envFrom: - secretRef: name: db-credentials @@ -31,7 +31,7 @@ spec: mountPath: /data containers: - name: transform - image: gcr.io/${PROJECT_ID}/data-pipeline-transform:latest + image: us-docker.pkg.dev/${PROJECT_ID}/my-docker-repo/data-pipeline-transform:latest envFrom: - secretRef: name: db-credentials diff --git a/gcp-deployment/k8s-artifacts/flask-app-deployment.yaml b/gcp-deployment/k8s-artifacts/flask-app-deployment.yaml index 70a1bb0..4db8a6b 100644 --- a/gcp-deployment/k8s-artifacts/flask-app-deployment.yaml +++ b/gcp-deployment/k8s-artifacts/flask-app-deployment.yaml @@ -14,7 +14,7 @@ spec: spec: containers: - name: flask-app - image: gcr.io/${PROJECT_ID}/flask-app:latest + image: us-docker.pkg.dev/${PROJECT_ID}/my-docker-repo/flask-app:latest envFrom: - secretRef: name: db-credentials diff --git a/gcp-deployment/weather-data-pipeline-deployment-guide.md b/gcp-deployment/weather-data-pipeline-deployment-guide.md index 3021d8f..17e379c 100644 --- a/gcp-deployment/weather-data-pipeline-deployment-guide.md +++ b/gcp-deployment/weather-data-pipeline-deployment-guide.md @@ -18,9 +18,15 @@ export PROJECT_ID=$(gcloud config get-value project) ``` + Make sure that the `$PROJECT_ID` variable is set by running the command below: + + ```bash + echo $PROJECT_ID + ``` + 2. Create a GKE cluster: ```bash - gcloud container clusters create weather-cluster-2 --num-nodes=2 --zone=us-central1-a --quiet > /dev/null 2>&1 & + gcloud container clusters create weather-cluster --num-nodes=2 --zone=us-central1-a --quiet > /dev/null 2>&1 & ``` The above should take about 5-8 minutes. We will run other processes that can be done in parallel and check the status of this from time to time. @@ -30,8 +36,12 @@ ```bash gcloud container clusters describe weather-cluster --zone=us-central1-a ``` +## Create container repository on artifact registery - +Create container registry. + ```bash + gcloud artifacts repositories create my-docker-repo --project=class-tmanik-dev --location=us --repository-format=docker + ``` ## Build and Push Docker Images @@ -42,20 +52,20 @@ Build and push data pipeline images: cd data-pipeline # Build and push data pipeline images -docker build --target extract -t gcr.io/${PROJECT_ID}/data-pipeline-extract:latest . -docker build --target load -t gcr.io/${PROJECT_ID}/data-pipeline-load:latest . -docker build --target transform -t gcr.io/${PROJECT_ID}/data-pipeline-transform:latest . +docker build --target extract -t us-docker.pkg.dev/${PROJECT_ID}/my-docker-repo/data-pipeline-extract:latest . +docker build --target load -t us-docker.pkg.dev/${PROJECT_ID}/my-docker-repo/data-pipeline-load:latest . +docker build --target transform -t us-docker.pkg.dev/${PROJECT_ID}/my-docker-repo/data-pipeline-transform:latest . -docker push gcr.io/${PROJECT_ID}/data-pipeline-extract:latest -docker push gcr.io/${PROJECT_ID}/data-pipeline-load:latest -docker push gcr.io/${PROJECT_ID}/data-pipeline-transform:latest +docker push us-docker.pkg.dev/${PROJECT_ID}/my-docker-repo/data-pipeline-extract:latest +docker push us-docker.pkg.dev/${PROJECT_ID}/my-docker-repo/data-pipeline-load:latest +docker push us-docker.pkg.dev/${PROJECT_ID}/my-docker-repo/data-pipeline-transform:latest # Navigate to the flask-app directory cd ../flask-app # Build and push Flask app image -docker build -t gcr.io/${PROJECT_ID}/flask-app:latest . -docker push gcr.io/${PROJECT_ID}/flask-app:latest +docker build -t us-docker.pkg.dev/${PROJECT_ID}/my-docker-repo/flask-app:latest . +docker push us-docker.pkg.dev/${PROJECT_ID}/my-docker-repo/flask-app:latest ``` @@ -127,10 +137,10 @@ Deploy data pipeline job: ```bash envsubst < data-pipeline-job.yaml | kubectl apply -f - -kubectl create job --from=cronjob/data-pipeline-sequence data-pipeline-manual-trigger-new +kubectl create job --from=cronjob/data-pipeline-sequence data-pipeline-sequence # Wait for data pipeline job to complete -kubectl wait --for=condition=complete job/data-pipeline-manual-trigger-new --timeout=600s +kubectl wait --for=condition=complete job/data-pipeline-sequence --timeout=600s ``` Once completed, proceed to the next step. @@ -177,7 +187,7 @@ View logs for the flask-app pod. 3. Delete a specific job: ```bash - kubectl delete job data-pipeline-manual-trigger + kubectl delete cronjob data-pipeline-sequence ``` 4. Delete a cronjob: