Skip to content

Commit

Permalink
Browse files Browse the repository at this point in the history
changes for Fall 2023. Upgraded ansible and other components
  • Loading branch information
satkinson committed Dec 12, 2023
1 parent 60173de commit 1646ad3
Show file tree
Hide file tree
Showing 10 changed files with 113 additions and 92 deletions.
8 changes: 4 additions & 4 deletions README.md
Expand Up @@ -67,7 +67,7 @@ cd comanage-registry-training-ansible
python3 -m venv .
source bin/activate
pip install --upgrade pip
pip install ansible==2.10.7
pip install ansible==5.10.0
pip install boto
pip install boto3
ansible-galaxy collection install amazon.aws
Expand All @@ -94,7 +94,7 @@ Do this each time to run ansible commands or playbooks
to set up the environment:

```
cd comanage-registry-training-deployment
cd comanage-registry-training-ansible
source bin/activate
export ANSIBLE_CONFIG=`pwd`/ansible.cfg
Expand Down Expand Up @@ -272,7 +272,7 @@ trainee is expected to create a few secrets.
Once successfully deployed, COmanage Registry is available at the URL

```
https://node11.comanage.incommon.training
https://node1.comanage.incommon.training
```

for node 1, and
Expand Down Expand Up @@ -306,7 +306,7 @@ used by ansible, it might help to start with a fresh agent when you begin your
work for the say:

```
cd comanage-registry-training-deployment
cd comanage-registry-training-ansible
rm ./ssh_mux_*
kill $SSH_AGENT_PID
unset SSH_AUTH_SOCK
Expand Down
25 changes: 13 additions & 12 deletions group_vars/all.yml
Expand Up @@ -5,11 +5,12 @@
# ansible-vault encrypt_string 'THE_PASSWORD' --name 'comanage_training_password'
comanage_training_password: !vault |
$ANSIBLE_VAULT;1.1;AES256
39336132313462653438613837623964363334316532646639303938353736393365626532363634
6664376163323637616463383437356538356438303534340a353630616538643730333330363638
30663032323634613537663334613638343031333436333030666131393037316232356539383133
3734383064343431340a333431303465346132323332386131613031366537393830633830663137
3333
37623864383639323431323832323334643964356137333336343434333438396665393563303130
3438333666393535633338343261663766373366333431330a656361353362646563383830366564
31346161393165363861343735653832383934623962336434656164323239616363393532343366
3234366534623766330a313232393062313034646566383230653661653737363161393131623766
6365
# It should not be necessary to change the password salt.
comanage_training_password_salt: !vault |
$ANSIBLE_VAULT;1.1;AES256
Expand Down Expand Up @@ -49,34 +50,34 @@ vpc_availability_zone:

ssh_bastion_instance_type: t2.nano
# Most current Debian AMD x86_64, see https://wiki.debian.org/Cloud/AmazonEC2Image/
ssh_bastion_ami_id: ami-0c1b4dff690b5d229
ssh_bastion_ami_id: ami-0aa49e41c7088702f
ssh_bastion_user: admin
ssh_bastion_device_name: /dev/xvda
ssh_bastion_volume_type: gp2
ssh_bastion_volume_size: 10

idp_node_instance_type: t2.small
# Most current Debian AMD x86_64, see https://wiki.debian.org/Cloud/AmazonEC2Image/
idp_node_ami_id: ami-0c1b4dff690b5d229
idp_node_ami_id: ami-0aa49e41c7088702f
idp_node_user: admin
idp_node_device_name: /dev/xvda
idp_node_volume_type: gp2
idp_node_volume_size: 20

training_node_count: 2
training_node_count: 30

training_node_instance_type: t2.small
# Most current Debian AMD x86_64, see https://wiki.debian.org/Cloud/AmazonEC2Image/
training_node_ami_id: ami-0c1b4dff690b5d229
training_node_ami_id: ami-0aa49e41c7088702f
training_node_user: admin
training_node_device_name: /dev/xvda
training_node_volume_type: gp2
training_node_volume_size: 20

# Docker version
docker_ce_package_version: "5:23.0.4-1~debian.11~bullseye"
docker_ce_cli_package_version: "5:23.0.4-1~debian.11~bullseye"
containerd_io_package_version: "1.6.20-1"
docker_ce_package_version: "5:24.0.6-1~debian.12~bookworm"
docker_ce_cli_package_version: "5:24.0.6-1~debian.12~bookworm"
containerd_io_package_version: "1.6.24-1"



30 changes: 15 additions & 15 deletions idp_node.yml
Expand Up @@ -26,53 +26,53 @@
register: idp_node_sg

- name: Provision COmanage IdP node
amazon.aws.ec2:
amazon.aws.ec2_instance:
key_name: "{{ training_node_ssh_key_name }}"
group_id: "{{ idp_node_sg.group_id }}"
security_group: "{{ idp_node_sg.group_id }}"
instance_type: "{{ idp_node_instance_type }}"
image: "{{ idp_node_ami_id }}"
image_id: "{{ idp_node_ami_id }}"
region: "{{ comanage_training_region }}"
assign_public_ip: no
network:
assign_public_ip: false
instance_initiated_shutdown_behavior: stop
monitoring: no
detailed_monitoring: false
# We only provision into one subnet since we do not need high
# availability for training.
vpc_subnet_id: "{{ private_subnet_id_by_az | dictsort | first | last }}"
volumes:
- device_name: "{{ idp_node_device_name }}"
volume_type: "{{ idp_node_volume_type }}"
volume_size: "{{ idp_node_volume_size }}"
delete_on_termination: yes
instance_tags:
ebs:
volume_type: "{{ idp_node_volume_type }}"
volume_size: "{{ idp_node_volume_size }}"
delete_on_termination: yes
tags:
Name: "comanage-idp-node"
private_fqdn: "login-private.{{ r53_dns_domain }}"
public_fqdn: "login.{{ r53_dns_domain }}"
comanage_training: True
role: idp
count_tag:
Name: "comanage-idp-node"
exact_count: 1
wait: true
register: idp_node

- name: Build Ansible inventory host group of IdP node
add_host:
name: "{{ idp_node.tagged_instances[0].private_ip }}"
name: "{{ idp_node.instances[0].private_ip_address }}"
groups: ssh_idp_node_host

- name: Create A record entry for IdP node private interface
community.aws.route53:
state: present
zone: "{{ r53_hosted_zone }}"
record: "{{ idp_node.tagged_instances[0].tags.private_fqdn }}"
value: "{{ idp_node.tagged_instances[0].private_ip }}"
record: "{{ idp_node.instances[0].tags.private_fqdn }}"
value: "{{ idp_node.instances[0].private_ip_address }}"
type: A
ttl: 30
overwrite: yes
wait: no

- name: Wait for SSH to come up on IdP node
delegate_to: "{{ idp_node.tagged_instances[0].private_ip }}"
delegate_to: "{{ idp_node.instances[0].private_ip_address }}"
wait_for_connection:
timeout: 300
register: idp_node_ssh_connection
Expand Down
2 changes: 1 addition & 1 deletion roles/common/tasks/users.yml
Expand Up @@ -85,7 +85,7 @@
comment: COmanage Training User
uid: 2000
home: /home/training
password: "{{ comanage_training_password | string | password_hash('sha512', comanage_training_password_salt) }}"
password: "{{ comanage_training_password | string | password_hash('sha512') }}"
shell: /bin/bash
group: training
append: yes
2 changes: 1 addition & 1 deletion roles/idp/files/shibboleth-idp-stack.yml
Expand Up @@ -36,7 +36,7 @@ services:
tag: "shibboleth-idp_{{.Name}}"

ldap:
image: sphericalcowgroup/comanage-registry-slapd:8
image: sphericalcowgroup/comanage-registry-slapd:20231011
command: ["slapd", "-d", "256", "-h", "ldapi:/// ldap:///", "-u", "openldap", "-g", "openldap"]
volumes:
- /srv/docker/var/lib/ldap:/var/lib/ldap
Expand Down
11 changes: 5 additions & 6 deletions roles/swarm/tasks/main.yml
Expand Up @@ -22,7 +22,7 @@

- name: Add Docker CE repository
apt_repository:
repo: deb [arch=amd64] https://download.docker.com/linux/debian bullseye stable
repo: deb [arch=amd64] https://download.docker.com/linux/debian bookworm stable

- name: Install Docker
apt:
Expand Down Expand Up @@ -70,11 +70,10 @@
name: python3-pip
update_cache: no

- name: Install Python3 docker module
pip:
executable: /usr/bin/pip3
name: docker
state: present
- name: Install python3 docker module
apt:
name: python3-docker
update_cache: no

- name: Initialize single node swarm
community.docker.docker_swarm:
Expand Down
9 changes: 5 additions & 4 deletions roles/training/files/comanage-match-stack.yml
Expand Up @@ -21,7 +21,7 @@ services:
tag: "postgresql-{{.Name}}"

match:
image: i2incommon/comanage-match:1.1.0-internet2-tap-1
image: i2incommon/comanage-match:1.1.1-20230726
sysctls:
- net.ipv4.tcp_keepalive_time=60
- net.ipv4.tcp_keepalive_intvl=60
Expand All @@ -42,10 +42,11 @@ services:
- COMANAGE_MATCH_DATABASE_USER=match_user
- COMANAGE_MATCH_DATABASE_USER_PASSWORD_FILE=/run/secrets/comanage_match_database_user_password
- COMANAGE_MATCH_EMAIL_TRANSPORT=Smtp
- COMANAGE_MATCH_EMAIL_HOST=tls://smtp.gmail.com
- COMANAGE_MATCH_EMAIL_PORT=465
- COMANAGE_MATCH_EMAIL_CLASS_NAME=Smtp
- COMANAGE_MATCH_EMAIL_HOST=smtp.gmail.com
- COMANAGE_MATCH_EMAIL_PORT=587
- COMANAGE_MATCH_EMAIL_ACCOUNT=comanagetraining@gmail.com
- COMANAGE_MATCH_EMAIL_ACCOUNT_PASSWORD_FILE=/run/secrets/comanage_registry_email_account_password
- COMANAGE_MATCH_EMAIL_ACCOUNT_PASSWORD=uzpvcdtniaeqdnso
- COMANAGE_MATCH_EMAIL_FROM_EMAIL=comanagetraining@gmail.com
- COMANAGE_MATCH_EMAIL_FROM_NAME=Match
- SHIBBOLETH_SP_ENCRYPT_CERT=/run/secrets/shibboleth_sp_encrypt_cert
Expand Down
10 changes: 5 additions & 5 deletions roles/training/files/comanage-registry-stack.yml
Expand Up @@ -2,7 +2,7 @@ version: '3.8'

services:
database:
image: mariadb:10.4.28
image: mariadb:10.5
volumes:
- /srv/docker/var/lib/mysql:/var/lib/mysql
environment:
Expand All @@ -21,7 +21,7 @@ services:
tag: "mariadb-{{.Name}}"

campusdatabase:
image: mariadb:10.4.28
image: mariadb:10.5
volumes:
- /srv/docker/var/lib/campussql:/var/lib/mysql
environment:
Expand All @@ -40,7 +40,7 @@ services:
tag: "mariadb-{{.Name}}"

registry:
image: i2incommon/comanage-registry:4.1.1-20230202
image: i2incommon/comanage-registry:4.2.1-20230725
volumes:
- /srv/docker/srv/comanage-registry/local:/srv/comanage-registry/local
- /srv/docker/etc/shibboleth/shibboleth2.xml:/etc/shibboleth/shibboleth2.xml
Expand Down Expand Up @@ -85,7 +85,7 @@ services:
tag: "registry_{{.Name}}"

cron:
image: i2incommon/comanage-registry-cron:4.1.1-20230202
image: i2incommon/comanage-registry-cron:4.2.1-20230725
volumes:
- /srv/docker/srv/comanage-registry/local:/srv/comanage-registry/local
environment:
Expand All @@ -99,7 +99,7 @@ services:
tag: "cron_{{.Name}}"

ldap:
image: sphericalcowgroup/comanage-registry-slapd:8
image: sphericalcowgroup/comanage-registry-slapd:20231011
command: ["slapd", "-d", "256", "-h", "ldapi:/// ldap:///", "-u", "openldap", "-g", "openldap"]
volumes:
- /srv/docker/var/lib/ldap:/var/lib/ldap
Expand Down
56 changes: 38 additions & 18 deletions ssh_bastion.yml
Expand Up @@ -23,46 +23,46 @@

# For each public subnet, build a bastion host
- name: Provision SSH bastion hosts
amazon.aws.ec2:
amazon.aws.ec2_instance:
key_name: "{{ training_node_ssh_key_name }}"
group_id: "{{ bastion_ssh_security_group.group_id }}"
security_group: "{{ bastion_ssh_security_group.group_id }}"
instance_type: "{{ ssh_bastion_instance_type }}"
image: "{{ ssh_bastion_ami_id }}"
image_id: "{{ ssh_bastion_ami_id }}"
wait: true
region: "{{ comanage_training_region }}"
assign_public_ip: yes
network:
assign_public_ip: true
private_ip: "{{ item.item.value.bastion_ip }}"
instance_initiated_shutdown_behavior: stop
monitoring: no
detailed_monitoring: false
vpc_subnet_id: "{{ item.subnet.id }}"
private_ip: "{{ item.item.value.bastion_ip }}"
volumes:
- device_name: "{{ ssh_bastion_device_name }}"
volume_type: "{{ ssh_bastion_volume_type }}"
volume_size: "{{ ssh_bastion_volume_size }}"
delete_on_termination: yes
instance_tags:
ebs:
volume_type: "{{ ssh_bastion_volume_type }}"
volume_size: "{{ ssh_bastion_volume_size }}"
delete_on_termination: yes
tags:
Name: "comanage_training_{{ item.item.value.bastion_hostname }}"
public_fqdn: "{{ item.item.value.bastion_hostname }}.{{ r53_dns_domain }}"
private_fqdn: "{{ item.item.value.bastion_hostname }}.{{ r53_dns_domain }}"
comanage_training: True
role : bastion
count_tag:
Name: "comanage_training_{{ item.item.value.bastion_hostname }}"
exact_count: 1
register: bastion
loop: "{{ subnet_public.results }}"

- name: List EC2 instance ID information
debug:
msg: "{{ item.tagged_instances[0].id }}"
msg: "{{ item.instances[0].instance_id }}"
loop: "{{ bastion.results }}"

- name: Create CNAME entries for bastion hosts
community.aws.route53:
state: present
zone: "{{ r53_hosted_zone }}"
record: "{{ item.tagged_instances[0].tags.public_fqdn }}"
value: "{{ item.tagged_instances[0].public_dns_name }}"
record: "{{ item.instances[0].tags.public_fqdn }}"
value: "{{ item.instances[0].public_dns_name }}"
type: CNAME
ttl: 30
overwrite: yes
Expand All @@ -71,7 +71,7 @@

- name: Build Ansible inventory host group of bastions
add_host:
name: "{{ item.tagged_instances[0].public_dns_name }}"
name: "{{ item.instances[0].public_dns_name }}"
groups: ssh_bastion_hosts
loop: "{{ bastion.results }}"

Expand All @@ -83,11 +83,11 @@

- name: Build bastion_internal_ip from bastion host list
set_fact:
bastion_internal_ip: "{{ bastion_internal_ip | default([]) + [item.tagged_instances[0].private_ip + '/32']}}"
bastion_internal_ip: "{{ bastion_internal_ip | default([]) + [item.instances[0].private_ip_address + '/32']}}"
loop: "{{ bastion.results }}"

- name: Wait for SSH to come up on SSH bastion hosts
delegate_to: "{{ item.tagged_instances[0].public_dns_name }}"
delegate_to: "{{ item.instances[0].public_dns_name }}"
wait_for_connection:
timeout: 300
register: bastion_ssh_connections
Expand Down Expand Up @@ -116,6 +116,26 @@
line: "prepend domain-search \"{{ r53_dns_domain }}\";"
register: bastion_domain_config

- name: Configure systemd-resolved service
blockinfile:
path: /etc/systemd/resolved.conf
block: |
Domains={{ r53_dns_domain }}
LLMNR=no
backup: yes

- name: Configure Name Service Switch
lineinfile:
path: /etc/nsswitch.conf
regexp: "^hosts:"
line: "hosts: files resove dns"

- name: Restart systemd-resolved service
systemd:
state: restarted
name: systemd-resolved


- name: Reboot bastion host
reboot:
when: bastion_domain_config.changed

0 comments on commit 1646ad3

Please sign in to comment.