aboutsummaryrefslogtreecommitdiffstats
path: root/fdio.infra.ansible
diff options
context:
space:
mode:
Diffstat (limited to 'fdio.infra.ansible')
-rw-r--r--fdio.infra.ansible/inventories/lf_inventory/group_vars/all.yaml3
-rw-r--r--fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.20.yaml67
-rw-r--r--fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.21.yaml51
-rw-r--r--fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.22.yaml48
-rw-r--r--fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.23.yaml29
-rw-r--r--fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.24.yaml29
-rw-r--r--fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.25.yaml29
-rw-r--r--fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.26.yaml29
-rw-r--r--fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.27.yaml (renamed from fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.17.yaml)42
-rw-r--r--fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.28.yaml (renamed from fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.18.yaml)42
-rw-r--r--fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.30.yaml (renamed from fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.19.yaml)55
-rw-r--r--fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.31.yaml (renamed from fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.16.yaml)56
-rw-r--r--fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.32.yaml38
-rw-r--r--fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.33.yaml38
-rw-r--r--fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.34.yaml38
-rw-r--r--fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.35.yaml38
-rw-r--r--fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.36.yaml4
-rw-r--r--fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.37.yaml4
-rw-r--r--fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.38.yaml3
-rw-r--r--fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.44.yaml32
-rw-r--r--fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.45.yaml32
-rw-r--r--fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.46.yaml32
-rw-r--r--fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.47.yaml32
-rw-r--r--fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.48.yaml32
-rw-r--r--fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.49.yaml2
-rw-r--r--fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.50.yaml53
-rw-r--r--fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.51.yaml53
-rw-r--r--fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.52.yaml60
-rw-r--r--fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.53.yaml55
-rw-r--r--fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.54.yaml53
-rw-r--r--fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.55.yaml53
-rw-r--r--fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.56.yaml36
-rw-r--r--fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.57.yaml36
-rw-r--r--fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.58.yaml22
-rw-r--r--fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.59.yaml27
-rw-r--r--fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.60.yaml32
-rw-r--r--fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.69.yaml8
-rw-r--r--fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.70.yaml71
-rw-r--r--fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.71.yaml71
-rw-r--r--fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.72.yaml3
-rw-r--r--fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.73.yaml3
-rw-r--r--fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.74.yaml4
-rw-r--r--fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.75.yaml5
-rw-r--r--fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.76.yaml5
-rw-r--r--fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.77.yaml5
-rw-r--r--fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.78.yaml5
-rw-r--r--fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.79.yaml5
-rw-r--r--fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.80.yaml5
-rw-r--r--fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.81.yaml5
-rw-r--r--fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.82.yaml5
-rw-r--r--fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.83.yaml5
-rw-r--r--fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.84.yaml5
-rw-r--r--fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.85.yaml5
-rw-r--r--fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.86.yaml5
-rw-r--r--fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.87.yaml5
-rw-r--r--fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.88.yaml5
-rw-r--r--fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.89.yaml4
-rw-r--r--fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.90.yaml38
-rw-r--r--fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.91.yaml35
-rw-r--r--fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.92.yaml35
-rw-r--r--fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.93.yaml4
-rw-r--r--fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.94.yaml4
-rw-r--r--fdio.infra.ansible/inventories/lf_inventory/host_vars/10.32.8.10.yaml33
-rw-r--r--fdio.infra.ansible/inventories/lf_inventory/host_vars/10.32.8.11.yaml30
-rw-r--r--fdio.infra.ansible/inventories/lf_inventory/host_vars/10.32.8.12.yaml30
-rw-r--r--fdio.infra.ansible/inventories/lf_inventory/host_vars/10.32.8.13.yaml30
-rw-r--r--fdio.infra.ansible/inventories/lf_inventory/host_vars/10.32.8.18.yaml6
-rw-r--r--fdio.infra.ansible/inventories/lf_inventory/host_vars/10.32.8.19.yaml6
-rw-r--r--fdio.infra.ansible/inventories/lf_inventory/host_vars/10.32.8.20.yaml6
-rw-r--r--fdio.infra.ansible/inventories/lf_inventory/host_vars/10.32.8.21.yaml6
-rw-r--r--fdio.infra.ansible/inventories/lf_inventory/host_vars/10.32.8.22.yaml6
-rw-r--r--fdio.infra.ansible/inventories/lf_inventory/host_vars/10.32.8.23.yaml6
-rw-r--r--fdio.infra.ansible/inventories/lf_inventory/host_vars/10.32.8.24.yaml5
-rw-r--r--fdio.infra.ansible/inventories/lf_inventory/host_vars/10.32.8.25.yaml5
-rw-r--r--fdio.infra.ansible/inventories/lf_inventory/hosts53
-rw-r--r--fdio.infra.ansible/inventories/sample_inventory/group_vars/all.yaml3
-rw-r--r--fdio.infra.ansible/inventories/vagrant_inventory/group_vars/vagrant.yml3
-rw-r--r--fdio.infra.ansible/roles/ab/defaults/main.yaml2
-rw-r--r--fdio.infra.ansible/roles/ab/tasks/main.yaml4
-rw-r--r--fdio.infra.ansible/roles/aws/defaults/main.yaml26
-rw-r--r--fdio.infra.ansible/roles/aws/files/get-vfio-with-wc.sh203
-rw-r--r--fdio.infra.ansible/roles/aws/handlers/main.yaml19
-rw-r--r--fdio.infra.ansible/roles/aws/tasks/main.yaml106
-rw-r--r--fdio.infra.ansible/roles/aws/tasks/ubuntu_focal.yaml10
-rw-r--r--fdio.infra.ansible/roles/aws/tasks/ubuntu_jammy.yaml31
-rw-r--r--fdio.infra.ansible/roles/calibration/defaults/main.yaml16
-rw-r--r--fdio.infra.ansible/roles/calibration/tasks/main.yaml18
-rw-r--r--fdio.infra.ansible/roles/calibration/tasks/x86_64.yaml10
-rw-r--r--fdio.infra.ansible/roles/cleanup/tasks/clean_images.yaml6
-rw-r--r--fdio.infra.ansible/roles/cleanup/tasks/kill_containers.yaml20
-rw-r--r--fdio.infra.ansible/roles/cleanup/tasks/kill_process.yaml9
-rw-r--r--fdio.infra.ansible/roles/cleanup/tasks/main.yaml19
-rw-r--r--fdio.infra.ansible/roles/cleanup/tasks/nomad.yaml12
-rw-r--r--fdio.infra.ansible/roles/cleanup/tasks/remove_package.yaml10
-rw-r--r--fdio.infra.ansible/roles/cleanup/tasks/sut.yaml17
-rw-r--r--fdio.infra.ansible/roles/cleanup/tasks/tg.yaml2
-rw-r--r--fdio.infra.ansible/roles/cleanup/tasks/vpp_device.yaml4
-rw-r--r--fdio.infra.ansible/roles/common/defaults/main.yaml25
-rw-r--r--fdio.infra.ansible/roles/common/handlers/main.yaml2
-rw-r--r--fdio.infra.ansible/roles/common/tasks/main.yaml17
-rw-r--r--fdio.infra.ansible/roles/consul/defaults/main.yaml37
-rw-r--r--fdio.infra.ansible/roles/consul/handlers/main.yaml2
-rw-r--r--fdio.infra.ansible/roles/consul/meta/main.yaml23
-rw-r--r--fdio.infra.ansible/roles/consul/tasks/main.yaml131
-rw-r--r--fdio.infra.ansible/roles/consul/templates/ports.hcl.j22
-rw-r--r--fdio.infra.ansible/roles/consul/templates/services.json.j213
-rw-r--r--fdio.infra.ansible/roles/consul/vars/main.yaml2
-rw-r--r--fdio.infra.ansible/roles/csit_sut_image/files/Dockerfile166
-rw-r--r--fdio.infra.ansible/roles/csit_sut_image/tasks/main.yaml30
-rw-r--r--fdio.infra.ansible/roles/docker/defaults/main.yaml2
-rw-r--r--fdio.infra.ansible/roles/docker/meta/main.yaml1
-rw-r--r--fdio.infra.ansible/roles/docker/tasks/focal.yaml30
-rw-r--r--fdio.infra.ansible/roles/docker_images/files/base/Dockerfile140
-rw-r--r--fdio.infra.ansible/roles/docker_images/files/csit-initialize-docker-sut.service12
-rw-r--r--fdio.infra.ansible/roles/docker_images/files/csit-initialize-docker-tg.service12
-rw-r--r--fdio.infra.ansible/roles/docker_images/files/csit-sut/Dockerfile7
-rw-r--r--fdio.infra.ansible/roles/docker_images/files/csit-sut/supervisord.conf (renamed from fdio.infra.ansible/roles/csit_sut_image/files/supervisord.conf)0
-rw-r--r--fdio.infra.ansible/roles/docker_images/handlers/main.yaml18
-rw-r--r--fdio.infra.ansible/roles/docker_images/tasks/base.yaml63
-rw-r--r--fdio.infra.ansible/roles/docker_images/tasks/main.yaml21
-rw-r--r--fdio.infra.ansible/roles/docker_images/tasks/sut.yaml28
-rw-r--r--fdio.infra.ansible/roles/docker_images/tasks/tg.yaml28
-rw-r--r--fdio.infra.ansible/roles/docker_images/templates/docker-compose-sut.yaml.j242
-rw-r--r--fdio.infra.ansible/roles/docker_images/templates/docker-compose-tg.yaml.j238
-rw-r--r--fdio.infra.ansible/roles/dpdk/defaults/main.yaml7
-rw-r--r--fdio.infra.ansible/roles/dpdk/meta/main.yaml22
-rw-r--r--fdio.infra.ansible/roles/dpdk/molecule/default/molecule.yml4
-rw-r--r--fdio.infra.ansible/roles/dpdk/tasks/deploy_block.yaml33
-rw-r--r--fdio.infra.ansible/roles/dpdk/tasks/main.yaml52
-rw-r--r--fdio.infra.ansible/roles/intel/defaults/main.yaml108
-rw-r--r--fdio.infra.ansible/roles/intel/tasks/dsa.yaml39
-rw-r--r--fdio.infra.ansible/roles/intel/tasks/i40e.yaml16
-rw-r--r--fdio.infra.ansible/roles/intel/tasks/iavf.yaml14
-rw-r--r--fdio.infra.ansible/roles/intel/tasks/ice.yaml54
-rw-r--r--fdio.infra.ansible/roles/intel/tasks/main.yaml99
-rw-r--r--fdio.infra.ansible/roles/intel/tasks/qat1.yaml54
-rw-r--r--fdio.infra.ansible/roles/intel/tasks/qat2.yaml57
-rw-r--r--fdio.infra.ansible/roles/iperf/defaults/main.yaml5
-rw-r--r--fdio.infra.ansible/roles/iperf/tasks/main.yaml14
-rw-r--r--fdio.infra.ansible/roles/jenkins_job_health_exporter/defaults/main.yaml25
-rw-r--r--fdio.infra.ansible/roles/jenkins_job_health_exporter/tasks/main.yaml6
-rw-r--r--fdio.infra.ansible/roles/kernel/defaults/main.yaml14
-rw-r--r--fdio.infra.ansible/roles/kernel/handlers/main.yaml2
-rw-r--r--fdio.infra.ansible/roles/kernel/tasks/ubuntu_focal.yaml62
-rw-r--r--fdio.infra.ansible/roles/kernel/tasks/ubuntu_jammy.yaml18
-rw-r--r--fdio.infra.ansible/roles/kernel_vm/tasks/main.yaml22
-rw-r--r--fdio.infra.ansible/roles/kubernetes/defaults/main.yaml15
-rw-r--r--fdio.infra.ansible/roles/kubernetes/tasks/main.yaml14
-rw-r--r--fdio.infra.ansible/roles/kubernetes/tasks/ubuntu_jammy.yaml37
-rw-r--r--fdio.infra.ansible/roles/mellanox/defaults/main.yaml16
-rw-r--r--fdio.infra.ansible/roles/mellanox/tasks/main.yaml16
-rw-r--r--fdio.infra.ansible/roles/mellanox/tasks/ofed.yaml21
-rw-r--r--fdio.infra.ansible/roles/nomad/defaults/main.yaml90
-rw-r--r--fdio.infra.ansible/roles/nomad/handlers/main.yaml4
-rw-r--r--fdio.infra.ansible/roles/nomad/meta/main.yaml26
-rw-r--r--fdio.infra.ansible/roles/nomad/tasks/main.yaml132
-rw-r--r--fdio.infra.ansible/roles/nomad/templates/cfssl.json8
-rw-r--r--fdio.infra.ansible/roles/nomad/templates/consul.hcl.j263
-rw-r--r--fdio.infra.ansible/roles/nomad/templates/nomad_systemd.service.j25
-rw-r--r--fdio.infra.ansible/roles/nomad/templates/server.hcl.j22
-rw-r--r--fdio.infra.ansible/roles/nomad/templates/telemetry.hcl.j230
-rw-r--r--fdio.infra.ansible/roles/nomad/templates/tls.hcl.j242
-rw-r--r--fdio.infra.ansible/roles/nomad/templates/vault.hcl.j269
-rw-r--r--fdio.infra.ansible/roles/nomad/vars/main.yaml2
-rw-r--r--fdio.infra.ansible/roles/performance_tuning/defaults/main.yaml2
-rw-r--r--fdio.infra.ansible/roles/performance_tuning/handlers/main.yaml8
-rw-r--r--fdio.infra.ansible/roles/performance_tuning/tasks/main.yaml42
-rw-r--r--fdio.infra.ansible/roles/prometheus_exporter/meta/main.yaml1
-rw-r--r--fdio.infra.ansible/roles/python_env/defaults/main.yaml11
-rw-r--r--fdio.infra.ansible/roles/python_env/tasks/main.yaml99
-rw-r--r--fdio.infra.ansible/roles/tg/files/csit-initialize-docker-tg.service12
-rwxr-xr-xfdio.infra.ansible/roles/tg/files/csit-initialize-docker-tg.sh58
-rw-r--r--fdio.infra.ansible/roles/tg/handlers/main.yaml10
-rw-r--r--fdio.infra.ansible/roles/tg/tasks/main.yaml30
-rw-r--r--fdio.infra.ansible/roles/topology/tasks/main.yaml8
-rw-r--r--fdio.infra.ansible/roles/topology/templates/topology-1n-c6gn.j230
-rw-r--r--fdio.infra.ansible/roles/topology/templates/topology-1n-c6in.j230
-rw-r--r--fdio.infra.ansible/roles/topology/templates/topology-2n-c6gn.j2 (renamed from fdio.infra.ansible/roles/topology/templates/topology-2n-aws-c6gn.j2)16
-rw-r--r--fdio.infra.ansible/roles/topology/templates/topology-2n-c6in.j251
-rw-r--r--fdio.infra.ansible/roles/topology/templates/topology-3n-c6gn.j273
-rw-r--r--fdio.infra.ansible/roles/topology/templates/topology-3n-c6in.j273
-rw-r--r--fdio.infra.ansible/roles/trex/defaults/main.yaml42
-rw-r--r--fdio.infra.ansible/roles/trex/tasks/deploy_block.yaml40
-rw-r--r--fdio.infra.ansible/roles/trex/tasks/main.yaml12
-rw-r--r--fdio.infra.ansible/roles/vagrant/tasks/main.yml15
-rw-r--r--fdio.infra.ansible/roles/vault/meta/main.yaml1
-rw-r--r--fdio.infra.ansible/roles/vpp/defaults/main.yaml14
-rw-r--r--fdio.infra.ansible/roles/vpp/tasks/main.yaml6
-rw-r--r--fdio.infra.ansible/roles/vpp_device/defaults/main.yaml6
-rw-r--r--fdio.infra.ansible/roles/vpp_device/files/csit-initialize-vfs-alt.sh39
-rw-r--r--fdio.infra.ansible/roles/vpp_device/files/csit-initialize-vfs-spr.sh28
-rw-r--r--fdio.infra.ansible/roles/vpp_device/handlers/main.yaml18
-rw-r--r--fdio.infra.ansible/roles/vpp_device/tasks/main.yaml74
-rw-r--r--fdio.infra.ansible/site.yaml2
-rw-r--r--fdio.infra.ansible/sut.yaml37
-rw-r--r--fdio.infra.ansible/tg.yaml47
-rw-r--r--fdio.infra.ansible/vpp_device.yaml8
197 files changed, 3332 insertions, 2229 deletions
diff --git a/fdio.infra.ansible/inventories/lf_inventory/group_vars/all.yaml b/fdio.infra.ansible/inventories/lf_inventory/group_vars/all.yaml
index 0756621eef..719ef32625 100644
--- a/fdio.infra.ansible/inventories/lf_inventory/group_vars/all.yaml
+++ b/fdio.infra.ansible/inventories/lf_inventory/group_vars/all.yaml
@@ -1,5 +1,2 @@
---
# file: lf_inventory/group_vars/all.yaml
-
-# Ansible interpreter (for PIP)
-ansible_python_interpreter: "/usr/bin/python3"
diff --git a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.20.yaml b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.20.yaml
deleted file mode 100644
index 8055eec3e9..0000000000
--- a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.20.yaml
+++ /dev/null
@@ -1,67 +0,0 @@
----
-# file: host_vars/10.30.51.20.yaml
-
-hostname: "s20-nomad"
-inventory_cimc_hostname: "10.30.50.20"
-
-# User management.
-users:
- - username: localadmin
- groups: [adm, sudo]
- password: "$6$FIsbVDQR$5D0wgufOd2FtnmOiRNsGlgg6Loh.0x3dWSj72DSQnqisSyE9DROfgSgA6s0yxDwz4Jd5SRTXiTKuRYuSQ5POI1"
- ssh_key:
- - "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQDQ5KJyLPM5yJAVLwhEwiTEZD5LkY7FEPOoyJplVMQu5/oIp+KtFr4/RrFEpJwlzuE3um+Hn9+4KZJiQvVJBEp/ZYeGMXJDw0oHlubtI/0AEdolM5TvYNzCASHulRfg2JdCGAeCG5W7vkPQmJjwtQFNw3ISGKKHgJBGipqUjDyuZPi24RI0YBfqtr/GDA0e9dZwkqI+F5yxVi9vXG68bpMkxpeC4Zlxe2DmXAEp787LYSlF+HoELPW3AAWnuQ3j1R2tImM/S9jlxfb9Uy7KlX9epkGV2/caS2EiLszT7604LrSZy4soko1CgtX1LJ3Qobz9OppkJQ+tiBU8C0oz8Z5CXnQ/GYmFt9SBDHneB1ZOtIcoV8HTQVLKCse/VXogQogdG8xOis6F+R3OAJQUF7w3ujIBCXv8ghVcynvzaGB1kxZtwceEqtIXSFdve6T2onyM0722aQ93MiXPv+IA5qpkpSN3JS0KFGxKalTdFz9+lx/agV4JVOrVxEASfok3hbNzEm/4DwUyvgf2vbHlfYQdjHg2mBmYnN3iY/ccU906Tt/TlIp25+FIIbTiLn/nt3NLLPPEZJbJDkEJ5tgJ7XLfJ9ByCfZoOIMdSPF/swDXafTcA5mJ4AKFS0HJf9wBvbgkmD2OttOAvL8k2mqsb7MiMurbXm7Ko/D+nEX+oxkKZQ== peter.mikus@protonmail.ch"
- - "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQDCG7Shfr7ASXXwpazYDGFzChGl7i4HgY9n81GTwc17B/O19IiJdrxFcBQH19HMuTFtWU4d9bQ6xgzz2pojBN13L3pEsStCHovDlEYbvfRxI2l2hcAAop1j1E4izHAS9IvCoy1AgWqBg6tsfP5mzOwGbSbcdI9ADRKIHgDTVbHg9SqKed27bNOLU0u3/5ra2Oar/tVIW37geEqFV/nHIBZ03Y/mszvXP/t/hP5bgJIGJKkzTjLd1aqEcuGmubW+wTQnVnrhFB87dw91gPj6BVyV0+7Vt1wrvPKqP2sGJhojAMSrBQBySnlrYgEg00bwDgNGFevatfG9+nTId+nhoKBkXya3MjSp4HwrGqGcij3/h7ovlau3/iRhkqlSeqenaNm4zKTAXRTnb60j2WKa6im0zdqJX98anp4mhjE8xHhmmfZV3vRT8mtY4hF/lg79miXFHpWH97bZV6r/D9qj1HWI/laJfOC5MOJdRcLETwtsUNMHeHtVnY3yu0XFYNcl2Xwajtnz3waF7vn1QHvAM9p878+JrO/IEUqEc0yZWVvlhZ7Krx1pS+APoMy8097MYDGFzFTkYSstKLGbgm/S7dEiWuSVxmMUxW7JYO3gHrQ3f1EvAYh2UFdWy76Dzr5II9UpVwOwF+HL/Oy8Sk77bPaK+tn7Kh4Tx7WWE0+EOAgElQ== ayourtch@ayourtch-lnx"
- - "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCXUbbhesOpvPF+KI8nq4TXvEO/Un1aU/ehZ9clCyw9C40xjDkX2BlcX8WeHxFHe7fjFaCd07Vg73rn/3M9uNDnDxvjH1GQ0twvi3iBTO4PkHBBfGF9qnE8MYzno6FvlsVKLuUuPbfm8kbOQ+ZDfdXq6gdtXh0hSYYkqC1heNPCNsqaakkB99Edyle+Ot0V7cpW+Yo2wo98KuX/cgUEhVoA8QnNVE7zaWcjSXBZEteoA4gLpAbV6p67/d6H/2ykHTidBViYTEsHco56tJoA4nTPuAupDOLBcWXgF5TAN6z1aCn2JA1DDfniLakgrZ5oVj2qHhUmbxQAtnKQfHADjqzV jlinkes@jlinkes"
- - "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQD0GXoSGDZ95TE/TT3kf4ZS3Tiso0UO3MVhqAqZ/F6LOvLyqnMPfhxPz1XpHsDikxvKgwhZvdBm1dWbKkPsD7jtw0PGphQO8QuEwBd2ZMvxZ4Qg6lNDUl5x3zRO2nkbKpcqnOugGLTtXP+yfw/wfQ2HNFLDP9gE90xegsPT83PmRUntQlhbS3ByHcCSUScC+Y1heZXuoKNyrmUY46lxkKsNfhx8sQKo0YhB21atV/mcAQbAaO2LggmaQYGtWizqPNGWIRsi9W8ZYnKva67c3Pbv/TTfaqmrNCwOXJ8G9oL+/3MlKbl3b5mYlTs2a/e9yVgMNwUZVX7aiHpgPgaVjL6j swdev@BradyBunch-MacMini.local"
- - "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCmo2YP4t/f58AAYH72rOe5VjYjk3wb/GY3aJEd5s9WspLhnsY0xBL67C+4kMq6VmQQvg0cUB8RJSFX1tUXMHCorVWnXNHkYomx0MCPcPUpVHuRyEqczYJ2pzgZsPzoEfw9E5hTrAiGzYFNAS/NOSavapVMDZxa1zsX7+sWQvJfmVtJWpcTQb0TkoWXRsy0YM3PYfUbYvK7lR3lGwyhwCcJn0WwWGreFB7bIok0poqqX5BgJ/okZuvC8II+UfuGoBHNhg49oqST1JlNi9gRqDNmLWkHRaneWZiF+Y2hdN3PRCdkt1x3eU0R+cdi5kPKslb6P0lsjOEA7fDLlq1+T2z1"
-sshd_disable_password_login: true
-
-# Nomad settings.
-nomad_certificates:
- - src: "{{ file_nomad_ca_pem }}"
- dest: "{{ nomad_ca_file }}"
- - src: "{{ file_nomad_client_pem }}"
- dest: "{{ nomad_cert_file }}"
- - src: "{{ file_nomad_client_key_pem }}"
- dest: "{{ nomad_key_file }}"
-nomad_datacenter: "yul1"
-nomad_node_name: "{{ hostname }}-{{ ansible_architecture }}"
-nomad_node_role: "client"
-nomad_node_class: "builder"
-nomad_options:
- driver.raw_exec.enable: 1
- docker.cleanup.image: false
- docker.privileged.enabled: true
- docker.volumes.enabled: true
- driver.whitelist: "docker,raw_exec,exec"
-nomad_service_mgr: "systemd"
-nomad_retry_servers:
- - "10.30.51.23"
- - "10.30.51.24"
- - "10.30.51.25"
-nomad_servers:
- - "10.30.51.23:4647"
- - "10.30.51.24:4647"
- - "10.30.51.25:4647"
-
-# Consul settigs.
-consul_nomad_integration: true
-consul_certificates:
- - src: "{{ file_consul_ca_pem }}"
- dest: "{{ consul_ca_file }}"
-consul_datacenter: "yul1"
-consul_encrypt: "Y4T+5JGx1C3l2NFBBvkTWQ=="
-consul_node_name: "{{ hostname }}"
-consul_node_role: "client"
-consul_retry_servers:
- - "10.30.51.23"
- - "10.30.51.24"
- - "10.30.51.25"
-consul_service_mgr: "systemd"
-
-# Docker daemon settings.
-docker_daemon:
- dns: ["172.17.0.1"]
- dns-opts: []
- dns-search: ["{{ansible_hostname}}"]
diff --git a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.21.yaml b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.21.yaml
index 0bcbaed0ec..b28cae11ff 100644
--- a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.21.yaml
+++ b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.21.yaml
@@ -2,7 +2,7 @@
# file: host_vars/10.30.51.21.yaml
hostname: "s21-nomad"
-inventory_cimc_hostname: "10.30.50.21"
+inventory_ipmi_hostname: "10.30.50.21"
# User management.
users:
@@ -10,56 +10,77 @@ users:
groups: [adm, sudo]
password: "$6$FIsbVDQR$5D0wgufOd2FtnmOiRNsGlgg6Loh.0x3dWSj72DSQnqisSyE9DROfgSgA6s0yxDwz4Jd5SRTXiTKuRYuSQ5POI1"
ssh_key:
- - "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQDQ5KJyLPM5yJAVLwhEwiTEZD5LkY7FEPOoyJplVMQu5/oIp+KtFr4/RrFEpJwlzuE3um+Hn9+4KZJiQvVJBEp/ZYeGMXJDw0oHlubtI/0AEdolM5TvYNzCASHulRfg2JdCGAeCG5W7vkPQmJjwtQFNw3ISGKKHgJBGipqUjDyuZPi24RI0YBfqtr/GDA0e9dZwkqI+F5yxVi9vXG68bpMkxpeC4Zlxe2DmXAEp787LYSlF+HoELPW3AAWnuQ3j1R2tImM/S9jlxfb9Uy7KlX9epkGV2/caS2EiLszT7604LrSZy4soko1CgtX1LJ3Qobz9OppkJQ+tiBU8C0oz8Z5CXnQ/GYmFt9SBDHneB1ZOtIcoV8HTQVLKCse/VXogQogdG8xOis6F+R3OAJQUF7w3ujIBCXv8ghVcynvzaGB1kxZtwceEqtIXSFdve6T2onyM0722aQ93MiXPv+IA5qpkpSN3JS0KFGxKalTdFz9+lx/agV4JVOrVxEASfok3hbNzEm/4DwUyvgf2vbHlfYQdjHg2mBmYnN3iY/ccU906Tt/TlIp25+FIIbTiLn/nt3NLLPPEZJbJDkEJ5tgJ7XLfJ9ByCfZoOIMdSPF/swDXafTcA5mJ4AKFS0HJf9wBvbgkmD2OttOAvL8k2mqsb7MiMurbXm7Ko/D+nEX+oxkKZQ== peter.mikus@protonmail.ch"
+ - "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIKM1D1kkGX1l7fSma1MOgw2yLI7zJHwTCcfVROQ4hh7r peter.mikus@protonmail.ch"
- "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQDCG7Shfr7ASXXwpazYDGFzChGl7i4HgY9n81GTwc17B/O19IiJdrxFcBQH19HMuTFtWU4d9bQ6xgzz2pojBN13L3pEsStCHovDlEYbvfRxI2l2hcAAop1j1E4izHAS9IvCoy1AgWqBg6tsfP5mzOwGbSbcdI9ADRKIHgDTVbHg9SqKed27bNOLU0u3/5ra2Oar/tVIW37geEqFV/nHIBZ03Y/mszvXP/t/hP5bgJIGJKkzTjLd1aqEcuGmubW+wTQnVnrhFB87dw91gPj6BVyV0+7Vt1wrvPKqP2sGJhojAMSrBQBySnlrYgEg00bwDgNGFevatfG9+nTId+nhoKBkXya3MjSp4HwrGqGcij3/h7ovlau3/iRhkqlSeqenaNm4zKTAXRTnb60j2WKa6im0zdqJX98anp4mhjE8xHhmmfZV3vRT8mtY4hF/lg79miXFHpWH97bZV6r/D9qj1HWI/laJfOC5MOJdRcLETwtsUNMHeHtVnY3yu0XFYNcl2Xwajtnz3waF7vn1QHvAM9p878+JrO/IEUqEc0yZWVvlhZ7Krx1pS+APoMy8097MYDGFzFTkYSstKLGbgm/S7dEiWuSVxmMUxW7JYO3gHrQ3f1EvAYh2UFdWy76Dzr5II9UpVwOwF+HL/Oy8Sk77bPaK+tn7Kh4Tx7WWE0+EOAgElQ== ayourtch@ayourtch-lnx"
- "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCXUbbhesOpvPF+KI8nq4TXvEO/Un1aU/ehZ9clCyw9C40xjDkX2BlcX8WeHxFHe7fjFaCd07Vg73rn/3M9uNDnDxvjH1GQ0twvi3iBTO4PkHBBfGF9qnE8MYzno6FvlsVKLuUuPbfm8kbOQ+ZDfdXq6gdtXh0hSYYkqC1heNPCNsqaakkB99Edyle+Ot0V7cpW+Yo2wo98KuX/cgUEhVoA8QnNVE7zaWcjSXBZEteoA4gLpAbV6p67/d6H/2ykHTidBViYTEsHco56tJoA4nTPuAupDOLBcWXgF5TAN6z1aCn2JA1DDfniLakgrZ5oVj2qHhUmbxQAtnKQfHADjqzV jlinkes@jlinkes"
- "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQD0GXoSGDZ95TE/TT3kf4ZS3Tiso0UO3MVhqAqZ/F6LOvLyqnMPfhxPz1XpHsDikxvKgwhZvdBm1dWbKkPsD7jtw0PGphQO8QuEwBd2ZMvxZ4Qg6lNDUl5x3zRO2nkbKpcqnOugGLTtXP+yfw/wfQ2HNFLDP9gE90xegsPT83PmRUntQlhbS3ByHcCSUScC+Y1heZXuoKNyrmUY46lxkKsNfhx8sQKo0YhB21atV/mcAQbAaO2LggmaQYGtWizqPNGWIRsi9W8ZYnKva67c3Pbv/TTfaqmrNCwOXJ8G9oL+/3MlKbl3b5mYlTs2a/e9yVgMNwUZVX7aiHpgPgaVjL6j swdev@BradyBunch-MacMini.local"
- "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCmo2YP4t/f58AAYH72rOe5VjYjk3wb/GY3aJEd5s9WspLhnsY0xBL67C+4kMq6VmQQvg0cUB8RJSFX1tUXMHCorVWnXNHkYomx0MCPcPUpVHuRyEqczYJ2pzgZsPzoEfw9E5hTrAiGzYFNAS/NOSavapVMDZxa1zsX7+sWQvJfmVtJWpcTQb0TkoWXRsy0YM3PYfUbYvK7lR3lGwyhwCcJn0WwWGreFB7bIok0poqqX5BgJ/okZuvC8II+UfuGoBHNhg49oqST1JlNi9gRqDNmLWkHRaneWZiF+Y2hdN3PRCdkt1x3eU0R+cdi5kPKslb6P0lsjOEA7fDLlq1+T2z1"
+ - "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDWWDIudZUaHp6pPkYmBsBeEYcbUoGGZH73eB374g8e9V3dfT7BuJOMR/Z3LxCGw5GcDDgzJHLQr8AykN7lUscaux68CXSf1CAKhfBeWBOCQD4G5tpmD6610jsiBig6bFISKpGW4nLt80yt3bKEqv6ezt35QuMeh3E45uK/pCRfDcukOThbwYG44xr7TeYAyNacWuNV9HDy5DzjBelElmolTOgjtZEbedmIcUbV/gb8yekUxxBTS3d3I5qVLtNRMHPqp9xbJqteRXIphizDlHbaTHZeVy9mrwi1RuTjDSg7p1OPJ9eVIzMYzk87gbwNxZw8481Xb1WsgGmyH/mXJjD1 najoy@NAJOY-M-505G.CISCO.COM"
sshd_disable_password_login: true
# Nomad settings.
+nomad_version: "1.6.1"
nomad_certificates:
- src: "{{ file_nomad_ca_pem }}"
- dest: "{{ nomad_ca_file }}"
- - src: "{{ file_nomad_client_pem }}"
- dest: "{{ nomad_cert_file }}"
- - src: "{{ file_nomad_client_key_pem }}"
- dest: "{{ nomad_key_file }}"
+ dest: "{{ nomad_tls_ca_file }}"
+ - src: "{{ file_nomad_server_pem }}"
+ dest: "{{ nomad_tls_cert_file }}"
+ - src: "{{ file_nomad_server_key_pem }}"
+ dest: "{{ nomad_tls_key_file }}"
nomad_datacenter: "yul1"
+nomad_encrypt: "Y4T+5JGx1C3l2NFBBvkTWQ=="
nomad_node_name: "{{ hostname }}-{{ ansible_architecture }}"
nomad_node_role: "client"
nomad_node_class: "builder"
nomad_options:
driver.raw_exec.enable: 1
- docker.cleanup.image: false
+ docker.cleanup.image: true
docker.privileged.enabled: true
docker.volumes.enabled: true
driver.whitelist: "docker,raw_exec,exec"
+ fingerprint.network.disallow_link_local: true
nomad_service_mgr: "systemd"
+nomad_consul_use_ssl: false
+nomad_use_tls: false
+nomad_tls_http: false
+nomad_tls_rpc: false
+nomad_use_vault: false
nomad_retry_servers:
- - "10.30.51.23"
+ - "10.30.51.26"
- "10.30.51.24"
- "10.30.51.25"
nomad_servers:
- - "10.30.51.23:4647"
+ - "10.30.51.26:4647"
- "10.30.51.24:4647"
- "10.30.51.25:4647"
-# Consul settigs.
-consul_nomad_integration: true
+# Consul settings.
+nomad_use_consul: true
consul_certificates:
- src: "{{ file_consul_ca_pem }}"
dest: "{{ consul_ca_file }}"
+ - src: "{{ file_consul_server_0_pem }}"
+ dest: "{{ consul_cert_file }}"
+ - src: "{{ file_consul_server_0_key_pem }}"
+ dest: "{{ consul_key_file }}"
+consul_verify_incoming: false
+consul_verify_outgoing: false
+consul_vefify_server_hostname: false
+consul_allow_tls: false
consul_datacenter: "yul1"
-consul_encrypt: "Y4T+5JGx1C3l2NFBBvkTWQ=="
-consul_node_name: "{{ hostname }}"
consul_node_role: "client"
+consul_encrypt: "Y4T+5JGx1C3l2NFBBvkTWQ=="
+consul_node_name: "{{ ansible_hostname }}"
+consul_retry_join: true
consul_retry_servers:
- - "10.30.51.23"
+ - "10.30.51.26"
- "10.30.51.24"
- "10.30.51.25"
consul_service_mgr: "systemd"
+# Vault settings.
+vault_version: "1.13.1"
+
# Docker daemon settings.
docker_daemon:
dns: ["172.17.0.1"]
diff --git a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.22.yaml b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.22.yaml
index 9364464ac2..8d7223495b 100644
--- a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.22.yaml
+++ b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.22.yaml
@@ -2,7 +2,7 @@
# file: host_vars/10.30.51.22.yaml
hostname: "s22-nomad"
-inventory_cimc_hostname: "10.30.50.22"
+inventory_ipmi_hostname: "10.30.50.22"
# User management.
users:
@@ -10,56 +10,76 @@ users:
groups: [adm, sudo]
password: "$6$FIsbVDQR$5D0wgufOd2FtnmOiRNsGlgg6Loh.0x3dWSj72DSQnqisSyE9DROfgSgA6s0yxDwz4Jd5SRTXiTKuRYuSQ5POI1"
ssh_key:
- - "ssh-rsa AAAAB3NzaC1yc2EAAAABJQAAAQEAgObJFDIMmPwQhhkjAynvlbwpM5yeSewyaE7vTLaFf4uFz4vmsE2hFf6B2xXHUGLVwoVfk91UeK7LOGrdDpoDDHzvPZXj5NmZI+WiWax5y2pQZNkcSZws0ENCeEc4hPwc4veJ1JmhokF4Bsmu14HyFMaFUhM8897jtJwsh+9fLA/no0iPGaQqEtRUQhkV+P4jCEPoY0qdRZAzVw/rY4EGAMhsJe3EJmyj63OfrrkG3+hvSLFo5pDxHQr3pZd/c6ukI7xMef48PosAvGCm3oxzb/Gu9PZIGuHLczY+tCnzCkY7MO7E+IWgjXrUAfYwSWz8XmFmA9LLe26DT5jkcK8hGQ== pmikus@cisco.com"
+ - "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIKM1D1kkGX1l7fSma1MOgw2yLI7zJHwTCcfVROQ4hh7r peter.mikus@protonmail.ch"
- "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQDCG7Shfr7ASXXwpazYDGFzChGl7i4HgY9n81GTwc17B/O19IiJdrxFcBQH19HMuTFtWU4d9bQ6xgzz2pojBN13L3pEsStCHovDlEYbvfRxI2l2hcAAop1j1E4izHAS9IvCoy1AgWqBg6tsfP5mzOwGbSbcdI9ADRKIHgDTVbHg9SqKed27bNOLU0u3/5ra2Oar/tVIW37geEqFV/nHIBZ03Y/mszvXP/t/hP5bgJIGJKkzTjLd1aqEcuGmubW+wTQnVnrhFB87dw91gPj6BVyV0+7Vt1wrvPKqP2sGJhojAMSrBQBySnlrYgEg00bwDgNGFevatfG9+nTId+nhoKBkXya3MjSp4HwrGqGcij3/h7ovlau3/iRhkqlSeqenaNm4zKTAXRTnb60j2WKa6im0zdqJX98anp4mhjE8xHhmmfZV3vRT8mtY4hF/lg79miXFHpWH97bZV6r/D9qj1HWI/laJfOC5MOJdRcLETwtsUNMHeHtVnY3yu0XFYNcl2Xwajtnz3waF7vn1QHvAM9p878+JrO/IEUqEc0yZWVvlhZ7Krx1pS+APoMy8097MYDGFzFTkYSstKLGbgm/S7dEiWuSVxmMUxW7JYO3gHrQ3f1EvAYh2UFdWy76Dzr5II9UpVwOwF+HL/Oy8Sk77bPaK+tn7Kh4Tx7WWE0+EOAgElQ== ayourtch@ayourtch-lnx"
- "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCXUbbhesOpvPF+KI8nq4TXvEO/Un1aU/ehZ9clCyw9C40xjDkX2BlcX8WeHxFHe7fjFaCd07Vg73rn/3M9uNDnDxvjH1GQ0twvi3iBTO4PkHBBfGF9qnE8MYzno6FvlsVKLuUuPbfm8kbOQ+ZDfdXq6gdtXh0hSYYkqC1heNPCNsqaakkB99Edyle+Ot0V7cpW+Yo2wo98KuX/cgUEhVoA8QnNVE7zaWcjSXBZEteoA4gLpAbV6p67/d6H/2ykHTidBViYTEsHco56tJoA4nTPuAupDOLBcWXgF5TAN6z1aCn2JA1DDfniLakgrZ5oVj2qHhUmbxQAtnKQfHADjqzV jlinkes@jlinkes"
- "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQD0GXoSGDZ95TE/TT3kf4ZS3Tiso0UO3MVhqAqZ/F6LOvLyqnMPfhxPz1XpHsDikxvKgwhZvdBm1dWbKkPsD7jtw0PGphQO8QuEwBd2ZMvxZ4Qg6lNDUl5x3zRO2nkbKpcqnOugGLTtXP+yfw/wfQ2HNFLDP9gE90xegsPT83PmRUntQlhbS3ByHcCSUScC+Y1heZXuoKNyrmUY46lxkKsNfhx8sQKo0YhB21atV/mcAQbAaO2LggmaQYGtWizqPNGWIRsi9W8ZYnKva67c3Pbv/TTfaqmrNCwOXJ8G9oL+/3MlKbl3b5mYlTs2a/e9yVgMNwUZVX7aiHpgPgaVjL6j swdev@BradyBunch-MacMini.local"
- "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCmo2YP4t/f58AAYH72rOe5VjYjk3wb/GY3aJEd5s9WspLhnsY0xBL67C+4kMq6VmQQvg0cUB8RJSFX1tUXMHCorVWnXNHkYomx0MCPcPUpVHuRyEqczYJ2pzgZsPzoEfw9E5hTrAiGzYFNAS/NOSavapVMDZxa1zsX7+sWQvJfmVtJWpcTQb0TkoWXRsy0YM3PYfUbYvK7lR3lGwyhwCcJn0WwWGreFB7bIok0poqqX5BgJ/okZuvC8II+UfuGoBHNhg49oqST1JlNi9gRqDNmLWkHRaneWZiF+Y2hdN3PRCdkt1x3eU0R+cdi5kPKslb6P0lsjOEA7fDLlq1+T2z1"
+ - "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDWWDIudZUaHp6pPkYmBsBeEYcbUoGGZH73eB374g8e9V3dfT7BuJOMR/Z3LxCGw5GcDDgzJHLQr8AykN7lUscaux68CXSf1CAKhfBeWBOCQD4G5tpmD6610jsiBig6bFISKpGW4nLt80yt3bKEqv6ezt35QuMeh3E45uK/pCRfDcukOThbwYG44xr7TeYAyNacWuNV9HDy5DzjBelElmolTOgjtZEbedmIcUbV/gb8yekUxxBTS3d3I5qVLtNRMHPqp9xbJqteRXIphizDlHbaTHZeVy9mrwi1RuTjDSg7p1OPJ9eVIzMYzk87gbwNxZw8481Xb1WsgGmyH/mXJjD1 najoy@NAJOY-M-505G.CISCO.COM"
sshd_disable_password_login: true
# Nomad settings.
+nomad_version: "1.6.1"
nomad_certificates:
- src: "{{ file_nomad_ca_pem }}"
- dest: "{{ nomad_ca_file }}"
- - src: "{{ file_nomad_client_pem }}"
- dest: "{{ nomad_cert_file }}"
- - src: "{{ file_nomad_client_key_pem }}"
- dest: "{{ nomad_key_file }}"
+ dest: "{{ nomad_tls_ca_file }}"
+ - src: "{{ file_nomad_server_pem }}"
+ dest: "{{ nomad_tls_cert_file }}"
+ - src: "{{ file_nomad_server_key_pem }}"
+ dest: "{{ nomad_tls_key_file }}"
nomad_datacenter: "yul1"
+nomad_encrypt: "Y4T+5JGx1C3l2NFBBvkTWQ=="
nomad_node_name: "{{ hostname }}-{{ ansible_architecture }}"
nomad_node_role: "client"
nomad_node_class: "builder"
nomad_options:
driver.raw_exec.enable: 1
- docker.cleanup.image: false
+ docker.cleanup.image: true
docker.privileged.enabled: true
docker.volumes.enabled: true
driver.whitelist: "docker,raw_exec,exec"
+ fingerprint.network.disallow_link_local: true
nomad_service_mgr: "systemd"
+nomad_consul_use_ssl: false
+nomad_use_tls: false
+nomad_tls_http: false
+nomad_tls_rpc: false
+nomad_use_vault: false
nomad_retry_servers:
- "10.30.51.23"
- - "10.30.51.24"
+ - "10.30.51.26"
- "10.30.51.25"
nomad_servers:
- "10.30.51.23:4647"
- - "10.30.51.24:4647"
+ - "10.30.51.26:4647"
- "10.30.51.25:4647"
# Consul settigs.
-consul_nomad_integration: true
+nomad_use_consul: true
consul_certificates:
- src: "{{ file_consul_ca_pem }}"
dest: "{{ consul_ca_file }}"
+ - src: "{{ file_consul_server_1_pem }}"
+ dest: "{{ consul_cert_file }}"
+ - src: "{{ file_consul_server_1_key_pem }}"
+ dest: "{{ consul_key_file }}"
+consul_verify_incoming: false
+consul_verify_outgoing: false
+consul_vefify_server_hostname: false
+consul_allow_tls: false
consul_datacenter: "yul1"
-consul_encrypt: "Y4T+5JGx1C3l2NFBBvkTWQ=="
-consul_node_name: "{{ hostname }}"
consul_node_role: "client"
+consul_encrypt: "Y4T+5JGx1C3l2NFBBvkTWQ=="
+consul_node_name: "{{ ansible_hostname }}"
+consul_retry_join: true
consul_retry_servers:
- "10.30.51.23"
- - "10.30.51.24"
+ - "10.30.51.26"
- "10.30.51.25"
consul_service_mgr: "systemd"
+# Vault settings.
+vault_version: "1.13.1"
# Docker daemon settings.
docker_daemon:
diff --git a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.23.yaml b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.23.yaml
index b0a950988d..8c3afaf74c 100644
--- a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.23.yaml
+++ b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.23.yaml
@@ -10,21 +10,23 @@ users:
groups: [adm, sudo]
password: "$6$FIsbVDQR$5D0wgufOd2FtnmOiRNsGlgg6Loh.0x3dWSj72DSQnqisSyE9DROfgSgA6s0yxDwz4Jd5SRTXiTKuRYuSQ5POI1"
ssh_key:
- - "ssh-rsa AAAAB3NzaC1yc2EAAAABJQAAAQEAgObJFDIMmPwQhhkjAynvlbwpM5yeSewyaE7vTLaFf4uFz4vmsE2hFf6B2xXHUGLVwoVfk91UeK7LOGrdDpoDDHzvPZXj5NmZI+WiWax5y2pQZNkcSZws0ENCeEc4hPwc4veJ1JmhokF4Bsmu14HyFMaFUhM8897jtJwsh+9fLA/no0iPGaQqEtRUQhkV+P4jCEPoY0qdRZAzVw/rY4EGAMhsJe3EJmyj63OfrrkG3+hvSLFo5pDxHQr3pZd/c6ukI7xMef48PosAvGCm3oxzb/Gu9PZIGuHLczY+tCnzCkY7MO7E+IWgjXrUAfYwSWz8XmFmA9LLe26DT5jkcK8hGQ== pmikus@cisco.com"
+ - "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIKM1D1kkGX1l7fSma1MOgw2yLI7zJHwTCcfVROQ4hh7r peter.mikus@protonmail.ch"
- "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQDCG7Shfr7ASXXwpazYDGFzChGl7i4HgY9n81GTwc17B/O19IiJdrxFcBQH19HMuTFtWU4d9bQ6xgzz2pojBN13L3pEsStCHovDlEYbvfRxI2l2hcAAop1j1E4izHAS9IvCoy1AgWqBg6tsfP5mzOwGbSbcdI9ADRKIHgDTVbHg9SqKed27bNOLU0u3/5ra2Oar/tVIW37geEqFV/nHIBZ03Y/mszvXP/t/hP5bgJIGJKkzTjLd1aqEcuGmubW+wTQnVnrhFB87dw91gPj6BVyV0+7Vt1wrvPKqP2sGJhojAMSrBQBySnlrYgEg00bwDgNGFevatfG9+nTId+nhoKBkXya3MjSp4HwrGqGcij3/h7ovlau3/iRhkqlSeqenaNm4zKTAXRTnb60j2WKa6im0zdqJX98anp4mhjE8xHhmmfZV3vRT8mtY4hF/lg79miXFHpWH97bZV6r/D9qj1HWI/laJfOC5MOJdRcLETwtsUNMHeHtVnY3yu0XFYNcl2Xwajtnz3waF7vn1QHvAM9p878+JrO/IEUqEc0yZWVvlhZ7Krx1pS+APoMy8097MYDGFzFTkYSstKLGbgm/S7dEiWuSVxmMUxW7JYO3gHrQ3f1EvAYh2UFdWy76Dzr5II9UpVwOwF+HL/Oy8Sk77bPaK+tn7Kh4Tx7WWE0+EOAgElQ== ayourtch@ayourtch-lnx"
- "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCXUbbhesOpvPF+KI8nq4TXvEO/Un1aU/ehZ9clCyw9C40xjDkX2BlcX8WeHxFHe7fjFaCd07Vg73rn/3M9uNDnDxvjH1GQ0twvi3iBTO4PkHBBfGF9qnE8MYzno6FvlsVKLuUuPbfm8kbOQ+ZDfdXq6gdtXh0hSYYkqC1heNPCNsqaakkB99Edyle+Ot0V7cpW+Yo2wo98KuX/cgUEhVoA8QnNVE7zaWcjSXBZEteoA4gLpAbV6p67/d6H/2ykHTidBViYTEsHco56tJoA4nTPuAupDOLBcWXgF5TAN6z1aCn2JA1DDfniLakgrZ5oVj2qHhUmbxQAtnKQfHADjqzV jlinkes@jlinkes"
- "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQD0GXoSGDZ95TE/TT3kf4ZS3Tiso0UO3MVhqAqZ/F6LOvLyqnMPfhxPz1XpHsDikxvKgwhZvdBm1dWbKkPsD7jtw0PGphQO8QuEwBd2ZMvxZ4Qg6lNDUl5x3zRO2nkbKpcqnOugGLTtXP+yfw/wfQ2HNFLDP9gE90xegsPT83PmRUntQlhbS3ByHcCSUScC+Y1heZXuoKNyrmUY46lxkKsNfhx8sQKo0YhB21atV/mcAQbAaO2LggmaQYGtWizqPNGWIRsi9W8ZYnKva67c3Pbv/TTfaqmrNCwOXJ8G9oL+/3MlKbl3b5mYlTs2a/e9yVgMNwUZVX7aiHpgPgaVjL6j swdev@BradyBunch-MacMini.local"
- "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCmo2YP4t/f58AAYH72rOe5VjYjk3wb/GY3aJEd5s9WspLhnsY0xBL67C+4kMq6VmQQvg0cUB8RJSFX1tUXMHCorVWnXNHkYomx0MCPcPUpVHuRyEqczYJ2pzgZsPzoEfw9E5hTrAiGzYFNAS/NOSavapVMDZxa1zsX7+sWQvJfmVtJWpcTQb0TkoWXRsy0YM3PYfUbYvK7lR3lGwyhwCcJn0WwWGreFB7bIok0poqqX5BgJ/okZuvC8II+UfuGoBHNhg49oqST1JlNi9gRqDNmLWkHRaneWZiF+Y2hdN3PRCdkt1x3eU0R+cdi5kPKslb6P0lsjOEA7fDLlq1+T2z1"
+ - "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDWWDIudZUaHp6pPkYmBsBeEYcbUoGGZH73eB374g8e9V3dfT7BuJOMR/Z3LxCGw5GcDDgzJHLQr8AykN7lUscaux68CXSf1CAKhfBeWBOCQD4G5tpmD6610jsiBig6bFISKpGW4nLt80yt3bKEqv6ezt35QuMeh3E45uK/pCRfDcukOThbwYG44xr7TeYAyNacWuNV9HDy5DzjBelElmolTOgjtZEbedmIcUbV/gb8yekUxxBTS3d3I5qVLtNRMHPqp9xbJqteRXIphizDlHbaTHZeVy9mrwi1RuTjDSg7p1OPJ9eVIzMYzk87gbwNxZw8481Xb1WsgGmyH/mXJjD1 najoy@NAJOY-M-505G.CISCO.COM"
sshd_disable_password_login: true
# Nomad settings.
+nomad_version: "1.6.1"
nomad_certificates:
- src: "{{ file_nomad_ca_pem }}"
- dest: "{{ nomad_ca_file }}"
+ dest: "{{ nomad_tls_ca_file }}"
- src: "{{ file_nomad_server_pem }}"
- dest: "{{ nomad_cert_file }}"
+ dest: "{{ nomad_tls_cert_file }}"
- src: "{{ file_nomad_server_key_pem }}"
- dest: "{{ nomad_key_file }}"
+ dest: "{{ nomad_tls_key_file }}"
nomad_datacenter: "yul1"
nomad_encrypt: "Y4T+5JGx1C3l2NFBBvkTWQ=="
nomad_node_name: "{{ hostname }}-{{ ansible_architecture }}"
@@ -38,6 +40,11 @@ nomad_options:
driver.whitelist: "docker,raw_exec,exec"
fingerprint.network.disallow_link_local: true
nomad_service_mgr: "systemd"
+nomad_consul_use_ssl: false
+nomad_use_tls: false
+nomad_tls_http: false
+nomad_tls_rpc: false
+nomad_use_vault: false
nomad_retry_servers:
- "10.30.51.26"
- "10.30.51.24"
@@ -52,7 +59,7 @@ nomad_volumes:
read_only: false
# Consul settings.
-consul_nomad_integration: true
+nomad_use_consul: true
consul_certificates:
- src: "{{ file_consul_ca_pem }}"
dest: "{{ consul_ca_file }}"
@@ -60,16 +67,24 @@ consul_certificates:
dest: "{{ consul_cert_file }}"
- src: "{{ file_consul_server_0_key_pem }}"
dest: "{{ consul_key_file }}"
+consul_verify_incoming: false
+consul_verify_outgoing: false
+consul_vefify_server_hostname: false
+consul_allow_tls: true
consul_datacenter: "yul1"
-consul_encrypt: "Y4T+5JGx1C3l2NFBBvkTWQ=="
-consul_node_name: "{{ hostname }}"
consul_node_role: "both"
+consul_encrypt: "Y4T+5JGx1C3l2NFBBvkTWQ=="
+consul_node_name: "{{ ansible_hostname }}"
+consul_retry_join: true
consul_retry_servers:
- "10.30.51.26"
- "10.30.51.24"
- "10.30.51.25"
consul_service_mgr: "systemd"
+# Vault settings.
+vault_version: "1.13.1"
+
# Docker daemon settings.
docker_daemon:
dns: ["172.17.0.1"]
diff --git a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.24.yaml b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.24.yaml
index b7981718e5..cb65e81c26 100644
--- a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.24.yaml
+++ b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.24.yaml
@@ -10,21 +10,23 @@ users:
groups: [adm, sudo]
password: "$6$FIsbVDQR$5D0wgufOd2FtnmOiRNsGlgg6Loh.0x3dWSj72DSQnqisSyE9DROfgSgA6s0yxDwz4Jd5SRTXiTKuRYuSQ5POI1"
ssh_key:
- - "ssh-rsa AAAAB3NzaC1yc2EAAAABJQAAAQEAgObJFDIMmPwQhhkjAynvlbwpM5yeSewyaE7vTLaFf4uFz4vmsE2hFf6B2xXHUGLVwoVfk91UeK7LOGrdDpoDDHzvPZXj5NmZI+WiWax5y2pQZNkcSZws0ENCeEc4hPwc4veJ1JmhokF4Bsmu14HyFMaFUhM8897jtJwsh+9fLA/no0iPGaQqEtRUQhkV+P4jCEPoY0qdRZAzVw/rY4EGAMhsJe3EJmyj63OfrrkG3+hvSLFo5pDxHQr3pZd/c6ukI7xMef48PosAvGCm3oxzb/Gu9PZIGuHLczY+tCnzCkY7MO7E+IWgjXrUAfYwSWz8XmFmA9LLe26DT5jkcK8hGQ== pmikus@cisco.com"
+ - "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIKM1D1kkGX1l7fSma1MOgw2yLI7zJHwTCcfVROQ4hh7r peter.mikus@protonmail.ch"
- "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQDCG7Shfr7ASXXwpazYDGFzChGl7i4HgY9n81GTwc17B/O19IiJdrxFcBQH19HMuTFtWU4d9bQ6xgzz2pojBN13L3pEsStCHovDlEYbvfRxI2l2hcAAop1j1E4izHAS9IvCoy1AgWqBg6tsfP5mzOwGbSbcdI9ADRKIHgDTVbHg9SqKed27bNOLU0u3/5ra2Oar/tVIW37geEqFV/nHIBZ03Y/mszvXP/t/hP5bgJIGJKkzTjLd1aqEcuGmubW+wTQnVnrhFB87dw91gPj6BVyV0+7Vt1wrvPKqP2sGJhojAMSrBQBySnlrYgEg00bwDgNGFevatfG9+nTId+nhoKBkXya3MjSp4HwrGqGcij3/h7ovlau3/iRhkqlSeqenaNm4zKTAXRTnb60j2WKa6im0zdqJX98anp4mhjE8xHhmmfZV3vRT8mtY4hF/lg79miXFHpWH97bZV6r/D9qj1HWI/laJfOC5MOJdRcLETwtsUNMHeHtVnY3yu0XFYNcl2Xwajtnz3waF7vn1QHvAM9p878+JrO/IEUqEc0yZWVvlhZ7Krx1pS+APoMy8097MYDGFzFTkYSstKLGbgm/S7dEiWuSVxmMUxW7JYO3gHrQ3f1EvAYh2UFdWy76Dzr5II9UpVwOwF+HL/Oy8Sk77bPaK+tn7Kh4Tx7WWE0+EOAgElQ== ayourtch@ayourtch-lnx"
- "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCXUbbhesOpvPF+KI8nq4TXvEO/Un1aU/ehZ9clCyw9C40xjDkX2BlcX8WeHxFHe7fjFaCd07Vg73rn/3M9uNDnDxvjH1GQ0twvi3iBTO4PkHBBfGF9qnE8MYzno6FvlsVKLuUuPbfm8kbOQ+ZDfdXq6gdtXh0hSYYkqC1heNPCNsqaakkB99Edyle+Ot0V7cpW+Yo2wo98KuX/cgUEhVoA8QnNVE7zaWcjSXBZEteoA4gLpAbV6p67/d6H/2ykHTidBViYTEsHco56tJoA4nTPuAupDOLBcWXgF5TAN6z1aCn2JA1DDfniLakgrZ5oVj2qHhUmbxQAtnKQfHADjqzV jlinkes@jlinkes"
- "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQD0GXoSGDZ95TE/TT3kf4ZS3Tiso0UO3MVhqAqZ/F6LOvLyqnMPfhxPz1XpHsDikxvKgwhZvdBm1dWbKkPsD7jtw0PGphQO8QuEwBd2ZMvxZ4Qg6lNDUl5x3zRO2nkbKpcqnOugGLTtXP+yfw/wfQ2HNFLDP9gE90xegsPT83PmRUntQlhbS3ByHcCSUScC+Y1heZXuoKNyrmUY46lxkKsNfhx8sQKo0YhB21atV/mcAQbAaO2LggmaQYGtWizqPNGWIRsi9W8ZYnKva67c3Pbv/TTfaqmrNCwOXJ8G9oL+/3MlKbl3b5mYlTs2a/e9yVgMNwUZVX7aiHpgPgaVjL6j swdev@BradyBunch-MacMini.local"
- "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCmo2YP4t/f58AAYH72rOe5VjYjk3wb/GY3aJEd5s9WspLhnsY0xBL67C+4kMq6VmQQvg0cUB8RJSFX1tUXMHCorVWnXNHkYomx0MCPcPUpVHuRyEqczYJ2pzgZsPzoEfw9E5hTrAiGzYFNAS/NOSavapVMDZxa1zsX7+sWQvJfmVtJWpcTQb0TkoWXRsy0YM3PYfUbYvK7lR3lGwyhwCcJn0WwWGreFB7bIok0poqqX5BgJ/okZuvC8II+UfuGoBHNhg49oqST1JlNi9gRqDNmLWkHRaneWZiF+Y2hdN3PRCdkt1x3eU0R+cdi5kPKslb6P0lsjOEA7fDLlq1+T2z1"
+ - "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDWWDIudZUaHp6pPkYmBsBeEYcbUoGGZH73eB374g8e9V3dfT7BuJOMR/Z3LxCGw5GcDDgzJHLQr8AykN7lUscaux68CXSf1CAKhfBeWBOCQD4G5tpmD6610jsiBig6bFISKpGW4nLt80yt3bKEqv6ezt35QuMeh3E45uK/pCRfDcukOThbwYG44xr7TeYAyNacWuNV9HDy5DzjBelElmolTOgjtZEbedmIcUbV/gb8yekUxxBTS3d3I5qVLtNRMHPqp9xbJqteRXIphizDlHbaTHZeVy9mrwi1RuTjDSg7p1OPJ9eVIzMYzk87gbwNxZw8481Xb1WsgGmyH/mXJjD1 najoy@NAJOY-M-505G.CISCO.COM"
sshd_disable_password_login: true
# Nomad settings.
+nomad_version: "1.6.1"
nomad_certificates:
- src: "{{ file_nomad_ca_pem }}"
- dest: "{{ nomad_ca_file }}"
+ dest: "{{ nomad_tls_ca_file }}"
- src: "{{ file_nomad_server_pem }}"
- dest: "{{ nomad_cert_file }}"
+ dest: "{{ nomad_tls_cert_file }}"
- src: "{{ file_nomad_server_key_pem }}"
- dest: "{{ nomad_key_file }}"
+ dest: "{{ nomad_tls_key_file }}"
nomad_datacenter: "yul1"
nomad_encrypt: "Y4T+5JGx1C3l2NFBBvkTWQ=="
nomad_node_name: "{{ hostname }}-{{ ansible_architecture }}"
@@ -38,6 +40,11 @@ nomad_options:
driver.whitelist: "docker,raw_exec,exec"
fingerprint.network.disallow_link_local: true
nomad_service_mgr: "systemd"
+nomad_consul_use_ssl: false
+nomad_use_tls: false
+nomad_tls_http: false
+nomad_tls_rpc: false
+nomad_use_vault: false
nomad_retry_servers:
- "10.30.51.23"
- "10.30.51.26"
@@ -52,7 +59,7 @@ nomad_volumes:
read_only: false
# Consul settigs.
-consul_nomad_integration: true
+nomad_use_consul: true
consul_certificates:
- src: "{{ file_consul_ca_pem }}"
dest: "{{ consul_ca_file }}"
@@ -60,16 +67,24 @@ consul_certificates:
dest: "{{ consul_cert_file }}"
- src: "{{ file_consul_server_1_key_pem }}"
dest: "{{ consul_key_file }}"
+consul_verify_incoming: false
+consul_verify_outgoing: false
+consul_vefify_server_hostname: false
+consul_allow_tls: true
consul_datacenter: "yul1"
-consul_encrypt: "Y4T+5JGx1C3l2NFBBvkTWQ=="
-consul_node_name: "{{ hostname }}"
consul_node_role: "both"
+consul_encrypt: "Y4T+5JGx1C3l2NFBBvkTWQ=="
+consul_node_name: "{{ ansible_hostname }}"
+consul_retry_join: true
consul_retry_servers:
- "10.30.51.23"
- "10.30.51.26"
- "10.30.51.25"
consul_service_mgr: "systemd"
+# Vault settings.
+vault_version: "1.13.1"
+
# Docker daemon settings.
docker_daemon:
dns: ["172.17.0.1"]
diff --git a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.25.yaml b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.25.yaml
index ba84722fc6..20e45a90ea 100644
--- a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.25.yaml
+++ b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.25.yaml
@@ -10,21 +10,23 @@ users:
groups: [adm, sudo]
password: "$6$FIsbVDQR$5D0wgufOd2FtnmOiRNsGlgg6Loh.0x3dWSj72DSQnqisSyE9DROfgSgA6s0yxDwz4Jd5SRTXiTKuRYuSQ5POI1"
ssh_key:
- - "ssh-rsa AAAAB3NzaC1yc2EAAAABJQAAAQEAgObJFDIMmPwQhhkjAynvlbwpM5yeSewyaE7vTLaFf4uFz4vmsE2hFf6B2xXHUGLVwoVfk91UeK7LOGrdDpoDDHzvPZXj5NmZI+WiWax5y2pQZNkcSZws0ENCeEc4hPwc4veJ1JmhokF4Bsmu14HyFMaFUhM8897jtJwsh+9fLA/no0iPGaQqEtRUQhkV+P4jCEPoY0qdRZAzVw/rY4EGAMhsJe3EJmyj63OfrrkG3+hvSLFo5pDxHQr3pZd/c6ukI7xMef48PosAvGCm3oxzb/Gu9PZIGuHLczY+tCnzCkY7MO7E+IWgjXrUAfYwSWz8XmFmA9LLe26DT5jkcK8hGQ== pmikus@cisco.com"
+ - "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIKM1D1kkGX1l7fSma1MOgw2yLI7zJHwTCcfVROQ4hh7r peter.mikus@protonmail.ch"
- "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQDCG7Shfr7ASXXwpazYDGFzChGl7i4HgY9n81GTwc17B/O19IiJdrxFcBQH19HMuTFtWU4d9bQ6xgzz2pojBN13L3pEsStCHovDlEYbvfRxI2l2hcAAop1j1E4izHAS9IvCoy1AgWqBg6tsfP5mzOwGbSbcdI9ADRKIHgDTVbHg9SqKed27bNOLU0u3/5ra2Oar/tVIW37geEqFV/nHIBZ03Y/mszvXP/t/hP5bgJIGJKkzTjLd1aqEcuGmubW+wTQnVnrhFB87dw91gPj6BVyV0+7Vt1wrvPKqP2sGJhojAMSrBQBySnlrYgEg00bwDgNGFevatfG9+nTId+nhoKBkXya3MjSp4HwrGqGcij3/h7ovlau3/iRhkqlSeqenaNm4zKTAXRTnb60j2WKa6im0zdqJX98anp4mhjE8xHhmmfZV3vRT8mtY4hF/lg79miXFHpWH97bZV6r/D9qj1HWI/laJfOC5MOJdRcLETwtsUNMHeHtVnY3yu0XFYNcl2Xwajtnz3waF7vn1QHvAM9p878+JrO/IEUqEc0yZWVvlhZ7Krx1pS+APoMy8097MYDGFzFTkYSstKLGbgm/S7dEiWuSVxmMUxW7JYO3gHrQ3f1EvAYh2UFdWy76Dzr5II9UpVwOwF+HL/Oy8Sk77bPaK+tn7Kh4Tx7WWE0+EOAgElQ== ayourtch@ayourtch-lnx"
- "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCXUbbhesOpvPF+KI8nq4TXvEO/Un1aU/ehZ9clCyw9C40xjDkX2BlcX8WeHxFHe7fjFaCd07Vg73rn/3M9uNDnDxvjH1GQ0twvi3iBTO4PkHBBfGF9qnE8MYzno6FvlsVKLuUuPbfm8kbOQ+ZDfdXq6gdtXh0hSYYkqC1heNPCNsqaakkB99Edyle+Ot0V7cpW+Yo2wo98KuX/cgUEhVoA8QnNVE7zaWcjSXBZEteoA4gLpAbV6p67/d6H/2ykHTidBViYTEsHco56tJoA4nTPuAupDOLBcWXgF5TAN6z1aCn2JA1DDfniLakgrZ5oVj2qHhUmbxQAtnKQfHADjqzV jlinkes@jlinkes"
- "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQD0GXoSGDZ95TE/TT3kf4ZS3Tiso0UO3MVhqAqZ/F6LOvLyqnMPfhxPz1XpHsDikxvKgwhZvdBm1dWbKkPsD7jtw0PGphQO8QuEwBd2ZMvxZ4Qg6lNDUl5x3zRO2nkbKpcqnOugGLTtXP+yfw/wfQ2HNFLDP9gE90xegsPT83PmRUntQlhbS3ByHcCSUScC+Y1heZXuoKNyrmUY46lxkKsNfhx8sQKo0YhB21atV/mcAQbAaO2LggmaQYGtWizqPNGWIRsi9W8ZYnKva67c3Pbv/TTfaqmrNCwOXJ8G9oL+/3MlKbl3b5mYlTs2a/e9yVgMNwUZVX7aiHpgPgaVjL6j swdev@BradyBunch-MacMini.local"
- "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCmo2YP4t/f58AAYH72rOe5VjYjk3wb/GY3aJEd5s9WspLhnsY0xBL67C+4kMq6VmQQvg0cUB8RJSFX1tUXMHCorVWnXNHkYomx0MCPcPUpVHuRyEqczYJ2pzgZsPzoEfw9E5hTrAiGzYFNAS/NOSavapVMDZxa1zsX7+sWQvJfmVtJWpcTQb0TkoWXRsy0YM3PYfUbYvK7lR3lGwyhwCcJn0WwWGreFB7bIok0poqqX5BgJ/okZuvC8II+UfuGoBHNhg49oqST1JlNi9gRqDNmLWkHRaneWZiF+Y2hdN3PRCdkt1x3eU0R+cdi5kPKslb6P0lsjOEA7fDLlq1+T2z1"
+ - "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDWWDIudZUaHp6pPkYmBsBeEYcbUoGGZH73eB374g8e9V3dfT7BuJOMR/Z3LxCGw5GcDDgzJHLQr8AykN7lUscaux68CXSf1CAKhfBeWBOCQD4G5tpmD6610jsiBig6bFISKpGW4nLt80yt3bKEqv6ezt35QuMeh3E45uK/pCRfDcukOThbwYG44xr7TeYAyNacWuNV9HDy5DzjBelElmolTOgjtZEbedmIcUbV/gb8yekUxxBTS3d3I5qVLtNRMHPqp9xbJqteRXIphizDlHbaTHZeVy9mrwi1RuTjDSg7p1OPJ9eVIzMYzk87gbwNxZw8481Xb1WsgGmyH/mXJjD1 najoy@NAJOY-M-505G.CISCO.COM"
sshd_disable_password_login: true
# Nomad settings.
+nomad_version: "1.6.1"
nomad_certificates:
- src: "{{ file_nomad_ca_pem }}"
- dest: "{{ nomad_ca_file }}"
+ dest: "{{ nomad_tls_ca_file }}"
- src: "{{ file_nomad_server_pem }}"
- dest: "{{ nomad_cert_file }}"
+ dest: "{{ nomad_tls_cert_file }}"
- src: "{{ file_nomad_server_key_pem }}"
- dest: "{{ nomad_key_file }}"
+ dest: "{{ nomad_tls_key_file }}"
nomad_datacenter: "yul1"
nomad_encrypt: "Y4T+5JGx1C3l2NFBBvkTWQ=="
nomad_node_name: "{{ hostname }}-{{ ansible_architecture }}"
@@ -38,6 +40,11 @@ nomad_options:
driver.whitelist: "docker,raw_exec,exec"
fingerprint.network.disallow_link_local: true
nomad_service_mgr: "systemd"
+nomad_consul_use_ssl: false
+nomad_use_tls: false
+nomad_tls_http: false
+nomad_tls_rpc: false
+nomad_use_vault: false
nomad_retry_servers:
- "10.30.51.23"
- "10.30.51.24"
@@ -52,7 +59,7 @@ nomad_volumes:
read_only: false
# Consul settigs.
-consul_nomad_integration: true
+nomad_use_consul: true
consul_certificates:
- src: "{{ file_consul_ca_pem }}"
dest: "{{ consul_ca_file }}"
@@ -60,16 +67,24 @@ consul_certificates:
dest: "{{ consul_cert_file }}"
- src: "{{ file_consul_server_2_key_pem }}"
dest: "{{ consul_key_file }}"
+consul_verify_incoming: false
+consul_verify_outgoing: false
+consul_vefify_server_hostname: false
+consul_allow_tls: true
consul_datacenter: "yul1"
-consul_encrypt: "Y4T+5JGx1C3l2NFBBvkTWQ=="
-consul_node_name: "{{ hostname }}"
consul_node_role: "both"
+consul_encrypt: "Y4T+5JGx1C3l2NFBBvkTWQ=="
+consul_node_name: "{{ ansible_hostname }}"
+consul_retry_join: true
consul_retry_servers:
- "10.30.51.23"
- "10.30.51.24"
- "10.30.51.26"
consul_service_mgr: "systemd"
+# Vault settings.
+vault_version: "1.13.1"
+
# Docker daemon settings.
docker_daemon:
dns: ["172.17.0.1"]
diff --git a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.26.yaml b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.26.yaml
index 1ad441bfe9..0d71009ede 100644
--- a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.26.yaml
+++ b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.26.yaml
@@ -10,21 +10,23 @@ users:
groups: [adm, sudo]
password: "$6$FIsbVDQR$5D0wgufOd2FtnmOiRNsGlgg6Loh.0x3dWSj72DSQnqisSyE9DROfgSgA6s0yxDwz4Jd5SRTXiTKuRYuSQ5POI1"
ssh_key:
- - "ssh-rsa AAAAB3NzaC1yc2EAAAABJQAAAQEAgObJFDIMmPwQhhkjAynvlbwpM5yeSewyaE7vTLaFf4uFz4vmsE2hFf6B2xXHUGLVwoVfk91UeK7LOGrdDpoDDHzvPZXj5NmZI+WiWax5y2pQZNkcSZws0ENCeEc4hPwc4veJ1JmhokF4Bsmu14HyFMaFUhM8897jtJwsh+9fLA/no0iPGaQqEtRUQhkV+P4jCEPoY0qdRZAzVw/rY4EGAMhsJe3EJmyj63OfrrkG3+hvSLFo5pDxHQr3pZd/c6ukI7xMef48PosAvGCm3oxzb/Gu9PZIGuHLczY+tCnzCkY7MO7E+IWgjXrUAfYwSWz8XmFmA9LLe26DT5jkcK8hGQ== pmikus@cisco.com"
+ - "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIKM1D1kkGX1l7fSma1MOgw2yLI7zJHwTCcfVROQ4hh7r peter.mikus@protonmail.ch"
- "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQDCG7Shfr7ASXXwpazYDGFzChGl7i4HgY9n81GTwc17B/O19IiJdrxFcBQH19HMuTFtWU4d9bQ6xgzz2pojBN13L3pEsStCHovDlEYbvfRxI2l2hcAAop1j1E4izHAS9IvCoy1AgWqBg6tsfP5mzOwGbSbcdI9ADRKIHgDTVbHg9SqKed27bNOLU0u3/5ra2Oar/tVIW37geEqFV/nHIBZ03Y/mszvXP/t/hP5bgJIGJKkzTjLd1aqEcuGmubW+wTQnVnrhFB87dw91gPj6BVyV0+7Vt1wrvPKqP2sGJhojAMSrBQBySnlrYgEg00bwDgNGFevatfG9+nTId+nhoKBkXya3MjSp4HwrGqGcij3/h7ovlau3/iRhkqlSeqenaNm4zKTAXRTnb60j2WKa6im0zdqJX98anp4mhjE8xHhmmfZV3vRT8mtY4hF/lg79miXFHpWH97bZV6r/D9qj1HWI/laJfOC5MOJdRcLETwtsUNMHeHtVnY3yu0XFYNcl2Xwajtnz3waF7vn1QHvAM9p878+JrO/IEUqEc0yZWVvlhZ7Krx1pS+APoMy8097MYDGFzFTkYSstKLGbgm/S7dEiWuSVxmMUxW7JYO3gHrQ3f1EvAYh2UFdWy76Dzr5II9UpVwOwF+HL/Oy8Sk77bPaK+tn7Kh4Tx7WWE0+EOAgElQ== ayourtch@ayourtch-lnx"
- "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCXUbbhesOpvPF+KI8nq4TXvEO/Un1aU/ehZ9clCyw9C40xjDkX2BlcX8WeHxFHe7fjFaCd07Vg73rn/3M9uNDnDxvjH1GQ0twvi3iBTO4PkHBBfGF9qnE8MYzno6FvlsVKLuUuPbfm8kbOQ+ZDfdXq6gdtXh0hSYYkqC1heNPCNsqaakkB99Edyle+Ot0V7cpW+Yo2wo98KuX/cgUEhVoA8QnNVE7zaWcjSXBZEteoA4gLpAbV6p67/d6H/2ykHTidBViYTEsHco56tJoA4nTPuAupDOLBcWXgF5TAN6z1aCn2JA1DDfniLakgrZ5oVj2qHhUmbxQAtnKQfHADjqzV jlinkes@jlinkes"
- "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQD0GXoSGDZ95TE/TT3kf4ZS3Tiso0UO3MVhqAqZ/F6LOvLyqnMPfhxPz1XpHsDikxvKgwhZvdBm1dWbKkPsD7jtw0PGphQO8QuEwBd2ZMvxZ4Qg6lNDUl5x3zRO2nkbKpcqnOugGLTtXP+yfw/wfQ2HNFLDP9gE90xegsPT83PmRUntQlhbS3ByHcCSUScC+Y1heZXuoKNyrmUY46lxkKsNfhx8sQKo0YhB21atV/mcAQbAaO2LggmaQYGtWizqPNGWIRsi9W8ZYnKva67c3Pbv/TTfaqmrNCwOXJ8G9oL+/3MlKbl3b5mYlTs2a/e9yVgMNwUZVX7aiHpgPgaVjL6j swdev@BradyBunch-MacMini.local"
- "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCmo2YP4t/f58AAYH72rOe5VjYjk3wb/GY3aJEd5s9WspLhnsY0xBL67C+4kMq6VmQQvg0cUB8RJSFX1tUXMHCorVWnXNHkYomx0MCPcPUpVHuRyEqczYJ2pzgZsPzoEfw9E5hTrAiGzYFNAS/NOSavapVMDZxa1zsX7+sWQvJfmVtJWpcTQb0TkoWXRsy0YM3PYfUbYvK7lR3lGwyhwCcJn0WwWGreFB7bIok0poqqX5BgJ/okZuvC8II+UfuGoBHNhg49oqST1JlNi9gRqDNmLWkHRaneWZiF+Y2hdN3PRCdkt1x3eU0R+cdi5kPKslb6P0lsjOEA7fDLlq1+T2z1"
+ - "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDWWDIudZUaHp6pPkYmBsBeEYcbUoGGZH73eB374g8e9V3dfT7BuJOMR/Z3LxCGw5GcDDgzJHLQr8AykN7lUscaux68CXSf1CAKhfBeWBOCQD4G5tpmD6610jsiBig6bFISKpGW4nLt80yt3bKEqv6ezt35QuMeh3E45uK/pCRfDcukOThbwYG44xr7TeYAyNacWuNV9HDy5DzjBelElmolTOgjtZEbedmIcUbV/gb8yekUxxBTS3d3I5qVLtNRMHPqp9xbJqteRXIphizDlHbaTHZeVy9mrwi1RuTjDSg7p1OPJ9eVIzMYzk87gbwNxZw8481Xb1WsgGmyH/mXJjD1 najoy@NAJOY-M-505G.CISCO.COM"
sshd_disable_password_login: true
# Nomad settings.
+nomad_version: "1.6.1"
nomad_certificates:
- src: "{{ file_nomad_ca_pem }}"
- dest: "{{ nomad_ca_file }}"
+ dest: "{{ nomad_tls_ca_file }}"
- src: "{{ file_nomad_server_pem }}"
- dest: "{{ nomad_cert_file }}"
+ dest: "{{ nomad_tls_cert_file }}"
- src: "{{ file_nomad_server_key_pem }}"
- dest: "{{ nomad_key_file }}"
+ dest: "{{ nomad_tls_key_file }}"
nomad_datacenter: "yul1"
nomad_encrypt: "Y4T+5JGx1C3l2NFBBvkTWQ=="
nomad_node_name: "{{ hostname }}-{{ ansible_architecture }}"
@@ -38,6 +40,11 @@ nomad_options:
driver.whitelist: "docker,raw_exec,exec"
fingerprint.network.disallow_link_local: true
nomad_service_mgr: "systemd"
+nomad_consul_use_ssl: false
+nomad_use_tls: false
+nomad_tls_http: false
+nomad_tls_rpc: false
+nomad_use_vault: false
nomad_retry_servers:
- "10.30.51.23"
- "10.30.51.24"
@@ -52,7 +59,7 @@ nomad_volumes:
read_only: false
# Consul settigs.
-consul_nomad_integration: true
+nomad_use_consul: true
consul_certificates:
- src: "{{ file_consul_ca_pem }}"
dest: "{{ consul_ca_file }}"
@@ -60,16 +67,24 @@ consul_certificates:
dest: "{{ consul_cert_file }}"
- src: "{{ file_consul_server_3_key_pem }}"
dest: "{{ consul_key_file }}"
+consul_verify_incoming: false
+consul_verify_outgoing: false
+consul_vefify_server_hostname: false
+consul_allow_tls: true
consul_datacenter: "yul1"
-consul_encrypt: "Y4T+5JGx1C3l2NFBBvkTWQ=="
-consul_node_name: "{{ hostname }}"
consul_node_role: "both"
+consul_encrypt: "Y4T+5JGx1C3l2NFBBvkTWQ=="
+consul_node_name: "{{ ansible_hostname }}"
+consul_retry_join: true
consul_retry_servers:
- "10.30.51.23"
- "10.30.51.24"
- "10.30.51.25"
consul_service_mgr: "systemd"
+# Vault settings.
+vault_version: "1.13.1"
+
# Docker daemon settings.
docker_daemon:
dns: ["172.17.0.1"]
diff --git a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.17.yaml b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.27.yaml
index bcc443919d..745686c31e 100644
--- a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.17.yaml
+++ b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.27.yaml
@@ -1,8 +1,8 @@
---
-# file: host_vars/10.30.51.17.yaml
+# file: host_vars/10.30.51.27.yaml
-hostname: "s17-nomad"
-inventory_cimc_hostname: "10.30.50.17"
+hostname: "s27-nomad"
+inventory_cimc_hostname: "10.30.50.27"
# User management.
users:
@@ -10,21 +10,23 @@ users:
groups: [adm, sudo]
password: "$6$FIsbVDQR$5D0wgufOd2FtnmOiRNsGlgg6Loh.0x3dWSj72DSQnqisSyE9DROfgSgA6s0yxDwz4Jd5SRTXiTKuRYuSQ5POI1"
ssh_key:
- - "ssh-rsa AAAAB3NzaC1yc2EAAAABJQAAAQEAgObJFDIMmPwQhhkjAynvlbwpM5yeSewyaE7vTLaFf4uFz4vmsE2hFf6B2xXHUGLVwoVfk91UeK7LOGrdDpoDDHzvPZXj5NmZI+WiWax5y2pQZNkcSZws0ENCeEc4hPwc4veJ1JmhokF4Bsmu14HyFMaFUhM8897jtJwsh+9fLA/no0iPGaQqEtRUQhkV+P4jCEPoY0qdRZAzVw/rY4EGAMhsJe3EJmyj63OfrrkG3+hvSLFo5pDxHQr3pZd/c6ukI7xMef48PosAvGCm3oxzb/Gu9PZIGuHLczY+tCnzCkY7MO7E+IWgjXrUAfYwSWz8XmFmA9LLe26DT5jkcK8hGQ== pmikus@cisco.com"
+ - "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIKM1D1kkGX1l7fSma1MOgw2yLI7zJHwTCcfVROQ4hh7r peter.mikus@protonmail.ch"
- "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQDCG7Shfr7ASXXwpazYDGFzChGl7i4HgY9n81GTwc17B/O19IiJdrxFcBQH19HMuTFtWU4d9bQ6xgzz2pojBN13L3pEsStCHovDlEYbvfRxI2l2hcAAop1j1E4izHAS9IvCoy1AgWqBg6tsfP5mzOwGbSbcdI9ADRKIHgDTVbHg9SqKed27bNOLU0u3/5ra2Oar/tVIW37geEqFV/nHIBZ03Y/mszvXP/t/hP5bgJIGJKkzTjLd1aqEcuGmubW+wTQnVnrhFB87dw91gPj6BVyV0+7Vt1wrvPKqP2sGJhojAMSrBQBySnlrYgEg00bwDgNGFevatfG9+nTId+nhoKBkXya3MjSp4HwrGqGcij3/h7ovlau3/iRhkqlSeqenaNm4zKTAXRTnb60j2WKa6im0zdqJX98anp4mhjE8xHhmmfZV3vRT8mtY4hF/lg79miXFHpWH97bZV6r/D9qj1HWI/laJfOC5MOJdRcLETwtsUNMHeHtVnY3yu0XFYNcl2Xwajtnz3waF7vn1QHvAM9p878+JrO/IEUqEc0yZWVvlhZ7Krx1pS+APoMy8097MYDGFzFTkYSstKLGbgm/S7dEiWuSVxmMUxW7JYO3gHrQ3f1EvAYh2UFdWy76Dzr5II9UpVwOwF+HL/Oy8Sk77bPaK+tn7Kh4Tx7WWE0+EOAgElQ== ayourtch@ayourtch-lnx"
- "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCXUbbhesOpvPF+KI8nq4TXvEO/Un1aU/ehZ9clCyw9C40xjDkX2BlcX8WeHxFHe7fjFaCd07Vg73rn/3M9uNDnDxvjH1GQ0twvi3iBTO4PkHBBfGF9qnE8MYzno6FvlsVKLuUuPbfm8kbOQ+ZDfdXq6gdtXh0hSYYkqC1heNPCNsqaakkB99Edyle+Ot0V7cpW+Yo2wo98KuX/cgUEhVoA8QnNVE7zaWcjSXBZEteoA4gLpAbV6p67/d6H/2ykHTidBViYTEsHco56tJoA4nTPuAupDOLBcWXgF5TAN6z1aCn2JA1DDfniLakgrZ5oVj2qHhUmbxQAtnKQfHADjqzV jlinkes@jlinkes"
- "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQD0GXoSGDZ95TE/TT3kf4ZS3Tiso0UO3MVhqAqZ/F6LOvLyqnMPfhxPz1XpHsDikxvKgwhZvdBm1dWbKkPsD7jtw0PGphQO8QuEwBd2ZMvxZ4Qg6lNDUl5x3zRO2nkbKpcqnOugGLTtXP+yfw/wfQ2HNFLDP9gE90xegsPT83PmRUntQlhbS3ByHcCSUScC+Y1heZXuoKNyrmUY46lxkKsNfhx8sQKo0YhB21atV/mcAQbAaO2LggmaQYGtWizqPNGWIRsi9W8ZYnKva67c3Pbv/TTfaqmrNCwOXJ8G9oL+/3MlKbl3b5mYlTs2a/e9yVgMNwUZVX7aiHpgPgaVjL6j swdev@BradyBunch-MacMini.local"
- "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCmo2YP4t/f58AAYH72rOe5VjYjk3wb/GY3aJEd5s9WspLhnsY0xBL67C+4kMq6VmQQvg0cUB8RJSFX1tUXMHCorVWnXNHkYomx0MCPcPUpVHuRyEqczYJ2pzgZsPzoEfw9E5hTrAiGzYFNAS/NOSavapVMDZxa1zsX7+sWQvJfmVtJWpcTQb0TkoWXRsy0YM3PYfUbYvK7lR3lGwyhwCcJn0WwWGreFB7bIok0poqqX5BgJ/okZuvC8II+UfuGoBHNhg49oqST1JlNi9gRqDNmLWkHRaneWZiF+Y2hdN3PRCdkt1x3eU0R+cdi5kPKslb6P0lsjOEA7fDLlq1+T2z1"
+ - "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDWWDIudZUaHp6pPkYmBsBeEYcbUoGGZH73eB374g8e9V3dfT7BuJOMR/Z3LxCGw5GcDDgzJHLQr8AykN7lUscaux68CXSf1CAKhfBeWBOCQD4G5tpmD6610jsiBig6bFISKpGW4nLt80yt3bKEqv6ezt35QuMeh3E45uK/pCRfDcukOThbwYG44xr7TeYAyNacWuNV9HDy5DzjBelElmolTOgjtZEbedmIcUbV/gb8yekUxxBTS3d3I5qVLtNRMHPqp9xbJqteRXIphizDlHbaTHZeVy9mrwi1RuTjDSg7p1OPJ9eVIzMYzk87gbwNxZw8481Xb1WsgGmyH/mXJjD1 najoy@NAJOY-M-505G.CISCO.COM"
sshd_disable_password_login: true
# Nomad settings.
+nomad_version: "1.6.1"
nomad_certificates:
- src: "{{ file_nomad_ca_pem }}"
- dest: "{{ nomad_ca_file }}"
+ dest: "{{ nomad_tls_ca_file }}"
- src: "{{ file_nomad_client_pem }}"
- dest: "{{ nomad_cert_file }}"
+ dest: "{{ nomad_tls_cert_file }}"
- src: "{{ file_nomad_client_key_pem }}"
- dest: "{{ nomad_key_file }}"
+ dest: "{{ nomad_tls_key_file }}"
nomad_datacenter: "yul1"
nomad_node_name: "{{ hostname }}-{{ ansible_architecture }}"
nomad_node_role: "client"
@@ -36,6 +38,11 @@ nomad_options:
docker.volumes.enabled: true
driver.whitelist: "docker,raw_exec,exec"
nomad_service_mgr: "systemd"
+nomad_consul_use_ssl: false
+nomad_use_tls: false
+nomad_tls_http: false
+nomad_tls_rpc: false
+nomad_use_vault: false
nomad_retry_servers:
- "10.30.51.23"
- "10.30.51.24"
@@ -45,21 +52,32 @@ nomad_servers:
- "10.30.51.24:4647"
- "10.30.51.25:4647"
-# Consul settigs.
-consul_nomad_integration: true
+# Consul settings.
+nomad_use_consul: true
consul_certificates:
- - src: "{{ file_consul_ca_pem }}"
+ - src: "{{ file_consul_agent_ca_pem }}"
dest: "{{ consul_ca_file }}"
+ - src: "{{ file_consul_server_0_pem }}"
+ dest: "{{ consul_cert_file }}"
+ - src: "{{ file_consul_server_0_key_pem }}"
+ dest: "{{ consul_key_file }}"
+consul_verify_incoming: false
+consul_verify_outgoing: false
+consul_vefify_server_hostname: false
+consul_allow_tls: false
consul_datacenter: "yul1"
-consul_encrypt: "Y4T+5JGx1C3l2NFBBvkTWQ=="
-consul_node_name: "{{ hostname }}"
consul_node_role: "client"
+consul_encrypt: "Y4T+5JGx1C3l2NFBBvkTWQ=="
+consul_node_name: "{{ ansible_hostname }}"
+consul_retry_join: true
consul_retry_servers:
- "10.30.51.23"
- "10.30.51.24"
- "10.30.51.25"
consul_service_mgr: "systemd"
+# Vault settings.
+vault_version: "1.13.1"
# Docker daemon settings.
docker_daemon:
diff --git a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.18.yaml b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.28.yaml
index cb643b9daa..5a3c8896fc 100644
--- a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.18.yaml
+++ b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.28.yaml
@@ -1,8 +1,8 @@
---
-# file: host_vars/10.30.51.18.yaml
+# file: host_vars/10.30.51.28.yaml
-hostname: "s18-nomad"
-inventory_cimc_hostname: "10.30.50.18"
+hostname: "s28-nomad"
+inventory_cimc_hostname: "10.30.50.28"
# User management.
users:
@@ -10,21 +10,23 @@ users:
groups: [adm, sudo]
password: "$6$FIsbVDQR$5D0wgufOd2FtnmOiRNsGlgg6Loh.0x3dWSj72DSQnqisSyE9DROfgSgA6s0yxDwz4Jd5SRTXiTKuRYuSQ5POI1"
ssh_key:
- - "ssh-rsa AAAAB3NzaC1yc2EAAAABJQAAAQEAgObJFDIMmPwQhhkjAynvlbwpM5yeSewyaE7vTLaFf4uFz4vmsE2hFf6B2xXHUGLVwoVfk91UeK7LOGrdDpoDDHzvPZXj5NmZI+WiWax5y2pQZNkcSZws0ENCeEc4hPwc4veJ1JmhokF4Bsmu14HyFMaFUhM8897jtJwsh+9fLA/no0iPGaQqEtRUQhkV+P4jCEPoY0qdRZAzVw/rY4EGAMhsJe3EJmyj63OfrrkG3+hvSLFo5pDxHQr3pZd/c6ukI7xMef48PosAvGCm3oxzb/Gu9PZIGuHLczY+tCnzCkY7MO7E+IWgjXrUAfYwSWz8XmFmA9LLe26DT5jkcK8hGQ== pmikus@cisco.com"
+ - "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIKM1D1kkGX1l7fSma1MOgw2yLI7zJHwTCcfVROQ4hh7r peter.mikus@protonmail.ch"
- "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQDCG7Shfr7ASXXwpazYDGFzChGl7i4HgY9n81GTwc17B/O19IiJdrxFcBQH19HMuTFtWU4d9bQ6xgzz2pojBN13L3pEsStCHovDlEYbvfRxI2l2hcAAop1j1E4izHAS9IvCoy1AgWqBg6tsfP5mzOwGbSbcdI9ADRKIHgDTVbHg9SqKed27bNOLU0u3/5ra2Oar/tVIW37geEqFV/nHIBZ03Y/mszvXP/t/hP5bgJIGJKkzTjLd1aqEcuGmubW+wTQnVnrhFB87dw91gPj6BVyV0+7Vt1wrvPKqP2sGJhojAMSrBQBySnlrYgEg00bwDgNGFevatfG9+nTId+nhoKBkXya3MjSp4HwrGqGcij3/h7ovlau3/iRhkqlSeqenaNm4zKTAXRTnb60j2WKa6im0zdqJX98anp4mhjE8xHhmmfZV3vRT8mtY4hF/lg79miXFHpWH97bZV6r/D9qj1HWI/laJfOC5MOJdRcLETwtsUNMHeHtVnY3yu0XFYNcl2Xwajtnz3waF7vn1QHvAM9p878+JrO/IEUqEc0yZWVvlhZ7Krx1pS+APoMy8097MYDGFzFTkYSstKLGbgm/S7dEiWuSVxmMUxW7JYO3gHrQ3f1EvAYh2UFdWy76Dzr5II9UpVwOwF+HL/Oy8Sk77bPaK+tn7Kh4Tx7WWE0+EOAgElQ== ayourtch@ayourtch-lnx"
- "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCXUbbhesOpvPF+KI8nq4TXvEO/Un1aU/ehZ9clCyw9C40xjDkX2BlcX8WeHxFHe7fjFaCd07Vg73rn/3M9uNDnDxvjH1GQ0twvi3iBTO4PkHBBfGF9qnE8MYzno6FvlsVKLuUuPbfm8kbOQ+ZDfdXq6gdtXh0hSYYkqC1heNPCNsqaakkB99Edyle+Ot0V7cpW+Yo2wo98KuX/cgUEhVoA8QnNVE7zaWcjSXBZEteoA4gLpAbV6p67/d6H/2ykHTidBViYTEsHco56tJoA4nTPuAupDOLBcWXgF5TAN6z1aCn2JA1DDfniLakgrZ5oVj2qHhUmbxQAtnKQfHADjqzV jlinkes@jlinkes"
- "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQD0GXoSGDZ95TE/TT3kf4ZS3Tiso0UO3MVhqAqZ/F6LOvLyqnMPfhxPz1XpHsDikxvKgwhZvdBm1dWbKkPsD7jtw0PGphQO8QuEwBd2ZMvxZ4Qg6lNDUl5x3zRO2nkbKpcqnOugGLTtXP+yfw/wfQ2HNFLDP9gE90xegsPT83PmRUntQlhbS3ByHcCSUScC+Y1heZXuoKNyrmUY46lxkKsNfhx8sQKo0YhB21atV/mcAQbAaO2LggmaQYGtWizqPNGWIRsi9W8ZYnKva67c3Pbv/TTfaqmrNCwOXJ8G9oL+/3MlKbl3b5mYlTs2a/e9yVgMNwUZVX7aiHpgPgaVjL6j swdev@BradyBunch-MacMini.local"
- "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCmo2YP4t/f58AAYH72rOe5VjYjk3wb/GY3aJEd5s9WspLhnsY0xBL67C+4kMq6VmQQvg0cUB8RJSFX1tUXMHCorVWnXNHkYomx0MCPcPUpVHuRyEqczYJ2pzgZsPzoEfw9E5hTrAiGzYFNAS/NOSavapVMDZxa1zsX7+sWQvJfmVtJWpcTQb0TkoWXRsy0YM3PYfUbYvK7lR3lGwyhwCcJn0WwWGreFB7bIok0poqqX5BgJ/okZuvC8II+UfuGoBHNhg49oqST1JlNi9gRqDNmLWkHRaneWZiF+Y2hdN3PRCdkt1x3eU0R+cdi5kPKslb6P0lsjOEA7fDLlq1+T2z1"
+ - "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDWWDIudZUaHp6pPkYmBsBeEYcbUoGGZH73eB374g8e9V3dfT7BuJOMR/Z3LxCGw5GcDDgzJHLQr8AykN7lUscaux68CXSf1CAKhfBeWBOCQD4G5tpmD6610jsiBig6bFISKpGW4nLt80yt3bKEqv6ezt35QuMeh3E45uK/pCRfDcukOThbwYG44xr7TeYAyNacWuNV9HDy5DzjBelElmolTOgjtZEbedmIcUbV/gb8yekUxxBTS3d3I5qVLtNRMHPqp9xbJqteRXIphizDlHbaTHZeVy9mrwi1RuTjDSg7p1OPJ9eVIzMYzk87gbwNxZw8481Xb1WsgGmyH/mXJjD1 najoy@NAJOY-M-505G.CISCO.COM"
sshd_disable_password_login: true
# Nomad settings.
+nomad_version: "1.6.1"
nomad_certificates:
- src: "{{ file_nomad_ca_pem }}"
- dest: "{{ nomad_ca_file }}"
+ dest: "{{ nomad_tls_ca_file }}"
- src: "{{ file_nomad_client_pem }}"
- dest: "{{ nomad_cert_file }}"
+ dest: "{{ nomad_tls_cert_file }}"
- src: "{{ file_nomad_client_key_pem }}"
- dest: "{{ nomad_key_file }}"
+ dest: "{{ nomad_tls_key_file }}"
nomad_datacenter: "yul1"
nomad_node_name: "{{ hostname }}-{{ ansible_architecture }}"
nomad_node_role: "client"
@@ -36,6 +38,11 @@ nomad_options:
docker.volumes.enabled: true
driver.whitelist: "docker,raw_exec,exec"
nomad_service_mgr: "systemd"
+nomad_consul_use_ssl: false
+nomad_use_tls: false
+nomad_tls_http: false
+nomad_tls_rpc: false
+nomad_use_vault: false
nomad_retry_servers:
- "10.30.51.23"
- "10.30.51.24"
@@ -45,21 +52,32 @@ nomad_servers:
- "10.30.51.24:4647"
- "10.30.51.25:4647"
-# Consul settigs.
-consul_nomad_integration: true
+# Consul settings.
+nomad_use_consul: true
consul_certificates:
- - src: "{{ file_consul_ca_pem }}"
+ - src: "{{ file_consul_agent_ca_pem }}"
dest: "{{ consul_ca_file }}"
+ - src: "{{ file_consul_server_0_pem }}"
+ dest: "{{ consul_cert_file }}"
+ - src: "{{ file_consul_server_0_key_pem }}"
+ dest: "{{ consul_key_file }}"
+consul_verify_incoming: false
+consul_verify_outgoing: false
+consul_vefify_server_hostname: false
+consul_allow_tls: false
consul_datacenter: "yul1"
-consul_encrypt: "Y4T+5JGx1C3l2NFBBvkTWQ=="
-consul_node_name: "{{ hostname }}"
consul_node_role: "client"
+consul_encrypt: "Y4T+5JGx1C3l2NFBBvkTWQ=="
+consul_node_name: "{{ ansible_hostname }}"
+consul_retry_join: true
consul_retry_servers:
- "10.30.51.23"
- "10.30.51.24"
- "10.30.51.25"
consul_service_mgr: "systemd"
+# Vault settings.
+vault_version: "1.13.1"
# Docker daemon settings.
docker_daemon:
diff --git a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.19.yaml b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.30.yaml
index 62edabfe0d..543f557d6e 100644
--- a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.19.yaml
+++ b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.30.yaml
@@ -1,8 +1,17 @@
---
-# file: host_vars/10.30.51.19.yaml
+# file: host_vars/10.30.51.30.yaml
-hostname: "s19-nomad"
-inventory_cimc_hostname: "10.30.50.19"
+hostname: "s30-t15-sut1"
+grub:
+ hugepagesz: "2M"
+ hugepages: 32768
+ iommu: "on"
+ vfio.enable_unsafe_noiommu_mode: 1
+inventory_ipmi_hostname: "10.30.50.30"
+vfs_data_file: "csit-initialize-vfs-spr.sh"
+cpu_microarchitecture: "sapphirerapids"
+
+intel_800_matrix: "dpdk22.03"
# User management.
users:
@@ -10,25 +19,27 @@ users:
groups: [adm, sudo]
password: "$6$FIsbVDQR$5D0wgufOd2FtnmOiRNsGlgg6Loh.0x3dWSj72DSQnqisSyE9DROfgSgA6s0yxDwz4Jd5SRTXiTKuRYuSQ5POI1"
ssh_key:
- - "ssh-rsa AAAAB3NzaC1yc2EAAAABJQAAAQEAgObJFDIMmPwQhhkjAynvlbwpM5yeSewyaE7vTLaFf4uFz4vmsE2hFf6B2xXHUGLVwoVfk91UeK7LOGrdDpoDDHzvPZXj5NmZI+WiWax5y2pQZNkcSZws0ENCeEc4hPwc4veJ1JmhokF4Bsmu14HyFMaFUhM8897jtJwsh+9fLA/no0iPGaQqEtRUQhkV+P4jCEPoY0qdRZAzVw/rY4EGAMhsJe3EJmyj63OfrrkG3+hvSLFo5pDxHQr3pZd/c6ukI7xMef48PosAvGCm3oxzb/Gu9PZIGuHLczY+tCnzCkY7MO7E+IWgjXrUAfYwSWz8XmFmA9LLe26DT5jkcK8hGQ== pmikus@cisco.com"
+ - "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIKM1D1kkGX1l7fSma1MOgw2yLI7zJHwTCcfVROQ4hh7r peter.mikus@protonmail.ch"
- "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQDCG7Shfr7ASXXwpazYDGFzChGl7i4HgY9n81GTwc17B/O19IiJdrxFcBQH19HMuTFtWU4d9bQ6xgzz2pojBN13L3pEsStCHovDlEYbvfRxI2l2hcAAop1j1E4izHAS9IvCoy1AgWqBg6tsfP5mzOwGbSbcdI9ADRKIHgDTVbHg9SqKed27bNOLU0u3/5ra2Oar/tVIW37geEqFV/nHIBZ03Y/mszvXP/t/hP5bgJIGJKkzTjLd1aqEcuGmubW+wTQnVnrhFB87dw91gPj6BVyV0+7Vt1wrvPKqP2sGJhojAMSrBQBySnlrYgEg00bwDgNGFevatfG9+nTId+nhoKBkXya3MjSp4HwrGqGcij3/h7ovlau3/iRhkqlSeqenaNm4zKTAXRTnb60j2WKa6im0zdqJX98anp4mhjE8xHhmmfZV3vRT8mtY4hF/lg79miXFHpWH97bZV6r/D9qj1HWI/laJfOC5MOJdRcLETwtsUNMHeHtVnY3yu0XFYNcl2Xwajtnz3waF7vn1QHvAM9p878+JrO/IEUqEc0yZWVvlhZ7Krx1pS+APoMy8097MYDGFzFTkYSstKLGbgm/S7dEiWuSVxmMUxW7JYO3gHrQ3f1EvAYh2UFdWy76Dzr5II9UpVwOwF+HL/Oy8Sk77bPaK+tn7Kh4Tx7WWE0+EOAgElQ== ayourtch@ayourtch-lnx"
- "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCXUbbhesOpvPF+KI8nq4TXvEO/Un1aU/ehZ9clCyw9C40xjDkX2BlcX8WeHxFHe7fjFaCd07Vg73rn/3M9uNDnDxvjH1GQ0twvi3iBTO4PkHBBfGF9qnE8MYzno6FvlsVKLuUuPbfm8kbOQ+ZDfdXq6gdtXh0hSYYkqC1heNPCNsqaakkB99Edyle+Ot0V7cpW+Yo2wo98KuX/cgUEhVoA8QnNVE7zaWcjSXBZEteoA4gLpAbV6p67/d6H/2ykHTidBViYTEsHco56tJoA4nTPuAupDOLBcWXgF5TAN6z1aCn2JA1DDfniLakgrZ5oVj2qHhUmbxQAtnKQfHADjqzV jlinkes@jlinkes"
- "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQD0GXoSGDZ95TE/TT3kf4ZS3Tiso0UO3MVhqAqZ/F6LOvLyqnMPfhxPz1XpHsDikxvKgwhZvdBm1dWbKkPsD7jtw0PGphQO8QuEwBd2ZMvxZ4Qg6lNDUl5x3zRO2nkbKpcqnOugGLTtXP+yfw/wfQ2HNFLDP9gE90xegsPT83PmRUntQlhbS3ByHcCSUScC+Y1heZXuoKNyrmUY46lxkKsNfhx8sQKo0YhB21atV/mcAQbAaO2LggmaQYGtWizqPNGWIRsi9W8ZYnKva67c3Pbv/TTfaqmrNCwOXJ8G9oL+/3MlKbl3b5mYlTs2a/e9yVgMNwUZVX7aiHpgPgaVjL6j swdev@BradyBunch-MacMini.local"
- "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCmo2YP4t/f58AAYH72rOe5VjYjk3wb/GY3aJEd5s9WspLhnsY0xBL67C+4kMq6VmQQvg0cUB8RJSFX1tUXMHCorVWnXNHkYomx0MCPcPUpVHuRyEqczYJ2pzgZsPzoEfw9E5hTrAiGzYFNAS/NOSavapVMDZxa1zsX7+sWQvJfmVtJWpcTQb0TkoWXRsy0YM3PYfUbYvK7lR3lGwyhwCcJn0WwWGreFB7bIok0poqqX5BgJ/okZuvC8II+UfuGoBHNhg49oqST1JlNi9gRqDNmLWkHRaneWZiF+Y2hdN3PRCdkt1x3eU0R+cdi5kPKslb6P0lsjOEA7fDLlq1+T2z1"
+ - "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDWWDIudZUaHp6pPkYmBsBeEYcbUoGGZH73eB374g8e9V3dfT7BuJOMR/Z3LxCGw5GcDDgzJHLQr8AykN7lUscaux68CXSf1CAKhfBeWBOCQD4G5tpmD6610jsiBig6bFISKpGW4nLt80yt3bKEqv6ezt35QuMeh3E45uK/pCRfDcukOThbwYG44xr7TeYAyNacWuNV9HDy5DzjBelElmolTOgjtZEbedmIcUbV/gb8yekUxxBTS3d3I5qVLtNRMHPqp9xbJqteRXIphizDlHbaTHZeVy9mrwi1RuTjDSg7p1OPJ9eVIzMYzk87gbwNxZw8481Xb1WsgGmyH/mXJjD1 najoy@NAJOY-M-505G.CISCO.COM"
sshd_disable_password_login: true
# Nomad settings.
+nomad_version: "1.6.1"
nomad_certificates:
- src: "{{ file_nomad_ca_pem }}"
- dest: "{{ nomad_ca_file }}"
+ dest: "{{ nomad_tls_ca_file }}"
- src: "{{ file_nomad_client_pem }}"
- dest: "{{ nomad_cert_file }}"
+ dest: "{{ nomad_tls_cert_file }}"
- src: "{{ file_nomad_client_key_pem }}"
- dest: "{{ nomad_key_file }}"
+ dest: "{{ nomad_tls_key_file }}"
nomad_datacenter: "yul1"
nomad_node_name: "{{ hostname }}-{{ ansible_architecture }}"
nomad_node_role: "client"
-nomad_node_class: "builder"
+nomad_node_class: "csit"
nomad_options:
driver.raw_exec.enable: 1
docker.cleanup.image: false
@@ -36,6 +47,11 @@ nomad_options:
docker.volumes.enabled: true
driver.whitelist: "docker,raw_exec,exec"
nomad_service_mgr: "systemd"
+nomad_consul_use_ssl: false
+nomad_use_tls: false
+nomad_tls_http: false
+nomad_tls_rpc: false
+nomad_use_vault: false
nomad_retry_servers:
- "10.30.51.23"
- "10.30.51.24"
@@ -46,23 +62,36 @@ nomad_servers:
- "10.30.51.25:4647"
# Consul settigs.
-consul_nomad_integration: true
+nomad_use_consul: true
consul_certificates:
- src: "{{ file_consul_ca_pem }}"
dest: "{{ consul_ca_file }}"
+ - src: "{{ file_consul_server_0_pem }}"
+ dest: "{{ consul_cert_file }}"
+ - src: "{{ file_consul_server_0_key_pem }}"
+ dest: "{{ consul_key_file }}"
+consul_verify_incoming: false
+consul_verify_outgoing: false
+consul_vefify_server_hostname: false
+consul_allow_tls: false
consul_datacenter: "yul1"
-consul_encrypt: "Y4T+5JGx1C3l2NFBBvkTWQ=="
-consul_node_name: "{{ hostname }}"
consul_node_role: "client"
+consul_encrypt: "Y4T+5JGx1C3l2NFBBvkTWQ=="
+consul_node_name: "{{ ansible_hostname }}"
+consul_retry_join: true
consul_retry_servers:
- "10.30.51.23"
- "10.30.51.24"
- "10.30.51.25"
consul_service_mgr: "systemd"
+# Vault settings.
+vault_version: "1.13.1"
-# Docker daemon settings.
+# Docker settings.
docker_daemon:
+ default-shm-size: "1073741824"
dns: ["172.17.0.1"]
dns-opts: []
- dns-search: ["{{ansible_hostname}}"]
+ dns-search: ["{{ ansible_hostname }}"]
+ host: ["172.17.0.1:/var/run/docker.sock"]
diff --git a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.16.yaml b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.31.yaml
index 90a7f1b2ee..1c80c5d4a6 100644
--- a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.16.yaml
+++ b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.31.yaml
@@ -1,8 +1,17 @@
---
-# file: host_vars/10.30.51.16.yaml
+# file: host_vars/10.30.51.31.yaml
-hostname: "s16-nomad"
-inventory_cimc_hostname: "10.30.50.16"
+hostname: "s31-t16-sut1"
+grub:
+ hugepagesz: "2M"
+ hugepages: 32768
+ iommu: "on"
+ vfio.enable_unsafe_noiommu_mode: 1
+inventory_ipmi_hostname: "10.30.50.31"
+vfs_data_file: "csit-initialize-vfs-spr.sh"
+cpu_microarchitecture: "sapphirerapids"
+
+intel_800_matrix: "dpdk22.03"
# User management.
users:
@@ -10,25 +19,27 @@ users:
groups: [adm, sudo]
password: "$6$FIsbVDQR$5D0wgufOd2FtnmOiRNsGlgg6Loh.0x3dWSj72DSQnqisSyE9DROfgSgA6s0yxDwz4Jd5SRTXiTKuRYuSQ5POI1"
ssh_key:
- - "ssh-rsa AAAAB3NzaC1yc2EAAAABJQAAAQEAgObJFDIMmPwQhhkjAynvlbwpM5yeSewyaE7vTLaFf4uFz4vmsE2hFf6B2xXHUGLVwoVfk91UeK7LOGrdDpoDDHzvPZXj5NmZI+WiWax5y2pQZNkcSZws0ENCeEc4hPwc4veJ1JmhokF4Bsmu14HyFMaFUhM8897jtJwsh+9fLA/no0iPGaQqEtRUQhkV+P4jCEPoY0qdRZAzVw/rY4EGAMhsJe3EJmyj63OfrrkG3+hvSLFo5pDxHQr3pZd/c6ukI7xMef48PosAvGCm3oxzb/Gu9PZIGuHLczY+tCnzCkY7MO7E+IWgjXrUAfYwSWz8XmFmA9LLe26DT5jkcK8hGQ== pmikus@cisco.com"
+ - "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIKM1D1kkGX1l7fSma1MOgw2yLI7zJHwTCcfVROQ4hh7r peter.mikus@protonmail.ch"
- "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQDCG7Shfr7ASXXwpazYDGFzChGl7i4HgY9n81GTwc17B/O19IiJdrxFcBQH19HMuTFtWU4d9bQ6xgzz2pojBN13L3pEsStCHovDlEYbvfRxI2l2hcAAop1j1E4izHAS9IvCoy1AgWqBg6tsfP5mzOwGbSbcdI9ADRKIHgDTVbHg9SqKed27bNOLU0u3/5ra2Oar/tVIW37geEqFV/nHIBZ03Y/mszvXP/t/hP5bgJIGJKkzTjLd1aqEcuGmubW+wTQnVnrhFB87dw91gPj6BVyV0+7Vt1wrvPKqP2sGJhojAMSrBQBySnlrYgEg00bwDgNGFevatfG9+nTId+nhoKBkXya3MjSp4HwrGqGcij3/h7ovlau3/iRhkqlSeqenaNm4zKTAXRTnb60j2WKa6im0zdqJX98anp4mhjE8xHhmmfZV3vRT8mtY4hF/lg79miXFHpWH97bZV6r/D9qj1HWI/laJfOC5MOJdRcLETwtsUNMHeHtVnY3yu0XFYNcl2Xwajtnz3waF7vn1QHvAM9p878+JrO/IEUqEc0yZWVvlhZ7Krx1pS+APoMy8097MYDGFzFTkYSstKLGbgm/S7dEiWuSVxmMUxW7JYO3gHrQ3f1EvAYh2UFdWy76Dzr5II9UpVwOwF+HL/Oy8Sk77bPaK+tn7Kh4Tx7WWE0+EOAgElQ== ayourtch@ayourtch-lnx"
- "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCXUbbhesOpvPF+KI8nq4TXvEO/Un1aU/ehZ9clCyw9C40xjDkX2BlcX8WeHxFHe7fjFaCd07Vg73rn/3M9uNDnDxvjH1GQ0twvi3iBTO4PkHBBfGF9qnE8MYzno6FvlsVKLuUuPbfm8kbOQ+ZDfdXq6gdtXh0hSYYkqC1heNPCNsqaakkB99Edyle+Ot0V7cpW+Yo2wo98KuX/cgUEhVoA8QnNVE7zaWcjSXBZEteoA4gLpAbV6p67/d6H/2ykHTidBViYTEsHco56tJoA4nTPuAupDOLBcWXgF5TAN6z1aCn2JA1DDfniLakgrZ5oVj2qHhUmbxQAtnKQfHADjqzV jlinkes@jlinkes"
- "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQD0GXoSGDZ95TE/TT3kf4ZS3Tiso0UO3MVhqAqZ/F6LOvLyqnMPfhxPz1XpHsDikxvKgwhZvdBm1dWbKkPsD7jtw0PGphQO8QuEwBd2ZMvxZ4Qg6lNDUl5x3zRO2nkbKpcqnOugGLTtXP+yfw/wfQ2HNFLDP9gE90xegsPT83PmRUntQlhbS3ByHcCSUScC+Y1heZXuoKNyrmUY46lxkKsNfhx8sQKo0YhB21atV/mcAQbAaO2LggmaQYGtWizqPNGWIRsi9W8ZYnKva67c3Pbv/TTfaqmrNCwOXJ8G9oL+/3MlKbl3b5mYlTs2a/e9yVgMNwUZVX7aiHpgPgaVjL6j swdev@BradyBunch-MacMini.local"
- "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCmo2YP4t/f58AAYH72rOe5VjYjk3wb/GY3aJEd5s9WspLhnsY0xBL67C+4kMq6VmQQvg0cUB8RJSFX1tUXMHCorVWnXNHkYomx0MCPcPUpVHuRyEqczYJ2pzgZsPzoEfw9E5hTrAiGzYFNAS/NOSavapVMDZxa1zsX7+sWQvJfmVtJWpcTQb0TkoWXRsy0YM3PYfUbYvK7lR3lGwyhwCcJn0WwWGreFB7bIok0poqqX5BgJ/okZuvC8II+UfuGoBHNhg49oqST1JlNi9gRqDNmLWkHRaneWZiF+Y2hdN3PRCdkt1x3eU0R+cdi5kPKslb6P0lsjOEA7fDLlq1+T2z1"
+ - "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDWWDIudZUaHp6pPkYmBsBeEYcbUoGGZH73eB374g8e9V3dfT7BuJOMR/Z3LxCGw5GcDDgzJHLQr8AykN7lUscaux68CXSf1CAKhfBeWBOCQD4G5tpmD6610jsiBig6bFISKpGW4nLt80yt3bKEqv6ezt35QuMeh3E45uK/pCRfDcukOThbwYG44xr7TeYAyNacWuNV9HDy5DzjBelElmolTOgjtZEbedmIcUbV/gb8yekUxxBTS3d3I5qVLtNRMHPqp9xbJqteRXIphizDlHbaTHZeVy9mrwi1RuTjDSg7p1OPJ9eVIzMYzk87gbwNxZw8481Xb1WsgGmyH/mXJjD1 najoy@NAJOY-M-505G.CISCO.COM"
sshd_disable_password_login: true
# Nomad settings.
+nomad_version: "1.6.1"
nomad_certificates:
- src: "{{ file_nomad_ca_pem }}"
- dest: "{{ nomad_ca_file }}"
+ dest: "{{ nomad_tls_ca_file }}"
- src: "{{ file_nomad_client_pem }}"
- dest: "{{ nomad_cert_file }}"
+ dest: "{{ nomad_tls_cert_file }}"
- src: "{{ file_nomad_client_key_pem }}"
- dest: "{{ nomad_key_file }}"
+ dest: "{{ nomad_tls_key_file }}"
nomad_datacenter: "yul1"
nomad_node_name: "{{ hostname }}-{{ ansible_architecture }}"
nomad_node_role: "client"
-nomad_node_class: "builder"
+nomad_node_class: "csit"
nomad_options:
driver.raw_exec.enable: 1
docker.cleanup.image: false
@@ -36,6 +47,11 @@ nomad_options:
docker.volumes.enabled: true
driver.whitelist: "docker,raw_exec,exec"
nomad_service_mgr: "systemd"
+nomad_consul_use_ssl: false
+nomad_use_tls: false
+nomad_tls_http: false
+nomad_tls_rpc: false
+nomad_use_vault: false
nomad_retry_servers:
- "10.30.51.23"
- "10.30.51.24"
@@ -46,22 +62,36 @@ nomad_servers:
- "10.30.51.25:4647"
# Consul settigs.
-consul_nomad_integration: true
+nomad_use_consul: true
consul_certificates:
- src: "{{ file_consul_ca_pem }}"
dest: "{{ consul_ca_file }}"
+ - src: "{{ file_consul_server_0_pem }}"
+ dest: "{{ consul_cert_file }}"
+ - src: "{{ file_consul_server_0_key_pem }}"
+ dest: "{{ consul_key_file }}"
+consul_verify_incoming: false
+consul_verify_outgoing: false
+consul_vefify_server_hostname: false
+consul_allow_tls: false
consul_datacenter: "yul1"
-consul_encrypt: "Y4T+5JGx1C3l2NFBBvkTWQ=="
-consul_node_name: "{{ hostname }}"
consul_node_role: "client"
+consul_encrypt: "Y4T+5JGx1C3l2NFBBvkTWQ=="
+consul_node_name: "{{ ansible_hostname }}"
+consul_retry_join: true
consul_retry_servers:
- "10.30.51.23"
- "10.30.51.24"
- "10.30.51.25"
consul_service_mgr: "systemd"
-# Docker daemon settings.
+# Vault settings.
+vault_version: "1.13.1"
+
+# Docker settings.
docker_daemon:
+ default-shm-size: "1073741824"
dns: ["172.17.0.1"]
dns-opts: []
- dns-search: ["{{ansible_hostname}}"]
+ dns-search: ["{{ ansible_hostname }}"]
+ host: ["172.17.0.1:/var/run/docker.sock"]
diff --git a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.32.yaml b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.32.yaml
new file mode 100644
index 0000000000..f7d9c092e5
--- /dev/null
+++ b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.32.yaml
@@ -0,0 +1,38 @@
+---
+# file: host_vars/10.30.51.32.yaml
+
+hostname: "s32-t31-sut1"
+grub:
+ audit: "0"
+ default_hugepagesz: "2M hugepagesz=1G hugepages=16 hugepagesz=2M hugepages=8192"
+ hpet: "disable"
+ intel_idle.max_cstate: "1"
+ intel_iommu: "on"
+ intel_pstate: "disable"
+ iommu: "pt"
+ isolcpus: "1-19,21-39"
+ mce: "off"
+ nmi_watchdog: "0"
+ nohz_full: "1-19,21-39"
+ nosoftlockup: true
+ numa_balancing: "disable"
+ processor.max_cstate: "1"
+ rcu_nocbs: "1-19,21-39"
+ tsc: "reliable"
+sysctl:
+ kernel:
+ watchdog_cpumask: "0,20"
+ vm:
+ nr_hugepages: 8192
+ max_map_count: 20000
+
+inventory_ipmi_hostname: "10.30.50.32"
+cpu_microarchitecture: "icelake"
+
+intel_800_matrix: "dpdk23.11"
+intel_qat_matrix: true
+
+kernel_version_by_distro:
+ ubuntu:
+ jammy:
+ - "5.15.0-72" \ No newline at end of file
diff --git a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.33.yaml b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.33.yaml
new file mode 100644
index 0000000000..c91d5e8d35
--- /dev/null
+++ b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.33.yaml
@@ -0,0 +1,38 @@
+---
+# file: host_vars/10.30.51.33.yaml
+
+hostname: "s33-t31-sut2"
+grub:
+ audit: "0"
+ default_hugepagesz: "2M hugepagesz=1G hugepages=16 hugepagesz=2M hugepages=8192"
+ hpet: "disable"
+ intel_idle.max_cstate: "1"
+ intel_iommu: "on"
+ intel_pstate: "disable"
+ iommu: "pt"
+ isolcpus: "1-19,21-39"
+ mce: "off"
+ nmi_watchdog: "0"
+ nohz_full: "1-19,21-39"
+ nosoftlockup: true
+ numa_balancing: "disable"
+ processor.max_cstate: "1"
+ rcu_nocbs: "1-19,21-39"
+ tsc: "reliable"
+sysctl:
+ kernel:
+ watchdog_cpumask: "0,20"
+ vm:
+ nr_hugepages: 8192
+ max_map_count: 20000
+
+inventory_ipmi_hostname: "10.30.50.33"
+cpu_microarchitecture: "icelake"
+
+intel_800_matrix: "dpdk23.11"
+intel_qat_matrix: true
+
+kernel_version_by_distro:
+ ubuntu:
+ jammy:
+ - "5.15.0-72" \ No newline at end of file
diff --git a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.34.yaml b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.34.yaml
new file mode 100644
index 0000000000..0c26db6084
--- /dev/null
+++ b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.34.yaml
@@ -0,0 +1,38 @@
+---
+# file: host_vars/10.30.51.34.yaml
+
+hostname: "s34-t32-sut1"
+grub:
+ audit: "0"
+ default_hugepagesz: "2M hugepagesz=1G hugepages=16 hugepagesz=2M hugepages=8192"
+ hpet: "disable"
+ intel_idle.max_cstate: "1"
+ intel_iommu: "on"
+ intel_pstate: "disable"
+ iommu: "pt"
+ isolcpus: "1-19,21-39"
+ mce: "off"
+ nmi_watchdog: "0"
+ nohz_full: "1-19,21-39"
+ nosoftlockup: true
+ numa_balancing: "disable"
+ processor.max_cstate: "1"
+ rcu_nocbs: "1-19,21-39"
+ tsc: "reliable"
+sysctl:
+ kernel:
+ watchdog_cpumask: "0,20"
+ vm:
+ nr_hugepages: 8192
+ max_map_count: 20000
+
+inventory_ipmi_hostname: "10.30.50.34"
+cpu_microarchitecture: "icelake"
+
+intel_800_matrix: "dpdk23.11"
+intel_qat_matrix: true
+
+kernel_version_by_distro:
+ ubuntu:
+ jammy:
+ - "5.15.0-72" \ No newline at end of file
diff --git a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.35.yaml b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.35.yaml
new file mode 100644
index 0000000000..d60b46c52a
--- /dev/null
+++ b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.35.yaml
@@ -0,0 +1,38 @@
+---
+# file: host_vars/10.30.51.35.yaml
+
+hostname: "s35-t32-sut2"
+grub:
+ audit: "0"
+ default_hugepagesz: "2M hugepagesz=1G hugepages=16 hugepagesz=2M hugepages=8192"
+ hpet: "disable"
+ intel_idle.max_cstate: "1"
+ intel_iommu: "on"
+ intel_pstate: "disable"
+ iommu: "pt"
+ isolcpus: "1-19,21-39"
+ mce: "off"
+ nmi_watchdog: "0"
+ nohz_full: "1-19,21-39"
+ nosoftlockup: true
+ numa_balancing: "disable"
+ processor.max_cstate: "1"
+ rcu_nocbs: "1-19,21-39"
+ tsc: "reliable"
+sysctl:
+ kernel:
+ watchdog_cpumask: "0,20"
+ vm:
+ nr_hugepages: 8192
+ max_map_count: 20000
+
+inventory_ipmi_hostname: "10.30.50.35"
+cpu_microarchitecture: "icelake"
+
+intel_800_matrix: "dpdk23.11"
+intel_qat_matrix: true
+
+kernel_version_by_distro:
+ ubuntu:
+ jammy:
+ - "5.15.0-72" \ No newline at end of file
diff --git a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.36.yaml b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.36.yaml
index c11c421769..1db367bd67 100644
--- a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.36.yaml
+++ b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.36.yaml
@@ -6,6 +6,7 @@ grub:
audit: "0"
default_hugepagesz: "2M hugepagesz=1G hugepages=32 hugepagesz=2M hugepages=32768"
iommu.passthrough: "1"
+ vfio.enable_unsafe_noiommu_mode: 1
isolcpus: "1-10,17-26,33-42,49-58"
nmi_watchdog: "0"
nohz_full: "1-10,17-26,33-42,49-58"
@@ -20,6 +21,3 @@ sysctl:
inventory_ipmi_hostname: "10.30.50.36"
cpu_microarchitecture: "taishan"
-
-intel_700_matrix: "dpdk22.03"
-
diff --git a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.37.yaml b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.37.yaml
index ab3176ef78..7d3188251a 100644
--- a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.37.yaml
+++ b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.37.yaml
@@ -6,6 +6,7 @@ grub:
audit: "0"
default_hugepagesz: "2M hugepagesz=1G hugepages=32 hugepagesz=2M hugepages=32768"
iommu.passthrough: "1"
+ vfio.enable_unsafe_noiommu_mode: 1
isolcpus: "1-10,17-26,33-42,49-58"
nmi_watchdog: "0"
nohz_full: "1-10,17-26,33-42,49-58"
@@ -20,6 +21,3 @@ sysctl:
inventory_ipmi_hostname: "10.30.50.37"
cpu_microarchitecture: "taishan"
-
-intel_700_matrix: "dpdk22.03"
-
diff --git a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.38.yaml b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.38.yaml
index 8d84fbdd0d..8b95603594 100644
--- a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.38.yaml
+++ b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.38.yaml
@@ -11,9 +11,10 @@ users:
groups: [adm, sudo]
password: "$6$FIsbVDQR$5D0wgufOd2FtnmOiRNsGlgg6Loh.0x3dWSj72DSQnqisSyE9DROfgSgA6s0yxDwz4Jd5SRTXiTKuRYuSQ5POI1"
ssh_key:
- - "ssh-rsa AAAAB3NzaC1yc2EAAAABJQAAAQEAgObJFDIMmPwQhhkjAynvlbwpM5yeSewyaE7vTLaFf4uFz4vmsE2hFf6B2xXHUGLVwoVfk91UeK7LOGrdDpoDDHzvPZXj5NmZI+WiWax5y2pQZNkcSZws0ENCeEc4hPwc4veJ1JmhokF4Bsmu14HyFMaFUhM8897jtJwsh+9fLA/no0iPGaQqEtRUQhkV+P4jCEPoY0qdRZAzVw/rY4EGAMhsJe3EJmyj63OfrrkG3+hvSLFo5pDxHQr3pZd/c6ukI7xMef48PosAvGCm3oxzb/Gu9PZIGuHLczY+tCnzCkY7MO7E+IWgjXrUAfYwSWz8XmFmA9LLe26DT5jkcK8hGQ== pmikus@cisco.com"
+ - "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIKM1D1kkGX1l7fSma1MOgw2yLI7zJHwTCcfVROQ4hh7r peter.mikus@protonmail.ch"
- "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQDCG7Shfr7ASXXwpazYDGFzChGl7i4HgY9n81GTwc17B/O19IiJdrxFcBQH19HMuTFtWU4d9bQ6xgzz2pojBN13L3pEsStCHovDlEYbvfRxI2l2hcAAop1j1E4izHAS9IvCoy1AgWqBg6tsfP5mzOwGbSbcdI9ADRKIHgDTVbHg9SqKed27bNOLU0u3/5ra2Oar/tVIW37geEqFV/nHIBZ03Y/mszvXP/t/hP5bgJIGJKkzTjLd1aqEcuGmubW+wTQnVnrhFB87dw91gPj6BVyV0+7Vt1wrvPKqP2sGJhojAMSrBQBySnlrYgEg00bwDgNGFevatfG9+nTId+nhoKBkXya3MjSp4HwrGqGcij3/h7ovlau3/iRhkqlSeqenaNm4zKTAXRTnb60j2WKa6im0zdqJX98anp4mhjE8xHhmmfZV3vRT8mtY4hF/lg79miXFHpWH97bZV6r/D9qj1HWI/laJfOC5MOJdRcLETwtsUNMHeHtVnY3yu0XFYNcl2Xwajtnz3waF7vn1QHvAM9p878+JrO/IEUqEc0yZWVvlhZ7Krx1pS+APoMy8097MYDGFzFTkYSstKLGbgm/S7dEiWuSVxmMUxW7JYO3gHrQ3f1EvAYh2UFdWy76Dzr5II9UpVwOwF+HL/Oy8Sk77bPaK+tn7Kh4Tx7WWE0+EOAgElQ== ayourtch@ayourtch-lnx"
- "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCXUbbhesOpvPF+KI8nq4TXvEO/Un1aU/ehZ9clCyw9C40xjDkX2BlcX8WeHxFHe7fjFaCd07Vg73rn/3M9uNDnDxvjH1GQ0twvi3iBTO4PkHBBfGF9qnE8MYzno6FvlsVKLuUuPbfm8kbOQ+ZDfdXq6gdtXh0hSYYkqC1heNPCNsqaakkB99Edyle+Ot0V7cpW+Yo2wo98KuX/cgUEhVoA8QnNVE7zaWcjSXBZEteoA4gLpAbV6p67/d6H/2ykHTidBViYTEsHco56tJoA4nTPuAupDOLBcWXgF5TAN6z1aCn2JA1DDfniLakgrZ5oVj2qHhUmbxQAtnKQfHADjqzV jlinkes@jlinkes"
- "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQD0GXoSGDZ95TE/TT3kf4ZS3Tiso0UO3MVhqAqZ/F6LOvLyqnMPfhxPz1XpHsDikxvKgwhZvdBm1dWbKkPsD7jtw0PGphQO8QuEwBd2ZMvxZ4Qg6lNDUl5x3zRO2nkbKpcqnOugGLTtXP+yfw/wfQ2HNFLDP9gE90xegsPT83PmRUntQlhbS3ByHcCSUScC+Y1heZXuoKNyrmUY46lxkKsNfhx8sQKo0YhB21atV/mcAQbAaO2LggmaQYGtWizqPNGWIRsi9W8ZYnKva67c3Pbv/TTfaqmrNCwOXJ8G9oL+/3MlKbl3b5mYlTs2a/e9yVgMNwUZVX7aiHpgPgaVjL6j swdev@BradyBunch-MacMini.local"
- "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCmo2YP4t/f58AAYH72rOe5VjYjk3wb/GY3aJEd5s9WspLhnsY0xBL67C+4kMq6VmQQvg0cUB8RJSFX1tUXMHCorVWnXNHkYomx0MCPcPUpVHuRyEqczYJ2pzgZsPzoEfw9E5hTrAiGzYFNAS/NOSavapVMDZxa1zsX7+sWQvJfmVtJWpcTQb0TkoWXRsy0YM3PYfUbYvK7lR3lGwyhwCcJn0WwWGreFB7bIok0poqqX5BgJ/okZuvC8II+UfuGoBHNhg49oqST1JlNi9gRqDNmLWkHRaneWZiF+Y2hdN3PRCdkt1x3eU0R+cdi5kPKslb6P0lsjOEA7fDLlq1+T2z1"
+ - "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDWWDIudZUaHp6pPkYmBsBeEYcbUoGGZH73eB374g8e9V3dfT7BuJOMR/Z3LxCGw5GcDDgzJHLQr8AykN7lUscaux68CXSf1CAKhfBeWBOCQD4G5tpmD6610jsiBig6bFISKpGW4nLt80yt3bKEqv6ezt35QuMeh3E45uK/pCRfDcukOThbwYG44xr7TeYAyNacWuNV9HDy5DzjBelElmolTOgjtZEbedmIcUbV/gb8yekUxxBTS3d3I5qVLtNRMHPqp9xbJqteRXIphizDlHbaTHZeVy9mrwi1RuTjDSg7p1OPJ9eVIzMYzk87gbwNxZw8481Xb1WsgGmyH/mXJjD1 najoy@NAJOY-M-505G.CISCO.COM"
sshd_disable_password_login: true
diff --git a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.44.yaml b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.44.yaml
deleted file mode 100644
index e1f4ebbe3f..0000000000
--- a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.44.yaml
+++ /dev/null
@@ -1,32 +0,0 @@
----
-# file: host_vars/10.30.51.44.yaml
-
-hostname: "s3-t21-sut1"
-grub:
- audit: "0"
- default_hugepagesz: "2M hugepagesz=1G hugepages=32 hugepagesz=2M hugepages=32768"
- hpet: "disable"
- intel_idle.max_cstate: "1"
- intel_iommu: "on"
- intel_pstate: "disable"
- iommu: "pt"
- isolcpus: "1-27,29-55,57-83,85-111"
- mce: "off"
- nmi_watchdog: "0"
- nohz_full: "1-27,29-55,57-83,85-111"
- nosoftlockup: true
- numa_balancing: "disable"
- processor.max_cstate: "1"
- rcu_nocbs: "1-27,29-55,57-83,85-111"
- tsc: "reliable"
-sysctl:
- kernel:
- watchdog_cpumask: "0,28,56,84"
- vm:
- nr_hugepages: 32768
- max_map_count: 20000
-
-inventory_ipmi_hostname: "10.30.50.41"
-cpu_microarchitecture: "skylake"
-
-intel_700_matrix: "dpdk22.03"
diff --git a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.45.yaml b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.45.yaml
deleted file mode 100644
index a38f7f0845..0000000000
--- a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.45.yaml
+++ /dev/null
@@ -1,32 +0,0 @@
----
-# file: host_vars/10.30.51.45.yaml
-
-hostname: "s4-t21-tg1"
-grub:
- audit: "0"
- default_hugepagesz: "2M hugepagesz=1G hugepages=32 hugepagesz=2M hugepages=8192"
- hpet: "disable"
- intel_idle.max_cstate: "1"
- intel_iommu: "on"
- intel_pstate: "disable"
- iommu: "pt"
- isolcpus: "1-27,29-55,57-83,85-111"
- mce: "off"
- nmi_watchdog: "0"
- nohz_full: "1-27,29-55,57-83,85-111"
- nosoftlockup: true
- numa_balancing: "disable"
- processor.max_cstate: "1"
- rcu_nocbs: "1-27,29-55,57-83,85-111"
- tsc: "reliable"
-sysctl:
- kernel:
- watchdog_cpumask: "0,28,56,84"
- vm:
- nr_hugepages: 8192
- max_map_count: 20000
-
-inventory_ipmi_hostname: "10.30.50.42"
-cpu_microarchitecture: "skylake"
-
-intel_700_matrix: "dpdk21.02"
diff --git a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.46.yaml b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.46.yaml
deleted file mode 100644
index 55d38a2fd5..0000000000
--- a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.46.yaml
+++ /dev/null
@@ -1,32 +0,0 @@
----
-# file: host_vars/10.30.51.46.yaml
-
-hostname: "s11-t31-sut1"
-grub:
- audit: "0"
- default_hugepagesz: "2M hugepagesz=1G hugepages=32 hugepagesz=2M hugepages=32768"
- hpet: "disable"
- intel_idle.max_cstate: "1"
- intel_iommu: "on"
- intel_pstate: "disable"
- iommu: "pt"
- isolcpus: "1-27,29-55,57-83,85-111"
- mce: "off"
- nmi_watchdog: "0"
- nohz_full: "1-27,29-55,57-83,85-111"
- nosoftlockup: true
- numa_balancing: "disable"
- processor.max_cstate: "1"
- rcu_nocbs: "1-27,29-55,57-83,85-111"
- tsc: "reliable"
-sysctl:
- kernel:
- watchdog_cpumask: "0,28,56,84"
- vm:
- nr_hugepages: 32768
- max_map_count: 20000
-
-inventory_ipmi_hostname: "10.30.50.43"
-cpu_microarchitecture: "skylake"
-
-intel_700_matrix: "dpdk22.03"
diff --git a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.47.yaml b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.47.yaml
deleted file mode 100644
index 718201b151..0000000000
--- a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.47.yaml
+++ /dev/null
@@ -1,32 +0,0 @@
----
-# file: host_vars/10.30.51.47.yaml
-
-hostname: "s12-t31-sut2"
-grub:
- audit: "0"
- default_hugepagesz: "2M hugepagesz=1G hugepages=32 hugepagesz=2M hugepages=32768"
- hpet: "disable"
- intel_idle.max_cstate: "1"
- intel_iommu: "on"
- intel_pstate: "disable"
- iommu: "pt"
- isolcpus: "1-27,29-55,57-83,85-111"
- mce: "off"
- nmi_watchdog: "0"
- nohz_full: "1-27,29-55,57-83,85-111"
- nosoftlockup: true
- numa_balancing: "disable"
- processor.max_cstate: "1"
- rcu_nocbs: "1-27,29-55,57-83,85-111"
- tsc: "reliable"
-sysctl:
- kernel:
- watchdog_cpumask: "0,28,56,84"
- vm:
- nr_hugepages: 32768
- max_map_count: 20000
-
-inventory_ipmi_hostname: "10.30.50.44"
-cpu_microarchitecture: "skylake"
-
-intel_700_matrix: "dpdk22.03"
diff --git a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.48.yaml b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.48.yaml
deleted file mode 100644
index caa0f54b16..0000000000
--- a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.48.yaml
+++ /dev/null
@@ -1,32 +0,0 @@
----
-# file: host_vars/10.30.51.48.yaml
-
-hostname: "s13-t31-tg1"
-grub:
- audit: "0"
- default_hugepagesz: "2M hugepagesz=1G hugepages=32 hugepagesz=2M hugepages=8192"
- hpet: "disable"
- intel_idle.max_cstate: "1"
- intel_iommu: "on"
- intel_pstate: "disable"
- iommu: "pt"
- isolcpus: "1-27,29-55,57-83,85-111"
- mce: "off"
- nmi_watchdog: "0"
- nohz_full: "1-27,29-55,57-83,85-111"
- nosoftlockup: true
- numa_balancing: "disable"
- processor.max_cstate: "1"
- rcu_nocbs: "1-27,29-55,57-83,85-111"
- tsc: "reliable"
-sysctl:
- kernel:
- watchdog_cpumask: "0,28,56,84"
- vm:
- nr_hugepages: 8192
- max_map_count: 20000
-
-inventory_ipmi_hostname: "10.30.50.45"
-cpu_microarchitecture: "skylake"
-
-intel_700_matrix: "dpdk21.02"
diff --git a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.49.yaml b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.49.yaml
index 85184b5540..3b9b63dfd0 100644
--- a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.49.yaml
+++ b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.49.yaml
@@ -30,4 +30,4 @@ inventory_ipmi_hostname: "10.30.50.46"
cpu_microarchitecture: "skylake"
docker_tg: true
-intel_700_matrix: "dpdk21.02"
+intel_700_matrix: "dpdk22.07"
diff --git a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.50.yaml b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.50.yaml
index 7115292b1a..117c6d2c31 100644
--- a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.50.yaml
+++ b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.50.yaml
@@ -18,21 +18,23 @@ users:
groups: [adm, sudo]
password: "$6$FIsbVDQR$5D0wgufOd2FtnmOiRNsGlgg6Loh.0x3dWSj72DSQnqisSyE9DROfgSgA6s0yxDwz4Jd5SRTXiTKuRYuSQ5POI1"
ssh_key:
- - "ssh-rsa AAAAB3NzaC1yc2EAAAABJQAAAQEAgObJFDIMmPwQhhkjAynvlbwpM5yeSewyaE7vTLaFf4uFz4vmsE2hFf6B2xXHUGLVwoVfk91UeK7LOGrdDpoDDHzvPZXj5NmZI+WiWax5y2pQZNkcSZws0ENCeEc4hPwc4veJ1JmhokF4Bsmu14HyFMaFUhM8897jtJwsh+9fLA/no0iPGaQqEtRUQhkV+P4jCEPoY0qdRZAzVw/rY4EGAMhsJe3EJmyj63OfrrkG3+hvSLFo5pDxHQr3pZd/c6ukI7xMef48PosAvGCm3oxzb/Gu9PZIGuHLczY+tCnzCkY7MO7E+IWgjXrUAfYwSWz8XmFmA9LLe26DT5jkcK8hGQ== pmikus@cisco.com"
+ - "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIKM1D1kkGX1l7fSma1MOgw2yLI7zJHwTCcfVROQ4hh7r peter.mikus@protonmail.ch"
- "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQDCG7Shfr7ASXXwpazYDGFzChGl7i4HgY9n81GTwc17B/O19IiJdrxFcBQH19HMuTFtWU4d9bQ6xgzz2pojBN13L3pEsStCHovDlEYbvfRxI2l2hcAAop1j1E4izHAS9IvCoy1AgWqBg6tsfP5mzOwGbSbcdI9ADRKIHgDTVbHg9SqKed27bNOLU0u3/5ra2Oar/tVIW37geEqFV/nHIBZ03Y/mszvXP/t/hP5bgJIGJKkzTjLd1aqEcuGmubW+wTQnVnrhFB87dw91gPj6BVyV0+7Vt1wrvPKqP2sGJhojAMSrBQBySnlrYgEg00bwDgNGFevatfG9+nTId+nhoKBkXya3MjSp4HwrGqGcij3/h7ovlau3/iRhkqlSeqenaNm4zKTAXRTnb60j2WKa6im0zdqJX98anp4mhjE8xHhmmfZV3vRT8mtY4hF/lg79miXFHpWH97bZV6r/D9qj1HWI/laJfOC5MOJdRcLETwtsUNMHeHtVnY3yu0XFYNcl2Xwajtnz3waF7vn1QHvAM9p878+JrO/IEUqEc0yZWVvlhZ7Krx1pS+APoMy8097MYDGFzFTkYSstKLGbgm/S7dEiWuSVxmMUxW7JYO3gHrQ3f1EvAYh2UFdWy76Dzr5II9UpVwOwF+HL/Oy8Sk77bPaK+tn7Kh4Tx7WWE0+EOAgElQ== ayourtch@ayourtch-lnx"
- "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCXUbbhesOpvPF+KI8nq4TXvEO/Un1aU/ehZ9clCyw9C40xjDkX2BlcX8WeHxFHe7fjFaCd07Vg73rn/3M9uNDnDxvjH1GQ0twvi3iBTO4PkHBBfGF9qnE8MYzno6FvlsVKLuUuPbfm8kbOQ+ZDfdXq6gdtXh0hSYYkqC1heNPCNsqaakkB99Edyle+Ot0V7cpW+Yo2wo98KuX/cgUEhVoA8QnNVE7zaWcjSXBZEteoA4gLpAbV6p67/d6H/2ykHTidBViYTEsHco56tJoA4nTPuAupDOLBcWXgF5TAN6z1aCn2JA1DDfniLakgrZ5oVj2qHhUmbxQAtnKQfHADjqzV jlinkes@jlinkes"
- "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQD0GXoSGDZ95TE/TT3kf4ZS3Tiso0UO3MVhqAqZ/F6LOvLyqnMPfhxPz1XpHsDikxvKgwhZvdBm1dWbKkPsD7jtw0PGphQO8QuEwBd2ZMvxZ4Qg6lNDUl5x3zRO2nkbKpcqnOugGLTtXP+yfw/wfQ2HNFLDP9gE90xegsPT83PmRUntQlhbS3ByHcCSUScC+Y1heZXuoKNyrmUY46lxkKsNfhx8sQKo0YhB21atV/mcAQbAaO2LggmaQYGtWizqPNGWIRsi9W8ZYnKva67c3Pbv/TTfaqmrNCwOXJ8G9oL+/3MlKbl3b5mYlTs2a/e9yVgMNwUZVX7aiHpgPgaVjL6j swdev@BradyBunch-MacMini.local"
- "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCmo2YP4t/f58AAYH72rOe5VjYjk3wb/GY3aJEd5s9WspLhnsY0xBL67C+4kMq6VmQQvg0cUB8RJSFX1tUXMHCorVWnXNHkYomx0MCPcPUpVHuRyEqczYJ2pzgZsPzoEfw9E5hTrAiGzYFNAS/NOSavapVMDZxa1zsX7+sWQvJfmVtJWpcTQb0TkoWXRsy0YM3PYfUbYvK7lR3lGwyhwCcJn0WwWGreFB7bIok0poqqX5BgJ/okZuvC8II+UfuGoBHNhg49oqST1JlNi9gRqDNmLWkHRaneWZiF+Y2hdN3PRCdkt1x3eU0R+cdi5kPKslb6P0lsjOEA7fDLlq1+T2z1"
+ - "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDWWDIudZUaHp6pPkYmBsBeEYcbUoGGZH73eB374g8e9V3dfT7BuJOMR/Z3LxCGw5GcDDgzJHLQr8AykN7lUscaux68CXSf1CAKhfBeWBOCQD4G5tpmD6610jsiBig6bFISKpGW4nLt80yt3bKEqv6ezt35QuMeh3E45uK/pCRfDcukOThbwYG44xr7TeYAyNacWuNV9HDy5DzjBelElmolTOgjtZEbedmIcUbV/gb8yekUxxBTS3d3I5qVLtNRMHPqp9xbJqteRXIphizDlHbaTHZeVy9mrwi1RuTjDSg7p1OPJ9eVIzMYzk87gbwNxZw8481Xb1WsgGmyH/mXJjD1 najoy@NAJOY-M-505G.CISCO.COM"
sshd_disable_password_login: true
# Nomad settings.
+nomad_version: "1.6.1"
nomad_certificates:
- src: "{{ file_nomad_ca_pem }}"
- dest: "{{ nomad_ca_file }}"
+ dest: "{{ nomad_tls_ca_file }}"
- src: "{{ file_nomad_client_pem }}"
- dest: "{{ nomad_cert_file }}"
+ dest: "{{ nomad_tls_cert_file }}"
- src: "{{ file_nomad_client_key_pem }}"
- dest: "{{ nomad_key_file }}"
+ dest: "{{ nomad_tls_key_file }}"
nomad_datacenter: "yul1"
nomad_node_name: "{{ hostname }}-{{ ansible_architecture }}"
nomad_node_role: "client"
@@ -44,30 +46,47 @@ nomad_options:
docker.volumes.enabled: true
driver.whitelist: "docker,raw_exec,exec"
nomad_service_mgr: "systemd"
+nomad_consul_use_ssl: false
+nomad_use_tls: false
+nomad_tls_http: false
+nomad_tls_rpc: false
+nomad_use_vault: false
nomad_retry_servers:
- - "10.32.8.15"
- - "10.32.8.16"
- - "10.32.8.17"
+ - "10.30.51.23"
+ - "10.30.51.24"
+ - "10.30.51.25"
nomad_servers:
- - "10.32.8.15:4647"
- - "10.32.8.16:4647"
- - "10.32.8.17:4647"
+ - "10.30.51.23:4647"
+ - "10.30.51.24:4647"
+ - "10.30.51.25:4647"
# Consul settigs.
-consul_nomad_integration: true
+nomad_use_consul: true
consul_certificates:
- - src: "{{ file_consul_ca_pem }}"
+ - src: "{{ file_consul_agent_ca_pem }}"
dest: "{{ consul_ca_file }}"
+ - src: "{{ file_consul_server_0_pem }}"
+ dest: "{{ consul_cert_file }}"
+ - src: "{{ file_consul_server_0_key_pem }}"
+ dest: "{{ consul_key_file }}"
+consul_verify_incoming: false
+consul_verify_outgoing: false
+consul_vefify_server_hostname: false
+consul_allow_tls: false
consul_datacenter: "yul1"
-consul_encrypt: "Y4T+5JGx1C3l2NFBBvkTWQ=="
-consul_node_name: "{{ hostname }}"
consul_node_role: "client"
+consul_encrypt: "Y4T+5JGx1C3l2NFBBvkTWQ=="
+consul_node_name: "{{ ansible_hostname }}"
+consul_retry_join: true
consul_retry_servers:
- - "10.32.8.15"
- - "10.32.8.16"
- - "10.32.8.17"
+ - "10.30.51.23"
+ - "10.30.51.24"
+ - "10.30.51.25"
consul_service_mgr: "systemd"
+# Vault settings.
+vault_version: "1.13.1"
+
# Docker settings.
docker_daemon:
default-shm-size: "1073741824"
diff --git a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.51.yaml b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.51.yaml
index a21eb85dcd..875b759675 100644
--- a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.51.yaml
+++ b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.51.yaml
@@ -18,21 +18,23 @@ users:
groups: [adm, sudo]
password: "$6$FIsbVDQR$5D0wgufOd2FtnmOiRNsGlgg6Loh.0x3dWSj72DSQnqisSyE9DROfgSgA6s0yxDwz4Jd5SRTXiTKuRYuSQ5POI1"
ssh_key:
- - "ssh-rsa AAAAB3NzaC1yc2EAAAABJQAAAQEAgObJFDIMmPwQhhkjAynvlbwpM5yeSewyaE7vTLaFf4uFz4vmsE2hFf6B2xXHUGLVwoVfk91UeK7LOGrdDpoDDHzvPZXj5NmZI+WiWax5y2pQZNkcSZws0ENCeEc4hPwc4veJ1JmhokF4Bsmu14HyFMaFUhM8897jtJwsh+9fLA/no0iPGaQqEtRUQhkV+P4jCEPoY0qdRZAzVw/rY4EGAMhsJe3EJmyj63OfrrkG3+hvSLFo5pDxHQr3pZd/c6ukI7xMef48PosAvGCm3oxzb/Gu9PZIGuHLczY+tCnzCkY7MO7E+IWgjXrUAfYwSWz8XmFmA9LLe26DT5jkcK8hGQ== pmikus@cisco.com"
+ - "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIKM1D1kkGX1l7fSma1MOgw2yLI7zJHwTCcfVROQ4hh7r peter.mikus@protonmail.ch"
- "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQDCG7Shfr7ASXXwpazYDGFzChGl7i4HgY9n81GTwc17B/O19IiJdrxFcBQH19HMuTFtWU4d9bQ6xgzz2pojBN13L3pEsStCHovDlEYbvfRxI2l2hcAAop1j1E4izHAS9IvCoy1AgWqBg6tsfP5mzOwGbSbcdI9ADRKIHgDTVbHg9SqKed27bNOLU0u3/5ra2Oar/tVIW37geEqFV/nHIBZ03Y/mszvXP/t/hP5bgJIGJKkzTjLd1aqEcuGmubW+wTQnVnrhFB87dw91gPj6BVyV0+7Vt1wrvPKqP2sGJhojAMSrBQBySnlrYgEg00bwDgNGFevatfG9+nTId+nhoKBkXya3MjSp4HwrGqGcij3/h7ovlau3/iRhkqlSeqenaNm4zKTAXRTnb60j2WKa6im0zdqJX98anp4mhjE8xHhmmfZV3vRT8mtY4hF/lg79miXFHpWH97bZV6r/D9qj1HWI/laJfOC5MOJdRcLETwtsUNMHeHtVnY3yu0XFYNcl2Xwajtnz3waF7vn1QHvAM9p878+JrO/IEUqEc0yZWVvlhZ7Krx1pS+APoMy8097MYDGFzFTkYSstKLGbgm/S7dEiWuSVxmMUxW7JYO3gHrQ3f1EvAYh2UFdWy76Dzr5II9UpVwOwF+HL/Oy8Sk77bPaK+tn7Kh4Tx7WWE0+EOAgElQ== ayourtch@ayourtch-lnx"
- "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCXUbbhesOpvPF+KI8nq4TXvEO/Un1aU/ehZ9clCyw9C40xjDkX2BlcX8WeHxFHe7fjFaCd07Vg73rn/3M9uNDnDxvjH1GQ0twvi3iBTO4PkHBBfGF9qnE8MYzno6FvlsVKLuUuPbfm8kbOQ+ZDfdXq6gdtXh0hSYYkqC1heNPCNsqaakkB99Edyle+Ot0V7cpW+Yo2wo98KuX/cgUEhVoA8QnNVE7zaWcjSXBZEteoA4gLpAbV6p67/d6H/2ykHTidBViYTEsHco56tJoA4nTPuAupDOLBcWXgF5TAN6z1aCn2JA1DDfniLakgrZ5oVj2qHhUmbxQAtnKQfHADjqzV jlinkes@jlinkes"
- "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQD0GXoSGDZ95TE/TT3kf4ZS3Tiso0UO3MVhqAqZ/F6LOvLyqnMPfhxPz1XpHsDikxvKgwhZvdBm1dWbKkPsD7jtw0PGphQO8QuEwBd2ZMvxZ4Qg6lNDUl5x3zRO2nkbKpcqnOugGLTtXP+yfw/wfQ2HNFLDP9gE90xegsPT83PmRUntQlhbS3ByHcCSUScC+Y1heZXuoKNyrmUY46lxkKsNfhx8sQKo0YhB21atV/mcAQbAaO2LggmaQYGtWizqPNGWIRsi9W8ZYnKva67c3Pbv/TTfaqmrNCwOXJ8G9oL+/3MlKbl3b5mYlTs2a/e9yVgMNwUZVX7aiHpgPgaVjL6j swdev@BradyBunch-MacMini.local"
- "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCmo2YP4t/f58AAYH72rOe5VjYjk3wb/GY3aJEd5s9WspLhnsY0xBL67C+4kMq6VmQQvg0cUB8RJSFX1tUXMHCorVWnXNHkYomx0MCPcPUpVHuRyEqczYJ2pzgZsPzoEfw9E5hTrAiGzYFNAS/NOSavapVMDZxa1zsX7+sWQvJfmVtJWpcTQb0TkoWXRsy0YM3PYfUbYvK7lR3lGwyhwCcJn0WwWGreFB7bIok0poqqX5BgJ/okZuvC8II+UfuGoBHNhg49oqST1JlNi9gRqDNmLWkHRaneWZiF+Y2hdN3PRCdkt1x3eU0R+cdi5kPKslb6P0lsjOEA7fDLlq1+T2z1"
+ - "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDWWDIudZUaHp6pPkYmBsBeEYcbUoGGZH73eB374g8e9V3dfT7BuJOMR/Z3LxCGw5GcDDgzJHLQr8AykN7lUscaux68CXSf1CAKhfBeWBOCQD4G5tpmD6610jsiBig6bFISKpGW4nLt80yt3bKEqv6ezt35QuMeh3E45uK/pCRfDcukOThbwYG44xr7TeYAyNacWuNV9HDy5DzjBelElmolTOgjtZEbedmIcUbV/gb8yekUxxBTS3d3I5qVLtNRMHPqp9xbJqteRXIphizDlHbaTHZeVy9mrwi1RuTjDSg7p1OPJ9eVIzMYzk87gbwNxZw8481Xb1WsgGmyH/mXJjD1 najoy@NAJOY-M-505G.CISCO.COM"
sshd_disable_password_login: true
# Nomad settings.
+nomad_version: "1.6.1"
nomad_certificates:
- src: "{{ file_nomad_ca_pem }}"
- dest: "{{ nomad_ca_file }}"
+ dest: "{{ nomad_tls_ca_file }}"
- src: "{{ file_nomad_client_pem }}"
- dest: "{{ nomad_cert_file }}"
+ dest: "{{ nomad_tls_cert_file }}"
- src: "{{ file_nomad_client_key_pem }}"
- dest: "{{ nomad_key_file }}"
+ dest: "{{ nomad_tls_key_file }}"
nomad_datacenter: "yul1"
nomad_node_name: "{{ hostname }}-{{ ansible_architecture }}"
nomad_node_role: "client"
@@ -44,30 +46,47 @@ nomad_options:
docker.volumes.enabled: true
driver.whitelist: "docker,raw_exec,exec"
nomad_service_mgr: "systemd"
+nomad_consul_use_ssl: false
+nomad_use_tls: false
+nomad_tls_http: false
+nomad_tls_rpc: false
+nomad_use_vault: false
nomad_retry_servers:
- - "10.32.8.15"
- - "10.32.8.16"
- - "10.32.8.17"
+ - "10.30.51.23"
+ - "10.30.51.24"
+ - "10.30.51.25"
nomad_servers:
- - "10.32.8.15:4647"
- - "10.32.8.16:4647"
- - "10.32.8.17:4647"
+ - "10.30.51.23:4647"
+ - "10.30.51.24:4647"
+ - "10.30.51.25:4647"
# Consul settigs.
-consul_nomad_integration: true
+nomad_use_consul: true
consul_certificates:
- - src: "{{ file_consul_ca_pem }}"
+ - src: "{{ file_consul_agent_ca_pem }}"
dest: "{{ consul_ca_file }}"
+ - src: "{{ file_consul_server_0_pem }}"
+ dest: "{{ consul_cert_file }}"
+ - src: "{{ file_consul_server_0_key_pem }}"
+ dest: "{{ consul_key_file }}"
+consul_verify_incoming: false
+consul_verify_outgoing: false
+consul_vefify_server_hostname: false
+consul_allow_tls: false
consul_datacenter: "yul1"
-consul_encrypt: "Y4T+5JGx1C3l2NFBBvkTWQ=="
-consul_node_name: "{{ hostname }}"
consul_node_role: "client"
+consul_encrypt: "Y4T+5JGx1C3l2NFBBvkTWQ=="
+consul_node_name: "{{ ansible_hostname }}"
+consul_retry_join: true
consul_retry_servers:
- - "10.32.8.15"
- - "10.32.8.16"
- - "10.32.8.17"
+ - "10.30.51.23"
+ - "10.30.51.24"
+ - "10.30.51.25"
consul_service_mgr: "systemd"
+# Vault settings.
+vault_version: "1.13.1"
+
# Docker settings.
docker_daemon:
default-shm-size: "1073741824"
diff --git a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.52.yaml b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.52.yaml
index 2b1eeff967..8e8d3d39c2 100644
--- a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.52.yaml
+++ b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.52.yaml
@@ -1,32 +1,74 @@
---
# file: host_vars/10.30.51.52.yaml
-hostname: "s5-t22-sut1"
+hostname: "s52-t21-sut1"
grub:
audit: "0"
default_hugepagesz: "2M hugepagesz=1G hugepages=32 hugepagesz=2M hugepages=32768"
hpet: "disable"
intel_idle.max_cstate: "1"
- intel_iommu: "on"
+ intel_iommu: "on,sm_on"
intel_pstate: "disable"
iommu: "pt"
- isolcpus: "1-27,29-55,57-83,85-111"
+ isolcpus: "1-31,33-63,65-95,97-127"
mce: "off"
nmi_watchdog: "0"
- nohz_full: "1-27,29-55,57-83,85-111"
+ nohz_full: "1-31,33-63,65-95,97-127"
nosoftlockup: true
numa_balancing: "disable"
processor.max_cstate: "1"
- rcu_nocbs: "1-27,29-55,57-83,85-111"
+ rcu_nocbs: "1-31,33-63,65-95,97-127"
tsc: "reliable"
sysctl:
kernel:
- watchdog_cpumask: "0,28,56,84"
+ watchdog_cpumask: "0,32,64,96"
vm:
nr_hugepages: 32768
max_map_count: 20000
-inventory_ipmi_hostname: "10.30.50.49"
-cpu_microarchitecture: "skylake"
+inventory_ipmi_hostname: "10.30.50.52"
+cpu_microarchitecture: "sapphirerapids"
-intel_700_matrix: "dpdk22.03"
+mellanox_matrix: "dpdk23.11"
+intel_dsa_matrix: true
+intel_qat_matrix: true
+
+kernel_version_by_distro:
+ ubuntu:
+ jammy:
+ - "5.15.0-72"
+
+docker_sut: true
+docker_volumes:
+ - source: "/usr/bin/ofed_info"
+ target: "/usr/bin/ofed_info"
+ - source: "/dev/hugepages"
+ target: "/dev/hugepages"
+ - source: "/dev/vfio"
+ target: "/dev/vfio"
+ - source: "/etc/sudoers"
+ target: "/etc/sudoers"
+ - source: "/dev/null"
+ target: "/etc/sysctl.d/80-vpp.conf"
+ - source: "/opt/boot/"
+ target: "/opt/boot/"
+ - source: "/usr/bin/iperf3"
+ target: "/usr/bin/iperf3"
+ - source: "/usr/lib/x86_64-linux-gnu/libiperf.so.0"
+ target: "/usr/lib/x86_64-linux-gnu/libiperf.so.0"
+ - source: "/usr/lib/x86_64-linux-gnu/libiperf.so.0.0.0"
+ target: "/usr/lib/x86_64-linux-gnu/libiperf.so.0.0.0"
+ - source: "/var/run/docker.sock"
+ target: "/var/run/docker.sock"
+ - source: "/usr/lib/firmware/"
+ target: "/usr/lib/firmware/"
+ - source: "/usr/local/bin/adf_ctl"
+ target: "/usr/local/bin/adf_ctl"
+ - source: "/etc/4xxx_dev0.conf"
+ target: "/etc/4xxx_dev0.conf"
+ - source: "/etc/4xxx_dev1.conf"
+ target: "/etc/4xxx_dev1.conf"
+ - source: "/etc/4xxx_dev2.conf"
+ target: "/etc/4xxx_dev2.conf"
+ - source: "/etc/4xxx_dev3.conf"
+ target: "/etc/4xxx_dev3.conf" \ No newline at end of file
diff --git a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.53.yaml b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.53.yaml
index 5806993388..ce07968323 100644
--- a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.53.yaml
+++ b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.53.yaml
@@ -1,32 +1,65 @@
---
# file: host_vars/10.30.51.53.yaml
-hostname: "s6-t22-tg1"
+hostname: "s53-t21-tg1"
grub:
audit: "0"
- default_hugepagesz: "2M hugepagesz=1G hugepages=32 hugepagesz=2M hugepages=8192"
+ default_hugepagesz: "2M hugepagesz=1G hugepages=32 hugepagesz=2M hugepages=32768"
hpet: "disable"
intel_idle.max_cstate: "1"
- intel_iommu: "on"
+ intel_iommu: "on,sm_on"
intel_pstate: "disable"
iommu: "pt"
- isolcpus: "1-27,29-55,57-83,85-111"
+ isolcpus: "1-31,33-63,65-95,97-127"
mce: "off"
nmi_watchdog: "0"
- nohz_full: "1-27,29-55,57-83,85-111"
+ nohz_full: "1-31,33-63,65-95,97-127"
nosoftlockup: true
numa_balancing: "disable"
processor.max_cstate: "1"
- rcu_nocbs: "1-27,29-55,57-83,85-111"
+ rcu_nocbs: "1-31,33-63,65-95,97-127"
tsc: "reliable"
sysctl:
kernel:
- watchdog_cpumask: "0,28,56,84"
+ watchdog_cpumask: "0,32,64,96"
vm:
- nr_hugepages: 8192
+ nr_hugepages: 32768
max_map_count: 20000
-inventory_ipmi_hostname: "10.30.50.50"
-cpu_microarchitecture: "skylake"
+inventory_ipmi_hostname: "10.30.50.53"
+cpu_microarchitecture: "sapphirerapids"
-intel_700_matrix: "dpdk21.02"
+mellanox_matrix: "dpdk23.07"
+intel_dsa_matrix: true
+intel_qat_matrix: true
+
+docker_tg: true
+docker_volumes:
+ - source: "/usr/bin/ofed_info"
+ target: "/usr/bin/ofed_info"
+ - source: "/dev/hugepages"
+ target: "/dev/hugepages"
+ - source: "/dev/vfio"
+ target: "/dev/vfio"
+ - source: "/etc/sudoers"
+ target: "/etc/sudoers"
+ - source: "/opt/"
+ target: "/opt/"
+ - source: "/usr/bin/iperf3"
+ target: "/usr/bin/iperf3"
+ - source: "/usr/lib/x86_64-linux-gnu/libiperf.so.0"
+ target: "/usr/lib/x86_64-linux-gnu/libiperf.so.0"
+ - source: "/usr/lib/x86_64-linux-gnu/libiperf.so.0.0.0"
+ target: "/usr/lib/x86_64-linux-gnu/libiperf.so.0.0.0"
+ - source: "/usr/lib/firmware/"
+ target: "/usr/lib/firmware/"
+ - source: "/usr/local/bin/adf_ctl"
+ target: "/usr/local/bin/adf_ctl"
+ - source: "/etc/4xxx_dev0.conf"
+ target: "/etc/4xxx_dev0.conf"
+ - source: "/etc/4xxx_dev1.conf"
+ target: "/etc/4xxx_dev1.conf"
+ - source: "/etc/4xxx_dev2.conf"
+ target: "/etc/4xxx_dev2.conf"
+ - source: "/etc/4xxx_dev3.conf"
+ target: "/etc/4xxx_dev3.conf" \ No newline at end of file
diff --git a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.54.yaml b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.54.yaml
index 50908d7df8..1fd8edd5dd 100644
--- a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.54.yaml
+++ b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.54.yaml
@@ -1,32 +1,67 @@
---
# file: host_vars/10.30.51.54.yaml
-hostname: "s7-t23-sut1"
+hostname: "s54-t22-sut1"
grub:
audit: "0"
default_hugepagesz: "2M hugepagesz=1G hugepages=32 hugepagesz=2M hugepages=32768"
hpet: "disable"
intel_idle.max_cstate: "1"
- intel_iommu: "on"
+ intel_iommu: "on,sm_on"
intel_pstate: "disable"
iommu: "pt"
- isolcpus: "1-27,29-55,57-83,85-111"
+ isolcpus: "1-31,33-63,65-95,97-127"
mce: "off"
nmi_watchdog: "0"
- nohz_full: "1-27,29-55,57-83,85-111"
+ nohz_full: "1-31,33-63,65-95,97-127"
nosoftlockup: true
numa_balancing: "disable"
processor.max_cstate: "1"
- rcu_nocbs: "1-27,29-55,57-83,85-111"
+ rcu_nocbs: "1-31,33-63,65-95,97-127"
tsc: "reliable"
sysctl:
kernel:
- watchdog_cpumask: "0,28,56,84"
+ watchdog_cpumask: "0,32,64,96"
vm:
nr_hugepages: 32768
max_map_count: 20000
-inventory_ipmi_hostname: "10.30.50.51"
-cpu_microarchitecture: "skylake"
+inventory_ipmi_hostname: "10.30.50.54"
+cpu_microarchitecture: "sapphirerapids"
-intel_700_matrix: "dpdk22.03"
+intel_800_matrix: "dpdk23.11"
+intel_dsa_matrix: true
+intel_qat_matrix: true
+
+docker_sut: true
+docker_volumes:
+ - source: "/dev/hugepages"
+ target: "/dev/hugepages"
+ - source: "/dev/vfio"
+ target: "/dev/vfio"
+ - source: "/etc/sudoers"
+ target: "/etc/sudoers"
+ - source: "/dev/null"
+ target: "/etc/sysctl.d/80-vpp.conf"
+ - source: "/opt/boot/"
+ target: "/opt/boot/"
+ - source: "/usr/bin/iperf3"
+ target: "/usr/bin/iperf3"
+ - source: "/usr/lib/x86_64-linux-gnu/libiperf.so.0"
+ target: "/usr/lib/x86_64-linux-gnu/libiperf.so.0"
+ - source: "/usr/lib/x86_64-linux-gnu/libiperf.so.0.0.0"
+ target: "/usr/lib/x86_64-linux-gnu/libiperf.so.0.0.0"
+ - source: "/var/run/docker.sock"
+ target: "/var/run/docker.sock"
+ - source: "/usr/lib/firmware/"
+ target: "/usr/lib/firmware/"
+ - source: "/usr/local/bin/adf_ctl"
+ target: "/usr/local/bin/adf_ctl"
+ - source: "/etc/4xxx_dev0.conf"
+ target: "/etc/4xxx_dev0.conf"
+ - source: "/etc/4xxx_dev1.conf"
+ target: "/etc/4xxx_dev1.conf"
+ - source: "/etc/4xxx_dev2.conf"
+ target: "/etc/4xxx_dev2.conf"
+ - source: "/etc/4xxx_dev3.conf"
+ target: "/etc/4xxx_dev3.conf" \ No newline at end of file
diff --git a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.55.yaml b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.55.yaml
index 9404e18ae0..1b9f9a56d3 100644
--- a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.55.yaml
+++ b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.55.yaml
@@ -1,32 +1,63 @@
---
# file: host_vars/10.30.51.55.yaml
-hostname: "s8-t23-tg1"
+hostname: "s55-t22-tg1"
grub:
audit: "0"
- default_hugepagesz: "2M hugepagesz=1G hugepages=32 hugepagesz=2M hugepages=8192"
+ default_hugepagesz: "2M hugepagesz=1G hugepages=32 hugepagesz=2M hugepages=32768"
hpet: "disable"
intel_idle.max_cstate: "1"
- intel_iommu: "on"
+ intel_iommu: "on,sm_on"
intel_pstate: "disable"
iommu: "pt"
- isolcpus: "1-27,29-55,57-83,85-111"
+ isolcpus: "1-31,33-63,65-95,97-127"
mce: "off"
nmi_watchdog: "0"
- nohz_full: "1-27,29-55,57-83,85-111"
+ nohz_full: "1-31,33-63,65-95,97-127"
nosoftlockup: true
numa_balancing: "disable"
processor.max_cstate: "1"
- rcu_nocbs: "1-27,29-55,57-83,85-111"
+ rcu_nocbs: "1-31,33-63,65-95,97-127"
tsc: "reliable"
sysctl:
kernel:
- watchdog_cpumask: "0,28,56,84"
+ watchdog_cpumask: "0,32,64,96"
vm:
- nr_hugepages: 8192
+ nr_hugepages: 32768
max_map_count: 20000
-inventory_ipmi_hostname: "10.30.50.52"
-cpu_microarchitecture: "skylake"
+inventory_ipmi_hostname: "10.30.50.55"
+cpu_microarchitecture: "sapphirerapids"
-intel_700_matrix: "dpdk21.02"
+intel_800_matrix: "dpdk22.07"
+intel_dsa_matrix: true
+intel_qat_matrix: true
+
+docker_tg: true
+docker_volumes:
+ - source: "/dev/hugepages"
+ target: "/dev/hugepages"
+ - source: "/dev/vfio"
+ target: "/dev/vfio"
+ - source: "/etc/sudoers"
+ target: "/etc/sudoers"
+ - source: "/opt/"
+ target: "/opt/"
+ - source: "/usr/bin/iperf3"
+ target: "/usr/bin/iperf3"
+ - source: "/usr/lib/x86_64-linux-gnu/libiperf.so.0"
+ target: "/usr/lib/x86_64-linux-gnu/libiperf.so.0"
+ - source: "/usr/lib/x86_64-linux-gnu/libiperf.so.0.0.0"
+ target: "/usr/lib/x86_64-linux-gnu/libiperf.so.0.0.0"
+ - source: "/usr/lib/firmware/"
+ target: "/usr/lib/firmware/"
+ - source: "/usr/local/bin/adf_ctl"
+ target: "/usr/local/bin/adf_ctl"
+ - source: "/etc/4xxx_dev0.conf"
+ target: "/etc/4xxx_dev0.conf"
+ - source: "/etc/4xxx_dev1.conf"
+ target: "/etc/4xxx_dev1.conf"
+ - source: "/etc/4xxx_dev2.conf"
+ target: "/etc/4xxx_dev2.conf"
+ - source: "/etc/4xxx_dev3.conf"
+ target: "/etc/4xxx_dev3.conf" \ No newline at end of file
diff --git a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.56.yaml b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.56.yaml
new file mode 100644
index 0000000000..e8f5c55393
--- /dev/null
+++ b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.56.yaml
@@ -0,0 +1,36 @@
+---
+# file: host_vars/10.30.51.56.yaml
+
+hostname: "s56-t23-sut1"
+grub:
+ audit: "0"
+ default_hugepagesz: "2M hugepagesz=1G hugepages=32 hugepagesz=2M hugepages=32768"
+ hpet: "disable"
+ intel_idle.max_cstate: "1"
+ intel_iommu: "on,sm_on"
+ intel_pstate: "disable"
+ iommu: "pt"
+ isolcpus: "1-31,33-63,65-95,97-127"
+ mce: "off"
+ nmi_watchdog: "0"
+ nohz_full: "1-31,33-63,65-95,97-127"
+ nosoftlockup: true
+ numa_balancing: "disable"
+ processor.max_cstate: "1"
+ rcu_nocbs: "1-31,33-63,65-95,97-127"
+ tsc: "reliable"
+sysctl:
+ kernel:
+ watchdog_cpumask: "0,32,64,96"
+ vm:
+ nr_hugepages: 32768
+ max_map_count: 20000
+
+inventory_ipmi_hostname: "10.30.50.56"
+cpu_microarchitecture: "sapphirerapids"
+docker_sut: true
+
+intel_800_matrix: "dpdk23.11"
+mellanox_matrix: "dpdk23.11"
+intel_dsa_matrix: true
+intel_qat_matrix: true \ No newline at end of file
diff --git a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.57.yaml b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.57.yaml
new file mode 100644
index 0000000000..03817cdef9
--- /dev/null
+++ b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.57.yaml
@@ -0,0 +1,36 @@
+---
+# file: host_vars/10.30.51.57.yaml
+
+hostname: "s57-t23-tg1"
+grub:
+ audit: "0"
+ default_hugepagesz: "2M hugepagesz=1G hugepages=32 hugepagesz=2M hugepages=32768"
+ hpet: "disable"
+ intel_idle.max_cstate: "1"
+ intel_iommu: "on,sm_on"
+ intel_pstate: "disable"
+ iommu: "pt"
+ isolcpus: "1-31,33-63,65-95,97-127"
+ mce: "off"
+ nmi_watchdog: "0"
+ nohz_full: "1-31,33-63,65-95,97-127"
+ nosoftlockup: true
+ numa_balancing: "disable"
+ processor.max_cstate: "1"
+ rcu_nocbs: "1-31,33-63,65-95,97-127"
+ tsc: "reliable"
+sysctl:
+ kernel:
+ watchdog_cpumask: "0,32,64,96"
+ vm:
+ nr_hugepages: 32768
+ max_map_count: 20000
+
+inventory_ipmi_hostname: "10.30.50.57"
+cpu_microarchitecture: "sapphirerapids"
+docker_tg: true
+
+intel_800_matrix: "dpdk22.07"
+mellanox_matrix: "dpdk23.07"
+intel_dsa_matrix: true
+intel_qat_matrix: true \ No newline at end of file
diff --git a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.58.yaml b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.58.yaml
index a100bcca8a..ecfced1823 100644
--- a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.58.yaml
+++ b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.58.yaml
@@ -1,32 +1,36 @@
---
# file: host_vars/10.30.51.58.yaml
-hostname: "s14-t32-sut1"
+hostname: "s58-t24-sut1"
grub:
audit: "0"
default_hugepagesz: "2M hugepagesz=1G hugepages=32 hugepagesz=2M hugepages=32768"
hpet: "disable"
intel_idle.max_cstate: "1"
- intel_iommu: "on"
+ intel_iommu: "on,sm_on"
intel_pstate: "disable"
iommu: "pt"
- isolcpus: "1-27,29-55,57-83,85-111"
+ isolcpus: "1-31,33-63,65-95,97-127"
mce: "off"
nmi_watchdog: "0"
- nohz_full: "1-27,29-55,57-83,85-111"
+ nohz_full: "1-31,33-63,65-95,97-127"
nosoftlockup: true
numa_balancing: "disable"
processor.max_cstate: "1"
- rcu_nocbs: "1-27,29-55,57-83,85-111"
+ rcu_nocbs: "1-31,33-63,65-95,97-127"
tsc: "reliable"
sysctl:
kernel:
- watchdog_cpumask: "0,28,56,84"
+ watchdog_cpumask: "0,32,64,96"
vm:
nr_hugepages: 32768
max_map_count: 20000
-inventory_ipmi_hostname: "10.30.50.55"
-cpu_microarchitecture: "skylake"
+inventory_ipmi_hostname: "10.30.50.58"
+cpu_microarchitecture: "sapphirerapids"
+docker_sut: true
-intel_700_matrix: "dpdk22.03"
+intel_800_matrix: "dpdk23.11"
+mellanox_matrix: "dpdk23.11"
+intel_dsa_matrix: true
+#intel_qat_matrix: true \ No newline at end of file
diff --git a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.59.yaml b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.59.yaml
index be5d0b09ae..5b9cd9b98c 100644
--- a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.59.yaml
+++ b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.59.yaml
@@ -1,32 +1,41 @@
---
# file: host_vars/10.30.51.59.yaml
-hostname: "s15-t32-sut2"
+hostname: "s59-t24-tg1"
grub:
audit: "0"
default_hugepagesz: "2M hugepagesz=1G hugepages=32 hugepagesz=2M hugepages=32768"
hpet: "disable"
intel_idle.max_cstate: "1"
- intel_iommu: "on"
+ intel_iommu: "on,sm_on"
intel_pstate: "disable"
iommu: "pt"
- isolcpus: "1-27,29-55,57-83,85-111"
+ isolcpus: "1-31,33-63,65-95,97-127"
mce: "off"
nmi_watchdog: "0"
- nohz_full: "1-27,29-55,57-83,85-111"
+ nohz_full: "1-31,33-63,65-95,97-127"
nosoftlockup: true
numa_balancing: "disable"
processor.max_cstate: "1"
- rcu_nocbs: "1-27,29-55,57-83,85-111"
+ rcu_nocbs: "1-31,33-63,65-95,97-127"
tsc: "reliable"
sysctl:
kernel:
- watchdog_cpumask: "0,28,56,84"
+ watchdog_cpumask: "0,32,64,96"
vm:
nr_hugepages: 32768
max_map_count: 20000
-inventory_ipmi_hostname: "10.30.50.56"
-cpu_microarchitecture: "skylake"
+inventory_ipmi_hostname: "10.30.50.59"
+cpu_microarchitecture: "sapphirerapids"
+docker_tg: true
-intel_700_matrix: "dpdk22.03"
+intel_800_matrix: "dpdk22.07"
+mellanox_matrix: "dpdk23.07"
+intel_dsa_matrix: true
+#intel_qat_matrix: true
+
+kernel_version_by_distro:
+ ubuntu:
+ jammy:
+ - "5.15.0-72" \ No newline at end of file
diff --git a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.60.yaml b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.60.yaml
deleted file mode 100644
index d340e39102..0000000000
--- a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.60.yaml
+++ /dev/null
@@ -1,32 +0,0 @@
----
-# file: host_vars/10.30.51.60.yaml
-
-hostname: "s16-t32-tg1"
-grub:
- audit: "0"
- default_hugepagesz: "2M hugepagesz=1G hugepages=32 hugepagesz=2M hugepages=8192"
- hpet: "disable"
- intel_idle.max_cstate: "1"
- intel_iommu: "on"
- intel_pstate: "disable"
- iommu: "pt"
- isolcpus: "1-27,29-55,57-83,85-111"
- mce: "off"
- nmi_watchdog: "0"
- nohz_full: "1-27,29-55,57-83,85-111"
- nosoftlockup: true
- numa_balancing: "disable"
- processor.max_cstate: "1"
- rcu_nocbs: "1-27,29-55,57-83,85-111"
- tsc: "reliable"
-sysctl:
- kernel:
- watchdog_cpumask: "0,28,56,84"
- vm:
- nr_hugepages: 8192
- max_map_count: 20000
-
-inventory_ipmi_hostname: "10.30.50.57"
-cpu_microarchitecture: "skylake"
-
-intel_700_matrix: "dpdk21.02"
diff --git a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.69.yaml b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.69.yaml
index 07eb7be548..3b5bb0be8a 100644
--- a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.69.yaml
+++ b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.69.yaml
@@ -28,13 +28,15 @@ users:
groups: [adm, sudo]
password: "$6$FIsbVDQR$5D0wgufOd2FtnmOiRNsGlgg6Loh.0x3dWSj72DSQnqisSyE9DROfgSgA6s0yxDwz4Jd5SRTXiTKuRYuSQ5POI1"
ssh_key:
- - "ssh-rsa AAAAB3NzaC1yc2EAAAABJQAAAQEAgObJFDIMmPwQhhkjAynvlbwpM5yeSewyaE7vTLaFf4uFz4vmsE2hFf6B2xXHUGLVwoVfk91UeK7LOGrdDpoDDHzvPZXj5NmZI+WiWax5y2pQZNkcSZws0ENCeEc4hPwc4veJ1JmhokF4Bsmu14HyFMaFUhM8897jtJwsh+9fLA/no0iPGaQqEtRUQhkV+P4jCEPoY0qdRZAzVw/rY4EGAMhsJe3EJmyj63OfrrkG3+hvSLFo5pDxHQr3pZd/c6ukI7xMef48PosAvGCm3oxzb/Gu9PZIGuHLczY+tCnzCkY7MO7E+IWgjXrUAfYwSWz8XmFmA9LLe26DT5jkcK8hGQ== pmikus@cisco.com"
+ - "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIKM1D1kkGX1l7fSma1MOgw2yLI7zJHwTCcfVROQ4hh7r peter.mikus@protonmail.ch"
- "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCXUbbhesOpvPF+KI8nq4TXvEO/Un1aU/ehZ9clCyw9C40xjDkX2BlcX8WeHxFHe7fjFaCd07Vg73rn/3M9uNDnDxvjH1GQ0twvi3iBTO4PkHBBfGF9qnE8MYzno6FvlsVKLuUuPbfm8kbOQ+ZDfdXq6gdtXh0hSYYkqC1heNPCNsqaakkB99Edyle+Ot0V7cpW+Yo2wo98KuX/cgUEhVoA8QnNVE7zaWcjSXBZEteoA4gLpAbV6p67/d6H/2ykHTidBViYTEsHco56tJoA4nTPuAupDOLBcWXgF5TAN6z1aCn2JA1DDfniLakgrZ5oVj2qHhUmbxQAtnKQfHADjqzV jlinkes@jlinkes"
+ - "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDWWDIudZUaHp6pPkYmBsBeEYcbUoGGZH73eB374g8e9V3dfT7BuJOMR/Z3LxCGw5GcDDgzJHLQr8AykN7lUscaux68CXSf1CAKhfBeWBOCQD4G5tpmD6610jsiBig6bFISKpGW4nLt80yt3bKEqv6ezt35QuMeh3E45uK/pCRfDcukOThbwYG44xr7TeYAyNacWuNV9HDy5DzjBelElmolTOgjtZEbedmIcUbV/gb8yekUxxBTS3d3I5qVLtNRMHPqp9xbJqteRXIphizDlHbaTHZeVy9mrwi1RuTjDSg7p1OPJ9eVIzMYzk87gbwNxZw8481Xb1WsgGmyH/mXJjD1 najoy@NAJOY-M-505G.CISCO.COM"
- username: testuser
groups: [adm, sudo]
password: "$6$zpBUdQ4q$P2zKclumvCndWujgP/qQ8eMk3YZk7ESAom04Fqp26hJH2jWkMXEX..jqxzMdDLJKiDaDHIaSkQMVjHzd3cRLs1"
ssh_key:
- - "ssh-rsa AAAAB3NzaC1yc2EAAAABJQAAAQEAgObJFDIMmPwQhhkjAynvlbwpM5yeSewyaE7vTLaFf4uFz4vmsE2hFf6B2xXHUGLVwoVfk91UeK7LOGrdDpoDDHzvPZXj5NmZI+WiWax5y2pQZNkcSZws0ENCeEc4hPwc4veJ1JmhokF4Bsmu14HyFMaFUhM8897jtJwsh+9fLA/no0iPGaQqEtRUQhkV+P4jCEPoY0qdRZAzVw/rY4EGAMhsJe3EJmyj63OfrrkG3+hvSLFo5pDxHQr3pZd/c6ukI7xMef48PosAvGCm3oxzb/Gu9PZIGuHLczY+tCnzCkY7MO7E+IWgjXrUAfYwSWz8XmFmA9LLe26DT5jkcK8hGQ== pmikus@cisco.com"
+ - "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIKM1D1kkGX1l7fSma1MOgw2yLI7zJHwTCcfVROQ4hh7r peter.mikus@protonmail.ch"
- "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCXUbbhesOpvPF+KI8nq4TXvEO/Un1aU/ehZ9clCyw9C40xjDkX2BlcX8WeHxFHe7fjFaCd07Vg73rn/3M9uNDnDxvjH1GQ0twvi3iBTO4PkHBBfGF9qnE8MYzno6FvlsVKLuUuPbfm8kbOQ+ZDfdXq6gdtXh0hSYYkqC1heNPCNsqaakkB99Edyle+Ot0V7cpW+Yo2wo98KuX/cgUEhVoA8QnNVE7zaWcjSXBZEteoA4gLpAbV6p67/d6H/2ykHTidBViYTEsHco56tJoA4nTPuAupDOLBcWXgF5TAN6z1aCn2JA1DDfniLakgrZ5oVj2qHhUmbxQAtnKQfHADjqzV jlinkes@jlinkes"
+ - "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDWWDIudZUaHp6pPkYmBsBeEYcbUoGGZH73eB374g8e9V3dfT7BuJOMR/Z3LxCGw5GcDDgzJHLQr8AykN7lUscaux68CXSf1CAKhfBeWBOCQD4G5tpmD6610jsiBig6bFISKpGW4nLt80yt3bKEqv6ezt35QuMeh3E45uK/pCRfDcukOThbwYG44xr7TeYAyNacWuNV9HDy5DzjBelElmolTOgjtZEbedmIcUbV/gb8yekUxxBTS3d3I5qVLtNRMHPqp9xbJqteRXIphizDlHbaTHZeVy9mrwi1RuTjDSg7p1OPJ9eVIzMYzk87gbwNxZw8481Xb1WsgGmyH/mXJjD1 najoy@NAJOY-M-505G.CISCO.COM"
-intel_700_matrix: "dpdk22.03"
+intel_700_matrix: "dpdk23.11"
diff --git a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.70.yaml b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.70.yaml
index 66df09b8bf..2337277144 100644
--- a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.70.yaml
+++ b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.70.yaml
@@ -1,14 +1,16 @@
---
# file: host_vars/10.30.51.70.yaml
-hostname: "s55-t13-sut1"
-inventory_ipmi_hostname: "10.30.50.70"
-vfs_data_file: "csit-initialize-vfs-tx2.sh"
+hostname: "s70-t13-sut1"
grub:
hugepagesz: "2M"
hugepages: 32768
iommu.passthrough: "1"
-cpu_microarchitecture: "thunderx2"
+inventory_ipmi_hostname: "10.30.50.70"
+vfs_data_file: "csit-initialize-vfs-alt.sh"
+cpu_microarchitecture: "altra"
+
+intel_700_matrix: "dpdk22.07"
# User management.
users:
@@ -16,23 +18,25 @@ users:
groups: [adm, sudo]
password: "$6$FIsbVDQR$5D0wgufOd2FtnmOiRNsGlgg6Loh.0x3dWSj72DSQnqisSyE9DROfgSgA6s0yxDwz4Jd5SRTXiTKuRYuSQ5POI1"
ssh_key:
- - "ssh-rsa AAAAB3NzaC1yc2EAAAABJQAAAQEAgObJFDIMmPwQhhkjAynvlbwpM5yeSewyaE7vTLaFf4uFz4vmsE2hFf6B2xXHUGLVwoVfk91UeK7LOGrdDpoDDHzvPZXj5NmZI+WiWax5y2pQZNkcSZws0ENCeEc4hPwc4veJ1JmhokF4Bsmu14HyFMaFUhM8897jtJwsh+9fLA/no0iPGaQqEtRUQhkV+P4jCEPoY0qdRZAzVw/rY4EGAMhsJe3EJmyj63OfrrkG3+hvSLFo5pDxHQr3pZd/c6ukI7xMef48PosAvGCm3oxzb/Gu9PZIGuHLczY+tCnzCkY7MO7E+IWgjXrUAfYwSWz8XmFmA9LLe26DT5jkcK8hGQ== pmikus@cisco.com"
+ - "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIKM1D1kkGX1l7fSma1MOgw2yLI7zJHwTCcfVROQ4hh7r peter.mikus@protonmail.ch"
- "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQDCG7Shfr7ASXXwpazYDGFzChGl7i4HgY9n81GTwc17B/O19IiJdrxFcBQH19HMuTFtWU4d9bQ6xgzz2pojBN13L3pEsStCHovDlEYbvfRxI2l2hcAAop1j1E4izHAS9IvCoy1AgWqBg6tsfP5mzOwGbSbcdI9ADRKIHgDTVbHg9SqKed27bNOLU0u3/5ra2Oar/tVIW37geEqFV/nHIBZ03Y/mszvXP/t/hP5bgJIGJKkzTjLd1aqEcuGmubW+wTQnVnrhFB87dw91gPj6BVyV0+7Vt1wrvPKqP2sGJhojAMSrBQBySnlrYgEg00bwDgNGFevatfG9+nTId+nhoKBkXya3MjSp4HwrGqGcij3/h7ovlau3/iRhkqlSeqenaNm4zKTAXRTnb60j2WKa6im0zdqJX98anp4mhjE8xHhmmfZV3vRT8mtY4hF/lg79miXFHpWH97bZV6r/D9qj1HWI/laJfOC5MOJdRcLETwtsUNMHeHtVnY3yu0XFYNcl2Xwajtnz3waF7vn1QHvAM9p878+JrO/IEUqEc0yZWVvlhZ7Krx1pS+APoMy8097MYDGFzFTkYSstKLGbgm/S7dEiWuSVxmMUxW7JYO3gHrQ3f1EvAYh2UFdWy76Dzr5II9UpVwOwF+HL/Oy8Sk77bPaK+tn7Kh4Tx7WWE0+EOAgElQ== ayourtch@ayourtch-lnx"
- "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCXUbbhesOpvPF+KI8nq4TXvEO/Un1aU/ehZ9clCyw9C40xjDkX2BlcX8WeHxFHe7fjFaCd07Vg73rn/3M9uNDnDxvjH1GQ0twvi3iBTO4PkHBBfGF9qnE8MYzno6FvlsVKLuUuPbfm8kbOQ+ZDfdXq6gdtXh0hSYYkqC1heNPCNsqaakkB99Edyle+Ot0V7cpW+Yo2wo98KuX/cgUEhVoA8QnNVE7zaWcjSXBZEteoA4gLpAbV6p67/d6H/2ykHTidBViYTEsHco56tJoA4nTPuAupDOLBcWXgF5TAN6z1aCn2JA1DDfniLakgrZ5oVj2qHhUmbxQAtnKQfHADjqzV jlinkes@jlinkes"
- "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQD0GXoSGDZ95TE/TT3kf4ZS3Tiso0UO3MVhqAqZ/F6LOvLyqnMPfhxPz1XpHsDikxvKgwhZvdBm1dWbKkPsD7jtw0PGphQO8QuEwBd2ZMvxZ4Qg6lNDUl5x3zRO2nkbKpcqnOugGLTtXP+yfw/wfQ2HNFLDP9gE90xegsPT83PmRUntQlhbS3ByHcCSUScC+Y1heZXuoKNyrmUY46lxkKsNfhx8sQKo0YhB21atV/mcAQbAaO2LggmaQYGtWizqPNGWIRsi9W8ZYnKva67c3Pbv/TTfaqmrNCwOXJ8G9oL+/3MlKbl3b5mYlTs2a/e9yVgMNwUZVX7aiHpgPgaVjL6j swdev@BradyBunch-MacMini.local"
- "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCmo2YP4t/f58AAYH72rOe5VjYjk3wb/GY3aJEd5s9WspLhnsY0xBL67C+4kMq6VmQQvg0cUB8RJSFX1tUXMHCorVWnXNHkYomx0MCPcPUpVHuRyEqczYJ2pzgZsPzoEfw9E5hTrAiGzYFNAS/NOSavapVMDZxa1zsX7+sWQvJfmVtJWpcTQb0TkoWXRsy0YM3PYfUbYvK7lR3lGwyhwCcJn0WwWGreFB7bIok0poqqX5BgJ/okZuvC8II+UfuGoBHNhg49oqST1JlNi9gRqDNmLWkHRaneWZiF+Y2hdN3PRCdkt1x3eU0R+cdi5kPKslb6P0lsjOEA7fDLlq1+T2z1"
- "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQDJIAgAKD47mDur3n5jeHXzId2uUFHKa5kBiF649YQsdBKeQyfMVysvN9immKSTvwo4BUlwqeKQq5aIWWpjKMJU2/WXe4WU1YVyKLYCAlbkYJ3WuIBKQ/fm2wb8M4oXtgkYb+wEr5RkP48WqtIo3Cm/L+1j6k5jiu5E1hKBmdaY1er5OG9nCpOHfN3e+VkWwIjqdHFphB9NIMu2X+1iKDwOq4+sIX6POweVvcGFZJ8djB4RRtnkuH5W89x7k8IM4e2w0SK/5yKfxNfN3CzWSQ1dsqpQFPbry7z8Oy+56mlRs15bv5TU9IJ78aDpp/FbSZPfVfmTfwFLUBIHMtEjLUGBrGPQN8p32ap+6a9st5Qfh7rVhIGyB/4npLmar9Nw0lJNX9nmKiD119bkwyuWZjk4s2ELvCAw9RBJCHP8AxXnLgieqkBebn00zoGL/gdQTxXKDJGe3SEbOk56AkkIynB6I7prERvnbIhGI/ObwrNKtfKliiIKq3iWTdBP6BfCgAOqgD6320G2VdZyXyh3oXyM2AlFXzuA8zc8wpZraUCX9J/iMoxhELcL0gpDFO4HUKxTt+uU45uNNK0DkXw3GDF/lr+oYvzJ45jX0qMExF6EHaKfplZxW0Nt9rPT8pKi9BC8dzdSHXuunA1PshvEfc7mLMtz0QdOXOvomtM2Jv84lw== jieqiang.wang@arm.com"
- "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDPsm7Ny+8QYyJ5JEECF0sntRbsF3jRqdSItPTgcbBEFwfNBVd0ulmmkwPUVrcJRMfGuzp3vA3Ss/BgutfvNo3WD5G+WECnOWXiTzroM34oZQ6awoZujxlQsNGBRsiGTPNay6oFoS2hIaW5OB/QHZwZH8HVYcc53oyM0uC72ItnCg5cvSS5v1XaoQby0pUsu2v5uSOm35XV/N2ishcF3sxfCjTMZEODCwYdcb1xOflzIWlIk7ZSDNzOlpmG/jZNDfc7V2GHvGz7WnBFkjkcVH86SEVcQmsc7yyQD1UUG/EZ5AA75vbH4vFye4cISTWpBZik5CbkElxvX9XrfFxtYEM/ tianyu.li@arm.com"
+ - "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDWWDIudZUaHp6pPkYmBsBeEYcbUoGGZH73eB374g8e9V3dfT7BuJOMR/Z3LxCGw5GcDDgzJHLQr8AykN7lUscaux68CXSf1CAKhfBeWBOCQD4G5tpmD6610jsiBig6bFISKpGW4nLt80yt3bKEqv6ezt35QuMeh3E45uK/pCRfDcukOThbwYG44xr7TeYAyNacWuNV9HDy5DzjBelElmolTOgjtZEbedmIcUbV/gb8yekUxxBTS3d3I5qVLtNRMHPqp9xbJqteRXIphizDlHbaTHZeVy9mrwi1RuTjDSg7p1OPJ9eVIzMYzk87gbwNxZw8481Xb1WsgGmyH/mXJjD1 najoy@NAJOY-M-505G.CISCO.COM"
sshd_disable_password_login: true
# Nomad settings.
+nomad_version: "1.6.1"
nomad_certificates:
- src: "{{ file_nomad_ca_pem }}"
- dest: "{{ nomad_ca_file }}"
+ dest: "{{ nomad_tls_ca_file }}"
- src: "{{ file_nomad_client_pem }}"
- dest: "{{ nomad_cert_file }}"
+ dest: "{{ nomad_tls_cert_file }}"
- src: "{{ file_nomad_client_key_pem }}"
- dest: "{{ nomad_key_file }}"
+ dest: "{{ nomad_tls_key_file }}"
nomad_datacenter: "yul1"
nomad_node_name: "{{ hostname }}-{{ ansible_architecture }}"
nomad_node_role: "client"
@@ -44,34 +48,51 @@ nomad_options:
docker.volumes.enabled: true
driver.whitelist: "docker,raw_exec,exec"
nomad_service_mgr: "systemd"
+nomad_consul_use_ssl: false
+nomad_use_tls: false
+nomad_tls_http: false
+nomad_tls_rpc: false
+nomad_use_vault: false
nomad_retry_servers:
- - "10.32.8.15"
- - "10.32.8.16"
- - "10.32.8.17"
+ - "10.30.51.23"
+ - "10.30.51.24"
+ - "10.30.51.25"
nomad_servers:
- - "10.32.8.15:4647"
- - "10.32.8.16:4647"
- - "10.32.8.17:4647"
-nomad_cpu_total_compute: "40000"
+ - "10.30.51.23:4647"
+ - "10.30.51.24:4647"
+ - "10.30.51.25:4647"
# Consul settigs.
-consul_nomad_integration: true
+nomad_use_consul: true
consul_certificates:
- src: "{{ file_consul_ca_pem }}"
dest: "{{ consul_ca_file }}"
+ - src: "{{ file_consul_server_0_pem }}"
+ dest: "{{ consul_cert_file }}"
+ - src: "{{ file_consul_server_0_key_pem }}"
+ dest: "{{ consul_key_file }}"
+consul_verify_incoming: false
+consul_verify_outgoing: false
+consul_vefify_server_hostname: false
+consul_allow_tls: false
consul_datacenter: "yul1"
-consul_encrypt: "Y4T+5JGx1C3l2NFBBvkTWQ=="
-consul_node_name: "{{ hostname }}"
consul_node_role: "client"
+consul_encrypt: "Y4T+5JGx1C3l2NFBBvkTWQ=="
+consul_node_name: "{{ ansible_hostname }}"
+consul_retry_join: true
consul_retry_servers:
- - "10.32.8.15"
- - "10.32.8.16"
- - "10.32.8.17"
+ - "10.30.51.23"
+ - "10.30.51.24"
+ - "10.30.51.25"
consul_service_mgr: "systemd"
+# Vault settings.
+vault_version: "1.13.1"
+
# Docker settings.
docker_daemon:
- dns: ["172.17.0.1"]
- dns-opts: []
- dns-search: ["{{ansible_hostname}}"]
- storage-driver: "overlay2"
+ default-shm-size: "1073741824"
+# dns: ["172.17.0.1"]
+# dns-opts: []
+# dns-search: ["{{ansible_hostname}}"]
+# host: ["172.17.0.1:/var/run/docker.sock"]
diff --git a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.71.yaml b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.71.yaml
index 9642b69efb..1d414b32c7 100644
--- a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.71.yaml
+++ b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.71.yaml
@@ -1,14 +1,16 @@
---
# file: host_vars/10.30.51.71.yaml
-hostname: "s56-t14-sut1"
-inventory_ipmi_hostname: "10.30.50.71"
-vfs_data_file: "csit-initialize-vfs-tx2.sh"
+hostname: "s71-t14-sut1"
grub:
hugepagesz: "2M"
hugepages: 32768
iommu.passthrough: "1"
-cpu_microarchitecture: "thunderx2"
+inventory_ipmi_hostname: "10.30.50.71"
+vfs_data_file: "csit-initialize-vfs-alt.sh"
+cpu_microarchitecture: "altra"
+
+intel_700_matrix: "dpdk22.07"
# User management.
users:
@@ -16,23 +18,25 @@ users:
groups: [adm, sudo]
password: "$6$FIsbVDQR$5D0wgufOd2FtnmOiRNsGlgg6Loh.0x3dWSj72DSQnqisSyE9DROfgSgA6s0yxDwz4Jd5SRTXiTKuRYuSQ5POI1"
ssh_key:
- - "ssh-rsa AAAAB3NzaC1yc2EAAAABJQAAAQEAgObJFDIMmPwQhhkjAynvlbwpM5yeSewyaE7vTLaFf4uFz4vmsE2hFf6B2xXHUGLVwoVfk91UeK7LOGrdDpoDDHzvPZXj5NmZI+WiWax5y2pQZNkcSZws0ENCeEc4hPwc4veJ1JmhokF4Bsmu14HyFMaFUhM8897jtJwsh+9fLA/no0iPGaQqEtRUQhkV+P4jCEPoY0qdRZAzVw/rY4EGAMhsJe3EJmyj63OfrrkG3+hvSLFo5pDxHQr3pZd/c6ukI7xMef48PosAvGCm3oxzb/Gu9PZIGuHLczY+tCnzCkY7MO7E+IWgjXrUAfYwSWz8XmFmA9LLe26DT5jkcK8hGQ== pmikus@cisco.com"
+ - "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIKM1D1kkGX1l7fSma1MOgw2yLI7zJHwTCcfVROQ4hh7r peter.mikus@protonmail.ch"
- "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQDCG7Shfr7ASXXwpazYDGFzChGl7i4HgY9n81GTwc17B/O19IiJdrxFcBQH19HMuTFtWU4d9bQ6xgzz2pojBN13L3pEsStCHovDlEYbvfRxI2l2hcAAop1j1E4izHAS9IvCoy1AgWqBg6tsfP5mzOwGbSbcdI9ADRKIHgDTVbHg9SqKed27bNOLU0u3/5ra2Oar/tVIW37geEqFV/nHIBZ03Y/mszvXP/t/hP5bgJIGJKkzTjLd1aqEcuGmubW+wTQnVnrhFB87dw91gPj6BVyV0+7Vt1wrvPKqP2sGJhojAMSrBQBySnlrYgEg00bwDgNGFevatfG9+nTId+nhoKBkXya3MjSp4HwrGqGcij3/h7ovlau3/iRhkqlSeqenaNm4zKTAXRTnb60j2WKa6im0zdqJX98anp4mhjE8xHhmmfZV3vRT8mtY4hF/lg79miXFHpWH97bZV6r/D9qj1HWI/laJfOC5MOJdRcLETwtsUNMHeHtVnY3yu0XFYNcl2Xwajtnz3waF7vn1QHvAM9p878+JrO/IEUqEc0yZWVvlhZ7Krx1pS+APoMy8097MYDGFzFTkYSstKLGbgm/S7dEiWuSVxmMUxW7JYO3gHrQ3f1EvAYh2UFdWy76Dzr5II9UpVwOwF+HL/Oy8Sk77bPaK+tn7Kh4Tx7WWE0+EOAgElQ== ayourtch@ayourtch-lnx"
- "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCXUbbhesOpvPF+KI8nq4TXvEO/Un1aU/ehZ9clCyw9C40xjDkX2BlcX8WeHxFHe7fjFaCd07Vg73rn/3M9uNDnDxvjH1GQ0twvi3iBTO4PkHBBfGF9qnE8MYzno6FvlsVKLuUuPbfm8kbOQ+ZDfdXq6gdtXh0hSYYkqC1heNPCNsqaakkB99Edyle+Ot0V7cpW+Yo2wo98KuX/cgUEhVoA8QnNVE7zaWcjSXBZEteoA4gLpAbV6p67/d6H/2ykHTidBViYTEsHco56tJoA4nTPuAupDOLBcWXgF5TAN6z1aCn2JA1DDfniLakgrZ5oVj2qHhUmbxQAtnKQfHADjqzV jlinkes@jlinkes"
- "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQD0GXoSGDZ95TE/TT3kf4ZS3Tiso0UO3MVhqAqZ/F6LOvLyqnMPfhxPz1XpHsDikxvKgwhZvdBm1dWbKkPsD7jtw0PGphQO8QuEwBd2ZMvxZ4Qg6lNDUl5x3zRO2nkbKpcqnOugGLTtXP+yfw/wfQ2HNFLDP9gE90xegsPT83PmRUntQlhbS3ByHcCSUScC+Y1heZXuoKNyrmUY46lxkKsNfhx8sQKo0YhB21atV/mcAQbAaO2LggmaQYGtWizqPNGWIRsi9W8ZYnKva67c3Pbv/TTfaqmrNCwOXJ8G9oL+/3MlKbl3b5mYlTs2a/e9yVgMNwUZVX7aiHpgPgaVjL6j swdev@BradyBunch-MacMini.local"
- "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCmo2YP4t/f58AAYH72rOe5VjYjk3wb/GY3aJEd5s9WspLhnsY0xBL67C+4kMq6VmQQvg0cUB8RJSFX1tUXMHCorVWnXNHkYomx0MCPcPUpVHuRyEqczYJ2pzgZsPzoEfw9E5hTrAiGzYFNAS/NOSavapVMDZxa1zsX7+sWQvJfmVtJWpcTQb0TkoWXRsy0YM3PYfUbYvK7lR3lGwyhwCcJn0WwWGreFB7bIok0poqqX5BgJ/okZuvC8II+UfuGoBHNhg49oqST1JlNi9gRqDNmLWkHRaneWZiF+Y2hdN3PRCdkt1x3eU0R+cdi5kPKslb6P0lsjOEA7fDLlq1+T2z1"
- "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQDJIAgAKD47mDur3n5jeHXzId2uUFHKa5kBiF649YQsdBKeQyfMVysvN9immKSTvwo4BUlwqeKQq5aIWWpjKMJU2/WXe4WU1YVyKLYCAlbkYJ3WuIBKQ/fm2wb8M4oXtgkYb+wEr5RkP48WqtIo3Cm/L+1j6k5jiu5E1hKBmdaY1er5OG9nCpOHfN3e+VkWwIjqdHFphB9NIMu2X+1iKDwOq4+sIX6POweVvcGFZJ8djB4RRtnkuH5W89x7k8IM4e2w0SK/5yKfxNfN3CzWSQ1dsqpQFPbry7z8Oy+56mlRs15bv5TU9IJ78aDpp/FbSZPfVfmTfwFLUBIHMtEjLUGBrGPQN8p32ap+6a9st5Qfh7rVhIGyB/4npLmar9Nw0lJNX9nmKiD119bkwyuWZjk4s2ELvCAw9RBJCHP8AxXnLgieqkBebn00zoGL/gdQTxXKDJGe3SEbOk56AkkIynB6I7prERvnbIhGI/ObwrNKtfKliiIKq3iWTdBP6BfCgAOqgD6320G2VdZyXyh3oXyM2AlFXzuA8zc8wpZraUCX9J/iMoxhELcL0gpDFO4HUKxTt+uU45uNNK0DkXw3GDF/lr+oYvzJ45jX0qMExF6EHaKfplZxW0Nt9rPT8pKi9BC8dzdSHXuunA1PshvEfc7mLMtz0QdOXOvomtM2Jv84lw== jieqiang.wang@arm.com"
- "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDPsm7Ny+8QYyJ5JEECF0sntRbsF3jRqdSItPTgcbBEFwfNBVd0ulmmkwPUVrcJRMfGuzp3vA3Ss/BgutfvNo3WD5G+WECnOWXiTzroM34oZQ6awoZujxlQsNGBRsiGTPNay6oFoS2hIaW5OB/QHZwZH8HVYcc53oyM0uC72ItnCg5cvSS5v1XaoQby0pUsu2v5uSOm35XV/N2ishcF3sxfCjTMZEODCwYdcb1xOflzIWlIk7ZSDNzOlpmG/jZNDfc7V2GHvGz7WnBFkjkcVH86SEVcQmsc7yyQD1UUG/EZ5AA75vbH4vFye4cISTWpBZik5CbkElxvX9XrfFxtYEM/ tianyu.li@arm.com"
+ - "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDWWDIudZUaHp6pPkYmBsBeEYcbUoGGZH73eB374g8e9V3dfT7BuJOMR/Z3LxCGw5GcDDgzJHLQr8AykN7lUscaux68CXSf1CAKhfBeWBOCQD4G5tpmD6610jsiBig6bFISKpGW4nLt80yt3bKEqv6ezt35QuMeh3E45uK/pCRfDcukOThbwYG44xr7TeYAyNacWuNV9HDy5DzjBelElmolTOgjtZEbedmIcUbV/gb8yekUxxBTS3d3I5qVLtNRMHPqp9xbJqteRXIphizDlHbaTHZeVy9mrwi1RuTjDSg7p1OPJ9eVIzMYzk87gbwNxZw8481Xb1WsgGmyH/mXJjD1 najoy@NAJOY-M-505G.CISCO.COM"
sshd_disable_password_login: true
# Nomad settings.
+nomad_version: "1.6.1"
nomad_certificates:
- src: "{{ file_nomad_ca_pem }}"
- dest: "{{ nomad_ca_file }}"
+ dest: "{{ nomad_tls_ca_file }}"
- src: "{{ file_nomad_client_pem }}"
- dest: "{{ nomad_cert_file }}"
+ dest: "{{ nomad_tls_cert_file }}"
- src: "{{ file_nomad_client_key_pem }}"
- dest: "{{ nomad_key_file }}"
+ dest: "{{ nomad_tls_key_file }}"
nomad_datacenter: "yul1"
nomad_node_name: "{{ hostname }}-{{ ansible_architecture }}"
nomad_node_role: "client"
@@ -44,34 +48,51 @@ nomad_options:
docker.volumes.enabled: true
driver.whitelist: "docker,raw_exec,exec"
nomad_service_mgr: "systemd"
+nomad_consul_use_ssl: false
+nomad_use_tls: false
+nomad_tls_http: false
+nomad_tls_rpc: false
+nomad_use_vault: false
nomad_retry_servers:
- - "10.32.8.15"
- - "10.32.8.16"
- - "10.32.8.17"
+ - "10.30.51.23"
+ - "10.30.51.24"
+ - "10.30.51.25"
nomad_servers:
- - "10.32.8.15:4647"
- - "10.32.8.16:4647"
- - "10.32.8.17:4647"
-nomad_cpu_total_compute: "40000"
+ - "10.30.51.23:4647"
+ - "10.30.51.24:4647"
+ - "10.30.51.25:4647"
# Consul settigs.
-consul_nomad_integration: true
+nomad_use_consul: true
consul_certificates:
- src: "{{ file_consul_ca_pem }}"
dest: "{{ consul_ca_file }}"
+ - src: "{{ file_consul_server_0_pem }}"
+ dest: "{{ consul_cert_file }}"
+ - src: "{{ file_consul_server_0_key_pem }}"
+ dest: "{{ consul_key_file }}"
+consul_verify_incoming: false
+consul_verify_outgoing: false
+consul_vefify_server_hostname: false
+consul_allow_tls: false
consul_datacenter: "yul1"
-consul_encrypt: "Y4T+5JGx1C3l2NFBBvkTWQ=="
-consul_node_name: "{{ hostname }}"
consul_node_role: "client"
+consul_encrypt: "Y4T+5JGx1C3l2NFBBvkTWQ=="
+consul_node_name: "{{ ansible_hostname }}"
+consul_retry_join: true
consul_retry_servers:
- - "10.32.8.15"
- - "10.32.8.16"
- - "10.32.8.17"
+ - "10.30.51.23"
+ - "10.30.51.24"
+ - "10.30.51.25"
consul_service_mgr: "systemd"
+# Vault settings.
+vault_version: "1.13.1"
+
# Docker settings.
docker_daemon:
- dns: ["172.17.0.1"]
- dns-opts: []
- dns-search: ["{{ansible_hostname}}"]
- storage-driver: "overlay2"
+ default-shm-size: "1073741824"
+# dns: ["172.17.0.1"]
+# dns-opts: []
+# dns-search: ["{{ansible_hostname}}"]
+# host: ["172.17.0.1:/var/run/docker.sock"]
diff --git a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.72.yaml b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.72.yaml
index a8193343e0..b7c8c26aae 100644
--- a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.72.yaml
+++ b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.72.yaml
@@ -22,4 +22,5 @@ sysctl:
inventory_ipmi_hostname: "10.30.50.72"
cpu_microarchitecture: "altra"
-intel_700_matrix: "dpdk22.03"
+intel_700_matrix: "dpdk23.11"
+mellanox_matrix: "dpdk23.11"
diff --git a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.73.yaml b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.73.yaml
index ebddd86462..0811b038b7 100644
--- a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.73.yaml
+++ b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.73.yaml
@@ -22,4 +22,5 @@ sysctl:
inventory_ipmi_hostname: "10.30.50.73"
cpu_microarchitecture: "altra"
-intel_700_matrix: "dpdk22.03"
+intel_700_matrix: "dpdk23.11"
+mellanox_matrix: "dpdk23.11"
diff --git a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.74.yaml b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.74.yaml
index 40dfa1e6b3..473e4a9a5e 100644
--- a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.74.yaml
+++ b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.74.yaml
@@ -29,5 +29,5 @@ sysctl:
inventory_ipmi_hostname: "10.30.50.74"
cpu_microarchitecture: "icelake"
-intel_700_matrix: "dpdk21.02"
-intel_800_matrix: "dpdk21.02"
+intel_700_matrix: "dpdk22.07"
+intel_800_matrix: "dpdk22.07"
diff --git a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.75.yaml b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.75.yaml
index ed651e973d..a96f087643 100644
--- a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.75.yaml
+++ b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.75.yaml
@@ -29,5 +29,6 @@ sysctl:
inventory_ipmi_hostname: "10.30.50.75"
cpu_microarchitecture: "icelake"
-intel_700_matrix: "dpdk22.03"
-intel_800_matrix: "dpdk22.03"
+intel_700_matrix: "dpdk23.11"
+intel_800_matrix: "dpdk23.11"
+mellanox_matrix: "dpdk23.11"
diff --git a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.76.yaml b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.76.yaml
index 6d9f0f4940..c1ddcf5a58 100644
--- a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.76.yaml
+++ b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.76.yaml
@@ -29,5 +29,6 @@ sysctl:
inventory_ipmi_hostname: "10.30.50.76"
cpu_microarchitecture: "icelake"
-intel_700_matrix: "dpdk22.03"
-intel_800_matrix: "dpdk22.03"
+intel_700_matrix: "dpdk23.11"
+intel_800_matrix: "dpdk23.11"
+mellanox_matrix: "dpdk23.11"
diff --git a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.77.yaml b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.77.yaml
index 1f98f1c8aa..e447ed2c81 100644
--- a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.77.yaml
+++ b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.77.yaml
@@ -29,5 +29,6 @@ sysctl:
inventory_ipmi_hostname: "10.30.50.77"
cpu_microarchitecture: "icelake"
-intel_700_matrix: "dpdk21.02"
-intel_800_matrix: "dpdk21.02"
+intel_700_matrix: "dpdk22.07"
+intel_800_matrix: "dpdk22.07"
+mellanox_matrix: "dpdk23.07"
diff --git a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.78.yaml b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.78.yaml
index 71d4245580..88f36b3880 100644
--- a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.78.yaml
+++ b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.78.yaml
@@ -29,5 +29,6 @@ sysctl:
inventory_ipmi_hostname: "10.30.50.78"
cpu_microarchitecture: "icelake"
-intel_700_matrix: "dpdk22.03"
-intel_800_matrix: "dpdk22.03"
+intel_700_matrix: "dpdk23.11"
+intel_800_matrix: "dpdk23.11"
+mellanox_matrix: "dpdk23.11"
diff --git a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.79.yaml b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.79.yaml
index d57803d525..37d6a18b2c 100644
--- a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.79.yaml
+++ b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.79.yaml
@@ -29,5 +29,6 @@ sysctl:
inventory_ipmi_hostname: "10.30.50.79"
cpu_microarchitecture: "icelake"
-intel_700_matrix: "dpdk22.03"
-intel_800_matrix: "dpdk22.03"
+intel_700_matrix: "dpdk23.11"
+intel_800_matrix: "dpdk23.11"
+mellanox_matrix: "dpdk23.11"
diff --git a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.80.yaml b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.80.yaml
index b1a5404498..dcb87d1a7c 100644
--- a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.80.yaml
+++ b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.80.yaml
@@ -29,5 +29,6 @@ sysctl:
inventory_ipmi_hostname: "10.30.50.80"
cpu_microarchitecture: "icelake"
-intel_700_matrix: "dpdk21.02"
-intel_800_matrix: "dpdk21.02"
+intel_700_matrix: "dpdk22.07"
+intel_800_matrix: "dpdk22.07"
+mellanox_matrix: "dpdk23.07"
diff --git a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.81.yaml b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.81.yaml
index f70c121bc3..e984947235 100644
--- a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.81.yaml
+++ b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.81.yaml
@@ -29,5 +29,6 @@ sysctl:
inventory_ipmi_hostname: "10.30.50.81"
cpu_microarchitecture: "icelake"
-intel_700_matrix: "dpdk21.02"
-intel_800_matrix: "dpdk21.02"
+intel_700_matrix: "dpdk23.11"
+intel_800_matrix: "dpdk23.11"
+mellanox_matrix: "dpdk23.11"
diff --git a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.82.yaml b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.82.yaml
index 828189d45d..5e6160e3ec 100644
--- a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.82.yaml
+++ b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.82.yaml
@@ -29,5 +29,6 @@ sysctl:
inventory_ipmi_hostname: "10.30.50.82"
cpu_microarchitecture: "icelake"
-intel_700_matrix: "dpdk22.03"
-intel_800_matrix: "dpdk22.03"
+intel_700_matrix: "dpdk22.07"
+intel_800_matrix: "dpdk22.07"
+mellanox_matrix: "dpdk23.07"
diff --git a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.83.yaml b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.83.yaml
index a1c5a81233..ac936cd89d 100644
--- a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.83.yaml
+++ b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.83.yaml
@@ -29,5 +29,6 @@ sysctl:
inventory_ipmi_hostname: "10.30.50.83"
cpu_microarchitecture: "icelake"
-intel_700_matrix: "dpdk21.02"
-intel_800_matrix: "dpdk21.02"
+intel_700_matrix: "dpdk23.11"
+intel_800_matrix: "dpdk23.11"
+mellanox_matrix: "dpdk23.11"
diff --git a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.84.yaml b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.84.yaml
index c9e998b8fa..05877b59e9 100644
--- a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.84.yaml
+++ b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.84.yaml
@@ -29,5 +29,6 @@ sysctl:
inventory_ipmi_hostname: "10.30.50.84"
cpu_microarchitecture: "icelake"
-intel_700_matrix: "dpdk22.03"
-intel_800_matrix: "dpdk22.03"
+intel_700_matrix: "dpdk22.07"
+intel_800_matrix: "dpdk22.07"
+mellanox_matrix: "dpdk23.07"
diff --git a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.85.yaml b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.85.yaml
index 31373e558c..0d61c87e4f 100644
--- a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.85.yaml
+++ b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.85.yaml
@@ -29,5 +29,6 @@ sysctl:
inventory_ipmi_hostname: "10.30.50.85"
cpu_microarchitecture: "icelake"
-intel_700_matrix: "dpdk21.02"
-intel_800_matrix: "dpdk21.02"
+intel_700_matrix: "dpdk23.11"
+intel_800_matrix: "dpdk23.11"
+mellanox_matrix: "dpdk23.11"
diff --git a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.86.yaml b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.86.yaml
index 24f0f517af..f40b86bd6b 100644
--- a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.86.yaml
+++ b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.86.yaml
@@ -29,5 +29,6 @@ sysctl:
inventory_ipmi_hostname: "10.30.50.86"
cpu_microarchitecture: "icelake"
-intel_700_matrix: "dpdk22.03"
-intel_800_matrix: "dpdk22.03"
+intel_700_matrix: "dpdk22.07"
+intel_800_matrix: "dpdk22.07"
+mellanox_matrix: "dpdk23.07"
diff --git a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.87.yaml b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.87.yaml
index a00eab9c26..2c767d6795 100644
--- a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.87.yaml
+++ b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.87.yaml
@@ -29,5 +29,6 @@ sysctl:
inventory_ipmi_hostname: "10.30.50.87"
cpu_microarchitecture: "icelake"
-intel_700_matrix: "dpdk21.02"
-intel_800_matrix: "dpdk21.02"
+intel_700_matrix: "dpdk23.11"
+intel_800_matrix: "dpdk23.11"
+mellanox_matrix: "dpdk23.11"
diff --git a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.88.yaml b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.88.yaml
index 3ab76b6952..313c316752 100644
--- a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.88.yaml
+++ b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.88.yaml
@@ -29,5 +29,6 @@ sysctl:
inventory_ipmi_hostname: "10.30.50.88"
cpu_microarchitecture: "icelake"
-intel_700_matrix: "dpdk22.03"
-intel_800_matrix: "dpdk22.03"
+intel_700_matrix: "dpdk22.07"
+intel_800_matrix: "dpdk22.07"
+mellanox_matrix: "dpdk23.07"
diff --git a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.89.yaml b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.89.yaml
index ba44916ab0..7ab0d8ab68 100644
--- a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.89.yaml
+++ b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.89.yaml
@@ -29,5 +29,5 @@ sysctl:
inventory_ipmi_hostname: "10.30.50.89"
cpu_microarchitecture: "icelake"
-intel_700_matrix: "dpdk22.03"
-intel_800_matrix: "dpdk22.03"
+intel_700_matrix: "dpdk22.07"
+intel_800_matrix: "dpdk22.07"
diff --git a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.90.yaml b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.90.yaml
new file mode 100644
index 0000000000..dfc36904f8
--- /dev/null
+++ b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.90.yaml
@@ -0,0 +1,38 @@
+---
+# file: host_vars/10.30.51.90.yaml
+
+hostname: "s90-t31t32-tg1"
+grub:
+ audit: "0"
+ default_hugepagesz: "2M hugepagesz=1G hugepages=32 hugepagesz=2M hugepages=32768"
+ hpet: "disable"
+ intel_idle.max_cstate: "1"
+ intel_iommu: "on,sm_on"
+ intel_pstate: "disable"
+ iommu: "pt"
+ isolcpus: "1-31,33-63,65-95,97-127"
+ mce: "off"
+ nmi_watchdog: "0"
+ nohz_full: "1-31,33-63,65-95,97-127"
+ nosoftlockup: true
+ numa_balancing: "disable"
+ processor.max_cstate: "1"
+ rcu_nocbs: "1-31,33-63,65-95,97-127"
+ tsc: "reliable"
+sysctl:
+ kernel:
+ watchdog_cpumask: "0,32,64,96"
+ vm:
+ nr_hugepages: 32768
+ max_map_count: 20000
+
+inventory_ipmi_hostname: "10.30.50.90"
+cpu_microarchitecture: "icelake"
+docker_tg: true
+
+intel_800_matrix: "dpdk23.07"
+
+kernel_version_by_distro:
+ ubuntu:
+ jammy:
+ - "5.15.0-72" \ No newline at end of file
diff --git a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.91.yaml b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.91.yaml
index d06284d5a4..53239492ef 100644
--- a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.91.yaml
+++ b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.91.yaml
@@ -11,21 +11,23 @@ users:
groups: [adm, sudo]
password: "$6$FIsbVDQR$5D0wgufOd2FtnmOiRNsGlgg6Loh.0x3dWSj72DSQnqisSyE9DROfgSgA6s0yxDwz4Jd5SRTXiTKuRYuSQ5POI1"
ssh_key:
- - "ssh-rsa AAAAB3NzaC1yc2EAAAABJQAAAQEAgObJFDIMmPwQhhkjAynvlbwpM5yeSewyaE7vTLaFf4uFz4vmsE2hFf6B2xXHUGLVwoVfk91UeK7LOGrdDpoDDHzvPZXj5NmZI+WiWax5y2pQZNkcSZws0ENCeEc4hPwc4veJ1JmhokF4Bsmu14HyFMaFUhM8897jtJwsh+9fLA/no0iPGaQqEtRUQhkV+P4jCEPoY0qdRZAzVw/rY4EGAMhsJe3EJmyj63OfrrkG3+hvSLFo5pDxHQr3pZd/c6ukI7xMef48PosAvGCm3oxzb/Gu9PZIGuHLczY+tCnzCkY7MO7E+IWgjXrUAfYwSWz8XmFmA9LLe26DT5jkcK8hGQ== pmikus@cisco.com"
+ - "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIKM1D1kkGX1l7fSma1MOgw2yLI7zJHwTCcfVROQ4hh7r peter.mikus@protonmail.ch"
- "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQDCG7Shfr7ASXXwpazYDGFzChGl7i4HgY9n81GTwc17B/O19IiJdrxFcBQH19HMuTFtWU4d9bQ6xgzz2pojBN13L3pEsStCHovDlEYbvfRxI2l2hcAAop1j1E4izHAS9IvCoy1AgWqBg6tsfP5mzOwGbSbcdI9ADRKIHgDTVbHg9SqKed27bNOLU0u3/5ra2Oar/tVIW37geEqFV/nHIBZ03Y/mszvXP/t/hP5bgJIGJKkzTjLd1aqEcuGmubW+wTQnVnrhFB87dw91gPj6BVyV0+7Vt1wrvPKqP2sGJhojAMSrBQBySnlrYgEg00bwDgNGFevatfG9+nTId+nhoKBkXya3MjSp4HwrGqGcij3/h7ovlau3/iRhkqlSeqenaNm4zKTAXRTnb60j2WKa6im0zdqJX98anp4mhjE8xHhmmfZV3vRT8mtY4hF/lg79miXFHpWH97bZV6r/D9qj1HWI/laJfOC5MOJdRcLETwtsUNMHeHtVnY3yu0XFYNcl2Xwajtnz3waF7vn1QHvAM9p878+JrO/IEUqEc0yZWVvlhZ7Krx1pS+APoMy8097MYDGFzFTkYSstKLGbgm/S7dEiWuSVxmMUxW7JYO3gHrQ3f1EvAYh2UFdWy76Dzr5II9UpVwOwF+HL/Oy8Sk77bPaK+tn7Kh4Tx7WWE0+EOAgElQ== ayourtch@ayourtch-lnx"
- "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCXUbbhesOpvPF+KI8nq4TXvEO/Un1aU/ehZ9clCyw9C40xjDkX2BlcX8WeHxFHe7fjFaCd07Vg73rn/3M9uNDnDxvjH1GQ0twvi3iBTO4PkHBBfGF9qnE8MYzno6FvlsVKLuUuPbfm8kbOQ+ZDfdXq6gdtXh0hSYYkqC1heNPCNsqaakkB99Edyle+Ot0V7cpW+Yo2wo98KuX/cgUEhVoA8QnNVE7zaWcjSXBZEteoA4gLpAbV6p67/d6H/2ykHTidBViYTEsHco56tJoA4nTPuAupDOLBcWXgF5TAN6z1aCn2JA1DDfniLakgrZ5oVj2qHhUmbxQAtnKQfHADjqzV jlinkes@jlinkes"
- "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQD0GXoSGDZ95TE/TT3kf4ZS3Tiso0UO3MVhqAqZ/F6LOvLyqnMPfhxPz1XpHsDikxvKgwhZvdBm1dWbKkPsD7jtw0PGphQO8QuEwBd2ZMvxZ4Qg6lNDUl5x3zRO2nkbKpcqnOugGLTtXP+yfw/wfQ2HNFLDP9gE90xegsPT83PmRUntQlhbS3ByHcCSUScC+Y1heZXuoKNyrmUY46lxkKsNfhx8sQKo0YhB21atV/mcAQbAaO2LggmaQYGtWizqPNGWIRsi9W8ZYnKva67c3Pbv/TTfaqmrNCwOXJ8G9oL+/3MlKbl3b5mYlTs2a/e9yVgMNwUZVX7aiHpgPgaVjL6j swdev@BradyBunch-MacMini.local"
- "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCmo2YP4t/f58AAYH72rOe5VjYjk3wb/GY3aJEd5s9WspLhnsY0xBL67C+4kMq6VmQQvg0cUB8RJSFX1tUXMHCorVWnXNHkYomx0MCPcPUpVHuRyEqczYJ2pzgZsPzoEfw9E5hTrAiGzYFNAS/NOSavapVMDZxa1zsX7+sWQvJfmVtJWpcTQb0TkoWXRsy0YM3PYfUbYvK7lR3lGwyhwCcJn0WwWGreFB7bIok0poqqX5BgJ/okZuvC8II+UfuGoBHNhg49oqST1JlNi9gRqDNmLWkHRaneWZiF+Y2hdN3PRCdkt1x3eU0R+cdi5kPKslb6P0lsjOEA7fDLlq1+T2z1"
+ - "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDWWDIudZUaHp6pPkYmBsBeEYcbUoGGZH73eB374g8e9V3dfT7BuJOMR/Z3LxCGw5GcDDgzJHLQr8AykN7lUscaux68CXSf1CAKhfBeWBOCQD4G5tpmD6610jsiBig6bFISKpGW4nLt80yt3bKEqv6ezt35QuMeh3E45uK/pCRfDcukOThbwYG44xr7TeYAyNacWuNV9HDy5DzjBelElmolTOgjtZEbedmIcUbV/gb8yekUxxBTS3d3I5qVLtNRMHPqp9xbJqteRXIphizDlHbaTHZeVy9mrwi1RuTjDSg7p1OPJ9eVIzMYzk87gbwNxZw8481Xb1WsgGmyH/mXJjD1 najoy@NAJOY-M-505G.CISCO.COM"
sshd_disable_password_login: true
# Nomad settings.
+nomad_version: "1.6.1"
nomad_certificates:
- src: "{{ file_nomad_ca_pem }}"
- dest: "{{ nomad_ca_file }}"
+ dest: "{{ nomad_tls_ca_file }}"
- src: "{{ file_nomad_client_pem }}"
- dest: "{{ nomad_cert_file }}"
+ dest: "{{ nomad_tls_cert_file }}"
- src: "{{ file_nomad_client_key_pem }}"
- dest: "{{ nomad_key_file }}"
+ dest: "{{ nomad_tls_key_file }}"
nomad_datacenter: "yul1"
nomad_node_name: "{{ hostname }}-{{ ansible_architecture }}"
nomad_node_role: "client"
@@ -37,6 +39,11 @@ nomad_options:
docker.volumes.enabled: true
driver.whitelist: "docker,raw_exec,exec"
nomad_service_mgr: "systemd"
+nomad_consul_use_ssl: false
+nomad_use_tls: false
+nomad_tls_http: false
+nomad_tls_rpc: false
+nomad_use_vault: false
nomad_retry_servers:
- "10.30.51.23"
- "10.30.51.24"
@@ -47,14 +54,23 @@ nomad_servers:
- "10.30.51.25:4647"
# Consul settings.
-consul_nomad_integration: true
+nomad_use_consul: true
consul_certificates:
- - src: "{{ file_consul_ca_pem }}"
+ - src: "{{ file_consul_agent_ca_pem }}"
dest: "{{ consul_ca_file }}"
+ - src: "{{ file_consul_server_0_pem }}"
+ dest: "{{ consul_cert_file }}"
+ - src: "{{ file_consul_server_0_key_pem }}"
+ dest: "{{ consul_key_file }}"
+consul_verify_incoming: false
+consul_verify_outgoing: false
+consul_vefify_server_hostname: false
+consul_allow_tls: false
consul_datacenter: "yul1"
-consul_encrypt: "Y4T+5JGx1C3l2NFBBvkTWQ=="
-consul_node_name: "{{ hostname }}"
consul_node_role: "client"
+consul_encrypt: "Y4T+5JGx1C3l2NFBBvkTWQ=="
+consul_node_name: "{{ ansible_hostname }}"
+consul_retry_join: true
consul_retry_servers:
- "10.30.51.23"
- "10.30.51.24"
@@ -62,6 +78,9 @@ consul_retry_servers:
consul_service_mgr: "systemd"
#consul_package_version: "1.5.2+dfsg2-14"
+# Vault settings.
+vault_version: "1.13.1"
+
# Docker daemon settings.
docker_daemon:
dns: ["172.17.0.1"]
diff --git a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.92.yaml b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.92.yaml
index 705409fb6b..19ec70ce83 100644
--- a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.92.yaml
+++ b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.92.yaml
@@ -11,21 +11,23 @@ users:
groups: [adm, sudo]
password: "$6$FIsbVDQR$5D0wgufOd2FtnmOiRNsGlgg6Loh.0x3dWSj72DSQnqisSyE9DROfgSgA6s0yxDwz4Jd5SRTXiTKuRYuSQ5POI1"
ssh_key:
- - "ssh-rsa AAAAB3NzaC1yc2EAAAABJQAAAQEAgObJFDIMmPwQhhkjAynvlbwpM5yeSewyaE7vTLaFf4uFz4vmsE2hFf6B2xXHUGLVwoVfk91UeK7LOGrdDpoDDHzvPZXj5NmZI+WiWax5y2pQZNkcSZws0ENCeEc4hPwc4veJ1JmhokF4Bsmu14HyFMaFUhM8897jtJwsh+9fLA/no0iPGaQqEtRUQhkV+P4jCEPoY0qdRZAzVw/rY4EGAMhsJe3EJmyj63OfrrkG3+hvSLFo5pDxHQr3pZd/c6ukI7xMef48PosAvGCm3oxzb/Gu9PZIGuHLczY+tCnzCkY7MO7E+IWgjXrUAfYwSWz8XmFmA9LLe26DT5jkcK8hGQ== pmikus@cisco.com"
+ - "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIKM1D1kkGX1l7fSma1MOgw2yLI7zJHwTCcfVROQ4hh7r peter.mikus@protonmail.ch"
- "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQDCG7Shfr7ASXXwpazYDGFzChGl7i4HgY9n81GTwc17B/O19IiJdrxFcBQH19HMuTFtWU4d9bQ6xgzz2pojBN13L3pEsStCHovDlEYbvfRxI2l2hcAAop1j1E4izHAS9IvCoy1AgWqBg6tsfP5mzOwGbSbcdI9ADRKIHgDTVbHg9SqKed27bNOLU0u3/5ra2Oar/tVIW37geEqFV/nHIBZ03Y/mszvXP/t/hP5bgJIGJKkzTjLd1aqEcuGmubW+wTQnVnrhFB87dw91gPj6BVyV0+7Vt1wrvPKqP2sGJhojAMSrBQBySnlrYgEg00bwDgNGFevatfG9+nTId+nhoKBkXya3MjSp4HwrGqGcij3/h7ovlau3/iRhkqlSeqenaNm4zKTAXRTnb60j2WKa6im0zdqJX98anp4mhjE8xHhmmfZV3vRT8mtY4hF/lg79miXFHpWH97bZV6r/D9qj1HWI/laJfOC5MOJdRcLETwtsUNMHeHtVnY3yu0XFYNcl2Xwajtnz3waF7vn1QHvAM9p878+JrO/IEUqEc0yZWVvlhZ7Krx1pS+APoMy8097MYDGFzFTkYSstKLGbgm/S7dEiWuSVxmMUxW7JYO3gHrQ3f1EvAYh2UFdWy76Dzr5II9UpVwOwF+HL/Oy8Sk77bPaK+tn7Kh4Tx7WWE0+EOAgElQ== ayourtch@ayourtch-lnx"
- "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCXUbbhesOpvPF+KI8nq4TXvEO/Un1aU/ehZ9clCyw9C40xjDkX2BlcX8WeHxFHe7fjFaCd07Vg73rn/3M9uNDnDxvjH1GQ0twvi3iBTO4PkHBBfGF9qnE8MYzno6FvlsVKLuUuPbfm8kbOQ+ZDfdXq6gdtXh0hSYYkqC1heNPCNsqaakkB99Edyle+Ot0V7cpW+Yo2wo98KuX/cgUEhVoA8QnNVE7zaWcjSXBZEteoA4gLpAbV6p67/d6H/2ykHTidBViYTEsHco56tJoA4nTPuAupDOLBcWXgF5TAN6z1aCn2JA1DDfniLakgrZ5oVj2qHhUmbxQAtnKQfHADjqzV jlinkes@jlinkes"
- "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQD0GXoSGDZ95TE/TT3kf4ZS3Tiso0UO3MVhqAqZ/F6LOvLyqnMPfhxPz1XpHsDikxvKgwhZvdBm1dWbKkPsD7jtw0PGphQO8QuEwBd2ZMvxZ4Qg6lNDUl5x3zRO2nkbKpcqnOugGLTtXP+yfw/wfQ2HNFLDP9gE90xegsPT83PmRUntQlhbS3ByHcCSUScC+Y1heZXuoKNyrmUY46lxkKsNfhx8sQKo0YhB21atV/mcAQbAaO2LggmaQYGtWizqPNGWIRsi9W8ZYnKva67c3Pbv/TTfaqmrNCwOXJ8G9oL+/3MlKbl3b5mYlTs2a/e9yVgMNwUZVX7aiHpgPgaVjL6j swdev@BradyBunch-MacMini.local"
- "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCmo2YP4t/f58AAYH72rOe5VjYjk3wb/GY3aJEd5s9WspLhnsY0xBL67C+4kMq6VmQQvg0cUB8RJSFX1tUXMHCorVWnXNHkYomx0MCPcPUpVHuRyEqczYJ2pzgZsPzoEfw9E5hTrAiGzYFNAS/NOSavapVMDZxa1zsX7+sWQvJfmVtJWpcTQb0TkoWXRsy0YM3PYfUbYvK7lR3lGwyhwCcJn0WwWGreFB7bIok0poqqX5BgJ/okZuvC8II+UfuGoBHNhg49oqST1JlNi9gRqDNmLWkHRaneWZiF+Y2hdN3PRCdkt1x3eU0R+cdi5kPKslb6P0lsjOEA7fDLlq1+T2z1"
+ - "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDWWDIudZUaHp6pPkYmBsBeEYcbUoGGZH73eB374g8e9V3dfT7BuJOMR/Z3LxCGw5GcDDgzJHLQr8AykN7lUscaux68CXSf1CAKhfBeWBOCQD4G5tpmD6610jsiBig6bFISKpGW4nLt80yt3bKEqv6ezt35QuMeh3E45uK/pCRfDcukOThbwYG44xr7TeYAyNacWuNV9HDy5DzjBelElmolTOgjtZEbedmIcUbV/gb8yekUxxBTS3d3I5qVLtNRMHPqp9xbJqteRXIphizDlHbaTHZeVy9mrwi1RuTjDSg7p1OPJ9eVIzMYzk87gbwNxZw8481Xb1WsgGmyH/mXJjD1 najoy@NAJOY-M-505G.CISCO.COM"
sshd_disable_password_login: true
# Nomad settings.
+nomad_version: "1.6.1"
nomad_certificates:
- src: "{{ file_nomad_ca_pem }}"
- dest: "{{ nomad_ca_file }}"
+ dest: "{{ nomad_tls_ca_file }}"
- src: "{{ file_nomad_client_pem }}"
- dest: "{{ nomad_cert_file }}"
+ dest: "{{ nomad_tls_cert_file }}"
- src: "{{ file_nomad_client_key_pem }}"
- dest: "{{ nomad_key_file }}"
+ dest: "{{ nomad_tls_key_file }}"
nomad_datacenter: "yul1"
nomad_node_name: "{{ hostname }}-{{ ansible_architecture }}"
nomad_node_role: "client"
@@ -37,6 +39,11 @@ nomad_options:
docker.volumes.enabled: true
driver.whitelist: "docker,raw_exec,exec"
nomad_service_mgr: "systemd"
+nomad_consul_use_ssl: false
+nomad_use_tls: false
+nomad_tls_http: false
+nomad_tls_rpc: false
+nomad_use_vault: false
nomad_retry_servers:
- "10.30.51.23"
- "10.30.51.24"
@@ -47,14 +54,23 @@ nomad_servers:
- "10.30.51.25:4647"
# Consul settings.
-consul_nomad_integration: true
+nomad_use_consul: true
consul_certificates:
- - src: "{{ file_consul_ca_pem }}"
+ - src: "{{ file_consul_agent_ca_pem }}"
dest: "{{ consul_ca_file }}"
+ - src: "{{ file_consul_server_0_pem }}"
+ dest: "{{ consul_cert_file }}"
+ - src: "{{ file_consul_server_0_key_pem }}"
+ dest: "{{ consul_key_file }}"
+consul_verify_incoming: false
+consul_verify_outgoing: false
+consul_vefify_server_hostname: false
+consul_allow_tls: false
consul_datacenter: "yul1"
-consul_encrypt: "Y4T+5JGx1C3l2NFBBvkTWQ=="
-consul_node_name: "{{ hostname }}"
consul_node_role: "client"
+consul_encrypt: "Y4T+5JGx1C3l2NFBBvkTWQ=="
+consul_node_name: "{{ ansible_hostname }}"
+consul_retry_join: true
consul_retry_servers:
- "10.30.51.23"
- "10.30.51.24"
@@ -62,6 +78,9 @@ consul_retry_servers:
consul_service_mgr: "systemd"
#consul_package_version: "1.5.2+dfsg2-14"
+# Vault settings.
+vault_version: "1.13.1"
+
# Docker daemon settings.
docker_daemon:
dns: ["172.17.0.1"]
diff --git a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.93.yaml b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.93.yaml
index 624945341b..cf4816a5f8 100644
--- a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.93.yaml
+++ b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.93.yaml
@@ -29,5 +29,5 @@ sysctl:
inventory_ipmi_hostname: "10.30.50.93"
cpu_microarchitecture: "snowridge"
-intel_700_matrix: "dpdk22.03"
-intel_800_matrix: "dpdk22.03"
+intel_800_matrix: "dpdk23.11"
+intel_qat_matrix: true \ No newline at end of file
diff --git a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.94.yaml b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.94.yaml
index 02bf62e674..d663cda18b 100644
--- a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.94.yaml
+++ b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.94.yaml
@@ -29,5 +29,5 @@ sysctl:
inventory_ipmi_hostname: "10.30.50.94"
cpu_microarchitecture: "snowridge"
-intel_700_matrix: "dpdk22.03"
-intel_800_matrix: "dpdk22.03"
+intel_800_matrix: "dpdk23.11"
+intel_qat_matrix: true \ No newline at end of file
diff --git a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.32.8.10.yaml b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.32.8.10.yaml
deleted file mode 100644
index 49abf934b5..0000000000
--- a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.32.8.10.yaml
+++ /dev/null
@@ -1,33 +0,0 @@
----
-# file: host_vars/10.32.8.10.yaml
-
-hostname: "s28-t26t35-tg1"
-grub:
- audit: "0"
- default_hugepagesz: "2M hugepagesz=1G hugepages=32 hugepagesz=2M hugepages=16484"
- hpet: "disable"
- intel_idle.max_cstate: "1"
- intel_iommu: "on"
- intel_pstate: "disable"
- iommu: "pt"
- isolcpus: "1-27,29-55,57-83,85-111"
- mce: "off"
- nmi_watchdog: "0"
- nohz_full: "1-27,29-55,57-83,85-111"
- nosoftlockup: true
- numa_balancing: "disable"
- processor.max_cstate: "1"
- rcu_nocbs: "1-27,29-55,57-83,85-111"
- tsc: "reliable"
-sysctl:
- kernel:
- watchdog_cpumask: "0,28,56,84"
- vm:
- nr_hugepages: 16384
- max_map_count: 20000
-
-inventory_ipmi_hostname: "10.30.55.10"
-cpu_microarchitecture: "skylake"
-docker_tg: true
-
-intel_700_matrix: "dpdk21.02"
diff --git a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.32.8.11.yaml b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.32.8.11.yaml
deleted file mode 100644
index e21568d8ca..0000000000
--- a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.32.8.11.yaml
+++ /dev/null
@@ -1,30 +0,0 @@
----
-# file: host_vars/10.32.8.11.yaml
-
-hostname: "s29-t26-sut1"
-grub:
- audit: "0"
- default_hugepagesz: "2M hugepagesz=1G hugepages=32 hugepagesz=2M hugepages=8192"
- hpet: "disable"
- intel_idle.max_cstate: "1"
- intel_iommu: "on"
- intel_pstate: "disable"
- iommu: "pt"
- isolcpus: "1-5"
- mce: "off"
- nmi_watchdog: "0"
- nohz_full: "1-5"
- nosoftlockup: true
- numa_balancing: "disable"
- processor.max_cstate: "1"
- rcu_nocbs: "1-5"
- tsc: "reliable"
-sysctl:
- kernel:
- watchdog_cpumask: "0"
- vm:
- nr_hugepages: 8192
- max_map_count: 20000
-
-inventory_ipmi_hostname: "10.30.55.11"
-cpu_microarchitecture: "denverton"
diff --git a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.32.8.12.yaml b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.32.8.12.yaml
deleted file mode 100644
index 9c6c0b7571..0000000000
--- a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.32.8.12.yaml
+++ /dev/null
@@ -1,30 +0,0 @@
----
-# file: host_vars/10.32.8.12.yaml
-
-hostname: "s30-t35-sut1"
-grub:
- audit: "0"
- default_hugepagesz: "2M hugepagesz=1G hugepages=32 hugepagesz=2M hugepages=8192"
- hpet: "disable"
- intel_idle.max_cstate: "1"
- intel_iommu: "on"
- intel_pstate: "disable"
- iommu: "pt"
- isolcpus: "1-5"
- mce: "off"
- nmi_watchdog: "0"
- nohz_full: "1-5"
- nosoftlockup: true
- numa_balancing: "disable"
- processor.max_cstate: "1"
- rcu_nocbs: "1-5"
- tsc: "reliable"
-sysctl:
- kernel:
- watchdog_cpumask: "0"
- vm:
- nr_hugepages: 8192
- max_map_count: 20000
-
-inventory_ipmi_hostname: "10.30.55.12"
-cpu_microarchitecture: "denverton"
diff --git a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.32.8.13.yaml b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.32.8.13.yaml
deleted file mode 100644
index 90554042cc..0000000000
--- a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.32.8.13.yaml
+++ /dev/null
@@ -1,30 +0,0 @@
----
-# file: host_vars/10.32.8.13.yaml
-
-hostname: "s31-t35-sut2"
-grub:
- audit: "0"
- default_hugepagesz: "2M hugepagesz=1G hugepages=32 hugepagesz=2M hugepages=8192"
- hpet: "disable"
- intel_idle.max_cstate: "1"
- intel_iommu: "on"
- intel_pstate: "disable"
- iommu: "pt"
- isolcpus: "1-5"
- mce: "off"
- nmi_watchdog: "0"
- nohz_full: "1-5"
- nosoftlockup: true
- numa_balancing: "disable"
- processor.max_cstate: "1"
- rcu_nocbs: "1-5"
- tsc: "reliable"
-sysctl:
- kernel:
- watchdog_cpumask: "0"
- vm:
- nr_hugepages: 8192
- max_map_count: 20000
-
-inventory_ipmi_hostname: "10.30.55.13"
-cpu_microarchitecture: "denverton"
diff --git a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.32.8.18.yaml b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.32.8.18.yaml
index 18ba2bfbfb..4423a36f56 100644
--- a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.32.8.18.yaml
+++ b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.32.8.18.yaml
@@ -29,6 +29,6 @@ sysctl:
inventory_ipmi_hostname: "10.30.55.18"
cpu_microarchitecture: "cascadelake"
-intel_700_matrix: "dpdk22.03"
-intel_800_matrix: "dpdk22.03"
-mellanox_cx5_matrix: "dpdk22.03"
+intel_700_matrix: "dpdk23.11"
+intel_800_matrix: "dpdk23.11"
+mellanox_matrix: "dpdk23.11"
diff --git a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.32.8.19.yaml b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.32.8.19.yaml
index 5ff676eb21..22210ae4bd 100644
--- a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.32.8.19.yaml
+++ b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.32.8.19.yaml
@@ -29,6 +29,6 @@ sysctl:
inventory_ipmi_hostname: "10.30.55.19"
cpu_microarchitecture: "cascadelake"
-intel_700_matrix: "dpdk21.02"
-intel_800_matrix: "dpdk21.02"
-mellanox_cx5_matrix: "dpdk21.02"
+intel_700_matrix: "dpdk22.07"
+intel_800_matrix: "dpdk22.07"
+mellanox_matrix: "dpdk22.07"
diff --git a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.32.8.20.yaml b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.32.8.20.yaml
index 947242ed92..36ba5c15f8 100644
--- a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.32.8.20.yaml
+++ b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.32.8.20.yaml
@@ -29,6 +29,6 @@ sysctl:
inventory_ipmi_hostname: "10.30.55.20"
cpu_microarchitecture: "cascadelake"
-intel_700_matrix: "dpdk22.03"
-intel_800_matrix: "dpdk22.03"
-mellanox_cx5_matrix: "dpdk22.03"
+intel_700_matrix: "dpdk23.11"
+intel_800_matrix: "dpdk23.11"
+mellanox_matrix: "dpdk23.11"
diff --git a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.32.8.21.yaml b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.32.8.21.yaml
index c4b626897e..61c6f51b06 100644
--- a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.32.8.21.yaml
+++ b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.32.8.21.yaml
@@ -29,6 +29,6 @@ sysctl:
inventory_ipmi_hostname: "10.30.55.21"
cpu_microarchitecture: "cascadelake"
-intel_700_matrix: "dpdk21.02"
-intel_800_matrix: "dpdk21.02"
-mellanox_cx5_matrix: "dpdk21.02"
+intel_700_matrix: "dpdk22.07"
+intel_800_matrix: "dpdk22.07"
+mellanox_matrix: "dpdk22.07"
diff --git a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.32.8.22.yaml b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.32.8.22.yaml
index 5c51159813..ae2947f54a 100644
--- a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.32.8.22.yaml
+++ b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.32.8.22.yaml
@@ -29,6 +29,6 @@ sysctl:
inventory_ipmi_hostname: "10.30.55.22"
cpu_microarchitecture: "cascadelake"
-intel_700_matrix: "dpdk22.03"
-intel_800_matrix: "dpdk22.03"
-mellanox_cx5_matrix: "dpdk22.03"
+intel_700_matrix: "dpdk23.11"
+intel_800_matrix: "dpdk23.11"
+mellanox_matrix: "dpdk23.11" \ No newline at end of file
diff --git a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.32.8.23.yaml b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.32.8.23.yaml
index cfa84c784e..a3f19b4249 100644
--- a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.32.8.23.yaml
+++ b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.32.8.23.yaml
@@ -29,6 +29,6 @@ sysctl:
inventory_ipmi_hostname: "10.30.55.23"
cpu_microarchitecture: "cascadelake"
-intel_700_matrix: "dpdk21.02"
-intel_800_matrix: "dpdk21.02"
-mellanox_cx5_matrix: "dpdk21.02"
+intel_700_matrix: "dpdk22.07"
+intel_800_matrix: "dpdk22.07"
+mellanox_matrix: "dpdk22.07"
diff --git a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.32.8.24.yaml b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.32.8.24.yaml
index c0046ec862..accb8c5dc9 100644
--- a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.32.8.24.yaml
+++ b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.32.8.24.yaml
@@ -26,6 +26,5 @@ sysctl:
inventory_ipmi_hostname: "10.30.55.24"
cpu_microarchitecture: "epyc"
-intel_700_matrix: "dpdk22.03"
-intel_800_matrix: "dpdk22.03"
-mellanox_cx5_matrix: "dpdk22.03"
+intel_700_matrix: "dpdk23.11"
+mellanox_matrix: "dpdk23.11" \ No newline at end of file
diff --git a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.32.8.25.yaml b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.32.8.25.yaml
index 639e37bb20..629538fa34 100644
--- a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.32.8.25.yaml
+++ b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.32.8.25.yaml
@@ -26,6 +26,5 @@ sysctl:
inventory_ipmi_hostname: "10.30.55.25"
cpu_microarchitecture: "epyc"
-intel_700_matrix: "dpdk21.02"
-intel_800_matrix: "dpdk21.02"
-mellanox_cx5_matrix: "dpdk21.02"
+intel_700_matrix: "dpdk22.07"
+mellanox_matrix: "dpdk22.07"
diff --git a/fdio.infra.ansible/inventories/lf_inventory/hosts b/fdio.infra.ansible/inventories/lf_inventory/hosts
index 76261d70e3..b7109a1261 100644
--- a/fdio.infra.ansible/inventories/lf_inventory/hosts
+++ b/fdio.infra.ansible/inventories/lf_inventory/hosts
@@ -2,12 +2,11 @@ all:
children:
tg:
hosts:
- 10.30.51.45: #s4-t21-tg1 - skylake
- 10.30.51.48: #s13-t31-tg1 - skylake
10.30.51.49: #s19-t33t211-tg1 - skylake
- 10.30.51.53: #s6-t22-tg1 - skylake
- 10.30.51.55: #s8-t23-tg1 - skylake
- 10.30.51.60: #s16-t32-tg1 - skylake
+ 10.30.51.53: #s53-t21-tg1 - sapphirerapids
+ 10.30.51.55: #s55-t22-tg1 - sapphirerapids
+ 10.30.51.57: #s57-t23-tg1 - sapphirerapids
+ 10.30.51.59: #s59-t24-tg1 - sapphirerapids
10.30.51.74: #s64-t34-tg1 - icelake
10.30.51.77: #s67-t37-tg1 - icelake
10.30.51.80: #s80-t38-tg1 - icelake
@@ -16,70 +15,60 @@ all:
10.30.51.86: #s86-t214-tg1 - icelake
10.30.51.88: #s88-t215-tg1 - icelake
10.30.51.89: #s89-t39t310-tg1 - icelake
- 10.32.8.10: #s28-t26t35-tg1 - skylake
+ 10.30.51.90: #s90-t31t32-tg1 - icelake
10.32.8.19: #s34-t27-tg1 - cascadelake
10.32.8.21: #s36-t28-tg1 - cascadelake
10.32.8.23: #s38-t29-tg1 - cascadelake
10.32.8.25: #s61-t210-tg1 - epyc
sut:
hosts:
+ 10.30.51.32: #s32-t31-sut1 - icelaked
+ 10.30.51.33: #s33-t31-sut2 - icelaked
+ 10.30.51.34: #s34-t32-sut1 - icelaked
+ 10.30.51.35: #s35-t32-sut2 - icelaked
10.30.51.36: #s17-t33-sut1 - taishan
10.30.51.37: #s18-t33-sut2 - taishan
- 10.30.51.44: #s3-t21-sut1 - skylake
- 10.30.51.46: #s11-t31-sut1 - skylake
- 10.30.51.47: #s12-t31-sut2 - skylake
- 10.30.51.52: #s5-t22-sut1 - skylake
- 10.30.51.54: #s7-t23-sut1 - skylake
- 10.30.51.58: #s14-t32-sut1 - skylake
- 10.30.51.59: #s15-t32-sut2 - skylake
+ 10.30.51.52: #s52-t21-sut1 - sapphirerapids
+ 10.30.51.54: #s54-t22-sut1 - sapphirerapids
+ 10.30.51.56: #s56-t23-sut1 - sapphirerapids
+ 10.30.51.58: #s58-t24-sut1 - sapphirerapids
10.30.51.72: #s62-t34-sut1 - altra
10.30.51.73: #s63-t34-sut2 - altra
10.30.51.75: #s65-t37-sut1 - icelake
10.30.51.76: #s66-t37-sut2 - icelake
10.30.51.78: #s78-t38-sut1 - icelake
10.30.51.79: #s79-t38-sut2 - icelake
- 10.30.51.81: #s71-t212-sut1 - icelake
+ 10.30.51.81: #s81-t212-sut1 - icelake
10.30.51.83: #s83-t213-sut1 - icelake
10.30.51.85: #s85-t214-sut1 - icelake
10.30.51.87: #s87-t215-sut1 - icelake
10.30.51.93: #s93-t39-sut1 - snowridge
10.30.51.94: #s94-t39-sut2 - snowridge
- 10.32.8.11: #s29-t26-sut1 - denverton
- 10.32.8.12: #s30-t35-sut1 - denverton
- 10.32.8.13: #s31-t35-sut2 - denverton
10.32.8.18: #s33-t27-sut1 - cascadelake
10.32.8.20: #s35-t28-sut1 - cascadelake
10.32.8.22: #s37-t29-sut1 - cascadelake
10.32.8.24: #s60-t210-sut1 - epyc
10.30.51.69: #s27-t211-sut1 - thunderx2 9975
vpp_device:
- # Note: vpp_device hosts are also nomad client hosts
hosts:
+ 10.30.51.30: #s30-t15-sut1 - sapphirerapids
+ 10.30.51.31: #s31-t16-sut1 - sapphirerapids
10.30.51.50: #s1-t11-sut1 - skylake
10.30.51.51: #s2-t12-sut1 - skylake
10.30.51.70: #s55-t13-sut1 - thunderx2 9980
10.30.51.71: #s56-t14-sut1 - thunderx2 9980
nomad:
hosts:
- 10.30.51.16: #s16-nomad - haswell
- 10.30.51.17: #s17-nomad - haswell
- 10.30.51.18: #s18-nomad - haswell
- 10.30.51.19: #s19-nomad - haswell
- 10.30.51.20: #s20-nomad - haswell
- 10.30.51.21: #s21-nomad - haswell
- 10.30.51.22: #s22-nomad - haswell
+ 10.30.51.21: #s21-nomad - sapphirerapids
+ 10.30.51.22: #s22-nomad - sapphirerapids
10.30.51.23: #s23-nomad - skylake
10.30.51.24: #s24-nomad - skylake
10.30.51.25: #s25-nomad - skylake
10.30.51.26: #s26-nomad - skylake
- 10.30.51.39: #s53-nomad - thunderx 88xx
- 10.30.51.40: #s54-nomad - thunderx 88xx
- 10.30.51.65: #s52-nomad - thunderx 88xx
- 10.30.51.66: #s51-nomad - thunderx 88xx
- 10.30.51.67: #s49-nomad - thunderx 88xx
- 10.30.51.68: #s50-nomad - thunderx 88xx
+ 10.30.51.27: #s27-nomad - skylake
+ 10.30.51.28: #s28-nomad - skylake
10.30.51.91: #s58-nomad - neoverse n1
10.30.51.92: #s59-nomad - neoverse n1
dev:
hosts:
- 10.30.51.38: #fdio-marvell-dev - thunderx 88xx
+ 10.30.51.38: #fdio-marvell-dev - thunderx 88xx \ No newline at end of file
diff --git a/fdio.infra.ansible/inventories/sample_inventory/group_vars/all.yaml b/fdio.infra.ansible/inventories/sample_inventory/group_vars/all.yaml
index f9938d20da..3a5ab66d8b 100644
--- a/fdio.infra.ansible/inventories/sample_inventory/group_vars/all.yaml
+++ b/fdio.infra.ansible/inventories/sample_inventory/group_vars/all.yaml
@@ -1,5 +1,2 @@
---
# file: sample_inventory/group_vars/all.yaml
-
-# Ansible interpreter (for PIP)
-ansible_python_interpreter: "python3"
diff --git a/fdio.infra.ansible/inventories/vagrant_inventory/group_vars/vagrant.yml b/fdio.infra.ansible/inventories/vagrant_inventory/group_vars/vagrant.yml
index c17eae8fb4..af44a3354d 100644
--- a/fdio.infra.ansible/inventories/vagrant_inventory/group_vars/vagrant.yml
+++ b/fdio.infra.ansible/inventories/vagrant_inventory/group_vars/vagrant.yml
@@ -1,5 +1,2 @@
---
# file: vagrant_inventory/group_vars/all.yaml
-
-# Ansible interpreter (for PIP)
-ansible_python_interpreter: "/usr/bin/python3"
diff --git a/fdio.infra.ansible/roles/ab/defaults/main.yaml b/fdio.infra.ansible/roles/ab/defaults/main.yaml
index 6e9dfbcfce..adabf1464c 100644
--- a/fdio.infra.ansible/roles/ab/defaults/main.yaml
+++ b/fdio.infra.ansible/roles/ab/defaults/main.yaml
@@ -8,8 +8,6 @@ packages_base:
packages_by_distro:
ubuntu:
- focal:
- - "apache2-utils"
jammy:
- "apache2-utils"
diff --git a/fdio.infra.ansible/roles/ab/tasks/main.yaml b/fdio.infra.ansible/roles/ab/tasks/main.yaml
index c69ed25a75..2a70fd1d1d 100644
--- a/fdio.infra.ansible/roles/ab/tasks/main.yaml
+++ b/fdio.infra.ansible/roles/ab/tasks/main.yaml
@@ -2,7 +2,7 @@
# file: roles/ab/tasks/main.yaml
- name: Inst - Update Package Cache (APT)
- apt:
+ ansible.builtin.apt:
update_cache: true
cache_valid_time: 3600
when:
@@ -11,7 +11,7 @@
- ab-inst-prerequisites
- name: Inst - Apache ab tools
- package:
+ ansible.builtin.package:
name: "{{ packages | flatten(levels=1) }}"
state: present
tags:
diff --git a/fdio.infra.ansible/roles/aws/defaults/main.yaml b/fdio.infra.ansible/roles/aws/defaults/main.yaml
index d4ea91afd4..5b6978da51 100644
--- a/fdio.infra.ansible/roles/aws/defaults/main.yaml
+++ b/fdio.infra.ansible/roles/aws/defaults/main.yaml
@@ -1,2 +1,26 @@
---
-# file: roles/aws/defaults/main.yaml
+# file: defaults/main.yaml
+
+packages: "{{ packages_base + packages_by_distro[ansible_distribution|lower] + packages_by_arch[ansible_machine] }}"
+
+packages_repo:
+ ubuntu:
+ aarch64:
+ "http://ports.ubuntu.com/"
+ x86_64:
+ "http://archive.ubuntu.com/ubuntu"
+
+packages_base:
+ - []
+
+packages_by_distro:
+ ubuntu:
+ - "linux-image-5.4.0-1009-aws"
+ - "linux-headers-5.4.0-1009-aws"
+ - "linux-tools-5.4.0-1009-aws"
+
+packages_by_arch:
+ aarch64:
+ - []
+ x86_64:
+ - [] \ No newline at end of file
diff --git a/fdio.infra.ansible/roles/aws/files/get-vfio-with-wc.sh b/fdio.infra.ansible/roles/aws/files/get-vfio-with-wc.sh
new file mode 100644
index 0000000000..02a3139b66
--- /dev/null
+++ b/fdio.infra.ansible/roles/aws/files/get-vfio-with-wc.sh
@@ -0,0 +1,203 @@
+#!/usr/bin/env bash
+# Enable WC in VFIO-PCI driver
+# Tested on:
+# * Amazon Linux 2 AMI (HVM), SSD Volume Type - ami-0bb3fad3c0286ebd5
+# * Amazon Linux AMI 2018.03.0 (HVM), SSD Volume Type - ami-015232c01a82b847b
+# * Red Hat Enterprise Linux 8 (HVM), SSD Volume Type - ami-08f4717d06813bf00
+# * Ubuntu Server 20.04 LTS (HVM), SSD Volume Type - ami-06fd8a495a537da8b
+# * Ubuntu Server 18.04 LTS (HVM), SSD Volume Type - ami-0823c236601fef765
+
+set -e
+
+TMP_DIR="tmp"
+
+# Kernel modules location:
+P1="/usr/lib/modules/`uname -r`/kernel/drivers/vfio"
+P2="/lib/modules/`uname -r`/kernel/drivers/vfio"
+
+# This may return an error if executed from inside the script
+set +e
+RED="$(tput setaf 1)"
+GREEN="$(tput setaf 2)"
+
+BOLD="$(tput bold)"
+NORMAL="$(tput sgr0)"
+set -e
+
+function bold {
+ echo -e "${BOLD}${@}${NORMAL}"
+}
+
+function err {
+ bold "${RED}ERROR: ${@}"
+}
+
+function green {
+ bold "${GREEN}${@}"
+}
+
+function get_kernel_version {
+ local ver=$(uname -r | cut -f 1 -d '-')
+ local ver_major=$(echo $ver | cut -f1 -d '.')
+ local ver_minor=$(echo $ver | cut -f2 -d '.')
+ local ver_subminor=$(echo $ver | cut -f3 -d '.')
+
+ printf "%d%02d%04d" "${ver_major}" "${ver_minor}" "${ver_subminor}"
+}
+
+function download_kernel_src_yum {
+ echo "Use yum to get the kernel sources"
+
+ bold "\nInstall required applications and kernel headers"
+ yum install -y gcc "kernel-$(uname -r)" "kernel-devel-$(uname -r)" \
+ git make elfutils-libelf-devel patch yum-utils
+ green Done
+
+ # Download kernel source
+ bold "\nDownload kernel source with vfio"
+ yumdownloader --source "kernel-devel-$(uname -r)"
+ rpm2cpio kernel*.src.rpm | cpio -idmv
+ green Done
+
+ rm -f *patches.tar
+ tar xf linux-*.tar*
+ rm -f linux-*.tar* linux-*.patch
+}
+
+function download_kernel_src_apt {
+ echo "Use apt-get to get the kernel sources"
+ apt-get -q -y update
+ green Done
+
+ bold "\nInstall required applications"
+ apt-get -q -y install dpkg-dev build-essential git
+ green Done
+
+ bold "\nDownload Linux kernel source with vfio"
+ if ! apt-get -q -y source -t focal linux-image-$(uname -r); then
+ err "Cannot download Linux kernel source.\nPlease uncomment appropriate 'deb-src' line in the /etc/apt/sources.list file"
+ exit 1
+ fi
+ green Done
+
+ rm -f linux-*.dsc linux-*.gz
+}
+
+function download_kernel_src {
+ bold "[1] Downloading prerequisites..."
+ rm -rf "${TMP_DIR}"
+ mkdir -p "${TMP_DIR}"
+ cd "${TMP_DIR}"
+
+ if apt-get -v >/dev/null 2>/dev/null; then
+ download_kernel_src_apt
+ else
+ download_kernel_src_yum
+ fi
+ cd linux-*
+}
+
+function apply_wc_patch {
+ echo "Using patch for kernel version 4.10"
+ local wc_patch="${BASE_PATH}/patches/linux-4.10-vfio-wc.patch"
+
+ if ! patch --ignore-whitespace -p1 < "${wc_patch}"; then
+ err "Cannot apply patch: ${wc_patch}!"
+ exit 1
+ fi
+}
+
+function compile_vfio_driver {
+ bold "\n[2] Patch and build the vfio driver"
+ # Adjust VFIO-PCI driver
+
+ bold "Apply patch for the write combining to the vfio-pci"
+ apply_wc_patch
+ green Done
+
+ cd drivers/vfio
+ # Configure Makefile - build VFIO with support for NOIOMMU mode
+ bold "\nConfigure Makefile for standalone vfio build and noiommu mode support"
+ echo "ccflags-y := -DCONFIG_VFIO_NOIOMMU=1" >> Makefile
+ echo 'all:' >> Makefile
+ echo ' make -C /lib/modules/$(shell uname -r)/build M=$(PWD) modules' >> Makefile
+ green Done
+
+ bold "\nBuild the driver"
+ if ! make; then
+ err "Compilation error."
+ exit 1
+ fi
+ green Done
+}
+
+function get_module_location {
+ for p in ${P1} ${P2}; do
+ if find "${p}" -name "vfio.*" >/dev/null 2>/dev/null; then
+ MOD_PATH="${p}"
+ break
+ fi
+ done
+
+ if [ -z "${MOD_PATH}" ]; then
+ err "Cannot find kernel modules location..."
+ exit
+ fi
+}
+
+function get_module_compression {
+ if ls "${MOD_PATH}/vfio.ko.xz" >/dev/null 2>/dev/null; then
+ XZ=".xz"
+ else
+ XZ=""
+ fi
+}
+
+function replace_module {
+ local installed=0
+
+ bold "\n[3] Install module"
+ get_module_location
+ get_module_compression
+
+ for name in "pci/vfio-pci.ko" "pci/vfio-pci-core.ko" "vfio.ko"; do
+ if test -e "${MOD_PATH}/${name}${XZ}"; then
+ if [ -n "${XZ}" ]; then
+ xz "${name}" -c > "${name}${XZ}"
+ fi
+ mv "${MOD_PATH}/${name}${XZ}" "${MOD_PATH}/${name}${XZ}_no_wc"
+ cp "${name}${XZ}" "${MOD_PATH}/${name}${XZ}"
+ bold "Installing: ${MOD_PATH}/${name}${XZ}"
+ installed=1
+ fi
+ done
+ if [ "${installed}" -eq 1 ]; then
+ green "Module installed at: ${MOD_PATH}"
+ else
+ err "Failure during vfio-pci module installation. Prehaps it's not provided as a kernel module!"
+ exit 1
+ fi
+}
+
+###############################################
+# Main script code
+###############################################
+
+if [ "$(id -u)" -ne 0 ]; then
+ err 'Please execute script as a root'
+ exit 1
+fi
+
+cd $(dirname ${0})
+BASE_PATH=$(pwd)
+
+KERNEL_VERSION=$(get_kernel_version)
+
+if [ "${KERNEL_VERSION}" -lt 4100000 ]; then
+ err "Kernel version: $(uname -r) is not supported by the script. Please upgrade kernel to at least v4.10."
+ exit 1
+fi
+
+download_kernel_src
+compile_vfio_driver
+replace_module
diff --git a/fdio.infra.ansible/roles/aws/handlers/main.yaml b/fdio.infra.ansible/roles/aws/handlers/main.yaml
index 7363dc2c34..d55db1c22f 100644
--- a/fdio.infra.ansible/roles/aws/handlers/main.yaml
+++ b/fdio.infra.ansible/roles/aws/handlers/main.yaml
@@ -1,15 +1,20 @@
---
# file: roles/aws/handlers/main.yaml
-- name: Reboot server
- reboot:
- reboot_timeout: 3600
- tags:
- - reboot-server
-
-- name: AWS - Reload systemd-modules
+- name: Reload systemd-modules
systemd:
name: "systemd-modules-load"
state: "restarted"
tags:
- reload-systemd-modules
+
+- name: Update GRUB
+ ansible.builtin.command: update-grub
+ tags:
+ - update-grub
+
+- name: Reboot Server
+ ansible.builtin.reboot:
+ reboot_timeout: 3600
+ tags:
+ - reboot-server
diff --git a/fdio.infra.ansible/roles/aws/tasks/main.yaml b/fdio.infra.ansible/roles/aws/tasks/main.yaml
index 2b8c22ccce..b5132c1909 100644
--- a/fdio.infra.ansible/roles/aws/tasks/main.yaml
+++ b/fdio.infra.ansible/roles/aws/tasks/main.yaml
@@ -1,60 +1,43 @@
---
-# file: roles/aws/tasks/main.yaml
+# file: tasks/main.yaml
- name: Edit repositories
include_tasks: "{{ ansible_distribution|lower }}_{{ ansible_distribution_release }}.yaml"
tags:
- aws-edit-repo
-- name: Get vfio-pci With WC Patcher
- get_url:
- url: "https://github.com/amzn/amzn-drivers/raw/master/userspace/dpdk/enav2-vfio-patch/get-vfio-with-wc.sh"
- dest: "/opt/get-vfio-with-wc.sh"
- mode: "744"
+- name: Prerequisites
+ ansible.builtin.package:
+ name: "{{ packages | flatten(levels=1) }}"
+ state: "latest"
tags:
- - aws-vfio-patch
-
-- name: Create vfio-pci Patch Directory
- file:
- path: "/opt/patches/"
- state: "directory"
- tags:
- - aws-vfio-patch
-
-- name: Get vfio-pci WC Patch >=4.10
- get_url:
- url: "https://github.com/amzn/amzn-drivers/raw/master/userspace/dpdk/enav2-vfio-patch/patches/linux-4.10-vfio-wc.patch"
- dest: "/opt/patches/linux-4.10-vfio-wc.patch"
- mode: "744"
- tags:
- - aws-vfio-patch
+ - aws-inst-prerequisites
-- name: Get vfio-pci WC Patch >=5.8
- get_url:
- url: "https://github.com/amzn/amzn-drivers/raw/master/userspace/dpdk/enav2-vfio-patch/patches/linux-5.8-vfio-wc.patch"
- dest: "/opt/patches/linux-5.8-vfio-wc.patch"
- mode: "744"
+- name: Switch Kernel At Boot
+ ansible.builtin.lineinfile:
+ path: "/etc/default/grub"
+ state: "present"
+ line: "GRUB_DEFAULT=\"1>2\""
+ notify:
+ - "Update GRUB"
tags:
- - aws-vfio-patch
+ - perf-conf-grub
-- name: Compile vfio-pci With WC Patch
- shell: "/bin/bash /opt/get-vfio-with-wc.sh"
- tags:
- - aws-vfio-patch
+- meta: flush_handlers
- name: Load Kernel Modules By Default
- lineinfile:
+ ansible.builtin.lineinfile:
path: "/etc/modules"
state: "present"
line: "{{ item }}"
with_items:
- - "vfio-pci"
- "igb_uio"
+ - "vfio-pci"
tags:
- aws-load-kernel-modules
- name: Add Kernel Modules Options (igb_uio)
- lineinfile:
+ ansible.builtin.lineinfile:
path: "/etc/modprobe.d/igb_uio.conf"
state: "present"
line: "{{ item }}"
@@ -65,7 +48,7 @@
- aws-load-kernel-modules
- name: Add Kernel Modules Options (vfio-pci)
- lineinfile:
+ ansible.builtin.lineinfile:
path: "/etc/modprobe.d/vfio-noiommu.conf"
state: "present"
line: "{{ item }}"
@@ -75,24 +58,67 @@
tags:
- aws-load-kernel-modules
+#- name: Get vfio-pci With WC Patcher
+# ansible.builtin.get_url:
+# url: "https://github.com/amzn/amzn-drivers/raw/master/userspace/dpdk/enav2-vfio-patch/get-vfio-with-wc.sh"
+# dest: "/opt/get-vfio-with-wc.sh"
+# mode: 0744
+# tags:
+# - aws-vfio-patch
+
+- name: Create vfio-pci Patch Directory
+ ansible.builtin.file:
+ path: "/opt/patches/"
+ state: "directory"
+ tags:
+ - aws-vfio-patch
+
+- name: Get vfio-pci WC Patch
+ ansible.builtin.get_url:
+ url: "https://github.com/amzn/amzn-drivers/raw/master/userspace/dpdk/enav2-vfio-patch/patches/{{ item }}"
+ dest: "/opt/patches/{{ item }}"
+ mode: 0744
+ with_items:
+ - "linux-4.10-vfio-wc.patch"
+ - "linux-5.8-vfio-wc.patch"
+ - "linux-5.15-vfio-wc.patch"
+ tags:
+ - aws-vfio-patch
+
+- name: Copy vfio-pci WC Patch
+ ansible.builtin.copy:
+ src: "files/get-vfio-with-wc.sh"
+ dest: "/opt"
+ mode: 0744
+ tags:
+ - aws-vfio-patch
+
+- name: Compile vfio-pci With WC Patch
+ ansible.builtin.shell: "/bin/bash /opt/get-vfio-with-wc.sh"
+ environment:
+ DEBIAN_FRONTEND: "noninteractive"
+ TERM: "vt100"
+ tags:
+ - aws-vfio-patch
+
- name: Reload systemd-modules
- systemd:
+ ansible.builtin.systemd:
name: "systemd-modules-load"
state: "restarted"
tags:
- aws-reload-systemd-modules
- name: Adjust nr_hugepages
- sysctl:
+ ansible.builtin.sysctl:
name: "vm.nr_hugepages"
value: "8192"
state: "present"
sysctl_file: "/etc/sysctl.d/90-csit.conf"
- reload: "yes"
+ reload: true
tags:
- aws-set-hugepages
- name: Shutdown host with delay
- command: "/sbin/shutdown -P +720"
+ ansible.builtin.command: "/sbin/shutdown -P +720"
tags:
- aws-set-self-terminate
diff --git a/fdio.infra.ansible/roles/aws/tasks/ubuntu_focal.yaml b/fdio.infra.ansible/roles/aws/tasks/ubuntu_focal.yaml
deleted file mode 100644
index 60302f2309..0000000000
--- a/fdio.infra.ansible/roles/aws/tasks/ubuntu_focal.yaml
+++ /dev/null
@@ -1,10 +0,0 @@
----
-# file: roles/aws/tasks/ubuntu_focal.yaml.yaml
-
-- name: Enable deb-src APT Repository
- apt_repository:
- repo: "deb-src http://archive.ubuntu.com/ubuntu focal main"
- state: "present"
- update_cache: true
- tags:
- - aws-enable-src-repo
diff --git a/fdio.infra.ansible/roles/aws/tasks/ubuntu_jammy.yaml b/fdio.infra.ansible/roles/aws/tasks/ubuntu_jammy.yaml
index 4ee1545baf..c589239f61 100644
--- a/fdio.infra.ansible/roles/aws/tasks/ubuntu_jammy.yaml
+++ b/fdio.infra.ansible/roles/aws/tasks/ubuntu_jammy.yaml
@@ -1,10 +1,35 @@
---
-# file: roles/aws/tasks/ubuntu_jammy.yaml.yaml
+# file: tasks/ubuntu_jammy.yaml
- name: Enable deb-src APT Repository
- apt_repository:
- repo: "deb-src http://archive.ubuntu.com/ubuntu jammy main"
+ ansible.builtin.apt_repository:
+ repo: "deb-src {{ packages_repo[ansible_distribution|lower][ansible_machine] }} jammy main"
state: "present"
update_cache: true
tags:
- aws-enable-src-repo
+
+- name: Enable deb APT Repository Focal
+ ansible.builtin.apt_repository:
+ repo: "deb {{ packages_repo[ansible_distribution|lower][ansible_machine] }} focal main"
+ state: "present"
+ update_cache: true
+ tags:
+ - aws-enable-src-repo
+
+- name: Enable deb-src APT Repository Focal Src
+ ansible.builtin.apt_repository:
+ repo: "deb-src {{ packages_repo[ansible_distribution|lower][ansible_machine] }} focal main"
+ state: "present"
+ update_cache: true
+ tags:
+ - aws-enable-src-repo
+
+- name: Update Package Cache (APT)
+ ansible.builtin.apt:
+ update_cache: true
+ cache_valid_time: 3600
+ when:
+ - ansible_distribution == 'Ubuntu'
+ tags:
+ - aws-enable-src-repo \ No newline at end of file
diff --git a/fdio.infra.ansible/roles/calibration/defaults/main.yaml b/fdio.infra.ansible/roles/calibration/defaults/main.yaml
index bb904622e4..5dc3330e08 100644
--- a/fdio.infra.ansible/roles/calibration/defaults/main.yaml
+++ b/fdio.infra.ansible/roles/calibration/defaults/main.yaml
@@ -9,9 +9,6 @@ packages_base:
packages_by_distro:
ubuntu:
- focal:
- - "build-essential"
- - "dmidecode"
jammy:
- "build-essential"
- "dmidecode"
@@ -27,20 +24,13 @@ kernel_version: "{{ kernel_version_by_distro_by_arch[ansible_distribution | lowe
kernel_version_by_distro_by_arch:
ubuntu:
- focal:
- x86_64:
- - "5.4.0-65-generic"
- - "5.3.0-1020-azure"
- - "5.4.0-1035-aws"
- aarch64:
- - "5.4.0-65-generic"
jammy:
x86_64:
- - "5.15.0-0-generic" # Placeholder
- - "5.3.0-1020-azure" # Placeholder
+ - "5.15.0-46-generic" # Placeholder
- "5.15.0-1000-aws" # Placeholder
+ - "5.4.0-1009-aws" # Placeholder
aarch64:
- - "5.15.0-0-generic" # Placeholder
+ - "5.15.0-46-generic" # Placeholder
pma_directory: "/tmp/pma_tools"
jitter_core: 7
diff --git a/fdio.infra.ansible/roles/calibration/tasks/main.yaml b/fdio.infra.ansible/roles/calibration/tasks/main.yaml
index 62470b4d7f..5807d7e2a4 100644
--- a/fdio.infra.ansible/roles/calibration/tasks/main.yaml
+++ b/fdio.infra.ansible/roles/calibration/tasks/main.yaml
@@ -2,7 +2,7 @@
# file: roles/calibration/tasks/main.yaml
- name: Inst - Update Package Cache (APT)
- apt:
+ ansible.builtin.apt:
update_cache: true
cache_valid_time: 3600
when:
@@ -11,21 +11,21 @@
- calibration-inst-prerequisites
- name: Inst - Prerequisites
- package:
+ ansible.builtin.package:
name: "{{ packages | flatten(levels=1) }}"
state: latest
tags:
- calibration-inst-prerequisites
- name: Check CPU Power States
- shell: "lscpu"
+ ansible.builtin.shell: "lscpu"
register: current_lscpu
changed_when: false
tags:
- check-cpu-frequency
- name: Check CPU Power States
- assert:
+ ansible.builtin.assert:
that:
- "'CPU min MHz' not in current_lscpu.stdout or 'Intel(R) Xeon(R)' not in ansible_processor"
fail_msg: "CPU configuration!"
@@ -34,7 +34,7 @@
- check-cpu-frequency
- name: Check Kernel Parameters
- assert:
+ ansible.builtin.assert:
that:
- item in ansible_cmdline
fail_msg: "Kernel parameters!"
@@ -46,7 +46,7 @@
- check-kernel-params
- name: Check Kernel Version
- assert:
+ ansible.builtin.assert:
that:
- ansible_kernel not in kernel_version_by_distro_by_arch
fail_msg: "Kernel version!"
@@ -55,14 +55,14 @@
- check-kernel-version
- name: Spectre Meltdown Checker Status
- stat:
+ ansible.builtin.stat:
path: "/opt/spectre-meltdown-checker.sh"
register: spectre_meltdown_status
tags:
- check-spectre-meltdown
- name: Get Spectre Meltdown Checker
- get_url:
+ ansible.builtin.get_url:
url: "https://meltdown.ovh"
dest: "/opt/spectre-meltdown-checker.sh"
mode: "744"
@@ -72,7 +72,7 @@
- check-spectre-meltdown
- name: Run Spectre Meltdown Checker
- shell: "/opt/spectre-meltdown-checker.sh --no-color --sysfs-only || true"
+ ansible.builtin.shell: "/opt/spectre-meltdown-checker.sh --no-color --sysfs-only || true"
ignore_errors: true
register: spectre_meltdown_sync
tags:
diff --git a/fdio.infra.ansible/roles/calibration/tasks/x86_64.yaml b/fdio.infra.ansible/roles/calibration/tasks/x86_64.yaml
index cb98c00c51..2d28f92ae3 100644
--- a/fdio.infra.ansible/roles/calibration/tasks/x86_64.yaml
+++ b/fdio.infra.ansible/roles/calibration/tasks/x86_64.yaml
@@ -2,19 +2,19 @@
# file: roles/calibration/tasks/x86_64.yaml
- name: Calibration - Clone PMA Tool
- git:
+ ansible.builtin.git:
repo: "https://gerrit.fd.io/r/pma_tools"
dest: "{{ pma_directory }}"
tags:
- check-jitter-tool
- name: Calibration - Compile PMA Tool
- raw: "cd {{ pma_directory }}/jitter && make"
+ ansible.builtin.raw: "cd {{ pma_directory }}/jitter && make"
tags:
- check-jitter-tool
- name: Calibration - Run Jitter Tool
- shell: "{{ pma_directory }}/jitter/jitter -c {{ jitter_core }} -i {{ jitter_iterations }} -f"
+ ansible.builtin.shell: "{{ pma_directory }}/jitter/jitter -c {{ jitter_core }} -i {{ jitter_iterations }} -f"
become: true
async: 60
poll: 0
@@ -24,12 +24,12 @@
- check-jitter-tool
- name: Check sync status
- async_status:
+ ansible.builtin.async_status:
jid: "{{ jitter_async.ansible_job_id }}"
register: "jitter_poll_results"
until: jitter_poll_results.finished
retries: 30
-- debug: var=jitter_poll_results.stdout_lines
+- ansible.builtin.debug: var=jitter_poll_results.stdout_lines
tags:
- check-jitter-tool
diff --git a/fdio.infra.ansible/roles/cleanup/tasks/clean_images.yaml b/fdio.infra.ansible/roles/cleanup/tasks/clean_images.yaml
index 7558ee0fd7..76704ab50d 100644
--- a/fdio.infra.ansible/roles/cleanup/tasks/clean_images.yaml
+++ b/fdio.infra.ansible/roles/cleanup/tasks/clean_images.yaml
@@ -1,10 +1,10 @@
---
-# file: roles/cleanup/tasks/clean_images.yaml
+# file: tasks/clean_images.yaml
- name: Clean Docker Images
block:
- name: Clean Images - Prefetch Docker Images
- cron:
+ ansible.builtin.cron:
name: "Prefetch docker image {{ item }}"
minute: "10"
hour: "7"
@@ -15,7 +15,7 @@
- prefetch-docker-images
- name: Clean Images - Remove Dangling Docker Images
- cron:
+ ansible.builtin.cron:
name: "Remove dangling docker images"
minute: "10"
hour: "5"
diff --git a/fdio.infra.ansible/roles/cleanup/tasks/kill_containers.yaml b/fdio.infra.ansible/roles/cleanup/tasks/kill_containers.yaml
index 750572feb9..dc739eb954 100644
--- a/fdio.infra.ansible/roles/cleanup/tasks/kill_containers.yaml
+++ b/fdio.infra.ansible/roles/cleanup/tasks/kill_containers.yaml
@@ -1,38 +1,38 @@
---
-# file: roles/cleanup/tasks/kill_containers.yaml
+# file: tasks/kill_containers.yaml
- name: Kill Docker Containers
block:
- - name: Kill Container - Get Running Docker Containers
- shell: "docker ps -aq"
+ - name: Get Running Docker Containers
+ ansible.builtin.shell: "docker ps -a --filter name=DUT -q"
register: running_containers
changed_when: false
tags:
- kill-containers
- - name: Kill Container - Remove All Docker Containers
- shell: "docker rm --force {{ item }}"
+ - name: Remove All Docker Containers
+ ansible.builtin.shell: "docker rm --force {{ item }}"
with_items: "{{ running_containers.stdout_lines }}"
tags:
- kill-containers
rescue:
- name: Restart Docker Daemon
- systemd:
+ ansible.builtin.systemd:
name: "docker"
state: "restarted"
- name: Kill LXC Containers
block:
- - name: Kill Container - Get Running LXC Containers
- shell: "lxc-ls"
+ - name: Get Running LXC Containers
+ ansible.builtin.shell: "lxc-ls"
register: running_containers
changed_when: false
tags:
- kill-containers
- - name: Kill Container - Remove All LXC Containers
- shell: "lxc-destroy --force -n {{ item }}"
+ - name: Remove All LXC Containers
+ ansible.builtin.shell: "lxc-destroy --force -n {{ item }}"
with_items: "{{ running_containers.stdout_lines }}"
tags:
- kill-containers
diff --git a/fdio.infra.ansible/roles/cleanup/tasks/kill_process.yaml b/fdio.infra.ansible/roles/cleanup/tasks/kill_process.yaml
index d6c6ed5118..9ab98a8e57 100644
--- a/fdio.infra.ansible/roles/cleanup/tasks/kill_process.yaml
+++ b/fdio.infra.ansible/roles/cleanup/tasks/kill_process.yaml
@@ -1,10 +1,10 @@
---
-# file: roles/cleanup/tasks/kill_process.yaml
+# file: tasks/kill_process.yaml
- name: Kill Process - {{ process }}
block:
- name: Get PID Of {{ process }}
- shell: "ps -ef | grep -v grep | grep -w {{ process }} | awk '{print $2}'"
+ ansible.builtin.shell: "ps -ef | grep -v grep | grep -w {{ process }} | awk '{print $2}'"
when:
- process is defined and process != ""
register: running_processes
@@ -12,8 +12,9 @@
- kill-process
- name: Safe Kill {{ process }}
- shell: "kill {{ item }}"
+ ansible.builtin.shell: "kill {{ item }}"
with_items: "{{ running_processes.stdout_lines }}"
+ ignore_errors: true
tags:
- kill-process
@@ -27,7 +28,7 @@
- kill-process
- name: Kill Process - Force Kill {{ process }}
- shell: "kill -9 {{ item }}"
+ ansible.builtin.shell: "kill -9 {{ item }}"
with_items: "{{ killed_processes.results | select('failed') | map(attribute='item') | list }}"
tags:
- kill-process
diff --git a/fdio.infra.ansible/roles/cleanup/tasks/main.yaml b/fdio.infra.ansible/roles/cleanup/tasks/main.yaml
index eeda0139b3..c97b9c5d7e 100644
--- a/fdio.infra.ansible/roles/cleanup/tasks/main.yaml
+++ b/fdio.infra.ansible/roles/cleanup/tasks/main.yaml
@@ -1,22 +1,5 @@
---
-# file: roles/cleanup/tasks/main.yaml
-# purpose: Structured per server cleanup tasks.
-# - main:
-# - tg:
-# - Run tasks on TG servers only.
-# - Cleanup processes (T-Rex).
-# - sut:
-# - Run tasks on SUT servers only.
-# - Cleanup file leftovers (logs).
-# - Cleanup packages (VPP, Honeycomb).
-# - Cleanup processes (qemu, l3fwd, testpmd, docker, kubernetes)
-# - Cleanup interfaces.
-# - vpp_device
-# - Run tasks on vpp_device servers only.
-# - Reset SRIOV
-# - Docker image cleanup
-# - nomad
-# - Docker image cleanup
+# file: tasks/main.yaml
- name: tg specific
include_tasks: tg.yaml
diff --git a/fdio.infra.ansible/roles/cleanup/tasks/nomad.yaml b/fdio.infra.ansible/roles/cleanup/tasks/nomad.yaml
index 3d8f322840..086a4eff7d 100644
--- a/fdio.infra.ansible/roles/cleanup/tasks/nomad.yaml
+++ b/fdio.infra.ansible/roles/cleanup/tasks/nomad.yaml
@@ -1,5 +1,5 @@
---
-# file: roles/cleanup/tasks/nomad.yaml
+# file: tasks/nomad.yaml
- name: Host Cleanup
block:
@@ -8,15 +8,11 @@
vars:
images_to_prefetch_by_arch:
aarch64:
+ - "fdiotools/builder-ubuntu2204:prod-aarch64"
- "fdiotools/builder-ubuntu2004:prod-aarch64"
- - "fdiotools/builder-ubuntu1804:prod-aarch64"
- - "fdiotools/builder-centos8:prod-aarch64"
x86_64:
+ - "fdiotools/builder-ubuntu2204:prod-x86_64"
- "fdiotools/builder-ubuntu2004:prod-x86_64"
- - "fdiotools/builder-ubuntu1804:prod-x86_64"
- - "fdiotools/builder-debian10:prod-x86_64"
- - "fdiotools/builder-debian9:prod-x86_64"
- - "fdiotools/builder-centos8:prod-x86_64"
- - "fdiotools/builder-centos7:prod-x86_64"
+ - "fdiotools/builder-debian11:prod-x86_64"
tags:
- clean-images
diff --git a/fdio.infra.ansible/roles/cleanup/tasks/remove_package.yaml b/fdio.infra.ansible/roles/cleanup/tasks/remove_package.yaml
index 484868e595..652729bc30 100644
--- a/fdio.infra.ansible/roles/cleanup/tasks/remove_package.yaml
+++ b/fdio.infra.ansible/roles/cleanup/tasks/remove_package.yaml
@@ -1,15 +1,15 @@
---
-# file: roles/cleanup/tasks/remove_package.yaml
+# file: tasks/remove_package.yaml
-- name: Remove Package - Fix Corrupted APT
- shell: "dpkg --configure -a"
+- name: Fix Corrupted APT
+ ansible.builtin.shell: "dpkg --configure -a"
when:
- ansible_distribution == 'Ubuntu'
tags:
- remove-package
- name: Remove Package - {{ package }}
- apt:
+ ansible.builtin.apt:
name: "{{ package }}"
force: true
purge: true
@@ -18,4 +18,4 @@
when:
- ansible_distribution == 'Ubuntu'
tags:
- - remove-package
+ - remove-package \ No newline at end of file
diff --git a/fdio.infra.ansible/roles/cleanup/tasks/sut.yaml b/fdio.infra.ansible/roles/cleanup/tasks/sut.yaml
index 396e442b4e..22bf596369 100644
--- a/fdio.infra.ansible/roles/cleanup/tasks/sut.yaml
+++ b/fdio.infra.ansible/roles/cleanup/tasks/sut.yaml
@@ -1,5 +1,5 @@
---
-# file: roles/cleanup/tasks/sut.yaml
+# file: tasks/sut.yaml
- name: Host Cleanup
block:
@@ -38,6 +38,13 @@
tags:
- kill-process
+ - name: Kill Processes - vpp
+ import_tasks: kill_process.yaml
+ vars:
+ process: "vpp"
+ tags:
+ - kill-process
+
- name: Kill Processes - vpp_echo
import_tasks: kill_process.yaml
vars:
@@ -46,7 +53,7 @@
- kill-process
- name: Find File Or Dir - Core Zip File
- find:
+ ansible.builtin.find:
paths: "/tmp/"
patterns: "*tar.lzo.lrz.xz*"
register: files_to_delete
@@ -54,7 +61,7 @@
- remove-file-dir
- name: Remove File Or Dir - Core Zip File
- file:
+ ansible.builtin.file:
path: "{{ item.path }}"
state: absent
with_items: "{{ files_to_delete.files }}"
@@ -62,7 +69,7 @@
- remove-file-dir
- name: Find File Or Dir - Core Dump File
- find:
+ ansible.builtin.find:
paths: "/tmp/"
patterns: "*core*"
register: files_to_delete
@@ -70,7 +77,7 @@
- remove-file-dir
- name: Remove File Or Dir - Core Dump File
- file:
+ ansible.builtin.file:
path: "{{ item.path }}"
state: absent
with_items: "{{ files_to_delete.files }}"
diff --git a/fdio.infra.ansible/roles/cleanup/tasks/tg.yaml b/fdio.infra.ansible/roles/cleanup/tasks/tg.yaml
index fa2d2d2819..8c0162df2c 100644
--- a/fdio.infra.ansible/roles/cleanup/tasks/tg.yaml
+++ b/fdio.infra.ansible/roles/cleanup/tasks/tg.yaml
@@ -1,5 +1,5 @@
---
-# file: roles/cleanup/tasks/tg.yaml
+# file: tasks/tg.yaml
- name: Host Cleanup
block:
diff --git a/fdio.infra.ansible/roles/cleanup/tasks/vpp_device.yaml b/fdio.infra.ansible/roles/cleanup/tasks/vpp_device.yaml
index 23d001fe6f..c97fa0cde5 100644
--- a/fdio.infra.ansible/roles/cleanup/tasks/vpp_device.yaml
+++ b/fdio.infra.ansible/roles/cleanup/tasks/vpp_device.yaml
@@ -1,10 +1,10 @@
---
-# file: roles/cleanup/tasks/vpp_device.yaml
+# file: tasks/vpp_device.yaml
- name: Host Cleanup
block:
- name: Reset vpp_device Binary
- copy:
+ ansible.builtin.copy:
src: "files/reset_vppdevice.sh"
dest: "/usr/local/bin"
owner: "root"
diff --git a/fdio.infra.ansible/roles/common/defaults/main.yaml b/fdio.infra.ansible/roles/common/defaults/main.yaml
index 77a9c83740..9ded8fcba9 100644
--- a/fdio.infra.ansible/roles/common/defaults/main.yaml
+++ b/fdio.infra.ansible/roles/common/defaults/main.yaml
@@ -19,35 +19,10 @@ packages_base:
packages_by_distro:
ubuntu:
- focal:
- - "bpfcc-tools"
- - "build-essential"
- - "libbpfcc"
- - "libbpfcc-dev"
- - "libclang1-9"
- - "libllvm9"
- - "libpcap-dev"
- - "net-tools"
- - "python3-all"
- - "python3-apt"
- - "python3-bpfcc"
- - "python3-cffi"
- - "python3-cffi-backend"
- - "python3-dev"
- - "python3-pip"
- - "python3-pyelftools"
- - "python3-setuptools"
jammy:
- "build-essential"
- "libpcap-dev"
- "net-tools"
- - "python-all"
- - "python-apt"
- - "python-cffi"
- - "python-cffi-backend"
- - "python-dev"
- - "python-pip"
- - "python-setuptools"
- "python3-all"
- "python3-apt"
- "python3-cffi"
diff --git a/fdio.infra.ansible/roles/common/handlers/main.yaml b/fdio.infra.ansible/roles/common/handlers/main.yaml
index bb317e8067..0a4944b4ca 100644
--- a/fdio.infra.ansible/roles/common/handlers/main.yaml
+++ b/fdio.infra.ansible/roles/common/handlers/main.yaml
@@ -2,7 +2,7 @@
# file: roles/common/handlers/main.yaml
- name: Reboot Server
- reboot:
+ ansible.builtin.reboot:
reboot_timeout: 3600
tags:
- reboot-server
diff --git a/fdio.infra.ansible/roles/common/tasks/main.yaml b/fdio.infra.ansible/roles/common/tasks/main.yaml
index 7418709c99..e47a1fc7a8 100644
--- a/fdio.infra.ansible/roles/common/tasks/main.yaml
+++ b/fdio.infra.ansible/roles/common/tasks/main.yaml
@@ -2,7 +2,7 @@
# file: roles/common/tasks/main.yaml
- name: Conf - Add permanent proxy settings
- lineinfile:
+ ansible.builtin.lineinfile:
path: "/etc/environment"
state: "present"
line: "{{ item.key }}={{ item.value }}"
@@ -12,7 +12,7 @@
- common-conf-proxy
- name: Inst - Update package cache (apt)
- apt:
+ ansible.builtin.apt:
update_cache: true
cache_valid_time: 3600
when:
@@ -21,21 +21,22 @@
- common-inst-prerequisites
- name: Inst - Prerequisites
- package:
+ ansible.builtin.package:
name: "{{ packages | flatten(levels=1) }}"
- state: latest
+ state: "latest"
tags:
- common-inst-prerequisites
- name: Inst - Meson (DPDK)
- pip:
+ ansible.builtin.pip:
name:
- - "meson==0.49.2"
+ - "meson==0.64.1"
+ state: "forcereinstall"
tags:
- common-inst-meson
- name: Conf - sudoers admin
- lineinfile:
+ ansible.builtin.lineinfile:
path: "/etc/sudoers"
state: "present"
regexp: "^%admin ALL="
@@ -45,7 +46,7 @@
- common-conf-sudoers
- name: Conf - sudoers nopasswd
- lineinfile:
+ ansible.builtin.lineinfile:
path: "/etc/sudoers"
state: "present"
regexp: "^%sudo"
diff --git a/fdio.infra.ansible/roles/consul/defaults/main.yaml b/fdio.infra.ansible/roles/consul/defaults/main.yaml
index 503857de92..9ea38efb56 100644
--- a/fdio.infra.ansible/roles/consul/defaults/main.yaml
+++ b/fdio.infra.ansible/roles/consul/defaults/main.yaml
@@ -1,5 +1,5 @@
---
-# file: roles/consul/defaults/main.yaml
+# file: defaults/main.yaml
# Inst - Prerequisites.
packages: "{{ packages_base + packages_by_distro[ansible_distribution | lower] + packages_by_arch[ansible_machine] }}"
@@ -24,7 +24,7 @@ consul_architecture_map:
32-bit: "386"
64-bit: "amd64"
consul_architecture: "{{ consul_architecture_map[ansible_architecture] }}"
-consul_version: "1.12.2"
+consul_version: "1.16.1"
consul_pkg: "consul_{{ consul_version }}_linux_{{ consul_architecture }}.zip"
consul_zip_url: "https://releases.hashicorp.com/consul/{{ consul_version }}/{{ consul_pkg }}"
consul_force_update: false
@@ -47,41 +47,32 @@ consul_service_mgr: ""
# Conf - User and group.
consul_group: "consul"
-consul_group_state: "present"
consul_user: "consul"
-consul_user_state: "present"
# Conf - base.hcl
+consul_allow_tls: true
consul_bind_addr: "{{ ansible_default_ipv4.address }}"
+consul_bootstrap_expect: 1
consul_client_addr: "0.0.0.0"
consul_datacenter: "dc1"
consul_disable_update_check: true
consul_enable_debug: false
consul_enable_syslog: true
+consul_encrypt: ""
consul_log_level: "INFO"
consul_node_name: "{{ inventory_hostname }}"
-consul_retry_join: true
-consul_bootstrap_expect: 2
-consul_encrypt: ""
-consul_ca_file: "{{ consul_ssl_dir }}/ca.pem"
-consul_cert_file: "{{ consul_ssl_dir }}/consul.pem"
-consul_key_file: "{{ consul_ssl_dir }}/consul-key.pem"
-consul_verify_incoming: false
-consul_verify_outgoing: false
-consul_vefify_server_hostname: false
-consul_allow_tls: false
-consul_ui_config:
- enabled: true
consul_recursors:
- 1.1.1.1
- 8.8.8.8
-consul_certificates:
- - src: "{{ file_consul_ca_pem }}"
- dest: "{{ consul_ca_file }}"
- - src: "{{ file_consul_server_0_pem }}"
- dest: "{{ consul_cert_file }}"
- - src: "{{ file_consul_server_0_key_pem }}"
- dest: "{{ consul_key_file }}"
+consul_retry_join: false
+consul_ui_config:
+ enabled: true
+consul_verify_incoming: true
+consul_verify_outgoing: true
+consul_vefify_server_hostname: false
+consul_ca_file: "{{ consul_ssl_dir }}/ca.pem"
+consul_cert_file: "{{ consul_ssl_dir }}/consul.pem"
+consul_key_file: "{{ consul_ssl_dir }}/consul-key.pem"
# Conf - ports.hcl
consul_port_dns: 53
diff --git a/fdio.infra.ansible/roles/consul/handlers/main.yaml b/fdio.infra.ansible/roles/consul/handlers/main.yaml
index a88ae45d27..a9de4d1439 100644
--- a/fdio.infra.ansible/roles/consul/handlers/main.yaml
+++ b/fdio.infra.ansible/roles/consul/handlers/main.yaml
@@ -1,5 +1,5 @@
---
-# file roles/consul/handlers/main.yaml
+# file handlers/main.yaml
- name: Restart Nomad
ansible.builtin.systemd:
diff --git a/fdio.infra.ansible/roles/consul/meta/main.yaml b/fdio.infra.ansible/roles/consul/meta/main.yaml
index bc6d6a1c57..673c3b738d 100644
--- a/fdio.infra.ansible/roles/consul/meta/main.yaml
+++ b/fdio.infra.ansible/roles/consul/meta/main.yaml
@@ -1,18 +1,21 @@
---
-# file: roles/consul/meta/main.yaml
+# file: meta/main.yaml
dependencies: []
+
galaxy_info:
- role_name: consul
- author: fd.io
- description: Hashicrop Consul.
- company: none
+ role_name: "consul"
+ author: "pmikus"
+ description: "Hashicorp Consul."
+ company: "none"
license: "license (Apache)"
- min_ansible_version: 2.9
+ min_ansible_version: "2.9"
platforms:
- - name: Ubuntu
+ - name: "Ubuntu"
versions:
- - focal
- - jammy
+ - "focal"
+ - "jammy"
+ - "kinetic"
galaxy_tags:
- - consul
+ - "consul"
+ - "hashicorp"
diff --git a/fdio.infra.ansible/roles/consul/tasks/main.yaml b/fdio.infra.ansible/roles/consul/tasks/main.yaml
index 1d6bcc0b0b..6dd430754b 100644
--- a/fdio.infra.ansible/roles/consul/tasks/main.yaml
+++ b/fdio.infra.ansible/roles/consul/tasks/main.yaml
@@ -1,16 +1,16 @@
---
-# file: roles/consul/tasks/main.yaml
+# file: tasks/main.yaml
-- name: Inst - Update Repositories Cache
- apt:
+- name: Update Repositories Cache
+ ansible.builtin.apt:
update_cache: true
when:
- ansible_os_family == 'Debian'
tags:
- consul-inst-package
-- name: Inst - Dependencies
- apt:
+- name: Dependencies
+ ansible.builtin.apt:
name: "{{ packages | flatten(levels=1) }}"
state: "present"
cache_valid_time: 3600
@@ -20,31 +20,31 @@
tags:
- consul-inst-dependencies
-- name: Conf - Add Consul Group
- group:
+- name: Add Consul Group
+ ansible.builtin.group:
name: "{{ consul_group }}"
- state: "{{ consul_group_state }}"
+ state: "present"
tags:
- consul-conf-user
-- name: Conf - Add Consul user
- user:
+- name: Add Consul user
+ ansible.builtin.user:
name: "{{ consul_user }}"
group: "{{ consul_group }}"
- state: "{{ consul_user_state }}"
+ state: "present"
system: true
tags:
- consul-conf-user
-- name: Inst - Download Consul
- get_url:
+- name: Download Consul
+ ansible.builtin.get_url:
url: "{{ consul_zip_url }}"
dest: "{{ consul_inst_dir }}/{{ consul_pkg }}"
tags:
- consul-inst-package
-- name: Inst - Clean Consul
- file:
+- name: Clean Consul
+ ansible.builtin.file:
path: "{{ consul_inst_dir }}/consul"
state: "absent"
when:
@@ -52,16 +52,16 @@
tags:
- consul-inst-package
-- name: Inst - Unarchive Consul
- unarchive:
+- name: Unarchive Consul
+ ansible.builtin.unarchive:
src: "{{ consul_inst_dir }}/{{ consul_pkg }}"
dest: "{{ consul_inst_dir }}/"
remote_src: true
tags:
- consul-inst-package
-- name: Inst - Consul
- copy:
+- name: Consul
+ ansible.builtin.copy:
src: "{{ consul_inst_dir }}/consul"
dest: "{{ consul_bin_dir }}"
owner: "{{ consul_user }}"
@@ -72,78 +72,38 @@
tags:
- consul-inst-package
-- name: Conf - Create Directories "{{ consul_data_dir }}"
- file:
- dest: "{{ consul_data_dir }}"
- state: directory
- owner: "{{ consul_user }}"
- group: "{{ consul_group }}"
- tags:
- - consul-conf
-
-- name: Conf - Create Directories "{{ consul_ssl_dir }}"
- file:
- dest: "{{ consul_ssl_dir }}"
- state: directory
- owner: "{{ consul_user }}"
- group: "{{ consul_group }}"
- tags:
- - consul-conf
-
-- name: Conf - Create Config Directory
- file:
- dest: "{{ consul_config_dir }}"
- state: directory
+- name: Create Directories
+ ansible.builtin.file:
+ dest: "{{ item }}"
+ state: "directory"
owner: "{{ consul_user }}"
group: "{{ consul_group }}"
mode: 0755
+ with_items:
+ - "{{ consul_config_dir }}"
+ - "{{ consul_ssl_dir }}"
+ - "{{ consul_data_dir }}"
+ - "{{ nomad_config_dir }}"
+ - "{{ nomad_ssl_dir }}"
tags:
- consul-conf
-- name: Conf - Base Configuration
- template:
- src: base.hcl.j2
- dest: "{{ consul_config_dir }}/base.hcl"
+- name: Base Configuration
+ ansible.builtin.template:
+ src: "{{ item }}.hcl.j2"
+ dest: "{{ consul_config_dir }}/{{ item }}.hcl"
owner: "{{ consul_user }}"
group: "{{ consul_group }}"
mode: 0644
+ with_items:
+ - "base"
+ - "ports"
+ - "telemetry"
tags:
- consul-conf
-- name: Conf - Ports Configuration
- template:
- src: ports.hcl.j2
- dest: "{{ consul_config_dir }}/ports.hcl"
- owner: "{{ consul_user }}"
- group: "{{ consul_group }}"
- mode: 0644
- tags:
- - consul-conf
-
-- name: Conf - Telemetry Configuration
- template:
- src: telemetry.hcl.j2
- dest: "{{ consul_config_dir }}/telemetry.hcl"
- owner: "{{ consul_user }}"
- group: "{{ consul_group }}"
- mode: 0644
- tags:
- - consul-conf
-
-- name: Conf - Services Configuration
- template:
- src: services.json.j2
- dest: "{{ consul_config_dir }}/services.json"
- owner: "{{ consul_user }}"
- group: "{{ consul_group }}"
- mode: 0644
- when:
- - consul_services
- tags:
- - consul-conf
-
-- name: Conf - Copy Certificates And Keys
- copy:
+- name: Copy Certificates And Keys
+ ansible.builtin.copy:
content: "{{ item.src }}"
dest: "{{ item.dest }}"
owner: "{{ consul_user }}"
@@ -156,8 +116,8 @@
tags:
- consul-conf
-- name: Conf - Stop Systemd-resolved
- systemd:
+- name: Stop Systemd-resolved
+ ansible.builtin.systemd:
daemon_reload: true
enabled: false
name: "systemd-resolved"
@@ -167,8 +127,8 @@
tags:
- consul-conf
-- name: Conf - System.d Script
- template:
+- name: System.d Script
+ ansible.builtin.template:
src: "consul_systemd.service.j2"
dest: "/lib/systemd/system/consul.service"
owner: "root"
@@ -176,11 +136,10 @@
mode: 0644
notify:
- "Restart Consul"
- - "Restart Nomad"
when:
- consul_service_mgr == "systemd"
tags:
- consul-conf
-- name: Meta - Flush handlers
- meta: flush_handlers
+- name: Flush handlers
+ ansible.builtin.meta: flush_handlers
diff --git a/fdio.infra.ansible/roles/consul/templates/ports.hcl.j2 b/fdio.infra.ansible/roles/consul/templates/ports.hcl.j2
index a658060ce8..02932bf6dc 100644
--- a/fdio.infra.ansible/roles/consul/templates/ports.hcl.j2
+++ b/fdio.infra.ansible/roles/consul/templates/ports.hcl.j2
@@ -2,7 +2,7 @@ ports {
dns = {{ consul_port_dns }}
http = {{ consul_port_http }}
https = {{ consul_port_https }}
- grpc = {{ consul_port_grpc }}
+ grpc_tls = {{ consul_port_grpc }}
serf_lan = {{ consul_port_serf_lan }}
serf_wan = {{ consul_port_serf_wan }}
server = {{ consul_port_server }}
diff --git a/fdio.infra.ansible/roles/consul/templates/services.json.j2 b/fdio.infra.ansible/roles/consul/templates/services.json.j2
deleted file mode 100644
index 3245ba92a4..0000000000
--- a/fdio.infra.ansible/roles/consul/templates/services.json.j2
+++ /dev/null
@@ -1,13 +0,0 @@
-{
- "services": [
-{% for item in consul_services %}
- {
- "name": "{{ item.name }}",
- "port": {{ item.port }}
- }
-{%- if not loop.last %},
-{% endif %}
-{% endfor %}
-
- ]
-} \ No newline at end of file
diff --git a/fdio.infra.ansible/roles/consul/vars/main.yaml b/fdio.infra.ansible/roles/consul/vars/main.yaml
index b46333a7a7..5d813dffc7 100644
--- a/fdio.infra.ansible/roles/consul/vars/main.yaml
+++ b/fdio.infra.ansible/roles/consul/vars/main.yaml
@@ -1,5 +1,5 @@
---
-# file: roles/consul/vars/main.yaml
+# file: vars/main.yaml
consul_node_client: "{{ (consul_node_role == 'client') or (consul_node_role == 'both') }}"
consul_node_server: "{{ (consul_node_role == 'server') or (consul_node_role == 'both') }}"
diff --git a/fdio.infra.ansible/roles/csit_sut_image/files/Dockerfile b/fdio.infra.ansible/roles/csit_sut_image/files/Dockerfile
deleted file mode 100644
index a955799b63..0000000000
--- a/fdio.infra.ansible/roles/csit_sut_image/files/Dockerfile
+++ /dev/null
@@ -1,166 +0,0 @@
-# Copyright (c) 2021 Cisco and/or its affiliates.
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at:
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-FROM ubuntu:20.04
-LABEL Description="CSIT vpp-device ubuntu 20.04 SUT image"
-LABEL Version="master"
-
-# Setup the environment
-ENV DEBIAN_FRONTEND=noninteractive
-
-# Configure locales
-RUN apt-get update -qq \
- && apt-get install -y \
- apt-utils \
- locales \
- && sed -i 's/# \(en_US\.UTF-8 .*\)/\1/' /etc/locale.gen \
- && locale-gen en_US.UTF-8 \
- && dpkg-reconfigure --frontend=noninteractive locales \
- && update-locale LANG=en_US.UTF-8 \
- && TZ=Etc/UTC && ln -snf /usr/share/zoneinfo/$TZ /etc/localtime && echo $TZ > /etc/timezone \
- && rm -r /var/lib/apt/lists/*
-ENV LANG="en_US.UTF-8" LANGUAGE="en_US" LC_ALL="en_US.UTF-8"
-
-# Install packages and Docker
-RUN apt-get -q update \
- && apt-get install -y -qq \
- apt-transport-https \
- bridge-utils \
- ca-certificates \
- cloud-init \
- cmake \
- curl \
- dkms \
- gdb \
- gfortran \
- libapr1 \
- libblas-dev \
- libffi-dev \
- liblapack-dev \
- libmbedcrypto3 \
- libmbedtls12 \
- libmbedx509-0 \
- libnuma1 \
- libnuma-dev \
- libpcap-dev \
- libpixman-1-dev \
- libssl-dev \
- locales \
- net-tools \
- openssh-server \
- pciutils \
- python3-all \
- python3-apt \
- python3-cffi \
- python3-cffi-backend \
- python3-dev \
- python3-pip \
- python3-setuptools \
- python3-virtualenv \
- qemu-system \
- rsyslog \
- socat \
- software-properties-common \
- strongswan \
- ssh \
- sshpass \
- sudo \
- supervisor \
- tar \
- tcpdump \
- unzip \
- vim \
- wget \
- zlib1g-dev \
- && curl -fsSL https://get.docker.com | sh \
- && rm -rf /var/lib/apt/lists/*
-
-# Fix permissions
-RUN chown root:syslog /var/log \
- && chmod 755 /etc/default
-
-# Create directory structure
-RUN mkdir -p /tmp/dumps \
- && mkdir -p /var/cache/vpp/python \
- && mkdir -p /var/run/sshd
-
-# CSIT PIP pre-cache
-RUN pip3 install \
- ecdsa==0.13.3 \
- paramiko==2.6.0 \
- pycrypto==2.6.1 \
- pypcap==1.2.3 \
- PyYAML==5.1.1 \
- requests==2.22.0 \
- robotframework==3.1.2 \
- scapy==2.4.3 \
- scp==0.13.2 \
- ansible==2.10.7 \
- dill==0.2.8.2 \
- numpy==1.17.3 \
- hdrhistogram==0.6.1 \
- plotly==4.1.1 \
- PTable==0.9.2 \
- Sphinx==2.2.1 \
- sphinx-rtd-theme==0.4.0 \
- sphinxcontrib-programoutput==0.15 \
- sphinxcontrib-robotdoc==0.11.0 \
- ply==3.11 \
- alabaster==0.7.12 \
- Babel==2.7.0 \
- bcrypt==3.1.7 \
- certifi==2019.9.11 \
- cffi==1.13.2 \
- chardet==3.0.4 \
- cryptography==2.8 \
- docutils==0.15.2 \
- future==0.18.2 \
- idna==2.8 \
- imagesize==1.1.0 \
- Jinja2==2.10.3 \
- MarkupSafe==1.1.1 \
- packaging==19.2 \
- pbr==5.4.3 \
- pycparser==2.19 \
- Pygments==2.4.2 \
- PyNaCl==1.3.0 \
- pyparsing==2.4.4 \
- python-dateutil==2.8.2 \
- pytz==2019.3 \
- retrying==1.3.3 \
- six==1.13.0 \
- snowballstemmer==2.0.0 \
- sphinxcontrib-applehelp==1.0.1 \
- sphinxcontrib-devhelp==1.0.1 \
- sphinxcontrib-htmlhelp==1.0.2 \
- sphinxcontrib-jsmath==1.0.1 \
- sphinxcontrib-qthelp==1.0.2 \
- sphinxcontrib-serializinghtml==1.1.3 \
- urllib3==1.25.6
-
-# ARM workaround
-RUN pip3 install \
- pandas==0.25.3 \
- scipy==1.5.4
-
-# SSH settings
-RUN echo 'root:Csit1234' | chpasswd \
- && sed -i 's/#PermitRootLogin prohibit-password/PermitRootLogin yes/' /etc/ssh/sshd_config \
- && sed 's@session\s*required\s*pam_loginuid.so@session optional pam_loginuid.so@g' -i /etc/pam.d/sshd \
- && echo "export VISIBLE=now" >> /etc/profile
-
-EXPOSE 2222
-
-COPY supervisord.conf /etc/supervisor/supervisord.conf
-
-CMD ["sh", "-c", "rm -f /dev/shm/db /dev/shm/global_vm /dev/shm/vpe-api; /usr/bin/supervisord -c /etc/supervisor/supervisord.conf; /usr/sbin/sshd -D -p 2222"] \ No newline at end of file
diff --git a/fdio.infra.ansible/roles/csit_sut_image/tasks/main.yaml b/fdio.infra.ansible/roles/csit_sut_image/tasks/main.yaml
deleted file mode 100644
index 9dddbe435b..0000000000
--- a/fdio.infra.ansible/roles/csit_sut_image/tasks/main.yaml
+++ /dev/null
@@ -1,30 +0,0 @@
----
-# file: roles/csit_sut_image/tasks/main.yaml
-
-- name: Create a directory if it does not exist
- file:
- path: "/opt/csit-sut/"
- state: "directory"
- mode: 0755
- tags:
- - csit-sut-image
-
-- name: Copy Build Items
- copy:
- src: "{{ item }}"
- dest: "/opt/csit-sut/"
- owner: "root"
- group: "root"
- mode: 0755
- with_items:
- - Dockerfile
- - supervisord.conf
- tags:
- - csit-sut-image
-
-- name: Build CSIT SUT Docker Image
- shell: "docker build -t csit_sut-ubuntu2004:local ."
- args:
- chdir: "/opt/csit-sut"
- tags:
- - csit-sut-image
diff --git a/fdio.infra.ansible/roles/docker/defaults/main.yaml b/fdio.infra.ansible/roles/docker/defaults/main.yaml
index e493d1c9b5..bf97b4a192 100644
--- a/fdio.infra.ansible/roles/docker/defaults/main.yaml
+++ b/fdio.infra.ansible/roles/docker/defaults/main.yaml
@@ -10,7 +10,7 @@ docker_package_state: latest
docker_service_state: started
docker_service_enabled: true
docker_restart_handler_state: restarted
-docker_service_mgr: ""
+docker_service_mgr: "systemd"
# Used only for Debian/Ubuntu.
docker_apt_release_channel: "stable"
diff --git a/fdio.infra.ansible/roles/docker/meta/main.yaml b/fdio.infra.ansible/roles/docker/meta/main.yaml
index ac6c0a9980..7bef656eb5 100644
--- a/fdio.infra.ansible/roles/docker/meta/main.yaml
+++ b/fdio.infra.ansible/roles/docker/meta/main.yaml
@@ -13,7 +13,6 @@ galaxy_info:
platforms:
- name: Ubuntu
versions:
- - focal
- jammy
galaxy_tags:
- docker
diff --git a/fdio.infra.ansible/roles/docker/tasks/focal.yaml b/fdio.infra.ansible/roles/docker/tasks/focal.yaml
deleted file mode 100644
index 27fee6285c..0000000000
--- a/fdio.infra.ansible/roles/docker/tasks/focal.yaml
+++ /dev/null
@@ -1,30 +0,0 @@
----
-# file: roles/docker/tasks/ubuntu_focal.yaml
-
-- name: Inst - Dependencies
- ansible.builtin.apt:
- name:
- - "apt-transport-https"
- - "ca-certificates"
- - "gpg-agent"
- - "software-properties-common"
- state: "present"
- cache_valid_time: 3600
- install_recommends: false
- tags:
- - docker-inst-dependencies
-
-- name: Conf - Add APT Key
- ansible.builtin.apt_key:
- url: "{{ docker_apt_gpg_key }}"
- state: "{{ docker_apt_gpg_key_state }}"
- tags:
- - docker-conf-apt
-
-- name: Conf - Install APT Repository
- ansible.builtin.apt_repository:
- repo: "{{ docker_apt_repository }}"
- state: "{{ docker_apt_repository_state }}"
- update_cache: true
- tags:
- - docker-conf-apt
diff --git a/fdio.infra.ansible/roles/docker_images/files/base/Dockerfile b/fdio.infra.ansible/roles/docker_images/files/base/Dockerfile
new file mode 100644
index 0000000000..88af96bfa8
--- /dev/null
+++ b/fdio.infra.ansible/roles/docker_images/files/base/Dockerfile
@@ -0,0 +1,140 @@
+FROM ubuntu:22.04
+
+# Setup the environment
+ENV DEBIAN_FRONTEND=noninteractive
+
+# Configure locales
+RUN apt-get update -qq \
+ && apt-get install -y \
+ apt-utils \
+ locales \
+ && sed -i 's/# \(en_US\.UTF-8 .*\)/\1/' /etc/locale.gen \
+ && locale-gen en_US.UTF-8 \
+ && dpkg-reconfigure --frontend=noninteractive locales \
+ && update-locale LANG=en_US.UTF-8 \
+ && TZ=Etc/UTC && ln -snf /usr/share/zoneinfo/$TZ /etc/localtime && echo $TZ > /etc/timezone \
+ && rm -r /var/lib/apt/lists/*
+ENV LANG="en_US.UTF-8" LANGUAGE="en_US" LC_ALL="en_US.UTF-8"
+
+# Install packages and Docker
+RUN apt-get -q update \
+ && apt-get install -y -qq \
+ apt-transport-https \
+ bridge-utils \
+ ca-certificates \
+ cgroup-tools \
+ cloud-init \
+ cmake \
+ curl \
+ dkms \
+ ethtool \
+ gdb \
+ gfortran \
+ libapr1 \
+ libblas-dev \
+ libffi-dev \
+ libibverbs-dev \
+ liblapack-dev \
+ libmbedcrypto7 \
+ libmbedtls14 \
+ libmbedx509-1 \
+ libnuma1 \
+ libnuma-dev \
+ libpcap-dev \
+ libpixman-1-dev \
+ libsctp-dev \
+ libssl-dev \
+ net-tools \
+ ninja-build \
+ openssh-server \
+ pciutils \
+ python3-all \
+ python3-apt \
+ python3-cffi \
+ python3-cffi-backend \
+ python3-dev \
+ python3-pip \
+ python3-pyelftools \
+ python3-setuptools \
+ python3-virtualenv \
+ qemu-system \
+ rdma-core \
+ rsyslog \
+ screen \
+ socat \
+ software-properties-common \
+ strace \
+ strongswan \
+ ssh \
+ sshpass \
+ sudo \
+ supervisor \
+ tar \
+ tcpdump \
+ unzip \
+ vim \
+ wget \
+ zlib1g-dev \
+ && ln -s -f /usr/lib/x86_64-linux-gnu/libc.a /usr/lib/x86_64-linux-gnu/liblibc.a \
+ && curl -fsSL https://get.docker.com | sh \
+ && rm -rf /var/lib/apt/lists/*
+
+# Fix permissions
+RUN chown root:syslog /var/log \
+ && chmod 755 /etc/default
+
+# Create directory structure
+RUN mkdir -p /tmp/dumps \
+ && mkdir -p /var/cache/vpp/python \
+ && mkdir -p /var/run/sshd \
+ && mkdir -p /var/log/vpp
+
+# CSIT PIP pre-cache
+RUN pip3 install \
+ ecdsa==0.18.0 \
+ paramiko==3.3.1 \
+ pycrypto==2.6.1 \
+ python-dateutil==2.8.2 \
+ PyYAML==6.0.1 \
+ requests==2.31.0 \
+ robotframework==6.1.1 \
+ scapy==2.4.5 \
+ scp==0.14.5 \
+ ansible==8.2.0 \
+ ansible-core==2.15.2 \
+ dill==0.3.7 \
+ numpy==1.25.2 \
+ scipy==1.11.1 \
+ ply==3.11 \
+ jsonschema==4.18.4 \
+ rfc3339-validator==0.1.4 \
+ rfc3987==1.3.8 \
+ attrs==23.1.0 \
+ bcrypt==4.0.1 \
+ certifi==2023.7.22 \
+ cffi==1.15.1 \
+ charset-normalizer==3.2.0 \
+ cryptography==41.0.3 \
+ idna==3.4 \
+ Jinja2==3.1.2 \
+ jsonschema-specifications==2023.7.1 \
+ MarkupSafe==2.1.3 \
+ packaging==23.1 \
+ pycparser==2.21 \
+ PyNaCl==1.5.0 \
+ referencing==0.30.0 \
+ resolvelib==1.0.1 \
+ rpds-py==0.9.2 \
+ six==1.16.0 \
+ urllib3==2.0.4 \
+ meson==0.64.1
+
+RUN groupadd -g 1000 testuser \
+ && useradd -rm -d /home/testuser -s /bin/bash -g testuser -G sudo -u 1000 testuser \
+ && echo 'testuser:Csit1234' | chpasswd
+
+RUN echo 'root:Csit1234' | chpasswd \
+ && sed -i 's/#PermitRootLogin prohibit-password/PermitRootLogin yes/' /etc/ssh/sshd_config \
+ && echo "export VISIBLE=now" >> /etc/profile
+
+RUN service ssh start \ No newline at end of file
diff --git a/fdio.infra.ansible/roles/docker_images/files/csit-initialize-docker-sut.service b/fdio.infra.ansible/roles/docker_images/files/csit-initialize-docker-sut.service
new file mode 100644
index 0000000000..431387c95c
--- /dev/null
+++ b/fdio.infra.ansible/roles/docker_images/files/csit-initialize-docker-sut.service
@@ -0,0 +1,12 @@
+[Unit]
+Description=CSIT Initialize Docker SUT
+After=network.target
+
+[Service]
+Type=oneshot
+RemainAfterExit=True
+ExecStart=docker compose -f /opt/csit-docker-images/docker-compose-sut.yaml up --detach
+ExecStop=docker compose -f /opt/csit-docker-images/docker-compose-sut.yaml down
+
+[Install]
+WantedBy=default.target
diff --git a/fdio.infra.ansible/roles/docker_images/files/csit-initialize-docker-tg.service b/fdio.infra.ansible/roles/docker_images/files/csit-initialize-docker-tg.service
new file mode 100644
index 0000000000..2c93724a4c
--- /dev/null
+++ b/fdio.infra.ansible/roles/docker_images/files/csit-initialize-docker-tg.service
@@ -0,0 +1,12 @@
+[Unit]
+Description=CSIT Initialize Docker TG
+After=network.target
+
+[Service]
+Type=oneshot
+RemainAfterExit=True
+ExecStart=docker compose -f /opt/csit-docker-images/docker-compose-tg.yaml up --detach
+ExecStop=docker compose -f /opt/csit-docker-images/docker-compose-tg.yaml down
+
+[Install]
+WantedBy=default.target
diff --git a/fdio.infra.ansible/roles/docker_images/files/csit-sut/Dockerfile b/fdio.infra.ansible/roles/docker_images/files/csit-sut/Dockerfile
new file mode 100644
index 0000000000..85537bc32f
--- /dev/null
+++ b/fdio.infra.ansible/roles/docker_images/files/csit-sut/Dockerfile
@@ -0,0 +1,7 @@
+FROM base-ubuntu2204:local
+
+EXPOSE 2222
+
+COPY supervisord.conf /etc/supervisor/supervisord.conf
+
+CMD ["sh", "-c", "rm -f /dev/shm/db /dev/shm/global_vm /dev/shm/vpe-api; /usr/bin/supervisord -c /etc/supervisor/supervisord.conf; /usr/sbin/sshd -D -p 2222"] \ No newline at end of file
diff --git a/fdio.infra.ansible/roles/csit_sut_image/files/supervisord.conf b/fdio.infra.ansible/roles/docker_images/files/csit-sut/supervisord.conf
index 22a36be5c6..22a36be5c6 100644
--- a/fdio.infra.ansible/roles/csit_sut_image/files/supervisord.conf
+++ b/fdio.infra.ansible/roles/docker_images/files/csit-sut/supervisord.conf
diff --git a/fdio.infra.ansible/roles/docker_images/handlers/main.yaml b/fdio.infra.ansible/roles/docker_images/handlers/main.yaml
new file mode 100644
index 0000000000..766eec432a
--- /dev/null
+++ b/fdio.infra.ansible/roles/docker_images/handlers/main.yaml
@@ -0,0 +1,18 @@
+---
+# file: handlers/main.yaml
+
+- name: "Start csit-initialize-docker-sut.service"
+ ansible.builtin.systemd:
+ enabled: true
+ state: "started"
+ name: "csit-initialize-docker-sut.service"
+ tags:
+ - docker-sut
+
+- name: "Start csit-initialize-docker-tg.service"
+ ansible.builtin.systemd:
+ enabled: true
+ state: "started"
+ name: "csit-initialize-docker-tg.service"
+ tags:
+ - docker-tg \ No newline at end of file
diff --git a/fdio.infra.ansible/roles/docker_images/tasks/base.yaml b/fdio.infra.ansible/roles/docker_images/tasks/base.yaml
new file mode 100644
index 0000000000..69b3f6217d
--- /dev/null
+++ b/fdio.infra.ansible/roles/docker_images/tasks/base.yaml
@@ -0,0 +1,63 @@
+---
+# file: tasks/base.yaml
+
+- name: "Create a Directory For Docker Images"
+ ansible.builtin.file:
+ path: "/opt/csit-docker-images/"
+ state: "directory"
+ mode: 0755
+ tags:
+ - docker-base
+
+- name: "Copy Build Items"
+ ansible.builtin.copy:
+ src: "{{ item }}"
+ dest: "/opt/csit-docker-images/{{ item }}"
+ owner: "root"
+ group: "root"
+ mode: 0755
+ with_items:
+ - "base/"
+ - "csit-sut/"
+ tags:
+ - docker-base
+
+- name: "Build CSIT Base Docker Image"
+ ansible.builtin.shell: "docker build -t base-ubuntu2204:local ."
+ args:
+ chdir: "/opt/csit-docker-images/base"
+ async: 3000
+ poll: 0
+ register: "docker_built"
+ tags:
+ - docker-base
+
+- name: "Check if CSIT Base Docker Image is Built"
+ async_status:
+ jid: "{{ docker_built.ansible_job_id }}"
+ register: "docker_built"
+ until: "docker_built.finished"
+ delay: 10
+ retries: 300
+ tags:
+ - docker-base
+
+- name: "Build CSIT OLD Docker Image"
+ ansible.builtin.shell: "docker build -t csit_sut-ubuntu2204:local ."
+ args:
+ chdir: "/opt/csit-docker-images/csit-sut"
+ async: 3000
+ poll: 0
+ register: "docker_built"
+ tags:
+ - docker-base
+
+- name: "Check if CSIT OLD Docker Image is Built"
+ async_status:
+ jid: "{{ docker_built.ansible_job_id }}"
+ register: "docker_built"
+ until: "docker_built.finished"
+ delay: 10
+ retries: 300
+ tags:
+ - docker-base \ No newline at end of file
diff --git a/fdio.infra.ansible/roles/docker_images/tasks/main.yaml b/fdio.infra.ansible/roles/docker_images/tasks/main.yaml
new file mode 100644
index 0000000000..1005e024f2
--- /dev/null
+++ b/fdio.infra.ansible/roles/docker_images/tasks/main.yaml
@@ -0,0 +1,21 @@
+---
+# file: tasks/main.yaml
+
+- name: "Build Base Docker Images"
+ import_tasks: "base.yaml"
+ tags:
+ - docker-base
+
+- name: "Docker Orchestration for TG"
+ import_tasks: "tg.yaml"
+ when: >
+ docker_tg is defined
+ tags:
+ - docker-tg
+
+- name: "Docker Orchestration for SUT"
+ import_tasks: "sut.yaml"
+ when: >
+ docker_sut is defined
+ tags:
+ - docker-sut \ No newline at end of file
diff --git a/fdio.infra.ansible/roles/docker_images/tasks/sut.yaml b/fdio.infra.ansible/roles/docker_images/tasks/sut.yaml
new file mode 100644
index 0000000000..8ac179573d
--- /dev/null
+++ b/fdio.infra.ansible/roles/docker_images/tasks/sut.yaml
@@ -0,0 +1,28 @@
+---
+# file: tasks/sut.yaml
+
+- name: "Template Compose File"
+ ansible.builtin.template:
+ src: "{{ item }}.j2"
+ dest: "/opt/csit-docker-images/{{ item }}"
+ owner: "root"
+ group: "root"
+ mode: 0755
+ with_items:
+ - "docker-compose-sut.yaml"
+ tags:
+ - docker-sut
+
+- name: "Copy csit-initialize-docker-sut.service"
+ ansible.builtin.copy:
+ src: "files/csit-initialize-docker-sut.service"
+ dest: "/etc/systemd/system/"
+ owner: "root"
+ group: "root"
+ mode: 0644
+ notify:
+ - "Start csit-initialize-docker-sut.service"
+ tags:
+ - docker-sut
+
+- meta: flush_handlers \ No newline at end of file
diff --git a/fdio.infra.ansible/roles/docker_images/tasks/tg.yaml b/fdio.infra.ansible/roles/docker_images/tasks/tg.yaml
new file mode 100644
index 0000000000..0623616073
--- /dev/null
+++ b/fdio.infra.ansible/roles/docker_images/tasks/tg.yaml
@@ -0,0 +1,28 @@
+---
+# file: tasks/tg.yaml
+
+- name: "Template Compose File"
+ ansible.builtin.template:
+ src: "{{ item }}.j2"
+ dest: "/opt/csit-docker-images/{{ item }}"
+ owner: "root"
+ group: "root"
+ mode: 0755
+ with_items:
+ - "docker-compose-tg.yaml"
+ tags:
+ - docker-tg
+
+- name: "Start csit-initialize-docker-tg.service"
+ ansible.builtin.copy:
+ src: "files/csit-initialize-docker-tg.service"
+ dest: "/etc/systemd/system/"
+ owner: "root"
+ group: "root"
+ mode: 0644
+ notify:
+ - "Start csit-initialize-docker-tg.service"
+ tags:
+ - docker-tg
+
+- meta: flush_handlers \ No newline at end of file
diff --git a/fdio.infra.ansible/roles/docker_images/templates/docker-compose-sut.yaml.j2 b/fdio.infra.ansible/roles/docker_images/templates/docker-compose-sut.yaml.j2
new file mode 100644
index 0000000000..b4713d8552
--- /dev/null
+++ b/fdio.infra.ansible/roles/docker_images/templates/docker-compose-sut.yaml.j2
@@ -0,0 +1,42 @@
+version: "3"
+services:
+ numa-0:
+ build:
+ context: "base/"
+ dockerfile: "Dockerfile"
+ cap_add:
+ - NET_RAW
+ command: ["/usr/sbin/sshd","-D", "-p", "6001"]
+ expose:
+ - "6001"
+ hostname: "{{ ansible_hostname[:-1] }}1"
+ network_mode: "host"
+ privileged: true
+ restart: "always"
+ shm_size: "4G"
+ volumes:
+{% for volume in docker_volumes %}
+ - type: "bind"
+ source: "{{ volume.source }}"
+ target: "{{ volume.target }}"
+{% endfor %}
+ numa-1:
+ build:
+ context: "base/"
+ dockerfile: "Dockerfile"
+ cap_add:
+ - NET_RAW
+ command: ["/usr/sbin/sshd","-D", "-p", "6002"]
+ expose:
+ - "6002"
+ hostname: "{{ ansible_hostname[:-1] }}2"
+ network_mode: "host"
+ privileged: true
+ restart: "always"
+ shm_size: "4G"
+ volumes:
+{% for volume in docker_volumes %}
+ - type: "bind"
+ source: "{{ volume.source }}"
+ target: "{{ volume.target }}"
+{% endfor %} \ No newline at end of file
diff --git a/fdio.infra.ansible/roles/docker_images/templates/docker-compose-tg.yaml.j2 b/fdio.infra.ansible/roles/docker_images/templates/docker-compose-tg.yaml.j2
new file mode 100644
index 0000000000..2cee85e169
--- /dev/null
+++ b/fdio.infra.ansible/roles/docker_images/templates/docker-compose-tg.yaml.j2
@@ -0,0 +1,38 @@
+version: "3"
+services:
+ tg-0:
+ build:
+ context: "base/"
+ dockerfile: "Dockerfile"
+ command: ["/usr/sbin/sshd","-D", "-p", "6001"]
+ expose:
+ - "6001"
+ hostname: "{{ ansible_hostname }}"
+ network_mode: "host"
+ privileged: true
+ restart: "always"
+ shm_size: "4G"
+ volumes:
+{% for volume in docker_volumes %}
+ - type: "bind"
+ source: "{{ volume.source }}"
+ target: "{{ volume.target }}"
+{% endfor %}
+ tg-1:
+ build:
+ context: "base/"
+ dockerfile: "Dockerfile"
+ command: ["/usr/sbin/sshd","-D", "-p", "6002"]
+ expose:
+ - "6002"
+ hostname: "{{ ansible_hostname }}"
+ network_mode: "host"
+ privileged: true
+ restart: "always"
+ shm_size: "4G"
+ volumes:
+{% for volume in docker_volumes %}
+ - type: "bind"
+ source: "{{ volume.source }}"
+ target: "{{ volume.target }}"
+{% endfor %} \ No newline at end of file
diff --git a/fdio.infra.ansible/roles/dpdk/defaults/main.yaml b/fdio.infra.ansible/roles/dpdk/defaults/main.yaml
index b9602bc2d2..d94e9ac91f 100644
--- a/fdio.infra.ansible/roles/dpdk/defaults/main.yaml
+++ b/fdio.infra.ansible/roles/dpdk/defaults/main.yaml
@@ -1,5 +1,5 @@
---
-# file: roles/dpdk/defaults/main.yaml
+# file: defaults/main.yaml
packages: "{{ packages_base + packages_by_distro[ansible_distribution|lower][ansible_distribution_release] + packages_by_arch[ansible_machine] }}"
@@ -8,9 +8,6 @@ packages_base:
packages_by_distro:
ubuntu:
- focal:
- - "build-essential"
- - "libnuma-dev"
jammy:
- "build-essential"
- "libnuma-dev"
@@ -23,5 +20,5 @@ packages_by_arch:
dpdk_target_dir: "/opt"
dpdk_version:
- - "22.03"
+ - "23.11"
dpdk_url: "https://fast.dpdk.org/rel"
diff --git a/fdio.infra.ansible/roles/dpdk/meta/main.yaml b/fdio.infra.ansible/roles/dpdk/meta/main.yaml
index e52ecb87c1..3ca2918d36 100644
--- a/fdio.infra.ansible/roles/dpdk/meta/main.yaml
+++ b/fdio.infra.ansible/roles/dpdk/meta/main.yaml
@@ -1,24 +1,18 @@
---
-# file: roles/dpdk/meta/main.yaml
-
-# desc: Install DPDK from stable branch and configure service.
-# inst: DPDK
-# conf: ?
-# info: 1.0 - added role
+# file: meta/main.yaml
dependencies: []
galaxy_info:
- role_name: dpdk
- author: fd.io
- description: DPDK for Linux.
- company: none
+ role_name: "dpdk"
+ author: "fd.io"
+ description: "DPDK for Linux."
+ company: "none"
license: "license (Apache)"
min_ansible_version: 2.9
platforms:
- - name: Ubuntu
+ - name: "Ubuntu"
versions:
- - focal
- - jammy
+ - "jammy"
galaxy_tags:
- - dpdk
+ - "dpdk"
diff --git a/fdio.infra.ansible/roles/dpdk/molecule/default/molecule.yml b/fdio.infra.ansible/roles/dpdk/molecule/default/molecule.yml
index 1910c4780c..060f842db7 100644
--- a/fdio.infra.ansible/roles/dpdk/molecule/default/molecule.yml
+++ b/fdio.infra.ansible/roles/dpdk/molecule/default/molecule.yml
@@ -9,8 +9,8 @@ lint: |
yamllint .
ansible-lint
platforms:
- - name: ${DISTRO:-ubuntu-20.04}
- image: "pmikus/docker-${MOLECULE_DISTRO:-ubuntu-20.04}-ansible:latest"
+ - name: ${DISTRO:-ubuntu-22.04}
+ image: "pmikus/docker-${MOLECULE_DISTRO:-ubuntu-22.04}-ansible:latest"
volumes:
- /sys/fs/cgroup:/sys/fs/cgroup:ro
privileged: true
diff --git a/fdio.infra.ansible/roles/dpdk/tasks/deploy_block.yaml b/fdio.infra.ansible/roles/dpdk/tasks/deploy_block.yaml
new file mode 100644
index 0000000000..1f972f5320
--- /dev/null
+++ b/fdio.infra.ansible/roles/dpdk/tasks/deploy_block.yaml
@@ -0,0 +1,33 @@
+---
+# file: tasks/deploy_block.yaml
+
+- name: Download Release {{ item }}
+ ansible.builtin.get_url:
+ url: "{{ dpdk_url }}/dpdk-{{ item }}.tar.xz"
+ dest: "{{ dpdk_target_dir }}/dpdk-{{ item }}.tar.xz"
+ mode: 0644
+ register: dpdk_downloaded
+
+- name: Extract Release {{ item }}
+ ansible.builtin.unarchive:
+ remote_src: true
+ src: "{{ dpdk_target_dir }}/dpdk-{{ item }}.tar.xz"
+ dest: "{{ dpdk_target_dir }}/"
+ creates: "{{ dpdk_target_dir }}/dpdk-{{ item }}"
+ when: dpdk_downloaded
+ register: dpdk_extracted
+
+- name: Compile Release I
+ ansible.builtin.command: "meson -Dexamples=l3fwd build"
+ args:
+ chdir: "{{ dpdk_target_dir }}/dpdk-{{ item }}"
+ environment:
+ CFLAGS: "-DRTE_LIBRTE_I40E_16BYTE_RX_DESC=y"
+ register: dpdk_compiled
+
+- name: Compile Release II
+ ansible.builtin.command: "ninja -C build"
+ args:
+ chdir: "{{ dpdk_target_dir }}/dpdk-{{ item }}"
+ environment:
+ CFLAGS: "-DRTE_LIBRTE_I40E_16BYTE_RX_DESC=y" \ No newline at end of file
diff --git a/fdio.infra.ansible/roles/dpdk/tasks/main.yaml b/fdio.infra.ansible/roles/dpdk/tasks/main.yaml
index 9052baff74..4f6c9ec9f2 100644
--- a/fdio.infra.ansible/roles/dpdk/tasks/main.yaml
+++ b/fdio.infra.ansible/roles/dpdk/tasks/main.yaml
@@ -1,8 +1,8 @@
---
-# file: roles/dpdk/tasks/main.yaml
+# file: tasks/main.yaml
-- name: Inst - Update Package Cache (APT)
- apt:
+- name: Update Package Cache (APT)
+ ansible.builtin.apt:
update_cache: true
cache_valid_time: 3600
when:
@@ -10,53 +10,15 @@
tags:
- dpdk-inst-prerequisites
-- name: Inst - Prerequisites
- package:
+- name: Prerequisites
+ ansible.builtin.package:
name: "{{ packages | flatten(levels=1) }}"
state: "latest"
tags:
- dpdk-inst-prerequisites
-- name: Inst - Download Release Archive
- get_url:
- url: "{{ dpdk_url }}/dpdk-{{ item }}.tar.xz"
- dest: "{{ dpdk_target_dir }}/dpdk-{{ item }}.tar.xz"
- mode: 0644
+- name: Multiple DPDK Versions
+ include_tasks: deploy_block.yaml
loop: "{{ dpdk_version }}"
- register: "dpdk_downloaded"
- tags:
- - dpdk-inst
-
-- name: Inst - Extract Release Archive
- unarchive:
- remote_src: true
- src: "{{ dpdk_target_dir }}/dpdk-{{ item }}.tar.xz"
- dest: "{{ dpdk_target_dir }}/"
- creates: "{{ dpdk_target_dir }}/dpdk-{{ item }}"
- loop: "{{ dpdk_version }}"
- when: "dpdk_downloaded"
- register: "dpdk_extracted"
- tags:
- - dpdk-inst
-
-- name: Inst - Compile Release I
- command: "meson -Dexamples=l3fwd build"
- args:
- chdir: "{{ dpdk_target_dir }}/dpdk-{{ item }}"
- environment:
- CFLAGS: "-DRTE_LIBRTE_I40E_16BYTE_RX_DESC=y"
- loop: "{{ dpdk_version }}"
- register: "dpdk_compiled"
- tags:
- - dpdk-inst
-
-- name: Inst - Compile Release II
- command: "ninja -C build"
- args:
- chdir: "{{ dpdk_target_dir }}/dpdk-{{ item }}"
- environment:
- CFLAGS: "-DRTE_LIBRTE_I40E_16BYTE_RX_DESC=y"
- loop: "{{ dpdk_version }}"
- register: "dpdk_compiled"
tags:
- dpdk-inst
diff --git a/fdio.infra.ansible/roles/intel/defaults/main.yaml b/fdio.infra.ansible/roles/intel/defaults/main.yaml
index 7f35be3d89..9a3c5c0f0c 100644
--- a/fdio.infra.ansible/roles/intel/defaults/main.yaml
+++ b/fdio.infra.ansible/roles/intel/defaults/main.yaml
@@ -1,5 +1,5 @@
---
-# file: roles/intel/defaults/main.yaml
+# file: defaults/main.yaml
packages: "{{ packages_base + packages_by_distro[ansible_distribution | lower] + packages_by_arch[ansible_machine] }}"
@@ -8,7 +8,34 @@ packages_base:
packages_by_distro:
ubuntu:
- - []
+ - "build-essential"
+ - "autoconf"
+ - "automake"
+ - "autotools-dev"
+ - "libtool"
+ - "pkgconf"
+ - "asciidoc"
+ - "xmlto"
+ - "uuid-dev"
+ - "libjson-c-dev"
+ - "libkeyutils-dev"
+ - "libz-dev"
+ - "libssl-dev"
+ - "debhelper"
+ - "devscripts"
+ - "debmake"
+ - "quilt"
+ - "fakeroot"
+ - "lintian"
+ - "asciidoctor"
+ - "file"
+ - "gnupg"
+ - "patch"
+ - "patchutils"
+ - "libboost-dev"
+ - "libboost-regex-dev"
+# - "libudev-dev" http://security.ubuntu.com/ubuntu/pool/main/s/systemd/
+ - "yasm"
packages_by_arch:
aarch64:
@@ -21,43 +48,64 @@ intel_download_url: "https://downloadmirror.intel.com"
intel_extract_dir: "/opt"
intel_700_compatibility_matrix:
- dpdk21.02:
- # https://doc.dpdk.org/guides/rel_notes/release_21_02.html
- i40e: "2.14.13"
- iavf: "4.1.1"
- nvm: "8.00"
- dpdk22.03:
- # https://doc.dpdk.org/guides/rel_notes/release_22_03.html
- i40e: "2.17.15"
- iavf: "4.3.19"
- nvm: "8.30"
+ dpdk22.07:
+ # https://doc.dpdk.org/guides/rel_notes/release_22_07.html
+ i40e: "2.19.3"
+ iavf: "4.5.3"
+ nvm: "8.70"
+ dpdk23.11:
+ # https://doc.dpdk.org/guides/rel_notes/release_23_11.html
+ i40e: "2.23.17"
+ iavf: "4.9.5"
+ nvm: "9.30"
intel_800_compatibility_matrix:
- dpdk21.02:
- # https://doc.dpdk.org/guides/rel_notes/release_21_02.html
- ice: "1.4.11"
- ddp: "1.3.28.0"
- iavf: "4.1.1"
- nvm: "2.40"
dpdk22.03:
- # https://doc.dpdk.org/guides/rel_notes/release_22_03.html
- ice: "1.8.3"
- ddp: "1.3.35.0"
- iavf: "4.3.19"
- nvm: "3.20"
+ # custom for vpp_device
+ ice: "1.13.7"
+ ddp: "1.3.45.0"
+ iavf: "4.9.5"
+ nvm: "4.40"
+ dpdk22.07:
+ # https://doc.dpdk.org/guides/rel_notes/release_22_07.html
+ ice: "1.9.7"
+ ddp: "1.3.37.0"
+ iavf: "4.5.3"
+ nvm: "4.00"
+ dpdk23.11:
+ # https://doc.dpdk.org/guides/rel_notes/release_23_11.html
+ ice: "1.13.7"
+ ddp: "1.3.45.0"
+ iavf: "4.9.5"
+ nvm: "4.40"
+
+intel_dsa_compatibility_matrix:
+ dsa: "4.0"
+
+intel_qat_compatibility_matrix:
+ qat2: "1.0.20-00008"
+ qat1: "4.22.0-00001"
intel_i40e_url:
- "2.14.13": "i40e%20stable/2.14.13/i40e-2.14.13.tar.gz/download"
- "2.17.15": "i40e%20stable/2.17.15/i40e-2.17.15.tar.gz/download"
+ "2.19.3": "i40e%20stable/2.19.3/i40e-2.19.3.tar.gz/download"
+ "2.23.17": "i40e%20stable/2.23.17/i40e-2.23.17.tar.gz/download"
intel_ice_url:
- "1.4.11": "ice%20stable/1.4.11/ice-1.4.11.tar.gz/download"
- "1.8.3": "ice%20stable/1.8.3/ice-1.8.3.tar.gz/download"
+ "1.9.7": "ice%20stable/1.9.7/ice-1.9.7.tar.gz/download"
+ "1.13.7": "ice%20stable/1.13.7/ice-1.13.7.tar.gz/download"
intel_iavf_url:
- "4.1.1": "iavf%20stable/4.1.1/iavf-4.1.1.tar.gz/download"
"4.3.19": "iavf%20stable/4.3.19/iavf-4.3.19.tar.gz/download"
+ "4.5.3": "iavf%20stable/4.5.3/iavf-4.5.3.tar.gz/download"
+ "4.9.5": "iavf%20stable/4.9.5/iavf-4.9.5.tar.gz/download"
intel_ddp_url:
- "1.3.28.0": "30467/eng/800%20series%20comms%20binary%20package%201.3.28.0.zip"
- "1.3.35.0": "727568/ice_comms-1.3.35.0.zip"
+ "1.3.37.0": "738733/800%20Series%20DDP%20Comms%20Package%201.3.37.0.zip"
+ "1.3.45.0": "785846/738693_ice_comms-1.3.45.0.zip"
+
+intel_dsa_url:
+ "4.0": "https://github.com/intel/idxd-config/archive/refs/tags/accel-config-v4.0.tar.gz"
+
+intel_qat_url:
+ "1.0.20-00008": "777529/QAT20.L.1.0.20-00008.tar.gz"
+ "4.22.0-00001": "780675/QAT.L.4.22.0-00001.tar.gz"
diff --git a/fdio.infra.ansible/roles/intel/tasks/dsa.yaml b/fdio.infra.ansible/roles/intel/tasks/dsa.yaml
new file mode 100644
index 0000000000..2f038b0e9f
--- /dev/null
+++ b/fdio.infra.ansible/roles/intel/tasks/dsa.yaml
@@ -0,0 +1,39 @@
+---
+# file: tasks/dsa.yaml
+
+- name: Get DSA Driver
+ ansible.builtin.uri:
+ url: "{{ intel_dsa_url[dsa] }}"
+ follow_redirects: "all"
+ force: true
+ dest: "{{ intel_extract_dir }}/accel-config-v{{ dsa }}.tar.gz"
+ mode: "0644"
+ failed_when: false
+ tags:
+ - intel-inst
+
+- name: Extract DSA Driver
+ ansible.builtin.unarchive:
+ remote_src: true
+ src: "{{ intel_extract_dir }}/accel-config-v{{ dsa }}.tar.gz"
+ dest: "{{ intel_extract_dir }}/"
+ creates: "{{ intel_extract_dir }}/idxd-config-accel-config-v{{ dsa }}"
+ register: intel_driver_extracted
+ tags:
+ - intel-inst
+
+- name: Install DSA Driver
+ ansible.builtin.command: "{{ item }}"
+ args:
+ chdir: "{{ intel_extract_dir }}/idxd-config-accel-config-v{{ dsa }}"
+ become: true
+ with_items:
+ - "./autogen.sh"
+ - "./configure CFLAGS='-g -O2' --prefix=/usr --sysconfdir=/etc --libdir=/usr/lib64"
+ - "make"
+ - "make check"
+ - "make install"
+ when:
+ - intel_driver_extracted
+ tags:
+ - intel-inst
diff --git a/fdio.infra.ansible/roles/intel/tasks/i40e.yaml b/fdio.infra.ansible/roles/intel/tasks/i40e.yaml
index 2299a3b1b8..8b069bf9b2 100644
--- a/fdio.infra.ansible/roles/intel/tasks/i40e.yaml
+++ b/fdio.infra.ansible/roles/intel/tasks/i40e.yaml
@@ -1,8 +1,8 @@
---
-# file: roles/intel/tasks/i40e.yaml
+# file: tasks/i40e.yaml
-- name: Inst - Get i40e Network Adapter Driver
- uri:
+- name: Get i40e Network Adapter Driver
+ ansible.builtin.uri:
url: "{{ intel_sourceforge_download_url }}/{{ intel_i40e_url[i40e] }}"
follow_redirects: "all"
force: true
@@ -12,8 +12,8 @@
tags:
- intel-inst
-- name: Inst - Extract i40e Network Adapter Driver
- unarchive:
+- name: Extract i40e Network Adapter Driver
+ ansible.builtin.unarchive:
remote_src: true
src: "{{ intel_extract_dir }}/i40e-{{ i40e }}.tar.gz"
dest: "{{ intel_extract_dir }}/"
@@ -22,14 +22,14 @@
tags:
- intel-inst
-- name: Inst - i40e Network Adapter Driver
- command: "{{ item }}"
+- name: Install i40e Network Adapter Driver
+ ansible.builtin.command: "{{ item }}"
args:
chdir: "{{ intel_extract_dir }}/i40e-{{ i40e }}/src"
become: true
with_items:
- "make install"
- - "modprobe -r i40e"
+ #- "modprobe -r i40e"
- "modprobe i40e"
when:
- intel_driver_extracted
diff --git a/fdio.infra.ansible/roles/intel/tasks/iavf.yaml b/fdio.infra.ansible/roles/intel/tasks/iavf.yaml
index 44040cfd7e..127e31bee2 100644
--- a/fdio.infra.ansible/roles/intel/tasks/iavf.yaml
+++ b/fdio.infra.ansible/roles/intel/tasks/iavf.yaml
@@ -1,8 +1,8 @@
---
-# file: roles/intel/tasks/iavf.yaml
+# file: tasks/iavf.yaml
-- name: Inst - Get iavf Network Adapter Driver
- uri:
+- name: Get iavf Network Adapter Driver
+ ansible.builtin.uri:
url: "{{ intel_sourceforge_download_url }}/{{ intel_iavf_url[iavf] }}"
follow_redirects: "all"
force: true
@@ -12,8 +12,8 @@
tags:
- intel-inst
-- name: Inst - Extract iavf Network Adapter Driver
- unarchive:
+- name: Extract iavf Network Adapter Driver
+ ansible.builtin.unarchive:
remote_src: true
src: "{{ intel_extract_dir }}/iavf-{{ iavf }}.tar.gz"
dest: "{{ intel_extract_dir }}/"
@@ -22,8 +22,8 @@
tags:
- intel-inst
-- name: Inst - iavf Network Adapter Driver
- command: "{{ item }}"
+- name: Install iavf Network Adapter Driver
+ ansible.builtin.command: "{{ item }}"
args:
chdir: "{{ intel_extract_dir }}/iavf-{{ iavf }}/src"
become: true
diff --git a/fdio.infra.ansible/roles/intel/tasks/ice.yaml b/fdio.infra.ansible/roles/intel/tasks/ice.yaml
index 2386c8390c..c773a65a34 100644
--- a/fdio.infra.ansible/roles/intel/tasks/ice.yaml
+++ b/fdio.infra.ansible/roles/intel/tasks/ice.yaml
@@ -1,8 +1,8 @@
---
-# file: roles/intel/tasks/ice.yaml
+# file: tasks/ice.yaml
-- name: Inst - Get ice Network Adapter Driver
- uri:
+- name: Get ice Network Adapter Driver
+ ansible.builtin.uri:
url: "{{ intel_sourceforge_download_url }}/{{ intel_ice_url[ice] }}"
follow_redirects: "all"
force: true
@@ -12,8 +12,8 @@
tags:
- intel-inst
-- name: Inst - Extract ice Network Adapter Driver
- unarchive:
+- name: Extract ice Network Adapter Driver
+ ansible.builtin.unarchive:
remote_src: true
src: "{{ intel_extract_dir }}/ice-{{ ice }}.tar.gz"
dest: "{{ intel_extract_dir }}/"
@@ -22,30 +22,30 @@
tags:
- intel-inst
-- name: Inst - ice Network Adapter Driver
- command: "{{ item }}"
+- name: Install ice Network Adapter Driver
+ ansible.builtin.command: "{{ item }}"
args:
chdir: "{{ intel_extract_dir }}/ice-{{ ice }}/src"
become: true
with_items:
- "make install"
- - "modprobe -r ice"
+ #- "modprobe -r ice"
- "modprobe ice"
when:
- intel_driver_extracted
tags:
- intel-inst
-- name: Inst - Get Dynamic Device Personalization (DDP) Package
- get_url:
+- name: Get Dynamic Device Personalization (DDP) Package
+ ansible.builtin.get_url:
url: "{{ intel_download_url }}/{{ intel_ddp_url[ddp] }}"
dest: "{{ intel_extract_dir }}/800-Series-Comms-Binary-Package-{{ ddp }}.zip"
mode: 0644
tags:
- intel-inst
-- name: Inst - Extract Dynamic Device Personalization (DDP) Package
- unarchive:
+- name: Extract Dynamic Device Personalization (DDP) Package
+ ansible.builtin.unarchive:
remote_src: true
src: "{{ intel_extract_dir }}/800-Series-Comms-Binary-Package-{{ ddp }}.zip"
dest: "{{ intel_extract_dir }}/"
@@ -54,17 +54,17 @@
tags:
- intel-inst
-#- name: Inst - Extract Dynamic Device Personalization (DDP) Package
-# unarchive:
-# remote_src: true
-# src: "{{ intel_extract_dir }}/ice_comms-{{ ddp }}.zip"
-# dest: "{{ intel_extract_dir }}/"
-# register: intel_driver_extracted
-# tags:
-# - intel-inst
-#
-- name: Inst - Copy Dynamic Device Personalization (DDP) Package
- copy:
+- name: Extract Dynamic Device Personalization (DDP) Package
+ ansible.builtin.unarchive:
+ remote_src: true
+ src: "{{ intel_extract_dir }}/ice_comms-{{ ddp }}.zip"
+ dest: "{{ intel_extract_dir }}/"
+ register: intel_driver_extracted
+ tags:
+ - intel-inst
+
+- name: Copy Dynamic Device Personalization (DDP) Package
+ ansible.builtin.copy:
src: "{{ intel_extract_dir }}/ice_comms-{{ ddp }}.pkg"
dest: "/lib/firmware/updates/intel/ice/ddp/ice-{{ ddp }}.pkg"
remote_src: true
@@ -72,16 +72,16 @@
tags:
- intel-inst
-- name: Inst - Link Dynamic Device Personalization (DDP) Package
- file:
+- name: Link Dynamic Device Personalization (DDP) Package
+ ansible.builtin.file:
src: "ice-{{ ddp }}.pkg"
dest: "/lib/firmware/updates/intel/ice/ddp/ice.pkg"
state: link
tags:
- intel-inst
-- name: Inst - Extract Dynamic Device Personalization (DDP) Package (cleanup)
- file:
+- name: Extract Dynamic Device Personalization (DDP) Package (cleanup)
+ ansible.builtin.file:
path: "{{ item }}"
state: absent
with_items:
diff --git a/fdio.infra.ansible/roles/intel/tasks/main.yaml b/fdio.infra.ansible/roles/intel/tasks/main.yaml
index 62d8d90499..d7598deca7 100644
--- a/fdio.infra.ansible/roles/intel/tasks/main.yaml
+++ b/fdio.infra.ansible/roles/intel/tasks/main.yaml
@@ -1,8 +1,8 @@
---
-# file: roles/intel/tasks/main.yaml
+# file: tasks/main.yaml
-- name: Inst - Update Package Cache (APT)
- apt:
+- name: Update Package Cache (APT)
+ ansible.builtin.apt:
update_cache: true
cache_valid_time: 3600
when:
@@ -10,31 +10,55 @@
tags:
- intel-inst-drivers
-- name: Inst - Prerequisites
- package:
+- name: Install Prerequisites
+ ansible.builtin.package:
name: "{{ packages | flatten(levels=1) }}"
state: latest
tags:
- intel-inst-drivers
-- name: Inst - Check Presence of Intel Ethernet 700 Series
- shell: "lspci -d 8086:1583; lspci -d 8086:1585; lspci -d 8086:1572; lspci -d 8086:158a; lspci -d 8086:158b"
+- name: Check Presence of Intel Ethernet 700 Series
+ ansible.builtin.shell: "lspci -d 8086:1583; lspci -d 8086:1585; lspci -d 8086:1572; lspci -d 8086:158a; lspci -d 8086:158b"
register: intel_700_pcis
failed_when: false
changed_when: false
tags:
- intel-inst-drivers
-- name: Inst - Check Presence of Intel Ethernet 800 Series
- shell: "lspci -d 8086:1592; lspci -d 8086:1891"
+- name: Check Presence of Intel Ethernet 800 Series
+ ansible.builtin.shell: "lspci -d 8086:1592; lspci -d 8086:1891; lspci -d 8086:188c"
register: intel_800_pcis
failed_when: false
changed_when: false
tags:
- intel-inst-drivers
-- name: Inst - Get Intel Ethernet 700 Series driver versions
- set_fact:
+- name: Check Presence of Intel DSA
+ ansible.builtin.shell: "lspci -d 8086:0b25"
+ register: intel_dsa_pcis
+ failed_when: false
+ changed_when: false
+ tags:
+ - intel-inst-drivers
+
+- name: Check Presence of Intel C4XXX
+ ansible.builtin.shell: "lspci -d 8086:18a0"
+ register: intel_qat1_pcis
+ failed_when: false
+ changed_when: false
+ tags:
+ - intel-inst-drivers
+
+- name: Check Presence of Intel 4XXX
+ ansible.builtin.shell: "lspci -d 8086:4942"
+ register: intel_qat2_pcis
+ failed_when: false
+ changed_when: false
+ tags:
+ - intel-inst-drivers
+
+- name: Get Intel Ethernet 700 Series driver versions
+ ansible.builtin.set_fact:
i40e: "{{ intel_700_compatibility_matrix[intel_700_matrix]['i40e'] }}"
iavf: "{{ intel_700_compatibility_matrix[intel_700_matrix]['iavf'] }}"
nvm: "{{ intel_700_compatibility_matrix[intel_700_matrix]['nvm'] }}"
@@ -43,8 +67,8 @@
tags:
- intel-inst-drivers
-- name: Inst - Get Intel Ethernet 800 Series driver versions
- set_fact:
+- name: Get Intel Ethernet 800 Series driver versions
+ ansible.builtin.set_fact:
ice: "{{ intel_800_compatibility_matrix[intel_800_matrix]['ice'] }}"
ddp: "{{ intel_800_compatibility_matrix[intel_800_matrix]['ddp'] }}"
iavf: "{{ intel_800_compatibility_matrix[intel_800_matrix]['iavf'] }}"
@@ -54,7 +78,24 @@
tags:
- intel-inst-drivers
-- name: Inst - Driver Intel Ethernet 700 Series
+- name: Get Intel DSA driver versions
+ ansible.builtin.set_fact:
+ dsa: "{{ intel_dsa_compatibility_matrix['dsa'] }}"
+ when: >
+ intel_dsa_matrix is defined
+ tags:
+ - intel-inst-drivers
+
+- name: Get Intel QAT driver versions
+ ansible.builtin.set_fact:
+ qat1: "{{ intel_qat_compatibility_matrix['qat1'] }}"
+ qat2: "{{ intel_qat_compatibility_matrix['qat2'] }}"
+ when: >
+ intel_qat_matrix is defined
+ tags:
+ - intel-inst-drivers
+
+- name: Driver Intel Ethernet 700 Series
import_tasks: i40e.yaml
when: >
intel_700_pcis.stdout_lines | length > 0 and
@@ -62,7 +103,7 @@
tags:
- intel-inst-drivers
-- name: Inst - Driver Intel Ethernet 800 Series
+- name: Driver Intel Ethernet 800 Series
import_tasks: ice.yaml
when: >
intel_800_pcis.stdout_lines | length > 0 and
@@ -70,12 +111,36 @@
tags:
- intel-inst-drivers
-- name: Inst - Driver Intel iAVF
+- name: Driver Intel iAVF
import_tasks: iavf.yaml
when: >
(intel_700_pcis.stdout_lines | length > 0 and
- intel_700_matrix is defined ) or
+ intel_700_matrix is defined) or
(intel_800_pcis.stdout_lines | length > 0 and
intel_800_matrix is defined)
tags:
- intel-inst-drivers
+
+- name: Driver Intel DSA
+ import_tasks: dsa.yaml
+ when: >
+ intel_dsa_pcis.stdout_lines | length > 0 and
+ intel_dsa_matrix is defined
+ tags:
+ - intel-inst-drivers
+
+- name: Driver Intel QAT 1.x
+ import_tasks: qat1.yaml
+ when: >
+ intel_qat1_pcis.stdout_lines | length > 0 and
+ intel_qat_matrix is defined
+ tags:
+ - intel-inst-drivers
+
+- name: Driver Intel QAT 2.x
+ import_tasks: qat2.yaml
+ when: >
+ intel_qat2_pcis.stdout_lines | length > 0 and
+ intel_qat_matrix is defined
+ tags:
+ - intel-inst-drivers \ No newline at end of file
diff --git a/fdio.infra.ansible/roles/intel/tasks/qat1.yaml b/fdio.infra.ansible/roles/intel/tasks/qat1.yaml
new file mode 100644
index 0000000000..701c0c1bf1
--- /dev/null
+++ b/fdio.infra.ansible/roles/intel/tasks/qat1.yaml
@@ -0,0 +1,54 @@
+---
+# file: tasks/qat1.yaml
+
+- name: Get QAT 1.x Driver
+ ansible.builtin.uri:
+ url: "{{ intel_download_url }}/{{ intel_qat_url[qat1] }}"
+ follow_redirects: "all"
+ force: true
+ dest: "{{ intel_extract_dir }}/QAT.L.{{ qat1 }}.tar.gz"
+ mode: "0644"
+ failed_when: false
+ tags:
+ - intel-inst
+
+- name: Create a Directory For QAT 1.x Driver
+ ansible.builtin.file:
+ path: "{{ intel_extract_dir }}/QAT.L.{{ qat1 }}/"
+ state: "directory"
+ mode: "0755"
+ tags:
+ - intel-inst
+
+- name: Extract QAT 1.x Driver
+ ansible.builtin.unarchive:
+ remote_src: true
+ src: "{{ intel_extract_dir }}/QAT.L.{{ qat1 }}.tar.gz"
+ dest: "{{ intel_extract_dir }}/QAT.L.{{ qat1 }}/"
+ register: intel_driver_extracted
+ tags:
+ - intel-inst
+
+- name: Install QAT1.x Driver
+ ansible.builtin.command: "{{ item }}"
+ args:
+ chdir: "{{ intel_extract_dir }}/QAT.L.{{ qat1 }}"
+ become: true
+ with_items:
+ - "./configure --enable-icp-sriov=host --enable-icp-sym-only"
+ - "make"
+ - "make install"
+ when:
+ - intel_driver_extracted
+ tags:
+ - intel-inst
+
+- name: Load Kernel Modules By Default
+ ansible.builtin.lineinfile:
+ path: "/etc/modules"
+ state: "present"
+ line: "{{ item }}"
+ with_items:
+ - "qat_c4xxx"
+ tags:
+ - intel-inst \ No newline at end of file
diff --git a/fdio.infra.ansible/roles/intel/tasks/qat2.yaml b/fdio.infra.ansible/roles/intel/tasks/qat2.yaml
new file mode 100644
index 0000000000..a560f16b2c
--- /dev/null
+++ b/fdio.infra.ansible/roles/intel/tasks/qat2.yaml
@@ -0,0 +1,57 @@
+---
+# file: tasks/qat2.yaml
+
+- name: Get QAT 2.x Driver
+ ansible.builtin.uri:
+ url: "{{ intel_download_url }}/{{ intel_qat_url[qat2] }}"
+ follow_redirects: "all"
+ force: true
+ dest: "{{ intel_extract_dir }}/QAT20.L.{{ qat2 }}.tar.gz"
+ mode: "0644"
+ failed_when: false
+ tags:
+ - intel-inst
+
+- name: Create a Directory For QAT 2.x Driver
+ ansible.builtin.file:
+ path: "{{ intel_extract_dir }}/QAT20.L.{{ qat2 }}/"
+ state: "directory"
+ mode: "0755"
+ tags:
+ - intel-inst
+
+- name: Extract QAT 2.x Driver
+ ansible.builtin.unarchive:
+ remote_src: true
+ src: "{{ intel_extract_dir }}/QAT20.L.{{ qat2 }}.tar.gz"
+ dest: "{{ intel_extract_dir }}/QAT20.L.{{ qat2 }}/"
+ register: intel_driver_extracted
+ tags:
+ - intel-inst
+
+- name: Install QAT 2.x Driver
+ ansible.builtin.command: "{{ item }}"
+ args:
+ chdir: "{{ intel_extract_dir }}/QAT20.L.{{ qat2 }}"
+ become: true
+ with_items:
+ - "wget http://security.ubuntu.com/ubuntu/pool/main/s/systemd/libudev-dev_249.11-0ubuntu3.7_amd64.deb"
+ - "dpkg -i ./libudev-dev_249.11-0ubuntu3.7_amd64.deb"
+ - "./configure --enable-icp-sriov=host --enable-icp-sym-only"
+ - "make"
+ - "make install"
+ - "apt remove -y libudev-dev"
+ when:
+ - intel_driver_extracted
+ tags:
+ - intel-inst
+
+- name: Load Kernel Modules By Default
+ ansible.builtin.lineinfile:
+ path: "/etc/modules"
+ state: "present"
+ line: "{{ item }}"
+ with_items:
+ - "qat_4xxx"
+ tags:
+ - intel-inst \ No newline at end of file
diff --git a/fdio.infra.ansible/roles/iperf/defaults/main.yaml b/fdio.infra.ansible/roles/iperf/defaults/main.yaml
index f014d83eed..f757b287b7 100644
--- a/fdio.infra.ansible/roles/iperf/defaults/main.yaml
+++ b/fdio.infra.ansible/roles/iperf/defaults/main.yaml
@@ -8,12 +8,9 @@ packages_base:
packages_by_distro:
ubuntu:
- focal:
- - "build-essential"
- - "lib32z1"
jammy:
- "build-essential"
- - "lib32z1"
+ #- "lib32z1"
packages_by_arch:
aarch64:
diff --git a/fdio.infra.ansible/roles/iperf/tasks/main.yaml b/fdio.infra.ansible/roles/iperf/tasks/main.yaml
index a2b7709cef..6184ba25f1 100644
--- a/fdio.infra.ansible/roles/iperf/tasks/main.yaml
+++ b/fdio.infra.ansible/roles/iperf/tasks/main.yaml
@@ -2,7 +2,7 @@
# file: roles/iperf/tasks/main.yaml
- name: Inst - Update Package Cache (APT)
- apt:
+ ansible.builtin.apt:
update_cache: true
cache_valid_time: 3600
when:
@@ -11,14 +11,14 @@
- iperf-inst-prerequisites
- name: Inst - Prerequisites
- package:
+ ansible.builtin.package:
name: "{{ packages | flatten(levels=1) }}"
state: latest
tags:
- iperf-inst-prerequisites
- name: Get Release Archive
- get_url:
+ ansible.builtin.get_url:
url: "https://downloads.es.net/pub/iperf/iperf-{{ item }}.tar.gz"
dest: "{{ iperf_target_dir }}/iperf-{{ item }}.tar.gz"
validate_certs: false
@@ -28,7 +28,7 @@
- iperf-inst
- name: Extract Release Archive
- unarchive:
+ ansible.builtin.unarchive:
remote_src: true
src: "{{ iperf_target_dir }}/iperf-{{ item }}.tar.gz"
dest: "{{ iperf_target_dir }}/"
@@ -38,7 +38,7 @@
- iperf-inst
- name: Compile Release I
- command: "./configure"
+ ansible.builtin.command: "./configure"
args:
chdir: "{{ iperf_target_dir }}/iperf-{{ item }}/"
loop: "{{ iperf_version }}"
@@ -46,7 +46,7 @@
- iperf-inst
- name: Compile Release II
- command: "make"
+ ansible.builtin.command: "make"
args:
chdir: "{{ iperf_target_dir }}/iperf-{{ item }}/"
loop: "{{ iperf_version }}"
@@ -54,7 +54,7 @@
- iperf-inst
- name: Compile Release III
- command: "make install"
+ ansible.builtin.command: "make install"
args:
chdir: "{{ iperf_target_dir }}/iperf-{{ item }}/"
loop: "{{ iperf_version }}"
diff --git a/fdio.infra.ansible/roles/jenkins_job_health_exporter/defaults/main.yaml b/fdio.infra.ansible/roles/jenkins_job_health_exporter/defaults/main.yaml
index 73a0caecfa..ab54aac516 100644
--- a/fdio.infra.ansible/roles/jenkins_job_health_exporter/defaults/main.yaml
+++ b/fdio.infra.ansible/roles/jenkins_job_health_exporter/defaults/main.yaml
@@ -8,22 +8,15 @@ req_timeout_sec: 30
bind_to: "0.0.0.0:9186"
last_builds: 10
jobs:
- - "vpp-csit-verify-api-crc-master-ubuntu2004-x86_64"
- - "vpp-beta-verify-master-ubuntu2004-aarch64"
- - "vpp-verify-master-centos8-aarch64"
- - "vpp-verify-master-ubuntu1804-aarch64"
- - "vpp-gcc-verify-master-ubuntu2004-x86_64"
- - "vpp-verify-master-centos8-x86_64"
- - "vpp-verify-master-debian10-x86_64"
- - "vpp-verify-master-ubuntu2004-x86_64"
- - "vpp-verify-master-ubuntu1804-x86_64"
- - "vpp-debug-verify-master-ubuntu2004-x86_64"
- - "vpp-checkstyle-verify-master-ubuntu2004-x86_64"
- - "vpp-sphinx-docs-verify-master-ubuntu1804-x86_64"
- - "vpp-docs-verify-master-ubuntu1804-x86_64"
- - "vpp-make-test-docs-verify-master-ubuntu1804-x86_64"
- - "vpp-csit-verify-device-master-ubuntu2004-x86_64-1n-skx"
- - "vpp-csit-verify-device-master-ubuntu2004-aarch64-1n-tx2"
+ - "vpp-csit-verify-api-crc-master-ubuntu2204-x86_64"
+ - "vpp-gcc-verify-master-ubuntu2204-x86_64"
+ - "vpp-verify-master-ubuntu2204-aarch64"
+ - "vpp-verify-master-ubuntu2204-x86_64"
+ - "vpp-debug-verify-master-ubuntu2204-x86_64"
+ - "vpp-checkstyle-verify-master-ubuntu2204-x86_64"
+ - "vpp-docs-verify-master-ubuntu2204-x86_64"
+ - "vpp-csit-verify-device-master-ubuntu2204-x86_64-1n-skx"
+ - "vpp-csit-verify-device-master-ubuntu2204-aarch64-1n-tx2"
# Conf - Service.
jenkins_job_health_exporter_restart_handler_state: "restarted"
diff --git a/fdio.infra.ansible/roles/jenkins_job_health_exporter/tasks/main.yaml b/fdio.infra.ansible/roles/jenkins_job_health_exporter/tasks/main.yaml
index 55cbea92de..efdc26a6d2 100644
--- a/fdio.infra.ansible/roles/jenkins_job_health_exporter/tasks/main.yaml
+++ b/fdio.infra.ansible/roles/jenkins_job_health_exporter/tasks/main.yaml
@@ -9,7 +9,7 @@
group: "root"
mode: "0644"
when:
- - ansible_hostname == "s16-nomad"
+ - ansible_hostname == "s22-nomad"
tags:
- conf-jenkins-job-json
@@ -19,7 +19,7 @@
dest: "{{ jenkins_job_health_exporter_target_dir }}/jenkins-job-health-exporter"
mode: "0755"
when:
- - ansible_hostname == "s16-nomad"
+ - ansible_hostname == "s22-nomad"
tags:
- inst-jenkins-job-binary
@@ -31,7 +31,7 @@
group: "root"
mode: "0644"
when:
- - ansible_hostname == "s16-nomad"
+ - ansible_hostname == "s22-nomad"
notify:
- "Restart Jenkins Job Health Exporter"
tags:
diff --git a/fdio.infra.ansible/roles/kernel/defaults/main.yaml b/fdio.infra.ansible/roles/kernel/defaults/main.yaml
index 60fa91e970..ef628c93f5 100644
--- a/fdio.infra.ansible/roles/kernel/defaults/main.yaml
+++ b/fdio.infra.ansible/roles/kernel/defaults/main.yaml
@@ -6,21 +6,13 @@ kernel_version: "{{ kernel_version_by_distro[ansible_distribution|lower][ansible
kernel_version_by_distro:
ubuntu:
- focal:
- - "5.4.0-65"
jammy:
- - "4.15.0-72"
+ - "5.15.0-46"
kernel_packages: "{{ kernel_packages_by_distro[ansible_distribution|lower][ansible_distribution_release] | flatten(levels=1) }}"
kernel_packages_by_distro:
ubuntu:
- focal:
- - "linux-image"
- - "linux-headers"
- - "linux-modules"
- - "linux-modules-extra"
- - "linux-tools"
jammy:
- "linux-image"
- "linux-headers"
@@ -33,10 +25,6 @@ absent_packages: "{{ absent_packages_by_distro[ansible_distribution|lower][ansib
absent_packages_by_distro:
ubuntu:
- focal:
- - "amd64-microcode"
- - "intel-microcode"
- - "iucode-tool"
jammy:
- "amd64-microcode"
- "intel-microcode"
diff --git a/fdio.infra.ansible/roles/kernel/handlers/main.yaml b/fdio.infra.ansible/roles/kernel/handlers/main.yaml
index 8e1239ab31..d0be276a5b 100644
--- a/fdio.infra.ansible/roles/kernel/handlers/main.yaml
+++ b/fdio.infra.ansible/roles/kernel/handlers/main.yaml
@@ -2,7 +2,7 @@
# file roles/kernel/handlers/main.yaml
- name: Reboot Server
- reboot:
+ ansible.builtin.reboot:
reboot_timeout: 3600
tags:
- reboot-server
diff --git a/fdio.infra.ansible/roles/kernel/tasks/ubuntu_focal.yaml b/fdio.infra.ansible/roles/kernel/tasks/ubuntu_focal.yaml
deleted file mode 100644
index 3e2686d565..0000000000
--- a/fdio.infra.ansible/roles/kernel/tasks/ubuntu_focal.yaml
+++ /dev/null
@@ -1,62 +0,0 @@
----
-# file: roles/kernel/tasks/ubuntu_focal.yaml
-
-- name: Get Available Kernel Versions
- command: "apt-cache showpkg linux-headers-*"
- changed_when: false
- register: apt_kernel_list
- tags:
- - kernel-inst
-
-- name: Get installed packages with APT
- command: "dpkg -l"
- changed_when: false
- register: apt_packages_list
- tags:
- - kernel-inst
-
-- name: Set target APT kernel version
- set_fact:
- _kernel: "{{ apt_kernel_list | deb_kernel(
- kernel_version, ansible_kernel) }}"
- tags:
- - kernel-inst
-
-- name: Disable APT auto upgrade
- lineinfile:
- path: "/etc/apt/apt.conf.d/20auto-upgrades"
- state: "present"
- regexp: "APT::Periodic::Unattended-Upgrade \"[0-9]\";"
- line: "APT::Periodic::Unattended-Upgrade \"0\";"
- create: true
- mode: 0644
- tags:
- - kernel-inst
-
-- name: Ensure Packages Versions
- apt:
- name: "{{ apt_kernel_list | deb_kernel_pkg(
- kernel_version, ansible_kernel, ansible_distribution,
- ansible_architecture, item) }}"
- loop: "{{ kernel_packages }}"
- tags:
- - kernel-inst
-
-- name: Ensure Any Other Kernel Packages Are Removed
- apt:
- name: "{{ apt_packages_list | deb_installed_kernel(
- apt_kernel_list, kernel_version, ansible_kernel) }}"
- state: absent
- purge: true
- notify:
- - "Reboot Server"
- tags:
- - kernel-inst
-
-- name: Ensure Any Microcode Is Absent
- apt:
- name: "{{ absent_packages }}"
- state: absent
- purge: true
- tags:
- - kernel-inst
diff --git a/fdio.infra.ansible/roles/kernel/tasks/ubuntu_jammy.yaml b/fdio.infra.ansible/roles/kernel/tasks/ubuntu_jammy.yaml
index 15ce221e66..af987d4e5a 100644
--- a/fdio.infra.ansible/roles/kernel/tasks/ubuntu_jammy.yaml
+++ b/fdio.infra.ansible/roles/kernel/tasks/ubuntu_jammy.yaml
@@ -2,28 +2,28 @@
# file: roles/kernel/tasks/ubuntu_jammy.yaml
- name: Get Available Kernel Versions
- command: "apt-cache showpkg linux-headers-*"
+ ansible.builtin.command: "apt-cache showpkg linux-headers-*"
changed_when: false
register: apt_kernel_list
tags:
- kernel-inst
- name: Get installed packages with APT
- command: "dpkg -l"
+ ansible.builtin.command: "dpkg -l"
changed_when: false
register: apt_packages_list
tags:
- kernel-inst
- name: Set target APT kernel version
- set_fact:
+ ansible.builtin.set_fact:
_kernel: "{{ apt_kernel_list | deb_kernel(
kernel_version, ansible_kernel) }}"
tags:
- kernel-inst
- name: Disable APT auto upgrade
- lineinfile:
+ ansible.builtin.lineinfile:
path: "/etc/apt/apt.conf.d/20auto-upgrades"
state: "present"
regexp: "APT::Periodic::Unattended-Upgrade \"[0-9]\";"
@@ -34,7 +34,7 @@
- kernel-inst
- name: Ensure Packages Versions
- apt:
+ ansible.builtin.apt:
name: "{{ apt_kernel_list | deb_kernel_pkg(
kernel_version, ansible_kernel, ansible_distribution,
ansible_architecture, item) }}"
@@ -43,10 +43,10 @@
- kernel-inst
- name: Ensure Any Other Kernel Packages Are Removed
- apt:
+ ansible.builtin.apt:
name: "{{ apt_packages_list | deb_installed_kernel(
apt_kernel_list, kernel_version, ansible_kernel) }}"
- state: absent
+ state: "absent"
purge: true
notify:
- "Reboot Server"
@@ -54,9 +54,9 @@
- kernel-inst
- name: Ensure Any Microcode Is Absent
- apt:
+ ansible.builtin.apt:
name: "{{ absent_packages }}"
- state: absent
+ state: "absent"
purge: true
tags:
- kernel-inst
diff --git a/fdio.infra.ansible/roles/kernel_vm/tasks/main.yaml b/fdio.infra.ansible/roles/kernel_vm/tasks/main.yaml
index 78bb822f80..cd8eb15c57 100644
--- a/fdio.infra.ansible/roles/kernel_vm/tasks/main.yaml
+++ b/fdio.infra.ansible/roles/kernel_vm/tasks/main.yaml
@@ -2,7 +2,7 @@
# file: roles/kernel_vm/tasks/main.yaml
- name: Inst - Backup remote initramfs modules
- copy:
+ ansible.builtin.copy:
src: "/etc/initramfs-tools/modules"
dest: "/tmp/initramfs_modules.bkp"
remote_src: true
@@ -12,7 +12,7 @@
- kernel-inst-image
- name: Inst - Backup remote initramfs resume config
- copy:
+ ansible.builtin.copy:
src: "/etc/initramfs-tools/conf.d/resume"
dest: "/tmp/initramfs-resume.bkp"
remote_src: true
@@ -22,33 +22,33 @@
- kernel-inst-image
- name: Inst - Update remote initramfs modules
- copy:
+ ansible.builtin.copy:
src: "../files/initramfs_modules"
dest: "/etc/initramfs-tools/modules"
tags:
- kernel-inst-image
- name: Inst - Update remote initramfs resume config
- copy:
+ ansible.builtin.copy:
src: "../files/initramfs_resume"
dest: "/etc/initramfs-tools/conf.d/resume"
tags:
- kernel-inst-image
- name: Inst - Create target kernel dir
- file:
+ ansible.builtin.file:
path: "/opt/boot"
state: "directory"
tags:
- kernel-inst-image
- name: Inst - Build initrd image
- shell: "update-initramfs -k {{ ansible_kernel }} -c -b /opt/boot"
+ ansible.builtin.shell: "update-initramfs -k {{ ansible_kernel }} -c -b /opt/boot"
tags:
- kernel-inst-image
- name: Inst - Copy corresponding kernel img
- copy:
+ ansible.builtin.copy:
src: "/boot/vmlinuz-{{ ansible_kernel }}"
dest: "/opt/boot/vmlinuz-{{ ansible_kernel }}"
remote_src: true
@@ -56,7 +56,7 @@
- kernel-inst-image
- name: Inst - Restore remote initramfs modules
- copy:
+ ansible.builtin.copy:
src: "/tmp/initramfs_modules.bkp"
dest: "/etc/initramfs-tools/modules"
remote_src: true
@@ -66,7 +66,7 @@
- kernel-inst-image
- name: Inst - Remove remote backup initramfs modules
- file:
+ ansible.builtin.file:
path: "/tmp/initramfs_modules.bkp"
state: "absent"
when: __initramfs_modules_backuped
@@ -74,7 +74,7 @@
- kernel-inst-image
- name: Inst - Restore remote initramfs resume config
- copy:
+ ansible.builtin.copy:
src: "/tmp/initramfs-resume.bkp"
dest: "/etc/initramfs-tools/conf.d/resume"
remote_src: true
@@ -84,7 +84,7 @@
- kernel-inst-image
- name: Inst - Remove remote backup initramfs resume config
- file:
+ ansible.builtin.file:
path: "/tmp/initramfs-resume.bkp"
state: "absent"
when: __initramfs_resume_backuped
diff --git a/fdio.infra.ansible/roles/kubernetes/defaults/main.yaml b/fdio.infra.ansible/roles/kubernetes/defaults/main.yaml
deleted file mode 100644
index 1a2f773950..0000000000
--- a/fdio.infra.ansible/roles/kubernetes/defaults/main.yaml
+++ /dev/null
@@ -1,15 +0,0 @@
----
-# file: roles/kubernetes/defaults/main.yaml
-
-# Version options.
-kubernetes_version: "1.11.0-00"
-kubernetes_apt_package_state: present
-
-# Service options.
-kubernetes_service_state: started
-kubernetes_service_enabled: true
-kubernetes_restart_handler_state: restarted
-
-# APT options.
-kubernetes_apt_repository: "deb http://apt.kubernetes.io/ kubernetes-xenial main"
-kubernetes_apt_repository_state: present
diff --git a/fdio.infra.ansible/roles/kubernetes/tasks/main.yaml b/fdio.infra.ansible/roles/kubernetes/tasks/main.yaml
deleted file mode 100644
index 160ffb8c06..0000000000
--- a/fdio.infra.ansible/roles/kubernetes/tasks/main.yaml
+++ /dev/null
@@ -1,14 +0,0 @@
----
-# file: roles/kubernetes/tasks/main.yaml
-
-- name: Kubernetes - Install distribution - release - machine prerequisites
- include_tasks: '{{ ansible_distribution|lower }}_{{ ansible_distribution_release }}.yaml'
- tags: install-kubernetes
-
-- name: Kubernetes - Apply kubelet parameter
- lineinfile:
- path: '/etc/default/kubelet'
- state: 'present'
- regexp: '^KUBELET_EXTRA_ARGS=*'
- line: 'KUBELET_EXTRA_ARGS=--feature-gates HugePages=false'
- tags: install-kubernetes
diff --git a/fdio.infra.ansible/roles/kubernetes/tasks/ubuntu_jammy.yaml b/fdio.infra.ansible/roles/kubernetes/tasks/ubuntu_jammy.yaml
deleted file mode 100644
index 5e155257ce..0000000000
--- a/fdio.infra.ansible/roles/kubernetes/tasks/ubuntu_jammy.yaml
+++ /dev/null
@@ -1,37 +0,0 @@
----
-# file: roles/kubernetes/tasks/ubuntu_jammy.yaml
-
-- name: Kubernetes repository - Dependencies
- apt:
- name:
- - 'apt-transport-https'
- - 'ca-certificates'
- - 'software-properties-common'
- state: 'present'
- cache_valid_time: 3600
- install_recommends: false
- tags: install-kubernetes
-
-- name: Kubernetes repository - Add an Apt signing key
- apt_key:
- url: 'https://packages.cloud.google.com/apt/doc/apt-key.gpg'
- state: 'present'
- tags: install-kubernetes
-
-- name: Kubernetes repository - Install APT repository
- apt_repository:
- repo: '{{ kubernetes_apt_repository }}'
- state: '{{ kubernetes_apt_repository_state }}'
- update_cache: true
- tags: install-kubernetes
-
-- name: Kubernetes - Install
- apt:
- name:
- - 'kubernetes-cni=0.6.0-00'
- - 'kubeadm={{ kubernetes_version }}'
- - 'kubectl={{ kubernetes_version }}'
- - 'kubelet={{ kubernetes_version }}'
- state: '{{ kubernetes_apt_package_state }}'
- force: true
- tags: install-kubernetes
diff --git a/fdio.infra.ansible/roles/mellanox/defaults/main.yaml b/fdio.infra.ansible/roles/mellanox/defaults/main.yaml
index a12a613f74..de66be2d6b 100644
--- a/fdio.infra.ansible/roles/mellanox/defaults/main.yaml
+++ b/fdio.infra.ansible/roles/mellanox/defaults/main.yaml
@@ -1,5 +1,5 @@
---
-# file: roles/mellanox/defaults/main.yaml
+# file: defaults/main.yaml
packages: "{{ packages_base + packages_by_distro[ansible_distribution | lower] + packages_by_arch[ansible_machine] }}"
@@ -21,10 +21,10 @@ packages_by_arch:
mellanox_download_url: "http://content.mellanox.com/ofed"
mellanox_extract_dir: "/opt"
-mellanox_cx5_compatibility_matrix:
- dpdk21.02:
- # https://doc.dpdk.org/guides/rel_notes/release_21_02.html
- ofed: "5.3-1.0.5.0"
- dpdk22.03:
- # https://doc.dpdk.org/guides/rel_notes/release_22_03.html
- ofed: "5.5-1.0.3.2"
+mellanox_compatibility_matrix:
+ dpdk22.07:
+ # https://doc.dpdk.org/guides/rel_notes/release_22_07.html
+ ofed: "5.9-0.5.6.0"
+ dpdk23.11:
+ # https://doc.dpdk.org/guides/rel_notes/release_23_11.html
+ ofed: "23.07-0.5.0.0"
diff --git a/fdio.infra.ansible/roles/mellanox/tasks/main.yaml b/fdio.infra.ansible/roles/mellanox/tasks/main.yaml
index 81fefc8ae3..53376eb997 100644
--- a/fdio.infra.ansible/roles/mellanox/tasks/main.yaml
+++ b/fdio.infra.ansible/roles/mellanox/tasks/main.yaml
@@ -1,5 +1,5 @@
---
-# file: roles/mellanox/tasks/main.yaml
+# file: tasks/main.yaml
- name: Inst - Update Package Cache (APT)
apt:
@@ -13,11 +13,11 @@
- name: Inst - Prerequisites
package:
name: "{{ packages | flatten(levels=1) }}"
- state: latest
+ state: "latest"
tags:
- mellanox-inst-drivers
-- name: Inst - Check Presence of Mellanox CX-5
+- name: Inst - Check Presence of Mellanox
shell: "lspci | grep Mellanox | awk '{print $1}'"
register: mellanox_pcis
failed_when: false
@@ -25,19 +25,19 @@
tags:
- mellanox-inst-drivers
-- name: Inst - Get Mellanox CX-5 OFED driver versions
+- name: Inst - Get Mellanox OFED driver versions
set_fact:
- ofed: "{{ mellanox_cx5_compatibility_matrix[mellanox_cx5_matrix]['ofed'] }}"
+ ofed: "{{ mellanox_compatibility_matrix[mellanox_matrix]['ofed'] }}"
when: >
mellanox_pcis.stdout_lines | length > 0 and
- mellanox_cx5_matrix is defined
+ mellanox_matrix is defined
tags:
- mellanox-inst-drivers
-- name: Inst - Driver Mellanox CX-5
+- name: Inst - Driver Mellanox
import_tasks: ofed.yaml
when: >
mellanox_pcis.stdout_lines | length > 0 and
- mellanox_cx5_matrix is defined
+ mellanox_matrix is defined
tags:
- mellanox-inst-drivers
diff --git a/fdio.infra.ansible/roles/mellanox/tasks/ofed.yaml b/fdio.infra.ansible/roles/mellanox/tasks/ofed.yaml
index 2ad40b0afb..c39975bab6 100644
--- a/fdio.infra.ansible/roles/mellanox/tasks/ofed.yaml
+++ b/fdio.infra.ansible/roles/mellanox/tasks/ofed.yaml
@@ -1,8 +1,8 @@
---
-# file: roles/mellanox/tasks/ofed.yaml
+# file: tasks/ofed.yaml
- name: Inst - Get OFED
- get_url:
+ ansible.builtin.get_url:
url: "{{ mellanox_download_url }}/MLNX_OFED-{{ ofed }}/MLNX_OFED_LINUX-{{ ofed }}-{{ ansible_distribution|lower }}{{ ansible_distribution_version }}-{{ ansible_machine }}.tgz"
dest: "{{ mellanox_extract_dir }}/MLNX_OFED_LINUX-{{ ofed }}-{{ ansible_distribution|lower }}{{ ansible_distribution_version }}-{{ ansible_machine }}.tgz"
mode: "0644"
@@ -11,7 +11,7 @@
- mellanox-inst-drivers
- name: Inst - Extract OFED
- unarchive:
+ ansible.builtin.unarchive:
remote_src: true
src: "{{ mellanox_extract_dir }}/MLNX_OFED_LINUX-{{ ofed }}-{{ ansible_distribution|lower }}{{ ansible_distribution_version }}-{{ ansible_machine }}.tgz"
dest: "{{ mellanox_extract_dir }}/"
@@ -21,7 +21,7 @@
- mellanox-inst-drivers
- name: Inst - OFED
- command: "./mlnxofedinstall --with-mft --dpdk --force --upstream-libs --without-fw-update"
+ ansible.builtin.command: "./mlnxofedinstall --with-mft --dpdk --force --upstream-libs" # --without-fw-update"
args:
chdir: "{{ mellanox_extract_dir }}/MLNX_OFED_LINUX-{{ ofed }}-{{ ansible_distribution|lower }}{{ ansible_distribution_version }}-{{ ansible_machine }}"
when: mellanox_firmware_extracted
@@ -29,14 +29,9 @@
- mellanox-inst-drivers
- name: Inst - Switch Infiniband to Ethernet
- command: "mlxconfig --yes --dev {{ item }} set LINK_TYPE_P1=2 LINK_TYPE_P2=2"
+ ansible.builtin.command: "mlxconfig --yes --dev {{ item }} set LINK_TYPE_P1=2 LINK_TYPE_P2=2"
with_items: "{{ mellanox_pcis.stdout_lines }}"
+ failed_when: false
+ changed_when: false
tags:
- - mellanox-inst-drivers
-
-- name: FIX qemu-system removal
- package:
- name: "qemu-system"
- state: latest
- tags:
- - mellanox-inst-drivers
+ - mellanox-inst-drivers \ No newline at end of file
diff --git a/fdio.infra.ansible/roles/nomad/defaults/main.yaml b/fdio.infra.ansible/roles/nomad/defaults/main.yaml
index b4741f8d43..535db2bb2c 100644
--- a/fdio.infra.ansible/roles/nomad/defaults/main.yaml
+++ b/fdio.infra.ansible/roles/nomad/defaults/main.yaml
@@ -16,7 +16,7 @@ packages_by_arch:
- []
# Package
-nomad_version: "{{ lookup('env','NOMAD_VERSION') | default('1.3.1', true) }}"
+nomad_version: "1.4.3"
nomad_architecture_map:
amd64: "amd64"
x86_64: "amd64"
@@ -28,12 +28,6 @@ nomad_architecture: "{{ nomad_architecture_map[ansible_architecture] }}"
nomad_pkg: "nomad_{{ nomad_version }}_linux_{{nomad_architecture}}.zip"
nomad_zip_url: "https://releases.hashicorp.com/nomad/{{ nomad_version }}/nomad_{{ nomad_version }}_linux_{{nomad_architecture}}.zip"
nomad_checksum_file_url: "https://releases.hashicorp.com/nomad/{{ nomad_version }}/nomad_{{ nomad_version}}_SHA256SUMS"
-nomad_podman_enable: false
-nomad_podman_version: "{{ lookup('env','NOMAD_PODMAN_VERSION') | default('0.1.0', true) }}"
-nomad_podman_pkg: "nomad-driver-podman_{{ nomad_podman_version }}_linux_{{nomad_architecture}}.zip"
-nomad_podman_url: "https://releases.hashicorp.com/nomad-driver-podman/{{ nomad_podman_version }}"
-nomad_podman_zip_url: "{{ nomad_podman_url }}/{{ nomad_podman_pkg }}"
-nomad_podman_checksum_file_url: "{{ nomad_podman_url }}/nomad-driver-podman_{{ nomad_podman_version }}_SHA256SUMS"
nomad_force_update: false
# Paths
@@ -47,23 +41,20 @@ nomad_run_dir: "/var/run/nomad"
nomad_ssl_dir: "/etc/nomad.d/ssl"
# Initialization and startup script templates
-nomad_restart_handler_state: "restarted"
nomad_service_mgr: ""
# System user and group
nomad_group: "nomad"
-nomad_group_state: "present"
nomad_user: "nomad"
-nomad_user_state: "present"
# Nomad settings
nomad_datacenter: "dc1"
nomad_region: "global"
nomad_log_level: "INFO"
nomad_syslog_enable: true
-nomad_iface: "{{ lookup('env','NOMAD_IFACE') | default(ansible_default_ipv4.interface, true) }}"
+nomad_iface: "{{ ansible_default_ipv4.interface }}"
nomad_node_name: "{{ inventory_hostname }}"
-nomad_node_role: "{{ lookup('env','NOMAD_NODE_ROLE') | default('server', true) }}"
+nomad_node_role: "server"
nomad_leave_on_terminate: true
nomad_leave_on_interrupt: false
nomad_disable_update_check: true
@@ -88,7 +79,7 @@ nomad_eval_gc_threshold: "1h"
# Specifies the minimum time a deployment must be in the terminal state before
# it is eligible for garbage collection.
nomad_deployment_gc_threshold: "1h"
-nomad_encrypt_enable: "{{ lookup('env','NOMAD_ENCRYPT_ENABLE') | default('false', true) }}"
+nomad_encrypt_enable: false
nomad_raft_protocol: 2
# Client settings
@@ -127,7 +118,7 @@ nomad_servers: "\
{% set _nomad_servers = [] %}\
{% for host in groups[nomad_group_name] %}\
{% set _nomad_node_role = hostvars[host]['nomad_node_role'] | default('client', true) %}\
- {% if ( _nomad_node_role == 'server' or _nomad_node_role == 'both') %}\
+ {% if (_nomad_node_role == 'server' or _nomad_node_role == 'both') %}\
{% if _nomad_servers.append(host) %}{% endif %}\
{% endif %}\
{% endfor %}\
@@ -144,56 +135,59 @@ nomad_consul_token: ""
nomad_consul_servers_service_name: "nomad"
nomad_consul_clients_service_name: "nomad-client"
nomad_consul_tags: {}
+nomad_consul_use_ssl: false
# ACLs
-nomad_acl_enabled: "{{ lookup('env', 'NOMAD_ACL_ENABLED') | default('no', true) }}"
+nomad_acl_enabled: false
nomad_acl_token_ttl: "30s"
nomad_acl_policy_ttl: "30s"
nomad_acl_replication_token: ""
-# Vault
-nomad_vault_enabled: "{{ lookup('env', 'NOMAD_VAULT_ENABLED') | default('no', true) }}"
-nomad_vault_address: "{{ vault_address | default('0.0.0.0', true) }}"
-nomad_vault_allow_unauthenticated: true
-nomad_vault_create_from_role: ""
-nomad_vault_task_token_ttl: ""
-nomad_vault_ca_file: ""
-nomad_vault_ca_path: ""
-nomad_vault_cert_file: ""
-nomad_vault_key_file: ""
-nomad_vault_tls_server_name: ""
-nomad_vault_tls_skip_verify: false
-nomad_vault_token: ""
-nomad_vault_namespace: ""
-
# Docker
-nomad_docker_enable: "{{ lookup('env','NOMAD_DOCKER_ENABLE') | default('false', true) }}"
+nomad_docker_enable: false
nomad_docker_dmsetup: true
-# TLS
-nomad_tls_enable: true
-nomad_ca_file: "{{ nomad_ssl_dir }}/nomad-ca.pem"
-nomad_cert_file: "{{ nomad_ssl_dir }}/nomad.pem"
-nomad_key_file: "{{ nomad_ssl_dir }}/nomad-key.pem"
-nomad_cli_cert_file: "{{ nomad_ssl_dir }}/nomad-cli.pem"
-nomad_cli_key_file: "{{ nomad_ssl_dir }}/nomad-cli-key.pem"
-nomad_http: false
-nomad_rpc: false
-nomad_rpc_upgrade_mode: false
-nomad_verify_server_hostname: false
-nomad_verify_https_client: false
-
-# Conf - autopilot.hcl
+# Autopilot
nomad_autopilot_cleanup_dead_servers: true
nomad_autopilot_last_contact_threshold: "200ms"
nomad_autopilot_max_trailing_logs: 250
nomad_autopilot_server_stabilization_time: "10s"
-# Telemetry
-nomad_telemetry: true
+# Telemetry.
+nomad_use_telemetry: true
nomad_telemetry_disable_hostname: false
-nomad_telemetry_collection_interval: 60s
+nomad_telemetry_collection_interval: "1s"
nomad_telemetry_use_node_name: false
nomad_telemetry_publish_allocation_metrics: true
nomad_telemetry_publish_node_metrics: true
nomad_telemetry_prometheus_metrics: true
+
+# TLS.
+nomad_use_tls: true
+nomad_tls_ca_file: "{{ nomad_ssl_dir }}/nomad-ca.pem"
+nomad_tls_cert_file: "{{ nomad_ssl_dir }}/nomad.pem"
+nomad_tls_key_file: "{{ nomad_ssl_dir }}/nomad-key.pem"
+nomad_tls_cli_cert_file: "{{ nomad_ssl_dir }}/nomad-cli.pem"
+nomad_tls_cli_key_file: "{{ nomad_ssl_dir }}/nomad-cli-key.pem"
+nomad_tls_http: false
+nomad_tls_rpc: false
+nomad_tls_rpc_upgrade_mode: false
+nomad_tls_verify_https_client: false
+nomad_tls_verify_server_hostname: false
+
+# Vault
+nomad_use_vault: false
+nomad_vault_address: "http://vault.service.consul:8200"
+nomad_vault_allow_unauthenticated: true
+nomad_vault_enabled: false
+nomad_vault_create_from_role: ""
+nomad_vault_task_token_ttl: "72h"
+nomad_vault_use_ssl: false
+nomad_vault_ca_file: ""
+nomad_vault_ca_path: ""
+nomad_vault_cert_file: ""
+nomad_vault_key_file: ""
+nomad_vault_namespace: ""
+nomad_vault_tls_server_name: ""
+nomad_vault_tls_skip_verify: false
+nomad_vault_token: ""
diff --git a/fdio.infra.ansible/roles/nomad/handlers/main.yaml b/fdio.infra.ansible/roles/nomad/handlers/main.yaml
index 6263f3dda1..32e5798e3e 100644
--- a/fdio.infra.ansible/roles/nomad/handlers/main.yaml
+++ b/fdio.infra.ansible/roles/nomad/handlers/main.yaml
@@ -1,9 +1,9 @@
---
-# file roles/nomad/handlers/main.yaml
+# file handlers/main.yaml
- name: Restart Nomad
ansible.builtin.systemd:
daemon_reload: true
enabled: true
name: "nomad"
- state: "{{ nomad_restart_handler_state }}"
+ state: "restarted"
diff --git a/fdio.infra.ansible/roles/nomad/meta/main.yaml b/fdio.infra.ansible/roles/nomad/meta/main.yaml
index f7b25fe8eb..098aafe2fb 100644
--- a/fdio.infra.ansible/roles/nomad/meta/main.yaml
+++ b/fdio.infra.ansible/roles/nomad/meta/main.yaml
@@ -1,19 +1,21 @@
---
-# file: roles/nomad/meta/main.yaml
+# file: meta/main.yaml
-dependencies: [docker]
+dependencies: ["docker"]
galaxy_info:
- role_name: nomad
- author: fd.io
- description: Hashicorp Nomad.
- company: none
+ role_name: "nomad"
+ author: "pmikus"
+ description: "Hashicorp Nomad."
+ company: "none"
license: "license (Apache)"
- min_ansible_version: 2.9
+ min_ansible_version: "2.9"
platforms:
- - name: Ubuntu
- versions:
- - focal
- - jammy
+ - name: "Ubuntu"
+ release:
+ - "focal"
+ - "jammy"
+ - "kinetic"
galaxy_tags:
- - nomad
+ - "nomad"
+ - "hashicorp"
diff --git a/fdio.infra.ansible/roles/nomad/tasks/main.yaml b/fdio.infra.ansible/roles/nomad/tasks/main.yaml
index 0204c0591c..72b78458f8 100644
--- a/fdio.infra.ansible/roles/nomad/tasks/main.yaml
+++ b/fdio.infra.ansible/roles/nomad/tasks/main.yaml
@@ -1,7 +1,7 @@
---
# file: tasks/main.yaml
-- name: Inst - Update Repositories Cache
+- name: Update Repositories Cache
ansible.builtin.apt:
update_cache: true
when:
@@ -9,7 +9,7 @@
tags:
- nomad-inst-package
-- name: Inst - Dependencies
+- name: Dependencies
ansible.builtin.apt:
name: "{{ packages | flatten(levels=1) }}"
state: "present"
@@ -20,30 +20,31 @@
tags:
- nomad-inst-dependencies
-- name: Conf - Add Nomad Group
+- name: Add Nomad Group
ansible.builtin.group:
name: "{{ nomad_group }}"
- state: "{{ nomad_user_state }}"
+ state: "present"
tags:
- nomad-conf-user
-- name: Conf - Add Nomad user
+- name: Add Nomad user
ansible.builtin.user:
name: "{{ nomad_user }}"
group: "{{ nomad_group }}"
- state: "{{ nomad_group_state }}"
+ state: "present"
system: true
tags:
- nomad-conf-user
-- name: Inst - Download Nomad
+- name: Download Nomad
ansible.builtin.get_url:
url: "{{ nomad_zip_url }}"
dest: "{{ nomad_inst_dir }}/{{ nomad_pkg }}"
+ mode: 0644
tags:
- nomad-inst-package
-- name: Inst - Clean Nomad
+- name: Clean Nomad
ansible.builtin.file:
path: "{{ nomad_inst_dir }}/nomad"
state: "absent"
@@ -52,7 +53,7 @@
tags:
- nomad-inst-package
-- name: Inst - Unarchive Nomad
+- name: Unarchive Nomad
ansible.builtin.unarchive:
src: "{{ nomad_inst_dir }}/{{ nomad_pkg }}"
dest: "{{ nomad_inst_dir }}/"
@@ -60,7 +61,7 @@
tags:
- nomad-inst-package
-- name: Inst - Nomad
+- name: Nomad
ansible.builtin.copy:
src: "{{ nomad_inst_dir }}/nomad"
dest: "{{ nomad_bin_dir }}"
@@ -72,97 +73,35 @@
tags:
- nomad-inst-package
-- name: Conf - Create Directories "{{ nomad_data_dir }}"
- ansible.builtin.file:
- dest: "{{ nomad_data_dir }}"
- state: directory
- owner: "{{ nomad_user }}"
- group: "{{ nomad_group }}"
- mode: 0755
- tags:
- - nomad-conf
-
-- name: Conf - Create Directories "{{ nomad_ssl_dir }}"
- ansible.builtin.file:
- dest: "{{ nomad_ssl_dir }}"
- state: directory
- owner: "{{ nomad_user }}"
- group: "{{ nomad_group }}"
- mode: 0755
- tags:
- - nomad-conf
-
-- name: Conf - Create Config Directory
+- name: Create Directories
ansible.builtin.file:
- dest: "{{ nomad_config_dir }}"
- state: directory
+ dest: "{{ item }}"
+ state: "directory"
owner: "{{ nomad_user }}"
group: "{{ nomad_group }}"
mode: 0755
+ with_items:
+ - "{{ nomad_data_dir }}"
+ - "{{ nomad_config_dir }}"
+ - "{{ nomad_ssl_dir }}"
tags:
- nomad-conf
-- name: Conf - Base Configuration
- ansible.builtin.template:
- src: base.hcl.j2
- dest: "{{ nomad_config_dir }}/base.hcl"
- owner: "{{ nomad_user }}"
- group: "{{ nomad_group }}"
- mode: 0644
- tags:
- - nomad-conf
-
-- name: Conf - Server Configuration
+- name: Base Configuration
ansible.builtin.template:
- src: server.hcl.j2
- dest: "{{ nomad_config_dir }}/server.hcl"
- owner: "{{ nomad_user }}"
- group: "{{ nomad_group }}"
- mode: 0644
- when:
- - nomad_node_server | bool
- tags:
- - nomad-conf
-
-- name: Conf - Client Configuration
- ansible.builtin.template:
- src: client.hcl.j2
- dest: "{{ nomad_config_dir }}/client.hcl"
- owner: "{{ nomad_user }}"
- group: "{{ nomad_group }}"
- mode: 0644
- when:
- - nomad_node_client | bool
- tags:
- - nomad-conf
-
-- name: Conf - TLS Configuration
- ansible.builtin.template:
- src: tls.hcl.j2
- dest: "{{ nomad_config_dir }}/tls.hcl"
- owner: "{{ nomad_user }}"
- group: "{{ nomad_group }}"
- mode: 0644
- tags:
- - nomad-conf
-
-- name: Conf - Telemetry Configuration
- ansible.builtin.template:
- src: telemetry.hcl.j2
- dest: "{{ nomad_config_dir }}/telemetry.hcl"
- owner: "{{ nomad_user }}"
- group: "{{ nomad_group }}"
- mode: 0644
- tags:
- - nomad-conf
-
-- name: Conf - Consul Configuration
- ansible.builtin.template:
- src: consul.hcl.j2
- dest: "{{ nomad_config_dir }}/consul.hcl"
+ src: "{{ item }}.hcl.j2"
+ dest: "{{ nomad_config_dir }}/{{ item }}.hcl"
owner: "{{ nomad_user }}"
group: "{{ nomad_group }}"
mode: 0644
+ with_items:
+ - "base"
+ - "consul"
+ - "client"
+ - "server"
+ - "telemetry"
+ - "tls"
+ - "vault"
tags:
- nomad-conf
@@ -180,20 +119,21 @@
tags:
- nomad-conf
-- name: Conf - Nomad CLI Environment Variables
+- name: Nomad CLI Environment Variables
ansible.builtin.lineinfile:
path: "/etc/profile.d/nomad.sh"
line: "{{ item }}"
+ mode: 0644
create: true
loop:
- - "export NOMAD_ADDR=http://nomad.service.consul:4646"
- - "export NOMAD_CACERT={{ nomad_ca_file }}"
-# - "export NOMAD_CLIENT_CERT={{ nomad_cli_cert_file }}"
-# - "export NOMAD_CLIENT_KEY={{ nomad_cli_key_file }}"
+ - "export NOMAD_ADDR=https://nomad-server.service.consul:4646"
+ - "export NOMAD_CACERT={{ nomad_tls_ca_file }}"
+ - "export NOMAD_CLIENT_CERT={{ nomad_tls_cli_cert_file }}"
+ - "export NOMAD_CLIENT_KEY={{ nomad_tls_cli_key_file }}"
tags:
- nomad-conf
-- name: Conf - System.d Script
+- name: System.d Script
ansible.builtin.template:
src: "nomad_systemd.service.j2"
dest: "/lib/systemd/system/nomad.service"
diff --git a/fdio.infra.ansible/roles/nomad/templates/cfssl.json b/fdio.infra.ansible/roles/nomad/templates/cfssl.json
new file mode 100644
index 0000000000..2b603e9b84
--- /dev/null
+++ b/fdio.infra.ansible/roles/nomad/templates/cfssl.json
@@ -0,0 +1,8 @@
+{
+ "signing": {
+ "default": {
+ "expiry": "87600h",
+ "usages": ["signing", "key encipherment", "server auth", "client auth"]
+ }
+ }
+} \ No newline at end of file
diff --git a/fdio.infra.ansible/roles/nomad/templates/consul.hcl.j2 b/fdio.infra.ansible/roles/nomad/templates/consul.hcl.j2
index 6d30676ca0..a9c1aff7b2 100644
--- a/fdio.infra.ansible/roles/nomad/templates/consul.hcl.j2
+++ b/fdio.infra.ansible/roles/nomad/templates/consul.hcl.j2
@@ -1,18 +1,63 @@
{% if nomad_use_consul | bool == True %}
consul {
- # The address to the Consul agent.
+ # Specifies the address to the local Consul agent, given in the format
+ # host:port.
address = "{{ nomad_consul_address }}"
- token = "{{ nomad_consul_token }}"
- # The service name to register the server and client with Consul.
- server_service_name = "{{ nomad_consul_servers_service_name }}"
- client_service_name = "{{ nomad_consul_clients_service_name }}"
- tags = {{ nomad_consul_tags | to_json }}
- # Enables automatically registering the services.
+ # Specifies if Nomad should advertise its services in Consul. The services
+ # are named according to server_service_name and client_service_name. Nomad
+ # servers and clients advertise their respective services, each tagged
+ # appropriately with either http or rpc tag. Nomad servers also advertise a
+ # serf tagged service.
auto_advertise = true
- # Enabling the server and client to bootstrap using Consul.
- server_auto_join = true
+ # Specifies if the Nomad clients should automatically discover servers in
+ # the same region by searching for the Consul service name defined in the
+ # server_service_name option. The search occurs if the client is not
+ # registered with any servers or it is unable to heartbeat to the leader of
+ # the region, in which case it may be partitioned and searches for other
+ # servers.
client_auto_join = true
+
+ # Specifies the name of the service in Consul for the Nomad clients.
+ client_service_name = "{{ nomad_consul_clients_service_name }}"
+
+ # Specifies the name of the service in Consul for the Nomad servers.
+ server_service_name = "{{ nomad_consul_servers_service_name }}"
+
+ # Specifies if the Nomad servers should automatically discover and join
+ # other Nomad servers by searching for the Consul service name defined in
+ # the server_service_name option. This search only happens if the server
+ # does not have a leader.
+ server_auto_join = true
+
+ # Specifies optional Consul tags to be registered with the Nomad server and
+ # agent services.
+ tags = {{ nomad_consul_tags | to_json }}
+
+ # Specifies the token used to provide a per-request ACL token. This option
+ # overrides the Consul Agent's default token. If the token is not set here
+ # or on the Consul agent, it will default to Consul's anonymous policy,
+ # which may or may not allow writes.
+ token = "{{ nomad_consul_token }}"
+
+ {% if nomad_consul_use_ssl | bool == True -%}
+ # Specifies if the transport scheme should use HTTPS to communicate with the
+ # Consul agent.
+ ssl = true
+
+ # Specifies an optional path to the CA certificate used for Consul
+ # communication. This defaults to the system bundle if unspecified.
+ ca_file = "{{ nomad_ca_file }}"
+
+ # Specifies the path to the certificate used for Consul communication. If
+ # this is set then you need to also set key_file.
+ cert_file = "{{ nomad_cert_file }}"
+
+ # Specifies the path to the private key used for Consul communication. If
+ # this is set then you need to also set cert_file.
+ key_file = "{{ nomad_key_file }}"
+ {% endif %}
+
}
{% endif %} \ No newline at end of file
diff --git a/fdio.infra.ansible/roles/nomad/templates/nomad_systemd.service.j2 b/fdio.infra.ansible/roles/nomad/templates/nomad_systemd.service.j2
index 61f07df5b6..564505781b 100644
--- a/fdio.infra.ansible/roles/nomad/templates/nomad_systemd.service.j2
+++ b/fdio.infra.ansible/roles/nomad/templates/nomad_systemd.service.j2
@@ -11,6 +11,11 @@ After=network-online.target
#After=consul.service
[Service]
+# Nomad server should be run as the nomad user. Nomad clients
+# should be run as root
+#User=nomad
+#Group=nomad
+
ExecReload=/bin/kill -HUP $MAINPID
ExecStart={{ nomad_bin_dir }}/nomad agent -config {{ nomad_config_dir }}
KillMode=process
diff --git a/fdio.infra.ansible/roles/nomad/templates/server.hcl.j2 b/fdio.infra.ansible/roles/nomad/templates/server.hcl.j2
index 570d92e0c1..e19dea9e6f 100644
--- a/fdio.infra.ansible/roles/nomad/templates/server.hcl.j2
+++ b/fdio.infra.ansible/roles/nomad/templates/server.hcl.j2
@@ -1,3 +1,4 @@
+{% if nomad_node_server | bool == True %}
server {
enabled = {{ nomad_node_server | bool | lower }}
@@ -54,3 +55,4 @@ server {
{%- endif %}
}
+{% endif %} \ No newline at end of file
diff --git a/fdio.infra.ansible/roles/nomad/templates/telemetry.hcl.j2 b/fdio.infra.ansible/roles/nomad/templates/telemetry.hcl.j2
index 4ad5330d1b..14be0d9548 100644
--- a/fdio.infra.ansible/roles/nomad/templates/telemetry.hcl.j2
+++ b/fdio.infra.ansible/roles/nomad/templates/telemetry.hcl.j2
@@ -1,10 +1,26 @@
-{% if nomad_telemetry | bool == True %}
+{% if nomad_use_telemetry | bool == True %}
telemetry {
- disable_hostname = "{{ nomad_telemetry_disable_hostname | default(false) | bool | lower }}"
- collection_interval = "{{ nomad_telemetry_collection_interval | default("1s") }}"
- use_node_name = "{{ nomad_telemetry_use_node_name | default(false) | bool | lower }}"
- publish_allocation_metrics = "{{ nomad_telemetry_publish_allocation_metrics | default(false) | bool | lower }}"
- publish_node_metrics = "{{ nomad_telemetry_publish_node_metrics | default(false) | bool | lower }}"
- prometheus_metrics = "{{ nomad_telemetry_prometheus_metrics | default(false) | bool | lower }}"
+ # Specifies if gauge values should be prefixed with the local hostname.
+ disable_hostname = {{ nomad_telemetry_disable_hostname | bool | lower }}
+
+ # Specifies the time interval at which the Nomad agent collects telemetry
+ # data.
+ collection_interval = "{{ nomad_telemetry_collection_interval }}"
+
+ # Specifies if gauge values should be prefixed with the name of the node,
+ # instead of the hostname. If set it will override disable_hostname value.
+ use_node_name = {{ nomad_telemetry_use_node_name | bool | lower }}
+
+ # Specifies if Nomad should publish runtime metrics of allocations.
+ publish_allocation_metrics = {{ nomad_telemetry_publish_allocation_metrics | bool | lower }}
+
+ # Specifies if Nomad should publish runtime metrics of nodes.
+ publish_node_metrics = {{ nomad_telemetry_publish_node_metrics | bool | lower }}
+
+ # Specifies whether the agent should make Prometheus formatted metrics
+ # available at /v1/metrics?format=prometheus.Specifies whether the agent
+ # should make Prometheus formatted metrics available at
+ # /v1/metrics?format=prometheus.
+ prometheus_metrics = {{ nomad_telemetry_prometheus_metrics | bool | lower }}
}
{% endif %}
diff --git a/fdio.infra.ansible/roles/nomad/templates/tls.hcl.j2 b/fdio.infra.ansible/roles/nomad/templates/tls.hcl.j2
index ceccdc8be5..0a1a5b20a4 100644
--- a/fdio.infra.ansible/roles/nomad/templates/tls.hcl.j2
+++ b/fdio.infra.ansible/roles/nomad/templates/tls.hcl.j2
@@ -1,12 +1,36 @@
-{% if nomad_tls_enable | bool %}
+{% if nomad_use_tls | bool %}
tls {
- http = {{ nomad_http | bool | lower }}
- rpc = {{ nomad_rpc | bool | lower }}
- ca_file = "{{ nomad_ca_file }}"
- cert_file = "{{ nomad_cert_file }}"
- key_file = "{{ nomad_key_file }}"
- rpc_upgrade_mode = {{ nomad_rpc_upgrade_mode | bool | lower }}
- verify_server_hostname = {{ nomad_verify_server_hostname | bool | lower }}
- verify_https_client = {{ nomad_verify_https_client | bool | lower }}
+ # Specifies the path to the CA certificate to use for Nomad's TLS
+ # communication.
+ ca_file = "{{ nomad_tls_ca_file }}"
+
+ # Specifies the path to the certificate file used for Nomad's TLS
+ # communication.
+ cert_file = "{{ nomad_tls_cert_file }}"
+
+ # Specifies the path to the key file to use for Nomad's TLS communication.
+ key_file = "{{ nomad_tls_key_file }}"
+
+ # Specifies if TLS should be enabled on the HTTP endpoints on the Nomad
+ # agent, including the API.
+ http = {{ nomad_tls_http | bool | lower }}
+
+ # Specifies if TLS should be enabled on the RPC endpoints and Raft traffic
+ # between the Nomad servers. Enabling this on a Nomad client makes the
+ # client use TLS for making RPC requests to the Nomad servers.
+ rpc = {{ nomad_tls_rpc | bool | lower }}
+
+ # This option should be used only when the cluster is being upgraded to
+ # TLS, and removed after the migration is complete. This allows the agent
+ # to accept both TLS and plaintext traffic.
+ rpc_upgrade_mode = {{ nomad_tls_rpc_upgrade_mode | bool | lower }}
+
+ # Specifies agents should require client certificates for all incoming
+ # HTTPS requests. The client certificates must be signed by the same CA
+ # as Nomad.
+ verify_https_client = {{ nomad_tls_verify_https_client | bool | lower }}
+
+ # Specifies if outgoing TLS connections should verify the server's hostname.
+ verify_server_hostname = {{ nomad_tls_verify_server_hostname | bool | lower }}
}
{% endif %}
diff --git a/fdio.infra.ansible/roles/nomad/templates/vault.hcl.j2 b/fdio.infra.ansible/roles/nomad/templates/vault.hcl.j2
new file mode 100644
index 0000000000..7911cbc5c4
--- /dev/null
+++ b/fdio.infra.ansible/roles/nomad/templates/vault.hcl.j2
@@ -0,0 +1,69 @@
+{% if nomad_use_tls | bool == True %}
+vault {
+ # Specifies the address to the Vault server. This must include the protocol,
+ # host/ip, and port given in the format protocol://host:port. If your Vault
+ # installation is behind a load balancer, this should be the address of the
+ # load balancer.
+ address = "{{ nomad_vault_address }}"
+
+ # Specifies if users submitting jobs to the Nomad server should be required
+ # to provide their own Vault token, proving they have access to the policies
+ # listed in the job. This option should be disabled in an untrusted
+ # environment.
+ allow_unauthenticated = {{ nomad_vault_allow_unauthenticated | bool | lower }}
+
+ # Specifies if the Vault integration should be activated.
+ enabled = {{ nomad_vault_enabled | bool | lower }}
+
+ # Specifies the role to create tokens from. The token given to Nomad does
+ # not have to be created from this role but must have "update" capability
+ # on "auth/token/create/<create_from_role>" path in Vault. If this value is
+ # unset and the token is created from a role, the value is defaulted to the
+ # role the token is from. This is largely for backwards compatibility. It
+ # is recommended to set the create_from_role field if Nomad is deriving
+ # child tokens from a role.
+ create_from_role = "{{ nomad_vault_create_from_role }}"
+
+ # Specifies the TTL of created tokens when using a root token. This is
+ # specified using a label suffix like "30s" or "1h".
+ task_token_ttl = "{{ nomad_vault_task_token_ttl }}"
+
+ {% if nomad_vault_use_ssl | bool == True -%}
+ # Specifies an optional path to the CA certificate used for Vault
+ # communication. If unspecified, this will fallback to the default system
+ # CA bundle, which varies by OS and version.
+ ca_file = "{{ nomad_vault_ca_file }}"
+
+ # Specifies an optional path to a folder containing CA certificates to be
+ # used for Vault communication. If unspecified, this will fallback to the
+ # default system CA bundle, which varies by OS and version.
+ ca_path = "{{ nomad_vault_ca_path }}"
+
+ # Specifies the path to the certificate used for Vault communication. This
+ # must be set if tls_require_and_verify_client_cert is enabled in Vault.
+ cert_file = "{{ nomad_vault_cert_file }}"
+
+ # Specifies the path to the private key used for Vault communication. If
+ # this is set then you need to also set cert_file. This must be set if
+ # tls_require_and_verify_client_cert is enabled in Vault.
+ key_file = "{{ nomad_vault_key_file }}"
+
+ # Specifies the Vault namespace used by the Vault integration. If non-empty,
+ # this namespace will be used on all Vault API calls.
+ namespace = "{{ nomad_vault_namespace }}"
+
+ # Specifies an optional string used to set the SNI host when connecting to
+ # Vault via TLS.
+ tls_server_name = "{{ nomad_vault_tls_server_name }}"
+
+ # Specifies if SSL peer validation should be enforced.
+ tls_skip_verify = {{ nomad_vault_tls_skip_verify | bool | lower }}
+ {% endif %}
+
+ # Specifies the parent Vault token to use to derive child tokens for jobs
+ # requesting tokens. Only required on Nomad servers. Nomad client agents
+ # use the allocation's token when contacting Vault. Visit the Vault
+ # Integration Guide to see how to generate an appropriate token in Vault.
+ token = "{{ nomad_vault_token }}"
+}
+{% endif %} \ No newline at end of file
diff --git a/fdio.infra.ansible/roles/nomad/vars/main.yaml b/fdio.infra.ansible/roles/nomad/vars/main.yaml
index a72222c992..791eeadb06 100644
--- a/fdio.infra.ansible/roles/nomad/vars/main.yaml
+++ b/fdio.infra.ansible/roles/nomad/vars/main.yaml
@@ -1,5 +1,5 @@
---
-# file: roles/nomad/vars/main.yaml
+# file: vars/main.yaml
nomad_node_client: "{{ (nomad_node_role == 'client') or (nomad_node_role == 'both') }}"
nomad_node_server: "{{ (nomad_node_role == 'server') or (nomad_node_role == 'both') }}"
diff --git a/fdio.infra.ansible/roles/performance_tuning/defaults/main.yaml b/fdio.infra.ansible/roles/performance_tuning/defaults/main.yaml
index d07a75d446..5a732e5539 100644
--- a/fdio.infra.ansible/roles/performance_tuning/defaults/main.yaml
+++ b/fdio.infra.ansible/roles/performance_tuning/defaults/main.yaml
@@ -8,8 +8,6 @@ packages_base:
packages_by_distro:
ubuntu:
- focal:
- - []
jammy:
- []
diff --git a/fdio.infra.ansible/roles/performance_tuning/handlers/main.yaml b/fdio.infra.ansible/roles/performance_tuning/handlers/main.yaml
index fa2876b7ac..5a48fc37b4 100644
--- a/fdio.infra.ansible/roles/performance_tuning/handlers/main.yaml
+++ b/fdio.infra.ansible/roles/performance_tuning/handlers/main.yaml
@@ -1,13 +1,13 @@
---
-# file roles/performance_tuning/handlers/main.yaml
+# file handlers/main.yaml
- name: Update GRUB
- command: update-grub
+ ansible.builtin.command: update-grub
tags:
- update-grub
-- name: Reboot server
- reboot:
+- name: Reboot Server
+ ansible.builtin.reboot:
reboot_timeout: 3600
tags:
- reboot-server
diff --git a/fdio.infra.ansible/roles/performance_tuning/tasks/main.yaml b/fdio.infra.ansible/roles/performance_tuning/tasks/main.yaml
index e3e22d03ac..cc904e23e9 100644
--- a/fdio.infra.ansible/roles/performance_tuning/tasks/main.yaml
+++ b/fdio.infra.ansible/roles/performance_tuning/tasks/main.yaml
@@ -2,7 +2,7 @@
# file: roles/performance_tuning/tasks/main.yaml
- name: Inst - Update Package Cache (APT)
- apt:
+ ansible.builtin.apt:
update_cache: true
cache_valid_time: 3600
when:
@@ -11,7 +11,7 @@
- perf-inst-prerequisites
- name: Inst - Machine Prerequisites
- package:
+ ansible.builtin.package:
name: "{{ packages | flatten(levels=1) }}"
state: latest
tags:
@@ -22,7 +22,8 @@
when: >
cpu_microarchitecture == "skylake" or
cpu_microarchitecture == "cascadelake" or
- cpu_microarchitecture == "icelake"
+ cpu_microarchitecture == "icelake" or
+ cpu_microarchitecture == "sapphirerapids"
tags:
- perf-conf-turbo-boost
@@ -36,7 +37,7 @@
# programs, particularly malloc debuggers, may consume lots of them,
# e.g., up to one or two maps per allocation.
# must be greater than or equal to (2 * vm.nr_hugepages).
- sysctl:
+ ansible.builtin.sysctl:
name: "vm.max_map_count"
value: "{{ sysctl.vm.nr_hugepages * 4 }}"
state: "present"
@@ -48,7 +49,7 @@
- name: Conf - Adjust hugetlb_shm_group
# hugetlb_shm_group contains group id that is allowed to create sysv
# shared memory segment using hugetlb page.
- sysctl:
+ ansible.builtin.sysctl:
name: "vm.hugetlb_shm_group"
value: "1000"
state: "present"
@@ -63,7 +64,7 @@
# decrease the amount of swap. a value of 0 instructs the kernel not to
# initiate swap until the amount of free and file-backed pages is less
# than the high water mark in a zone.
- sysctl:
+ ansible.builtin.sysctl:
name: "vm.swappiness"
value: "0"
state: "present"
@@ -78,7 +79,7 @@
# if the existing kernel.shmmax setting (cat /sys/proc/kernel/shmmax)
# is greater than the calculated totalhugepagesize then set this parameter
# to current shmmax value.
- sysctl:
+ ansible.builtin.sysctl:
name: "kernel.shmmax"
value: "{{ sysctl.vm.nr_hugepages * 2 * 1024 * 1024 }}"
state: "present"
@@ -98,7 +99,7 @@
# typically this value would only be touched in the nohz_full case
# to re-enable cores that by default were not running the watchdog,
# if a kernel lockup was suspected on those cores.
- sysctl:
+ ansible.builtin.sysctl:
name: "kernel.watchdog_cpumask"
value: "{{ sysctl.kernel.watchdog_cpumask }}"
state: "present"
@@ -114,7 +115,7 @@
# 0 - turn the process address space randomization off. this is the
# default for architectures that do not support this feature anyways,
# and kernels that are booted with the "norandmaps" parameter.
- sysctl:
+ ansible.builtin.sysctl:
name: "kernel.randomize_va_space"
value: "0"
state: "present"
@@ -124,7 +125,7 @@
- perf-conf-sysctl
- name: Conf - Cpufrequtils
- copy:
+ ansible.builtin.copy:
src: "files/cpufrequtils"
dest: "/etc/default/cpufrequtils"
owner: "root"
@@ -134,7 +135,7 @@
- perf-conf-cpufrequtils
- name: Conf - Irqbalance
- template:
+ ansible.builtin.template:
src: "files/irqbalance"
dest: "/etc/default/irqbalance"
owner: "root"
@@ -143,15 +144,8 @@
tags:
- perf-conf-irqbalance
-- name: Conf - Set Ondemand Service To Disable
- service:
- name: "ondemand"
- enabled: "no"
- tags:
- - perf-conf-ondemand
-
- name: Conf - Kernel Parameters
- lineinfile:
+ ansible.builtin.lineinfile:
path: "/etc/default/grub"
state: "present"
regexp: "^GRUB_CMDLINE_LINUX="
@@ -164,7 +158,7 @@
- meta: flush_handlers
- name: Conf - Load Kernel Modules By Default
- lineinfile:
+ ansible.builtin.lineinfile:
path: "/etc/modules"
state: "present"
line: "{{ item }}"
@@ -176,7 +170,7 @@
- perf-conf-load-kernel-modules
- name: Conf - Create a directory for 1G HugeTLBs hugepages
- file:
+ ansible.builtin.file:
path: "/dev/hugepages1G"
state: "directory"
mode: 0755
@@ -184,7 +178,7 @@
- perf-conf-hugepages-1g
- name: Conf - Mount 1G HugeTLBs hugepages
- mount:
+ ansible.builtin.mount:
path: "/dev/hugepages1G"
src: "hugetlbfs"
opts: "pagesize=1G"
@@ -195,7 +189,7 @@
- perf-conf-hugepages-1g
- name: Create a directory if it does not exist
- file:
+ ansible.builtin.file:
path: "/dev/hugepages2M"
state: "directory"
mode: 0755
@@ -203,7 +197,7 @@
- perf-conf-hugepages-2m
- name: Conf - Create a directory for 2M HugeTLBs hugepages
- mount:
+ ansible.builtin.mount:
path: "/dev/hugepages2M"
src: "hugetlbfs"
opts: "pagesize=2M"
diff --git a/fdio.infra.ansible/roles/prometheus_exporter/meta/main.yaml b/fdio.infra.ansible/roles/prometheus_exporter/meta/main.yaml
index 0ee6c2c224..7d8b861882 100644
--- a/fdio.infra.ansible/roles/prometheus_exporter/meta/main.yaml
+++ b/fdio.infra.ansible/roles/prometheus_exporter/meta/main.yaml
@@ -13,7 +13,6 @@ galaxy_info:
platforms:
- name: Ubuntu
versions:
- - focal
- jammy
galaxy_tags:
- prometheus
diff --git a/fdio.infra.ansible/roles/python_env/defaults/main.yaml b/fdio.infra.ansible/roles/python_env/defaults/main.yaml
index 89e85d64fc..4b572c0dd0 100644
--- a/fdio.infra.ansible/roles/python_env/defaults/main.yaml
+++ b/fdio.infra.ansible/roles/python_env/defaults/main.yaml
@@ -1,5 +1,5 @@
---
-# file: roles/common/defaults/main.yaml
+# file: defaults/main.yaml
packages: "{{ packages_base + packages_by_distro[ansible_distribution|lower][ansible_distribution_release] + packages_by_arch[ansible_machine] }}"
@@ -8,15 +8,6 @@ packages_base:
packages_by_distro:
ubuntu:
- focal:
- - "python3-all"
- - "python3-apt"
- - "python3-cffi"
- - "python3-cffi-backend"
- - "python3-dev"
- - "python3-pip"
- - "python3-pyelftools"
- - "python3-setuptools"
jammy:
- "python3-all"
- "python3-apt"
diff --git a/fdio.infra.ansible/roles/python_env/tasks/main.yaml b/fdio.infra.ansible/roles/python_env/tasks/main.yaml
index 4e4cfb447c..02850110a9 100644
--- a/fdio.infra.ansible/roles/python_env/tasks/main.yaml
+++ b/fdio.infra.ansible/roles/python_env/tasks/main.yaml
@@ -1,8 +1,8 @@
---
-# file: roles/python_env/tasks/main.yaml
+# file: tasks/main.yaml
- name: Inst - Update package cache (apt)
- apt:
+ ansible.builtin.apt:
update_cache: true
cache_valid_time: 3600
when:
@@ -11,75 +11,52 @@
- common-inst-prerequisites
- name: Inst - Prerequisites
- package:
+ ansible.builtin.package:
name: "{{ packages | flatten(levels=1) }}"
state: latest
tags:
- common-inst-prerequisites
- name: Inst - CSIT PIP requirements
- pip:
+ ansible.builtin.pip:
name:
- - "ecdsa==0.13.3"
- - "paramiko==2.6.0"
+ - "ecdsa==0.18.0"
+ - "paramiko==3.3.1"
- "pycrypto==2.6.1"
- - "pypcap==1.2.3"
- - "PyYAML==5.1.1"
- - "requests==2.25.1"
- - "robotframework==3.1.2"
- - "scapy==2.4.3"
- - "scp==0.13.2"
- - "ansible==2.10.7"
- - "dill==0.3.3"
- - "numpy==1.17.3"
- - "hdrhistogram==0.6.1"
- - "plotly==4.1.1"
- - "PTable==0.9.2"
- - "Sphinx==3.5.4"
- - "sphinx-rtd-theme==0.5.2"
- - "sphinxcontrib-programoutput==0.17"
- - "sphinxcontrib-robotdoc==0.11.0"
- - "alabaster==0.7.12"
- - "Babel==2.9.0"
- - "bcrypt==3.1.7"
- - "certifi==2020.12.5"
- - "cffi==1.13.2"
- - "chardet==4.0.0"
- - "cryptography==2.8"
- - "docutils==0.16"
- - "future==0.18.2"
- - "idna==2.10"
- - "imagesize==1.2.0"
- - "Jinja2==2.11.3"
- - "MarkupSafe==1.1.1"
- - "packaging==20.9"
- - "pbr==5.5.1"
- - "ply==3.11"
- - "pycparser==2.19"
- - "Pygments==2.8.1"
- - "PyNaCl==1.3.0"
- - "pyparsing==2.4.7"
- "python-dateutil==2.8.2"
- - "pytz==2021.1"
- - "retrying==1.3.3"
- - "six==1.15.0"
- - "snowballstemmer==2.1.0"
- - "sphinxcontrib-applehelp==1.0.2"
- - "sphinxcontrib-devhelp==1.0.2"
- - "sphinxcontrib-htmlhelp==1.0.3"
- - "sphinxcontrib-jsmath==1.0.1"
- - "sphinxcontrib-qthelp==1.0.3"
- - "sphinxcontrib-serializinghtml==1.1.4"
- - "urllib3==1.25.6"
+ - "PyYAML==6.0.1"
+ - "requests==2.31.0"
+ - "robotframework==6.1.1"
+ - "scapy==2.4.5"
+ - "scp==0.14.5"
+ - "ansible==8.2.0"
+ - "ansible-core==2.15.2"
+ - "dill==0.3.7"
+ - "numpy==1.25.2"
+ - "scipy==1.11.1"
+ - "ply==3.11"
+ - "jsonschema==4.18.4"
+ - "rfc3339-validator==0.1.4"
+ - "rfc3987==1.3.8"
+ - "attrs==23.1.0"
+ - "bcrypt==4.0.1"
+ - "certifi==2023.7.22"
+ - "cffi==1.15.1"
+ - "charset-normalizer==3.2.0"
+ - "cryptography==41.0.3"
+ - "idna==3.4"
+ - "Jinja2==3.1.2"
+ - "jsonschema-specifications==2023.7.1"
+ - "MarkupSafe==2.1.3"
+ - "packaging==23.1"
+ - "pycparser==2.21"
+ - "PyNaCl==1.5.0"
+ - "referencing==0.30.0"
+ - "resolvelib==1.0.1"
+ - "rpds-py==0.9.2"
+ - "six==1.16.0"
+ - "urllib3==2.0.4"
environment:
ANSIBLE_SKIP_CONFLICT_CHECK: 1
tags:
- common-inst-pip
-
-- name: Inst - CSIT PIP requirements - Pandas and SciPy workaround
- pip:
- name:
- - "pandas==0.25.3"
- - "scipy==1.5.4"
- tags:
- - common-inst-pip
diff --git a/fdio.infra.ansible/roles/tg/files/csit-initialize-docker-tg.service b/fdio.infra.ansible/roles/tg/files/csit-initialize-docker-tg.service
deleted file mode 100644
index 11911201d5..0000000000
--- a/fdio.infra.ansible/roles/tg/files/csit-initialize-docker-tg.service
+++ /dev/null
@@ -1,12 +0,0 @@
-[Unit]
-Description=CSIT Initialize Docker TG
-After=network.target
-
-[Service]
-Type=oneshot
-RemainAfterExit=True
-ExecStart=/usr/local/bin/csit-initialize-docker-tg.sh start 2
-ExecStop=/usr/local/bin/csit-initialize-docker-tg.sh stop
-
-[Install]
-WantedBy=default.target
diff --git a/fdio.infra.ansible/roles/tg/files/csit-initialize-docker-tg.sh b/fdio.infra.ansible/roles/tg/files/csit-initialize-docker-tg.sh
deleted file mode 100755
index 0120795e9c..0000000000
--- a/fdio.infra.ansible/roles/tg/files/csit-initialize-docker-tg.sh
+++ /dev/null
@@ -1,58 +0,0 @@
-#!/usr/bin/env bash
-
-# Copyright (c) 2021 Cisco and/or its affiliates.
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at:
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# CSIT SRIOV VF initialization and isolation.
-
-set -euo pipefail
-
-case "${1:-start}" in
- "start" )
- # Run TG
- for cnt in $(seq 1 ${2:-1}); do
- docker network create --driver bridge csit-nw-tg${cnt} || true
- # If the IMAGE is not already loaded then docker run will pull the
- # IMAGE, and all image dependencies, before it starts the container.
- dcr_image="csit_sut-ubuntu2004:local"
- # Run the container in the background and print the new container
- # ID.
- dcr_stc_params="--detach=true "
- # Give extended privileges to this container. A "privileged"
- # container is given access to all devices and able to run nested
- # containers.
- dcr_stc_params+="--privileged "
- # Publish all exposed ports to random ports on the host interfaces.
- dcr_stc_params+="--publish 600${cnt}:2222 "
- # Automatically remove the container when it exits.
- dcr_stc_params+="--rm "
- # Size of /dev/shm.
- dcr_stc_params+="--shm-size 4G "
- # Mount vfio to be able to bind to see binded interfaces. We cannot
- # use --device=/dev/vfio as this does not see newly binded
- # interfaces.
- dcr_stc_params+="--volume /dev:/dev "
- # Mount /opt/boot/ where VM kernel and initrd are located.
- dcr_stc_params+="--volume /opt:/opt "
- # Mount host hugepages for VMs.
- dcr_stc_params+="--volume /dev/hugepages:/dev/hugepages "
-
- params=(${dcr_stc_params} --name csit-tg-"${cnt}" "${dcr_image}")
- docker run --network=csit-nw-tg${cnt} "${params[@]}"
- done
- ;;
- "stop" )
- docker rm --force $(docker ps --all --quiet --filter name=csit)
- docker network rm $(docker network ls --filter name=csit --quiet)
- ;;
-esac
diff --git a/fdio.infra.ansible/roles/tg/handlers/main.yaml b/fdio.infra.ansible/roles/tg/handlers/main.yaml
deleted file mode 100644
index b2c32a7078..0000000000
--- a/fdio.infra.ansible/roles/tg/handlers/main.yaml
+++ /dev/null
@@ -1,10 +0,0 @@
----
-# file: roles/tg/handlers/main.yaml
-
-- name: Start csit-initialize-docker-tg.service
- systemd:
- enabled: true
- state: "started"
- name: "csit-initialize-docker-tg.service"
- tags:
- - docker-tg
diff --git a/fdio.infra.ansible/roles/tg/tasks/main.yaml b/fdio.infra.ansible/roles/tg/tasks/main.yaml
deleted file mode 100644
index 70ed384826..0000000000
--- a/fdio.infra.ansible/roles/tg/tasks/main.yaml
+++ /dev/null
@@ -1,30 +0,0 @@
----
-# file: roles/tg/tasks/main.yaml
-
-- name: Conf - csit-initialize-docker-tg.sh
- copy:
- src: "files/csit-initialize-docker-tg.sh"
- dest: "/usr/local/bin/csit-initialize-docker-tg.sh"
- owner: "root"
- group: "root"
- mode: 0744
- when:
- - docker_tg is defined
- tags:
- - tg-conf-docker
-
-- name: Conf - Start csit-initialize-docker-tg.service
- copy:
- src: "files/csit-initialize-docker-tg.service"
- dest: "/etc/systemd/system/"
- owner: "root"
- group: "root"
- mode: 0644
- notify:
- - "Start csit-initialize-docker-tg.service"
- when:
- - docker_tg is defined
- tags:
- - tg-conf-docker
-
-- meta: flush_handlers
diff --git a/fdio.infra.ansible/roles/topology/tasks/main.yaml b/fdio.infra.ansible/roles/topology/tasks/main.yaml
index 4f94060cad..1dc704331d 100644
--- a/fdio.infra.ansible/roles/topology/tasks/main.yaml
+++ b/fdio.infra.ansible/roles/topology/tasks/main.yaml
@@ -1,22 +1,22 @@
---
-# file: roles/topology/tasks/main.yaml
+# file: tasks/main.yaml
- name: Create Topology File
- template:
+ ansible.builtin.template:
src: "templates/topology-{{ cloud_topology }}.j2"
dest: "../topologies/available/{{ cloud_topology }}-{{ testbed_name }}.yaml"
tags:
- create-topology-file
- name: Create Inventory Folder
- file:
+ ansible.builtin.file:
path: "./inventories/cloud_inventory/"
state: directory
tags:
- create-inventory-folder-cloud
- name: Create Hosts File
- template:
+ ansible.builtin.template:
src: "templates/hosts.j2"
dest: "./inventories/cloud_inventory/hosts"
tags:
diff --git a/fdio.infra.ansible/roles/topology/templates/topology-1n-c6gn.j2 b/fdio.infra.ansible/roles/topology/templates/topology-1n-c6gn.j2
new file mode 100644
index 0000000000..647a40b1e7
--- /dev/null
+++ b/fdio.infra.ansible/roles/topology/templates/topology-1n-c6gn.j2
@@ -0,0 +1,30 @@
+---
+metadata:
+ version: 0.1
+ schema:
+ - resources/topology_schemas/1_node_topology.sch.yaml
+ - resources/topology_schemas/topology.sch.yaml
+ tags: [hw, 1-node]
+
+nodes:
+ TG:
+ type: TG
+ subtype: TREX
+ host: "{{ tg_public_ip }}"
+ arch: x86_64
+ port: 22
+ username: testuser
+ password: Csit1234
+ interfaces:
+ port1:
+ # tg_instance/p1 - 100GE port1 on ENA NIC.
+ mac_address: {{ tg_if1_mac }}
+ pci_address: "0000:00:06.0"
+ link: link1
+ model: Amazon-Nitro-100G
+ port2:
+ # tg_instance/p2 - 100GE port2 on ENA NIC.
+ mac_address: {{ tg_if2_mac }}
+ pci_address: "0000:00:07.0"
+ link: link1
+ model: Amazon-Nitro-100G
diff --git a/fdio.infra.ansible/roles/topology/templates/topology-1n-c6in.j2 b/fdio.infra.ansible/roles/topology/templates/topology-1n-c6in.j2
new file mode 100644
index 0000000000..7d3f4e5318
--- /dev/null
+++ b/fdio.infra.ansible/roles/topology/templates/topology-1n-c6in.j2
@@ -0,0 +1,30 @@
+---
+metadata:
+ version: 0.1
+ schema:
+ - resources/topology_schemas/1_node_topology.sch.yaml
+ - resources/topology_schemas/topology.sch.yaml
+ tags: [hw, 1-node]
+
+nodes:
+ TG:
+ type: TG
+ subtype: TREX
+ host: "{{ tg_public_ip }}"
+ arch: x86_64
+ port: 22
+ username: testuser
+ password: Csit1234
+ interfaces:
+ port1:
+ # tg_instance/p1 - 200GE port1 on ENA NIC.
+ mac_address: {{ tg_if1_mac }}
+ pci_address: "0000:00:06.0"
+ link: link1
+ model: Amazon-Nitro-200G
+ port2:
+ # tg_instance/p2 - 200GE port2 on ENA NIC.
+ mac_address: {{ tg_if2_mac }}
+ pci_address: "0000:00:07.0"
+ link: link1
+ model: Amazon-Nitro-200G
diff --git a/fdio.infra.ansible/roles/topology/templates/topology-2n-aws-c6gn.j2 b/fdio.infra.ansible/roles/topology/templates/topology-2n-c6gn.j2
index de43291cc6..e693f6c42c 100644
--- a/fdio.infra.ansible/roles/topology/templates/topology-2n-aws-c6gn.j2
+++ b/fdio.infra.ansible/roles/topology/templates/topology-2n-c6gn.j2
@@ -17,17 +17,17 @@ nodes:
password: Csit1234
interfaces:
port1:
- # tg_instance/p1 - 50GE port1 on ENA NIC.
+ # tg_instance/p1 - 100GE port1 on ENA NIC.
mac_address: {{ tg_if1_mac }}
pci_address: "0000:00:06.0"
link: link1
- model: Amazon-Nitro-50G
+ model: Amazon-Nitro-100G
port2:
- # tg_instance/p2 - 50GE port2 on ENA NIC.
+ # tg_instance/p2 - 100GE port2 on ENA NIC.
mac_address: {{ tg_if2_mac }}
pci_address: "0000:00:07.0"
link: link2
- model: Amazon-Nitro-50G
+ model: Amazon-Nitro-100G
DUT1:
type: DUT
host: "{{ dut1_public_ip }}"
@@ -38,14 +38,14 @@ nodes:
uio_driver: vfio-pci
interfaces:
port1:
- # dut1_instance/p1 - 50GE port1 on ENA NIC.
+ # dut1_instance/p1 - 100GE port1 on ENA NIC.
mac_address: {{ dut1_if1_mac }}
pci_address: "0000:00:06.0"
link: link1
- model: Amazon-Nitro-50G
+ model: Amazon-Nitro-100G
port2:
- # dut1_instance/p2 - 50GE port2 on ENA NIC.
+ # dut1_instance/p2 - 100GE port2 on ENA NIC.
mac_address: {{ dut1_if2_mac }}
pci_address: "0000:00:07.0"
link: link2
- model: Amazon-Nitro-50G
+ model: Amazon-Nitro-100G
diff --git a/fdio.infra.ansible/roles/topology/templates/topology-2n-c6in.j2 b/fdio.infra.ansible/roles/topology/templates/topology-2n-c6in.j2
new file mode 100644
index 0000000000..ef7b464967
--- /dev/null
+++ b/fdio.infra.ansible/roles/topology/templates/topology-2n-c6in.j2
@@ -0,0 +1,51 @@
+---
+metadata:
+ version: 0.1
+ schema:
+ - resources/topology_schemas/2_node_topology.sch.yaml
+ - resources/topology_schemas/topology.sch.yaml
+ tags: [hw, 2-node]
+
+nodes:
+ TG:
+ type: TG
+ subtype: TREX
+ host: "{{ tg_public_ip }}"
+ arch: x86_64
+ port: 22
+ username: testuser
+ password: Csit1234
+ interfaces:
+ port1:
+ # tg_instance/p1 - 200GE port1 on ENA NIC.
+ mac_address: {{ tg_if1_mac }}
+ pci_address: "0000:00:06.0"
+ link: link1
+ model: Amazon-Nitro-200G
+ port2:
+ # tg_instance/p2 - 200GE port2 on ENA NIC.
+ mac_address: {{ tg_if2_mac }}
+ pci_address: "0000:00:07.0"
+ link: link2
+ model: Amazon-Nitro-200G
+ DUT1:
+ type: DUT
+ host: "{{ dut1_public_ip }}"
+ arch: x86_64
+ port: 22
+ username: testuser
+ password: Csit1234
+ uio_driver: vfio-pci
+ interfaces:
+ port1:
+ # dut1_instance/p1 - 200GE port1 on ENA NIC.
+ mac_address: {{ dut1_if1_mac }}
+ pci_address: "0000:00:06.0"
+ link: link1
+ model: Amazon-Nitro-200G
+ port2:
+ # dut1_instance/p2 - 200GE port2 on ENA NIC.
+ mac_address: {{ dut1_if2_mac }}
+ pci_address: "0000:00:07.0"
+ link: link2
+ model: Amazon-Nitro-200G
diff --git a/fdio.infra.ansible/roles/topology/templates/topology-3n-c6gn.j2 b/fdio.infra.ansible/roles/topology/templates/topology-3n-c6gn.j2
new file mode 100644
index 0000000000..295d457f49
--- /dev/null
+++ b/fdio.infra.ansible/roles/topology/templates/topology-3n-c6gn.j2
@@ -0,0 +1,73 @@
+---
+metadata:
+ version: 0.1
+ schema:
+ - resources/topology_schemas/3_node_topology.sch.yaml
+ - resources/topology_schemas/topology.sch.yaml
+ tags: [hw, 3-node]
+
+nodes:
+ TG:
+ type: TG
+ subtype: TREX
+ host: "{{ tg_public_ip }}"
+ arch: x86_64
+ port: 22
+ username: testuser
+ password: Csit1234
+ interfaces:
+ port1:
+ # tg_instance/p1 - 100GE port1 on ENA NIC.
+ mac_address: {{ tg_if1_mac }}
+ pci_address: "0000:00:06.0"
+ link: link1
+ model: Amazon-Nitro-100G
+ port2:
+ # tg_instance/p2 - 100GE port2 on ENA NIC.
+ mac_address: {{ tg_if2_mac }}
+ pci_address: "0000:00:07.0"
+ link: link2
+ model: Amazon-Nitro-100G
+ DUT1:
+ type: DUT
+ host: "{{ dut1_public_ip }}"
+ arch: x86_64
+ port: 22
+ username: testuser
+ password: Csit1234
+ uio_driver: vfio-pci
+ interfaces:
+ port1:
+ # dut1_instance/p1 - 100GE port1 on ENA NIC.
+ mac_address: {{ dut1_if1_mac }}
+ pci_address: "0000:00:06.0"
+ link: link1
+ model: Amazon-Nitro-100G
+ port2:
+ # dut1_instance/p2 - 100GE port2 on ENA NIC.
+ mac_address: {{ dut1_if2_mac }}
+ pci_address: "0000:00:07.0"
+ link: link21
+ model: Amazon-Nitro-100G
+ DUT2:
+ type: DUT
+ host: "{{ dut2_public_ip }}"
+ arch: x86_64
+ port: 22
+ username: testuser
+ password: Csit1234
+ uio_driver: vfio-pci
+ interfaces:
+ port1:
+ # dut2_instance/p1 - 100GE port1 on ENA NIC.
+ mac_address: {{ dut2_if1_mac }}
+ pci_address: "0000:00:06.0"
+ link: link21
+ model: Amazon-Nitro-100G
+ port2:
+ # dut2_instance/p2 - 100GE port1 on ENA NIC.
+ mac_address: {{ dut2_if2_mac }}
+ pci_address: "0000:00:07.0"
+ link: link2
+ model: Amazon-Nitro-100G
+
diff --git a/fdio.infra.ansible/roles/topology/templates/topology-3n-c6in.j2 b/fdio.infra.ansible/roles/topology/templates/topology-3n-c6in.j2
new file mode 100644
index 0000000000..c280f4e7e1
--- /dev/null
+++ b/fdio.infra.ansible/roles/topology/templates/topology-3n-c6in.j2
@@ -0,0 +1,73 @@
+---
+metadata:
+ version: 0.1
+ schema:
+ - resources/topology_schemas/3_node_topology.sch.yaml
+ - resources/topology_schemas/topology.sch.yaml
+ tags: [hw, 3-node]
+
+nodes:
+ TG:
+ type: TG
+ subtype: TREX
+ host: "{{ tg_public_ip }}"
+ arch: x86_64
+ port: 22
+ username: testuser
+ password: Csit1234
+ interfaces:
+ port1:
+ # tg_instance/p1 - 200GE port1 on ENA NIC.
+ mac_address: {{ tg_if1_mac }}
+ pci_address: "0000:00:06.0"
+ link: link1
+ model: Amazon-Nitro-200G
+ port2:
+ # tg_instance/p2 - 200GE port2 on ENA NIC.
+ mac_address: {{ tg_if2_mac }}
+ pci_address: "0000:00:07.0"
+ link: link2
+ model: Amazon-Nitro-200G
+ DUT1:
+ type: DUT
+ host: "{{ dut1_public_ip }}"
+ arch: x86_64
+ port: 22
+ username: testuser
+ password: Csit1234
+ uio_driver: vfio-pci
+ interfaces:
+ port1:
+ # dut1_instance/p1 - 200GE port1 on ENA NIC.
+ mac_address: {{ dut1_if1_mac }}
+ pci_address: "0000:00:06.0"
+ link: link1
+ model: Amazon-Nitro-200G
+ port2:
+ # dut1_instance/p2 - 200GE port2 on ENA NIC.
+ mac_address: {{ dut1_if2_mac }}
+ pci_address: "0000:00:07.0"
+ link: link21
+ model: Amazon-Nitro-200G
+ DUT2:
+ type: DUT
+ host: "{{ dut2_public_ip }}"
+ arch: x86_64
+ port: 22
+ username: testuser
+ password: Csit1234
+ uio_driver: vfio-pci
+ interfaces:
+ port1:
+ # dut2_instance/p1 - 200GE port1 on ENA NIC.
+ mac_address: {{ dut2_if1_mac }}
+ pci_address: "0000:00:06.0"
+ link: link21
+ model: Amazon-Nitro-200G
+ port2:
+ # dut2_instance/p2 - 200GE port1 on ENA NIC.
+ mac_address: {{ dut2_if2_mac }}
+ pci_address: "0000:00:07.0"
+ link: link2
+ model: Amazon-Nitro-200G
+
diff --git a/fdio.infra.ansible/roles/trex/defaults/main.yaml b/fdio.infra.ansible/roles/trex/defaults/main.yaml
index 180d635def..18a2b56bda 100644
--- a/fdio.infra.ansible/roles/trex/defaults/main.yaml
+++ b/fdio.infra.ansible/roles/trex/defaults/main.yaml
@@ -1,35 +1,25 @@
---
-# file: roles/trex/defaults/main.yaml
+# file: defaults/main.yaml
-packages: "{{ packages_base + packages_by_distro[ansible_distribution|lower][ansible_distribution_release] + packages_by_arch[ansible_machine] }}"
+packages: "{{ packages_base + packages_by_distro[ansible_distribution|lower] + packages_by_arch[ansible_machine] }}"
packages_base:
- []
packages_by_distro:
ubuntu:
- focal:
- - "build-essential"
- - "libmnl-dev"
- - "libnuma-dev"
- - "libpcap-dev"
- - "librdmacm-dev"
- - "librdmacm1"
- - "libssl-dev"
- - "pciutils"
- - "python3-pip"
- - "zlib1g-dev"
- jammy:
- - "build-essential"
- - "libmnl-dev"
- - "libnuma-dev"
- - "libpcap-dev"
- - "librdmacm-dev"
- - "librdmacm1"
- - "libssl-dev"
- - "pciutils"
- - "python3-pip"
- - "zlib1g-dev"
+ - "build-essential"
+ - "gcc-9"
+ - "g++-9"
+ - "libmnl-dev"
+ - "libnuma-dev"
+ - "libpcap-dev"
+ - "librdmacm-dev"
+ - "librdmacm1"
+ - "libssl-dev"
+ - "pciutils"
+ - "python3-pip"
+ - "zlib1g-dev"
packages_by_arch:
aarch64:
@@ -40,6 +30,4 @@ packages_by_arch:
trex_target_dir: "/opt"
trex_url: "https://github.com/cisco-system-traffic-generator/trex-core/archive/"
trex_version:
- # master // ubuntu 20.04
- - "2.88"
- - "2.97"
+ - "3.03"
diff --git a/fdio.infra.ansible/roles/trex/tasks/deploy_block.yaml b/fdio.infra.ansible/roles/trex/tasks/deploy_block.yaml
index 9aeb1be023..1a747f68d5 100644
--- a/fdio.infra.ansible/roles/trex/tasks/deploy_block.yaml
+++ b/fdio.infra.ansible/roles/trex/tasks/deploy_block.yaml
@@ -1,8 +1,8 @@
---
-# file: roles/trex/tasks/deploy_block.yaml
+# file: tasks/deploy_block.yaml
- name: Get Release {{ item }}
- get_url:
+ ansible.builtin.get_url:
url: "{{ trex_url }}/v{{ item }}.tar.gz"
dest: "{{ trex_target_dir }}/trex-core-{{ item }}.tar.gz"
validate_certs: false
@@ -10,46 +10,54 @@
register: trex_downloaded
- name: Create Directory {{ item }}
- file:
+ ansible.builtin.file:
path: "{{ trex_target_dir }}/trex-core-{{ item }}"
state: "directory"
- name: Extract Release {{ item }}
- unarchive:
+ ansible.builtin.unarchive:
remote_src: true
src: "{{ trex_target_dir }}/trex-core-{{ item }}.tar.gz"
dest: "{{ trex_target_dir }}/"
creates: "{{ trex_target_dir }}/trex-core-{{ item }}/linux_dpdk/"
register: trex_extracted
-- name: Patch Azure
- patch:
- src: "files/t-rex.patch"
- basedir: "{{ trex_target_dir }}/trex-core-{{ item }}"
- strip: 1
- when:
- - azure is defined and item == "2.73"
-
- name: Compile Release {{ item }} Part I
- command: "./b configure"
+ ansible.builtin.command: "./b configure"
args:
chdir: "{{ trex_target_dir }}/trex-core-{{ item }}/linux_dpdk/"
when: trex_extracted.changed
- name: Compile Release {{ item }} Part II
- command: "./b build"
+ ansible.builtin.command: "./b build"
args:
chdir: "{{ trex_target_dir }}/trex-core-{{ item }}/linux_dpdk/"
+ async: 3000
+ poll: 0
+ register: trex_built
+ when: trex_extracted.changed
+
+- name: Check if T-Rex is Compiled
+ async_status:
+ jid: "{{ trex_built.ansible_job_id }}"
+ register: trex_built
+ until: trex_built.finished
+ delay: 10
+ retries: 300
when: trex_extracted.changed
- name: Compile Release {{ item }} Part III
- command: "make -j 16"
+ ansible.builtin.command: "make -j 16"
args:
chdir: "{{ trex_target_dir }}/trex-core-{{ item }}/scripts/ko/src"
when: trex_extracted.changed
- name: Compile Release {{ item }} Part IV
- command: "make install"
+ ansible.builtin.command: "make install"
args:
chdir: "{{ trex_target_dir }}/trex-core-{{ item }}/scripts/ko/src"
when: trex_extracted.changed
+
+- name: Link libc.a to liblibc.a
+ ansible.builtin.command: "ln -s -f /usr/lib/x86_64-linux-gnu/libc.a /usr/lib/x86_64-linux-gnu/liblibc.a"
+ when: trex_extracted.changed
diff --git a/fdio.infra.ansible/roles/trex/tasks/main.yaml b/fdio.infra.ansible/roles/trex/tasks/main.yaml
index 8f659b697c..d0509f7544 100644
--- a/fdio.infra.ansible/roles/trex/tasks/main.yaml
+++ b/fdio.infra.ansible/roles/trex/tasks/main.yaml
@@ -1,8 +1,8 @@
---
-# file: roles/trex/tasks/main.yaml
+# file: tasks/main.yaml
-- name: Inst - Update Package Cache (APT)
- apt:
+- name: Update Package Cache (APT)
+ ansible.builtin.apt:
update_cache: true
cache_valid_time: 3600
when:
@@ -10,14 +10,14 @@
tags:
- trex-inst-prerequisites
-- name: Inst - Prerequisites
- package:
+- name: Prerequisites
+ ansible.builtin.package:
name: "{{ packages | flatten(levels=1) }}"
state: latest
tags:
- trex-inst-prerequisites
-- name: Inst - Multiple T-Rex Versions
+- name: Multiple T-Rex Versions
include_tasks: deploy_block.yaml
loop: "{{ trex_version }}"
tags:
diff --git a/fdio.infra.ansible/roles/vagrant/tasks/main.yml b/fdio.infra.ansible/roles/vagrant/tasks/main.yml
index 14e919cb31..1716ebe0d5 100644
--- a/fdio.infra.ansible/roles/vagrant/tasks/main.yml
+++ b/fdio.infra.ansible/roles/vagrant/tasks/main.yml
@@ -26,13 +26,14 @@
- name: Reload groups for current session
command: "/usr/bin/newgrp docker"
-- name: Clone CSIT repository
- become_user: vagrant
- git:
- repo: "{{ csit.repository.url }}"
- dest: "{{ csit.home }}"
- accept_hostkey: true
- version: "{{ csit.repository.version }}"
+# Disabling CSIT repo cloning in the VM as the repo is synced from the host
+# - name: Clone CSIT repository
+# become_user: vagrant
+# git:
+# repo: "{{ csit.repository.url }}"
+# dest: "{{ csit.home }}"
+# accept_hostkey: true
+# version: "{{ csit.repository.version }}"
- name: Load csit docker image from local path if exists (/vagrant/csit-sut.tar)
shell: |
diff --git a/fdio.infra.ansible/roles/vault/meta/main.yaml b/fdio.infra.ansible/roles/vault/meta/main.yaml
index 882dcc3a7b..22a62dd438 100644
--- a/fdio.infra.ansible/roles/vault/meta/main.yaml
+++ b/fdio.infra.ansible/roles/vault/meta/main.yaml
@@ -13,7 +13,6 @@ galaxy_info:
platforms:
- name: Ubuntu
versions:
- - focal
- jammy
galaxy_tags:
- vault
diff --git a/fdio.infra.ansible/roles/vpp/defaults/main.yaml b/fdio.infra.ansible/roles/vpp/defaults/main.yaml
index 55051b8c8f..00c56859d0 100644
--- a/fdio.infra.ansible/roles/vpp/defaults/main.yaml
+++ b/fdio.infra.ansible/roles/vpp/defaults/main.yaml
@@ -12,20 +12,12 @@ packages_base:
packages_by_distro:
ubuntu:
- focal:
- - "build-essential"
- - "libglib2.0-dev"
- - "libmbedcrypto3"
- - "libmbedtls12"
- - "libmbedx509-0"
- - "libnuma-dev"
- - "libpixman-1-dev"
jammy:
- "build-essential"
- "libglib2.0-dev"
- - "libmbedcrypto1"
- - "libmbedtls10"
- - "libmbedx509-0"
+ - "libmbedcrypto7"
+ - "libmbedtls14"
+ - "libmbedx509-1"
- "libnuma-dev"
- "libpixman-1-dev"
diff --git a/fdio.infra.ansible/roles/vpp/tasks/main.yaml b/fdio.infra.ansible/roles/vpp/tasks/main.yaml
index 320584c432..cea06b764d 100644
--- a/fdio.infra.ansible/roles/vpp/tasks/main.yaml
+++ b/fdio.infra.ansible/roles/vpp/tasks/main.yaml
@@ -2,7 +2,7 @@
# file: roles/vpp/tasks/main.yaml
- name: Inst - Update Package Cache (APT)
- apt:
+ ansible.builtin.apt:
update_cache: true
cache_valid_time: 3600
when:
@@ -11,14 +11,14 @@
- vpp-inst-prerequisites
- name: Inst - Prerequisites
- package:
+ ansible.builtin.package:
name: "{{ packages | flatten(levels=1) }}"
state: latest
tags:
- vpp-inst-prerequisites
- name: Conf - sysctl
- file:
+ ansible.builtin.file:
src: "/dev/null"
dest: "/etc/sysctl.d/80-vpp.conf"
state: "link"
diff --git a/fdio.infra.ansible/roles/vpp_device/defaults/main.yaml b/fdio.infra.ansible/roles/vpp_device/defaults/main.yaml
deleted file mode 100644
index 9816d7087c..0000000000
--- a/fdio.infra.ansible/roles/vpp_device/defaults/main.yaml
+++ /dev/null
@@ -1,6 +0,0 @@
----
-# file: roles/vpp_device/defaults/main.yaml
-
-iavf_target_dir: "/opt"
-iavf_version: "4.1.1"
-iavf_url: "https://downloads.sourceforge.net/project/e1000/iavf%20stable/{{ iavf_version }}/iavf-{{ iavf_version }}.tar.gz?ts=gAAAAABgvfs_L6gSH8B09NfEc8fzoooS3M2wyuOxlFEK5EtTstZWFdb2dYcqJZNqiirAT87dbc3GeJJrRst9KfUgS-byAeyaLw%3D%3D&r=https%3A%2F%2Fsourceforge.net%2Fprojects%2Fe1000%2Ffiles%2Fiavf%2520stable%2F{{ iavf_version }}%2Fiavf-{{ iavf_version }}.tar.gz%2Fdownload"
diff --git a/fdio.infra.ansible/roles/vpp_device/files/csit-initialize-vfs-alt.sh b/fdio.infra.ansible/roles/vpp_device/files/csit-initialize-vfs-alt.sh
new file mode 100644
index 0000000000..cd04d61251
--- /dev/null
+++ b/fdio.infra.ansible/roles/vpp_device/files/csit-initialize-vfs-alt.sh
@@ -0,0 +1,39 @@
+#!/usr/bin/env bash
+
+# Copyright (c) 2023 PANTHEON.tech and/or its affiliates.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at:
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Add QLogic Corp. FastLinQ QL41000 Series 10/25/40/50GbE Controller to
+# blacklist.
+PCI_BLACKLIST=($(lspci -Dmmd ':8070:0200' | cut -f1 -d' '))
+# Add I350 Gigabit Network Connection 1521 to blacklist.
+PCI_BLACKLIST+=($(lspci -Dmmd ':1521:0200' | cut -f1 -d' '))
+# Add MT27800 Family [ConnectX-5] 1017 to blacklist.
+PCI_BLACKLIST+=($(lspci -Dmmd ':1017:0200' | cut -f1 -d' '))
+
+# Add Intel Corporation Ethernet Controller XL710 for 40GbE QSFP+ to whitelist.
+PCI_WHITELIST=($(lspci -Dmmd ':1583:0200' | cut -f1 -d' '))
+# Add MT2892 Family [ConnectX-6 Dx] 101d to whitelist.
+PCI_WHITELIST+=($(lspci -Dmmd ':101d:0200' | cut -f1 -d' '))
+
+# See http://pci-ids.ucw.cz/v2.2/pci.ids for more info.
+
+declare -A PF_INDICES
+# Intel NICs
+PF_INDICES["0000:01:00.0"]=0
+PF_INDICES["0000:01:00.1"]=1
+PF_INDICES["0003:02:00.0"]=0
+PF_INDICES["0003:02:00.1"]=1
+# Mellanox CX6
+PF_INDICES["0001:01:00.0"]=2
+PF_INDICES["0001:01:00.1"]=2 \ No newline at end of file
diff --git a/fdio.infra.ansible/roles/vpp_device/files/csit-initialize-vfs-spr.sh b/fdio.infra.ansible/roles/vpp_device/files/csit-initialize-vfs-spr.sh
new file mode 100644
index 0000000000..b240649b4c
--- /dev/null
+++ b/fdio.infra.ansible/roles/vpp_device/files/csit-initialize-vfs-spr.sh
@@ -0,0 +1,28 @@
+#!/usr/bin/env bash
+
+# Copyright (c) 2023 Cisco and/or its affiliates.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at:
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Add Intel Corporation Ethernet Controller 10G X550T to blacklist.
+PCI_BLACKLIST=($(lspci -Dmmd ':1563:0200' | cut -f1 -d' '))
+# Add Intel Corporation Ethernet Controller E810-C for 100GbE QSFP to whitelist.
+PCI_WHITELIST+=($(lspci -Dmmd ':1592:0200' | cut -f1 -d' '))
+
+# See http://pci-ids.ucw.cz/v2.2/pci.ids for more info.
+
+declare -A PF_INDICES
+# Intel NICs
+PF_INDICES["0000:2a:00.0"]=0
+PF_INDICES["0000:2c:00.0"]=1
+PF_INDICES["0000:3f:00.0"]=0
+PF_INDICES["0000:3d:00.0"]=1
diff --git a/fdio.infra.ansible/roles/vpp_device/handlers/main.yaml b/fdio.infra.ansible/roles/vpp_device/handlers/main.yaml
index 29342ae43c..3ac80cc16e 100644
--- a/fdio.infra.ansible/roles/vpp_device/handlers/main.yaml
+++ b/fdio.infra.ansible/roles/vpp_device/handlers/main.yaml
@@ -1,21 +1,21 @@
---
-# file: roles/vpp_device/handlers/main.yaml
+# file: handlers/main.yaml
-- name: Start csit-initialize-vfs.service
- systemd:
+- name: "Start csit-initialize-vfs.service"
+ ansible.builtin.systemd:
enabled: true
- state: started
- name: csit-initialize-vfs.service
+ state: "started"
+ name: "csit-initialize-vfs.service"
tags:
- start-vf-service
-- name: Update GRUB
- command: update-grub
+- name: "Update GRUB"
+ ansible.builtin.command: "update-grub"
tags:
- update-grub
-- name: Reboot server
- reboot:
+- name: "Reboot server"
+ ansible.builtin.reboot:
reboot_timeout: 3600
tags:
- reboot-server
diff --git a/fdio.infra.ansible/roles/vpp_device/tasks/main.yaml b/fdio.infra.ansible/roles/vpp_device/tasks/main.yaml
index 418217aaac..91916456af 100644
--- a/fdio.infra.ansible/roles/vpp_device/tasks/main.yaml
+++ b/fdio.infra.ansible/roles/vpp_device/tasks/main.yaml
@@ -1,20 +1,18 @@
---
-# file: roles/vpp_device/tasks/main.yaml
+# file: tasks/main.yaml
-- name: Load Kernel Modules By Default
- lineinfile:
+- name: "Load Kernel Modules On Startup (vfio-pci)"
+ ansible.builtin.lineinfile:
path: "/etc/modules"
state: "present"
line: "{{ item }}"
with_items:
- "vfio-pci"
- when:
- - cpu_microarchitecture == "thunderx2"
tags:
- load-kernel-modules
-- name: Disable IPv6 Router Advertisement
- sysctl:
+- name: "Disable IPv6 Router Advertisement"
+ ansible.builtin.sysctl:
name: "net.ipv6.conf.default.accept_ra"
value: "0"
state: "present"
@@ -23,9 +21,19 @@
tags:
- set-sysctl
-- name: Disable IPv6 Router Advertisement
- sysctl:
- name: "net.ipv6.conf.all.accept_ra"
+- name: "Disable IPv6 MLDv1 interval"
+ ansible.builtin.sysctl:
+ name: "net.ipv6.conf.default.mldv1_unsolicited_report_interval"
+ value: "0"
+ state: "present"
+ sysctl_file: "/etc/sysctl.d/90-csit.conf"
+ reload: "yes"
+ tags:
+ - set-sysctl
+
+- name: "Disable IPv6 MLDv2 interval"
+ ansible.builtin.sysctl:
+ name: "net.ipv6.conf.default.mldv2_unsolicited_report_interval"
value: "0"
state: "present"
sysctl_file: "/etc/sysctl.d/90-csit.conf"
@@ -33,9 +41,9 @@
tags:
- set-sysctl
-- name: Disable IPv6 MLDv1 interval
- sysctl:
- name: "net.ipv6.conf.all.mldv1_unsolicited_report_interval"
+- name: "Disable IPv6 Autoconf"
+ ansible.builtin.sysctl:
+ name: "net.ipv6.conf.default.autoconf"
value: "0"
state: "present"
sysctl_file: "/etc/sysctl.d/90-csit.conf"
@@ -43,9 +51,9 @@
tags:
- set-sysctl
-- name: Disable IPv6 MLDv2 interval
- sysctl:
- name: "net.ipv6.conf.all.mldv2_unsolicited_report_interval"
+- name: "Disable IPv6 MC Forwarding"
+ ansible.builtin.sysctl:
+ name: "net.ipv6.conf.default.mc_forwarding"
value: "0"
state: "present"
sysctl_file: "/etc/sysctl.d/90-csit.conf"
@@ -53,9 +61,9 @@
tags:
- set-sysctl
-- name: Disable IPv6 Autoconf
- sysctl:
- name: "net.ipv6.conf.all.autoconf"
+- name: "Disable IPv4 IGMPv2 interval"
+ ansible.builtin.sysctl:
+ name: "net.ipv4.conf.default.igmpv2_unsolicited_report_interval"
value: "0"
state: "present"
sysctl_file: "/etc/sysctl.d/90-csit.conf"
@@ -63,9 +71,9 @@
tags:
- set-sysctl
-- name: Disable IPv6 MC Forwarding
- sysctl:
- name: "net.ipv6.conf.all.mc_forwarding"
+- name: "Disable IPv4 IGMPv3 interval"
+ ansible.builtin.sysctl:
+ name: "net.ipv4.conf.default.igmpv3_unsolicited_report_interval"
value: "0"
state: "present"
sysctl_file: "/etc/sysctl.d/90-csit.conf"
@@ -73,8 +81,8 @@
tags:
- set-sysctl
-- name: Copy csit-initialize-vfs.sh
- copy:
+- name: "Copy csit-initialize-vfs.sh"
+ ansible.builtin.copy:
src: "files/csit-initialize-vfs.sh"
dest: "/usr/local/bin/"
owner: "root"
@@ -83,8 +91,8 @@
tags:
- copy-vf-script
-- name: Copy csit-initialize-vfs-data.sh
- copy:
+- name: "Copy csit-initialize-vfs-data.sh"
+ ansible.builtin.copy:
src: "files/{{ vfs_data_file }}"
dest: "/usr/local/bin/csit-initialize-vfs-data.sh"
owner: "root"
@@ -94,8 +102,8 @@
when:
- vfs_data_file is defined
-- name: Copy Default csit-initialize-vfs-data.sh
- copy:
+- name: "Copy Default csit-initialize-vfs-data.sh"
+ ansible.builtin.copy:
src: "files/csit-initialize-vfs-default.sh"
dest: "/usr/local/bin/csit-initialize-vfs-data.sh"
owner: "root"
@@ -105,8 +113,8 @@
when:
- vfs_data_file is not defined
-- name: Start csit-initialize-vfs.service
- copy:
+- name: "Start csit-initialize-vfs.service"
+ ansible.builtin.copy:
src: "files/csit-initialize-vfs.service"
dest: "/etc/systemd/system/"
owner: "root"
@@ -117,10 +125,10 @@
tags:
- start-vf-service
-- meta: flush_handlers
+- ansible.builtin.meta: "flush_handlers"
-- name: Set Hugepages In GRUB
- lineinfile:
+- name: "Set Hugepages In GRUB"
+ ansible.builtin.lineinfile:
path: "/etc/default/grub"
state: "present"
regexp: "^GRUB_CMDLINE_LINUX="
diff --git a/fdio.infra.ansible/site.yaml b/fdio.infra.ansible/site.yaml
index 9350a2c140..45a090344e 100644
--- a/fdio.infra.ansible/site.yaml
+++ b/fdio.infra.ansible/site.yaml
@@ -6,12 +6,14 @@
- tg
- tg_aws
- tg_azure
+ - tg_openstack
- import_playbook: sut.yaml
tags:
- sut
- sut_aws
- sut_azure
+ - sut_openstasck
- import_playbook: vpp_device.yaml
tags:
diff --git a/fdio.infra.ansible/sut.yaml b/fdio.infra.ansible/sut.yaml
index d0faad10a7..57be961ee1 100644
--- a/fdio.infra.ansible/sut.yaml
+++ b/fdio.infra.ansible/sut.yaml
@@ -32,8 +32,8 @@
tags: dpdk
- role: kernel_vm
tags: kernel_vm
- - role: csit_sut_image
- tags: csit_sut_image
+ - role: docker_images
+ tags: docker_images
- role: performance_tuning
tags: performance_tuning
- role: cleanup
@@ -60,12 +60,12 @@
tags: python_env
- role: vpp
tags: vpp
- - role: dpdk
- tags: dpdk
- role: iperf
tags: iperf
- role: docker
tags: docker
+# - role: dpdk
+# tags: dpdk
- role: aws
tags: aws
- role: cleanup
@@ -103,3 +103,32 @@
tags: cleanup
- role: calibration
tags: calibration
+
+- hosts: sut_openstack
+ become: true
+ become_user: root
+ gather_facts: false
+ pre_tasks:
+ - name: Gathering Facts
+ gather_facts:
+ tags:
+ - always
+ roles:
+ - role: user_add
+ tags: user_add
+ - role: common
+ tags: common
+ - role: python_env
+ tags: python_env
+ - role: docker
+ tags: docker
+ - role: vpp
+ tags: vpp
+ - role: iperf
+ tags: iperf
+ - role: dpdk
+ tags: dpdk
+ - role: cleanup
+ tags: cleanup
+ - role: calibration
+ tags: calibration \ No newline at end of file
diff --git a/fdio.infra.ansible/tg.yaml b/fdio.infra.ansible/tg.yaml
index 86184e10a7..de8706ffd1 100644
--- a/fdio.infra.ansible/tg.yaml
+++ b/fdio.infra.ansible/tg.yaml
@@ -26,16 +26,14 @@
tags: intel
- role: docker
tags: docker
+ - role: docker_images
+ tags: docker_images
- role: iperf
tags: iperf
- role: trex
tags: trex
- role: ab
tags: ab
- - role: tg
- tags: tg
- - role: csit_sut_image
- tags: csit_sut_image
- role: performance_tuning
tags: performance_tuning
- role: cleanup
@@ -60,20 +58,18 @@
tags: common
- role: python_env
tags: python_env
- - role: dpdk
- tags: dpdk
- role: docker
tags: docker
- - role: tg
- tags: tg
- role: iperf
tags: iperf
+# - role: dpdk
+# tags: dpdk
+ - role: aws
+ tags: aws
- role: trex
tags: trex
- role: ab
tags: ab
- - role: aws
- tags: aws
- role: cleanup
tags: cleanup
- role: calibration
@@ -97,8 +93,6 @@
tags: python_env
- role: docker
tags: docker
- - role: tg
- tags: tg
- role: iperf
tags: iperf
- role: trex
@@ -111,3 +105,32 @@
tags: cleanup
- role: calibration
tags: calibration
+
+- hosts: tg_openstack
+ become: true
+ become_user: root
+ gather_facts: false
+ pre_tasks:
+ - name: Gathering Facts
+ gather_facts:
+ tags:
+ - always
+ roles:
+ - role: user_add
+ tags: user_add
+ - role: common
+ tags: common
+ - role: python_env
+ tags: python_env
+ - role: docker
+ tags: docker
+ - role: iperf
+ tags: iperf
+ - role: trex
+ tags: trex
+ - role: ab
+ tags: ab
+ - role: cleanup
+ tags: cleanup
+ - role: calibration
+ tags: calibration \ No newline at end of file
diff --git a/fdio.infra.ansible/vpp_device.yaml b/fdio.infra.ansible/vpp_device.yaml
index ced882a148..21676811c7 100644
--- a/fdio.infra.ansible/vpp_device.yaml
+++ b/fdio.infra.ansible/vpp_device.yaml
@@ -18,12 +18,14 @@
tags: baremetal
- role: common
tags: common
- - role: kernel
- tags: kernel
+# - role: kernel
+# tags: kernel
- role: intel
tags: intel
- role: docker
tags: docker
+ - role: docker_images
+ tags: docker_images
- role: nomad
tags: nomad
- role: consul
@@ -36,7 +38,5 @@
tags: vpp_device
- role: kernel_vm
tags: kernel_vm
- - role: csit_sut_image
- tags: csit_sut_image
- role: cleanup
tags: cleanup