aboutsummaryrefslogtreecommitdiffstats
path: root/fdio.infra.ansible
diff options
context:
space:
mode:
Diffstat (limited to 'fdio.infra.ansible')
-rw-r--r--fdio.infra.ansible/.gitignore1
-rw-r--r--fdio.infra.ansible/cloud_topology.yaml8
-rw-r--r--fdio.infra.ansible/dev.yaml18
-rw-r--r--fdio.infra.ansible/inventories/lf_inventory/group_vars/all.yaml2
-rw-r--r--fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.21.yaml88
-rw-r--r--fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.22.yaml88
-rw-r--r--fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.23.yaml92
-rw-r--r--fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.24.yaml92
-rw-r--r--fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.25.yaml92
-rw-r--r--fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.26.yaml92
-rw-r--r--fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.27.yaml86
-rw-r--r--fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.28.yaml86
-rw-r--r--fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.30.yaml97
-rw-r--r--fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.31.yaml97
-rw-r--r--fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.32.yaml38
-rw-r--r--fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.33.yaml38
-rw-r--r--fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.34.yaml38
-rw-r--r--fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.35.yaml38
-rw-r--r--fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.36.yaml23
-rw-r--r--fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.37.yaml23
-rw-r--r--fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.38.yaml20
-rw-r--r--fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.49.yaml33
-rw-r--r--fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.50.yaml96
-rw-r--r--fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.51.yaml96
-rw-r--r--fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.52.yaml74
-rw-r--r--fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.53.yaml65
-rw-r--r--fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.54.yaml67
-rw-r--r--fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.55.yaml63
-rw-r--r--fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.56.yaml36
-rw-r--r--fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.57.yaml36
-rw-r--r--fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.58.yaml36
-rw-r--r--fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.59.yaml41
-rw-r--r--fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.69.yaml42
-rw-r--r--fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.70.yaml98
-rw-r--r--fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.71.yaml98
-rw-r--r--fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.72.yaml26
-rw-r--r--fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.73.yaml26
-rw-r--r--fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.74.yaml33
-rw-r--r--fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.75.yaml34
-rw-r--r--fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.76.yaml34
-rw-r--r--fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.77.yaml34
-rw-r--r--fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.78.yaml34
-rw-r--r--fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.79.yaml34
-rw-r--r--fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.80.yaml34
-rw-r--r--fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.81.yaml34
-rw-r--r--fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.82.yaml34
-rw-r--r--fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.83.yaml34
-rw-r--r--fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.84.yaml34
-rw-r--r--fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.85.yaml34
-rw-r--r--fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.86.yaml34
-rw-r--r--fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.87.yaml34
-rw-r--r--fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.88.yaml34
-rw-r--r--fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.89.yaml33
-rw-r--r--fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.90.yaml38
-rw-r--r--fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.91.yaml88
-rw-r--r--fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.92.yaml88
-rw-r--r--fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.93.yaml33
-rw-r--r--fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.94.yaml33
-rw-r--r--fdio.infra.ansible/inventories/lf_inventory/host_vars/10.32.8.18.yaml34
-rw-r--r--fdio.infra.ansible/inventories/lf_inventory/host_vars/10.32.8.19.yaml34
-rw-r--r--fdio.infra.ansible/inventories/lf_inventory/host_vars/10.32.8.20.yaml34
-rw-r--r--fdio.infra.ansible/inventories/lf_inventory/host_vars/10.32.8.21.yaml34
-rw-r--r--fdio.infra.ansible/inventories/lf_inventory/host_vars/10.32.8.22.yaml34
-rw-r--r--fdio.infra.ansible/inventories/lf_inventory/host_vars/10.32.8.23.yaml34
-rw-r--r--fdio.infra.ansible/inventories/lf_inventory/host_vars/10.32.8.24.yaml30
-rw-r--r--fdio.infra.ansible/inventories/lf_inventory/host_vars/10.32.8.25.yaml30
-rw-r--r--fdio.infra.ansible/inventories/lf_inventory/hosts74
-rw-r--r--fdio.infra.ansible/inventories/sample_inventory/group_vars/all.yaml2
-rw-r--r--fdio.infra.ansible/inventories/sample_inventory/host_vars/1.1.1.1.yaml17
-rw-r--r--fdio.infra.ansible/inventories/sample_inventory/hosts9
-rw-r--r--fdio.infra.ansible/inventories/vagrant_inventory/group_vars/vagrant.yml2
-rw-r--r--fdio.infra.ansible/inventories/vagrant_inventory/hosts4
-rw-r--r--fdio.infra.ansible/nomad.yaml32
-rw-r--r--fdio.infra.ansible/nomad_vault.yml796
-rw-r--r--fdio.infra.ansible/roles/ab/defaults/main.yaml18
-rw-r--r--fdio.infra.ansible/roles/ab/tasks/main.yaml18
-rw-r--r--fdio.infra.ansible/roles/aws/defaults/main.yaml26
-rw-r--r--fdio.infra.ansible/roles/aws/files/get-vfio-with-wc.sh203
-rw-r--r--fdio.infra.ansible/roles/aws/handlers/main.yaml20
-rw-r--r--fdio.infra.ansible/roles/aws/tasks/main.yaml124
-rw-r--r--fdio.infra.ansible/roles/aws/tasks/ubuntu_jammy.yaml35
-rw-r--r--fdio.infra.ansible/roles/azure/defaults/main.yaml2
-rw-r--r--fdio.infra.ansible/roles/azure/files/10-dtap.link4
-rw-r--r--fdio.infra.ansible/roles/azure/handlers/main.yaml15
-rw-r--r--fdio.infra.ansible/roles/azure/tasks/main.yaml38
-rw-r--r--fdio.infra.ansible/roles/baremetal/handlers/cimc.yaml74
-rw-r--r--fdio.infra.ansible/roles/baremetal/handlers/ipmi.yaml52
-rw-r--r--fdio.infra.ansible/roles/baremetal/handlers/main.yaml30
-rw-r--r--fdio.infra.ansible/roles/cadvisor/defaults/main.yaml24
-rw-r--r--fdio.infra.ansible/roles/cadvisor/tasks/main.yaml39
-rw-r--r--fdio.infra.ansible/roles/calibration/defaults/main.yaml37
-rw-r--r--fdio.infra.ansible/roles/calibration/tasks/aarch64.yaml2
-rw-r--r--fdio.infra.ansible/roles/calibration/tasks/main.yaml89
-rw-r--r--fdio.infra.ansible/roles/calibration/tasks/x86_64.yaml35
-rw-r--r--fdio.infra.ansible/roles/cleanup/files/reset_vppdevice.sh113
-rw-r--r--fdio.infra.ansible/roles/cleanup/tasks/clean_images.yaml36
-rw-r--r--fdio.infra.ansible/roles/cleanup/tasks/kill_containers.yaml42
-rw-r--r--fdio.infra.ansible/roles/cleanup/tasks/kill_process.yaml38
-rw-r--r--fdio.infra.ansible/roles/cleanup/tasks/main.yaml26
-rw-r--r--fdio.infra.ansible/roles/cleanup/tasks/nomad.yaml18
-rw-r--r--fdio.infra.ansible/roles/cleanup/tasks/remove_package.yaml21
-rw-r--r--fdio.infra.ansible/roles/cleanup/tasks/sut.yaml97
-rw-r--r--fdio.infra.ansible/roles/cleanup/tasks/tg.yaml13
-rw-r--r--fdio.infra.ansible/roles/cleanup/tasks/vpp_device.yaml32
-rw-r--r--fdio.infra.ansible/roles/common/defaults/main.yaml55
-rw-r--r--fdio.infra.ansible/roles/common/handlers/main.yaml8
-rw-r--r--fdio.infra.ansible/roles/common/tasks/main.yaml56
-rw-r--r--fdio.infra.ansible/roles/consul/defaults/main.yaml87
-rw-r--r--fdio.infra.ansible/roles/consul/handlers/main.yaml16
-rw-r--r--fdio.infra.ansible/roles/consul/meta/main.yaml21
-rw-r--r--fdio.infra.ansible/roles/consul/tasks/main.yaml145
-rw-r--r--fdio.infra.ansible/roles/consul/templates/base.hcl.j256
-rw-r--r--fdio.infra.ansible/roles/consul/templates/consul_systemd.service.j218
-rw-r--r--fdio.infra.ansible/roles/consul/templates/ports.hcl.j29
-rw-r--r--fdio.infra.ansible/roles/consul/templates/telemetry.hcl.j23
-rw-r--r--fdio.infra.ansible/roles/consul/vars/main.yaml5
-rw-r--r--fdio.infra.ansible/roles/docker/defaults/main.yaml35
-rw-r--r--fdio.infra.ansible/roles/docker/handlers/main.yaml9
-rw-r--r--fdio.infra.ansible/roles/docker/meta/main.yaml18
-rw-r--r--fdio.infra.ansible/roles/docker/tasks/jammy.yaml30
-rw-r--r--fdio.infra.ansible/roles/docker/tasks/main.yaml85
-rw-r--r--fdio.infra.ansible/roles/docker/templates/daemon.json.j21
-rw-r--r--fdio.infra.ansible/roles/docker/templates/docker.service.proxy.http4
-rw-r--r--fdio.infra.ansible/roles/docker/templates/docker.service.proxy.https4
-rw-r--r--fdio.infra.ansible/roles/docker_images/files/base/Dockerfile140
-rw-r--r--fdio.infra.ansible/roles/docker_images/files/csit-initialize-docker-sut.service12
-rw-r--r--fdio.infra.ansible/roles/docker_images/files/csit-initialize-docker-tg.service12
-rw-r--r--fdio.infra.ansible/roles/docker_images/files/csit-sut/Dockerfile7
-rw-r--r--fdio.infra.ansible/roles/docker_images/files/csit-sut/supervisord.conf24
-rw-r--r--fdio.infra.ansible/roles/docker_images/handlers/main.yaml18
-rw-r--r--fdio.infra.ansible/roles/docker_images/tasks/base.yaml63
-rw-r--r--fdio.infra.ansible/roles/docker_images/tasks/main.yaml21
-rw-r--r--fdio.infra.ansible/roles/docker_images/tasks/sut.yaml28
-rw-r--r--fdio.infra.ansible/roles/docker_images/tasks/tg.yaml28
-rw-r--r--fdio.infra.ansible/roles/docker_images/templates/docker-compose-sut.yaml.j242
-rw-r--r--fdio.infra.ansible/roles/docker_images/templates/docker-compose-tg.yaml.j238
-rw-r--r--fdio.infra.ansible/roles/dpdk/defaults/main.yaml24
-rw-r--r--fdio.infra.ansible/roles/dpdk/files/dpdk-mlx5.patch19
-rw-r--r--fdio.infra.ansible/roles/dpdk/meta/main.yaml18
-rw-r--r--fdio.infra.ansible/roles/dpdk/molecule/default/converge.yml9
-rw-r--r--fdio.infra.ansible/roles/dpdk/molecule/default/molecule.yml21
-rw-r--r--fdio.infra.ansible/roles/dpdk/tasks/deploy_block.yaml33
-rw-r--r--fdio.infra.ansible/roles/dpdk/tasks/main.yaml24
-rw-r--r--fdio.infra.ansible/roles/intel/defaults/main.yaml111
-rw-r--r--fdio.infra.ansible/roles/intel/tasks/dsa.yaml39
-rw-r--r--fdio.infra.ansible/roles/intel/tasks/i40e.yaml37
-rw-r--r--fdio.infra.ansible/roles/intel/tasks/iavf.yaml37
-rw-r--r--fdio.infra.ansible/roles/intel/tasks/ice.yaml91
-rw-r--r--fdio.infra.ansible/roles/intel/tasks/main.yaml146
-rw-r--r--fdio.infra.ansible/roles/intel/tasks/qat1.yaml54
-rw-r--r--fdio.infra.ansible/roles/intel/tasks/qat2.yaml57
-rw-r--r--fdio.infra.ansible/roles/iperf/defaults/main.yaml23
-rw-r--r--fdio.infra.ansible/roles/iperf/tasks/main.yaml62
-rw-r--r--fdio.infra.ansible/roles/jenkins_job_health_exporter/defaults/main.yaml28
-rw-r--r--fdio.infra.ansible/roles/jenkins_job_health_exporter/handlers/main.yaml9
-rw-r--r--fdio.infra.ansible/roles/jenkins_job_health_exporter/tasks/main.yaml38
-rw-r--r--fdio.infra.ansible/roles/jenkins_job_health_exporter/templates/jenkins-job-health-exporter.j216
-rw-r--r--fdio.infra.ansible/roles/jenkins_job_health_exporter/templates/jenkins-job-health-exporter.service.j213
-rw-r--r--fdio.infra.ansible/roles/kernel/defaults/main.yaml31
-rw-r--r--fdio.infra.ansible/roles/kernel/filter_plugins/main.py143
-rw-r--r--fdio.infra.ansible/roles/kernel/handlers/main.yaml8
-rw-r--r--fdio.infra.ansible/roles/kernel/tasks/main.yaml9
-rw-r--r--fdio.infra.ansible/roles/kernel/tasks/ubuntu_jammy.yaml62
-rw-r--r--fdio.infra.ansible/roles/kernel_vm/files/initramfs_modules4
-rw-r--r--fdio.infra.ansible/roles/kernel_vm/files/initramfs_resume1
-rw-r--r--fdio.infra.ansible/roles/kernel_vm/tasks/main.yaml92
-rw-r--r--fdio.infra.ansible/roles/mellanox/defaults/main.yaml30
-rw-r--r--fdio.infra.ansible/roles/mellanox/tasks/main.yaml43
-rw-r--r--fdio.infra.ansible/roles/mellanox/tasks/ofed.yaml37
-rw-r--r--fdio.infra.ansible/roles/nomad/defaults/main.yaml193
-rw-r--r--fdio.infra.ansible/roles/nomad/handlers/main.yaml9
-rw-r--r--fdio.infra.ansible/roles/nomad/meta/main.yaml21
-rw-r--r--fdio.infra.ansible/roles/nomad/tasks/main.yaml151
-rw-r--r--fdio.infra.ansible/roles/nomad/templates/base.hcl.j226
-rw-r--r--fdio.infra.ansible/roles/nomad/templates/cfssl.json8
-rw-r--r--fdio.infra.ansible/roles/nomad/templates/client.hcl.j260
-rw-r--r--fdio.infra.ansible/roles/nomad/templates/consul.hcl.j263
-rw-r--r--fdio.infra.ansible/roles/nomad/templates/nomad_systemd.service.j233
-rw-r--r--fdio.infra.ansible/roles/nomad/templates/server.hcl.j258
-rw-r--r--fdio.infra.ansible/roles/nomad/templates/telemetry.hcl.j226
-rw-r--r--fdio.infra.ansible/roles/nomad/templates/tls.hcl.j236
-rw-r--r--fdio.infra.ansible/roles/nomad/templates/vault.hcl.j269
-rw-r--r--fdio.infra.ansible/roles/nomad/vars/main.yaml5
-rw-r--r--fdio.infra.ansible/roles/performance_tuning/defaults/main.yaml18
-rw-r--r--fdio.infra.ansible/roles/performance_tuning/files/cpufrequtils1
-rw-r--r--fdio.infra.ansible/roles/performance_tuning/files/disable-turbo-boost.service10
-rw-r--r--fdio.infra.ansible/roles/performance_tuning/files/irqbalance25
-rw-r--r--fdio.infra.ansible/roles/performance_tuning/filter_plugins/main.py29
-rw-r--r--fdio.infra.ansible/roles/performance_tuning/handlers/main.yaml13
-rw-r--r--fdio.infra.ansible/roles/performance_tuning/tasks/main.yaml210
-rw-r--r--fdio.infra.ansible/roles/performance_tuning/tasks/turbo_boost.yaml44
-rw-r--r--fdio.infra.ansible/roles/prometheus_exporter/defaults/main.yaml31
-rw-r--r--fdio.infra.ansible/roles/prometheus_exporter/files/blackbox.yml25
-rw-r--r--fdio.infra.ansible/roles/prometheus_exporter/meta/main.yaml18
-rw-r--r--fdio.infra.ansible/roles/prometheus_exporter/tasks/main.yaml72
-rw-r--r--fdio.infra.ansible/roles/python_env/defaults/main.yaml25
-rw-r--r--fdio.infra.ansible/roles/python_env/tasks/main.yaml62
-rw-r--r--fdio.infra.ansible/roles/topology/tasks/main.yaml23
-rw-r--r--fdio.infra.ansible/roles/topology/templates/hosts.j29
-rw-r--r--fdio.infra.ansible/roles/topology/templates/topology-1n-aws-c5n.j230
-rw-r--r--fdio.infra.ansible/roles/topology/templates/topology-1n-c6gn.j230
-rw-r--r--fdio.infra.ansible/roles/topology/templates/topology-1n-c6in.j230
-rw-r--r--fdio.infra.ansible/roles/topology/templates/topology-2n-aws-c5n.j251
-rw-r--r--fdio.infra.ansible/roles/topology/templates/topology-2n-c6gn.j251
-rw-r--r--fdio.infra.ansible/roles/topology/templates/topology-2n-c6in.j251
-rw-r--r--fdio.infra.ansible/roles/topology/templates/topology-3n-aws-c5n.j273
-rw-r--r--fdio.infra.ansible/roles/topology/templates/topology-3n-azure-Fsv2.j282
-rw-r--r--fdio.infra.ansible/roles/topology/templates/topology-3n-c6gn.j273
-rw-r--r--fdio.infra.ansible/roles/topology/templates/topology-3n-c6in.j273
-rw-r--r--fdio.infra.ansible/roles/trex/defaults/main.yaml33
-rw-r--r--fdio.infra.ansible/roles/trex/files/t-rex.patch548
-rw-r--r--fdio.infra.ansible/roles/trex/tasks/deploy_block.yaml63
-rw-r--r--fdio.infra.ansible/roles/trex/tasks/main.yaml24
-rw-r--r--fdio.infra.ansible/roles/user_add/defaults/main.yaml14
-rw-r--r--fdio.infra.ansible/roles/user_add/handlers/main.yaml7
-rw-r--r--fdio.infra.ansible/roles/user_add/tasks/main.yaml39
-rw-r--r--fdio.infra.ansible/roles/vagrant/defaults/main.yml14
-rw-r--r--fdio.infra.ansible/roles/vagrant/files/99-vppdevice.yaml28
-rw-r--r--fdio.infra.ansible/roles/vagrant/tasks/main.yml43
-rw-r--r--fdio.infra.ansible/roles/vault/defaults/main.yaml159
-rw-r--r--fdio.infra.ansible/roles/vault/handlers/main.yaml9
-rw-r--r--fdio.infra.ansible/roles/vault/meta/main.yaml18
-rw-r--r--fdio.infra.ansible/roles/vault/tasks/main.yaml133
-rw-r--r--fdio.infra.ansible/roles/vault/templates/vault_backend_consul.j215
-rw-r--r--fdio.infra.ansible/roles/vault/templates/vault_main_configuration.hcl.j293
-rw-r--r--fdio.infra.ansible/roles/vault/templates/vault_service_registration_consul.hcl.j222
-rw-r--r--fdio.infra.ansible/roles/vault/templates/vault_systemd.service.j230
-rw-r--r--fdio.infra.ansible/roles/vault/vars/main.yaml5
-rw-r--r--fdio.infra.ansible/roles/vpp/defaults/main.yaml28
-rw-r--r--fdio.infra.ansible/roles/vpp/tasks/main.yaml27
-rw-r--r--fdio.infra.ansible/roles/vpp_device/files/csit-initialize-vfs-alt.sh39
-rw-r--r--fdio.infra.ansible/roles/vpp_device/files/csit-initialize-vfs-default.sh37
-rw-r--r--fdio.infra.ansible/roles/vpp_device/files/csit-initialize-vfs-spr.sh38
-rw-r--r--fdio.infra.ansible/roles/vpp_device/files/csit-initialize-vfs-tx2.sh34
-rw-r--r--fdio.infra.ansible/roles/vpp_device/files/csit-initialize-vfs.service12
-rw-r--r--fdio.infra.ansible/roles/vpp_device/files/csit-initialize-vfs.sh77
-rw-r--r--fdio.infra.ansible/roles/vpp_device/handlers/main.yaml21
-rw-r--r--fdio.infra.ansible/roles/vpp_device/tasks/main.yaml139
-rw-r--r--fdio.infra.ansible/site.yaml32
-rw-r--r--fdio.infra.ansible/sut.yaml134
-rw-r--r--fdio.infra.ansible/tg.yaml136
-rw-r--r--fdio.infra.ansible/vagrant.yaml24
-rw-r--r--fdio.infra.ansible/vault.yml706
-rw-r--r--fdio.infra.ansible/vault_pass1
-rw-r--r--fdio.infra.ansible/vpp_device.yaml42
245 files changed, 12641 insertions, 0 deletions
diff --git a/fdio.infra.ansible/.gitignore b/fdio.infra.ansible/.gitignore
new file mode 100644
index 0000000000..bed1e33b88
--- /dev/null
+++ b/fdio.infra.ansible/.gitignore
@@ -0,0 +1 @@
+site.retry \ No newline at end of file
diff --git a/fdio.infra.ansible/cloud_topology.yaml b/fdio.infra.ansible/cloud_topology.yaml
new file mode 100644
index 0000000000..c62d3a7035
--- /dev/null
+++ b/fdio.infra.ansible/cloud_topology.yaml
@@ -0,0 +1,8 @@
+---
+# file: cloud_topology.yaml
+
+- hosts: localhost
+ gather_facts: false
+ roles:
+ - role: topology
+ tags: topology
diff --git a/fdio.infra.ansible/dev.yaml b/fdio.infra.ansible/dev.yaml
new file mode 100644
index 0000000000..895cf518ec
--- /dev/null
+++ b/fdio.infra.ansible/dev.yaml
@@ -0,0 +1,18 @@
+---
+# file: dev.yaml
+
+- hosts: dev
+ remote_user: testuser
+ become: true
+ become_user: root
+ gather_facts: false
+ pre_tasks:
+ - name: Gathering Facts
+ gather_facts:
+ tags:
+ - always
+ roles:
+ - role: user_add
+ tags: user_add
+ - role: docker
+ tags: docker
diff --git a/fdio.infra.ansible/inventories/lf_inventory/group_vars/all.yaml b/fdio.infra.ansible/inventories/lf_inventory/group_vars/all.yaml
new file mode 100644
index 0000000000..719ef32625
--- /dev/null
+++ b/fdio.infra.ansible/inventories/lf_inventory/group_vars/all.yaml
@@ -0,0 +1,2 @@
+---
+# file: lf_inventory/group_vars/all.yaml
diff --git a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.21.yaml b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.21.yaml
new file mode 100644
index 0000000000..b28cae11ff
--- /dev/null
+++ b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.21.yaml
@@ -0,0 +1,88 @@
+---
+# file: host_vars/10.30.51.21.yaml
+
+hostname: "s21-nomad"
+inventory_ipmi_hostname: "10.30.50.21"
+
+# User management.
+users:
+ - username: localadmin
+ groups: [adm, sudo]
+ password: "$6$FIsbVDQR$5D0wgufOd2FtnmOiRNsGlgg6Loh.0x3dWSj72DSQnqisSyE9DROfgSgA6s0yxDwz4Jd5SRTXiTKuRYuSQ5POI1"
+ ssh_key:
+ - "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIKM1D1kkGX1l7fSma1MOgw2yLI7zJHwTCcfVROQ4hh7r peter.mikus@protonmail.ch"
+ - "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQDCG7Shfr7ASXXwpazYDGFzChGl7i4HgY9n81GTwc17B/O19IiJdrxFcBQH19HMuTFtWU4d9bQ6xgzz2pojBN13L3pEsStCHovDlEYbvfRxI2l2hcAAop1j1E4izHAS9IvCoy1AgWqBg6tsfP5mzOwGbSbcdI9ADRKIHgDTVbHg9SqKed27bNOLU0u3/5ra2Oar/tVIW37geEqFV/nHIBZ03Y/mszvXP/t/hP5bgJIGJKkzTjLd1aqEcuGmubW+wTQnVnrhFB87dw91gPj6BVyV0+7Vt1wrvPKqP2sGJhojAMSrBQBySnlrYgEg00bwDgNGFevatfG9+nTId+nhoKBkXya3MjSp4HwrGqGcij3/h7ovlau3/iRhkqlSeqenaNm4zKTAXRTnb60j2WKa6im0zdqJX98anp4mhjE8xHhmmfZV3vRT8mtY4hF/lg79miXFHpWH97bZV6r/D9qj1HWI/laJfOC5MOJdRcLETwtsUNMHeHtVnY3yu0XFYNcl2Xwajtnz3waF7vn1QHvAM9p878+JrO/IEUqEc0yZWVvlhZ7Krx1pS+APoMy8097MYDGFzFTkYSstKLGbgm/S7dEiWuSVxmMUxW7JYO3gHrQ3f1EvAYh2UFdWy76Dzr5II9UpVwOwF+HL/Oy8Sk77bPaK+tn7Kh4Tx7WWE0+EOAgElQ== ayourtch@ayourtch-lnx"
+ - "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCXUbbhesOpvPF+KI8nq4TXvEO/Un1aU/ehZ9clCyw9C40xjDkX2BlcX8WeHxFHe7fjFaCd07Vg73rn/3M9uNDnDxvjH1GQ0twvi3iBTO4PkHBBfGF9qnE8MYzno6FvlsVKLuUuPbfm8kbOQ+ZDfdXq6gdtXh0hSYYkqC1heNPCNsqaakkB99Edyle+Ot0V7cpW+Yo2wo98KuX/cgUEhVoA8QnNVE7zaWcjSXBZEteoA4gLpAbV6p67/d6H/2ykHTidBViYTEsHco56tJoA4nTPuAupDOLBcWXgF5TAN6z1aCn2JA1DDfniLakgrZ5oVj2qHhUmbxQAtnKQfHADjqzV jlinkes@jlinkes"
+ - "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQD0GXoSGDZ95TE/TT3kf4ZS3Tiso0UO3MVhqAqZ/F6LOvLyqnMPfhxPz1XpHsDikxvKgwhZvdBm1dWbKkPsD7jtw0PGphQO8QuEwBd2ZMvxZ4Qg6lNDUl5x3zRO2nkbKpcqnOugGLTtXP+yfw/wfQ2HNFLDP9gE90xegsPT83PmRUntQlhbS3ByHcCSUScC+Y1heZXuoKNyrmUY46lxkKsNfhx8sQKo0YhB21atV/mcAQbAaO2LggmaQYGtWizqPNGWIRsi9W8ZYnKva67c3Pbv/TTfaqmrNCwOXJ8G9oL+/3MlKbl3b5mYlTs2a/e9yVgMNwUZVX7aiHpgPgaVjL6j swdev@BradyBunch-MacMini.local"
+ - "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCmo2YP4t/f58AAYH72rOe5VjYjk3wb/GY3aJEd5s9WspLhnsY0xBL67C+4kMq6VmQQvg0cUB8RJSFX1tUXMHCorVWnXNHkYomx0MCPcPUpVHuRyEqczYJ2pzgZsPzoEfw9E5hTrAiGzYFNAS/NOSavapVMDZxa1zsX7+sWQvJfmVtJWpcTQb0TkoWXRsy0YM3PYfUbYvK7lR3lGwyhwCcJn0WwWGreFB7bIok0poqqX5BgJ/okZuvC8II+UfuGoBHNhg49oqST1JlNi9gRqDNmLWkHRaneWZiF+Y2hdN3PRCdkt1x3eU0R+cdi5kPKslb6P0lsjOEA7fDLlq1+T2z1"
+ - "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDWWDIudZUaHp6pPkYmBsBeEYcbUoGGZH73eB374g8e9V3dfT7BuJOMR/Z3LxCGw5GcDDgzJHLQr8AykN7lUscaux68CXSf1CAKhfBeWBOCQD4G5tpmD6610jsiBig6bFISKpGW4nLt80yt3bKEqv6ezt35QuMeh3E45uK/pCRfDcukOThbwYG44xr7TeYAyNacWuNV9HDy5DzjBelElmolTOgjtZEbedmIcUbV/gb8yekUxxBTS3d3I5qVLtNRMHPqp9xbJqteRXIphizDlHbaTHZeVy9mrwi1RuTjDSg7p1OPJ9eVIzMYzk87gbwNxZw8481Xb1WsgGmyH/mXJjD1 najoy@NAJOY-M-505G.CISCO.COM"
+sshd_disable_password_login: true
+
+# Nomad settings.
+nomad_version: "1.6.1"
+nomad_certificates:
+ - src: "{{ file_nomad_ca_pem }}"
+ dest: "{{ nomad_tls_ca_file }}"
+ - src: "{{ file_nomad_server_pem }}"
+ dest: "{{ nomad_tls_cert_file }}"
+ - src: "{{ file_nomad_server_key_pem }}"
+ dest: "{{ nomad_tls_key_file }}"
+nomad_datacenter: "yul1"
+nomad_encrypt: "Y4T+5JGx1C3l2NFBBvkTWQ=="
+nomad_node_name: "{{ hostname }}-{{ ansible_architecture }}"
+nomad_node_role: "client"
+nomad_node_class: "builder"
+nomad_options:
+ driver.raw_exec.enable: 1
+ docker.cleanup.image: true
+ docker.privileged.enabled: true
+ docker.volumes.enabled: true
+ driver.whitelist: "docker,raw_exec,exec"
+ fingerprint.network.disallow_link_local: true
+nomad_service_mgr: "systemd"
+nomad_consul_use_ssl: false
+nomad_use_tls: false
+nomad_tls_http: false
+nomad_tls_rpc: false
+nomad_use_vault: false
+nomad_retry_servers:
+ - "10.30.51.26"
+ - "10.30.51.24"
+ - "10.30.51.25"
+nomad_servers:
+ - "10.30.51.26:4647"
+ - "10.30.51.24:4647"
+ - "10.30.51.25:4647"
+
+# Consul settings.
+nomad_use_consul: true
+consul_certificates:
+ - src: "{{ file_consul_ca_pem }}"
+ dest: "{{ consul_ca_file }}"
+ - src: "{{ file_consul_server_0_pem }}"
+ dest: "{{ consul_cert_file }}"
+ - src: "{{ file_consul_server_0_key_pem }}"
+ dest: "{{ consul_key_file }}"
+consul_verify_incoming: false
+consul_verify_outgoing: false
+consul_vefify_server_hostname: false
+consul_allow_tls: false
+consul_datacenter: "yul1"
+consul_node_role: "client"
+consul_encrypt: "Y4T+5JGx1C3l2NFBBvkTWQ=="
+consul_node_name: "{{ ansible_hostname }}"
+consul_retry_join: true
+consul_retry_servers:
+ - "10.30.51.26"
+ - "10.30.51.24"
+ - "10.30.51.25"
+consul_service_mgr: "systemd"
+
+# Vault settings.
+vault_version: "1.13.1"
+
+# Docker daemon settings.
+docker_daemon:
+ dns: ["172.17.0.1"]
+ dns-opts: []
+ dns-search: ["{{ansible_hostname}}"]
diff --git a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.22.yaml b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.22.yaml
new file mode 100644
index 0000000000..8d7223495b
--- /dev/null
+++ b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.22.yaml
@@ -0,0 +1,88 @@
+---
+# file: host_vars/10.30.51.22.yaml
+
+hostname: "s22-nomad"
+inventory_ipmi_hostname: "10.30.50.22"
+
+# User management.
+users:
+ - username: localadmin
+ groups: [adm, sudo]
+ password: "$6$FIsbVDQR$5D0wgufOd2FtnmOiRNsGlgg6Loh.0x3dWSj72DSQnqisSyE9DROfgSgA6s0yxDwz4Jd5SRTXiTKuRYuSQ5POI1"
+ ssh_key:
+ - "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIKM1D1kkGX1l7fSma1MOgw2yLI7zJHwTCcfVROQ4hh7r peter.mikus@protonmail.ch"
+ - "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQDCG7Shfr7ASXXwpazYDGFzChGl7i4HgY9n81GTwc17B/O19IiJdrxFcBQH19HMuTFtWU4d9bQ6xgzz2pojBN13L3pEsStCHovDlEYbvfRxI2l2hcAAop1j1E4izHAS9IvCoy1AgWqBg6tsfP5mzOwGbSbcdI9ADRKIHgDTVbHg9SqKed27bNOLU0u3/5ra2Oar/tVIW37geEqFV/nHIBZ03Y/mszvXP/t/hP5bgJIGJKkzTjLd1aqEcuGmubW+wTQnVnrhFB87dw91gPj6BVyV0+7Vt1wrvPKqP2sGJhojAMSrBQBySnlrYgEg00bwDgNGFevatfG9+nTId+nhoKBkXya3MjSp4HwrGqGcij3/h7ovlau3/iRhkqlSeqenaNm4zKTAXRTnb60j2WKa6im0zdqJX98anp4mhjE8xHhmmfZV3vRT8mtY4hF/lg79miXFHpWH97bZV6r/D9qj1HWI/laJfOC5MOJdRcLETwtsUNMHeHtVnY3yu0XFYNcl2Xwajtnz3waF7vn1QHvAM9p878+JrO/IEUqEc0yZWVvlhZ7Krx1pS+APoMy8097MYDGFzFTkYSstKLGbgm/S7dEiWuSVxmMUxW7JYO3gHrQ3f1EvAYh2UFdWy76Dzr5II9UpVwOwF+HL/Oy8Sk77bPaK+tn7Kh4Tx7WWE0+EOAgElQ== ayourtch@ayourtch-lnx"
+ - "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCXUbbhesOpvPF+KI8nq4TXvEO/Un1aU/ehZ9clCyw9C40xjDkX2BlcX8WeHxFHe7fjFaCd07Vg73rn/3M9uNDnDxvjH1GQ0twvi3iBTO4PkHBBfGF9qnE8MYzno6FvlsVKLuUuPbfm8kbOQ+ZDfdXq6gdtXh0hSYYkqC1heNPCNsqaakkB99Edyle+Ot0V7cpW+Yo2wo98KuX/cgUEhVoA8QnNVE7zaWcjSXBZEteoA4gLpAbV6p67/d6H/2ykHTidBViYTEsHco56tJoA4nTPuAupDOLBcWXgF5TAN6z1aCn2JA1DDfniLakgrZ5oVj2qHhUmbxQAtnKQfHADjqzV jlinkes@jlinkes"
+ - "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQD0GXoSGDZ95TE/TT3kf4ZS3Tiso0UO3MVhqAqZ/F6LOvLyqnMPfhxPz1XpHsDikxvKgwhZvdBm1dWbKkPsD7jtw0PGphQO8QuEwBd2ZMvxZ4Qg6lNDUl5x3zRO2nkbKpcqnOugGLTtXP+yfw/wfQ2HNFLDP9gE90xegsPT83PmRUntQlhbS3ByHcCSUScC+Y1heZXuoKNyrmUY46lxkKsNfhx8sQKo0YhB21atV/mcAQbAaO2LggmaQYGtWizqPNGWIRsi9W8ZYnKva67c3Pbv/TTfaqmrNCwOXJ8G9oL+/3MlKbl3b5mYlTs2a/e9yVgMNwUZVX7aiHpgPgaVjL6j swdev@BradyBunch-MacMini.local"
+ - "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCmo2YP4t/f58AAYH72rOe5VjYjk3wb/GY3aJEd5s9WspLhnsY0xBL67C+4kMq6VmQQvg0cUB8RJSFX1tUXMHCorVWnXNHkYomx0MCPcPUpVHuRyEqczYJ2pzgZsPzoEfw9E5hTrAiGzYFNAS/NOSavapVMDZxa1zsX7+sWQvJfmVtJWpcTQb0TkoWXRsy0YM3PYfUbYvK7lR3lGwyhwCcJn0WwWGreFB7bIok0poqqX5BgJ/okZuvC8II+UfuGoBHNhg49oqST1JlNi9gRqDNmLWkHRaneWZiF+Y2hdN3PRCdkt1x3eU0R+cdi5kPKslb6P0lsjOEA7fDLlq1+T2z1"
+ - "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDWWDIudZUaHp6pPkYmBsBeEYcbUoGGZH73eB374g8e9V3dfT7BuJOMR/Z3LxCGw5GcDDgzJHLQr8AykN7lUscaux68CXSf1CAKhfBeWBOCQD4G5tpmD6610jsiBig6bFISKpGW4nLt80yt3bKEqv6ezt35QuMeh3E45uK/pCRfDcukOThbwYG44xr7TeYAyNacWuNV9HDy5DzjBelElmolTOgjtZEbedmIcUbV/gb8yekUxxBTS3d3I5qVLtNRMHPqp9xbJqteRXIphizDlHbaTHZeVy9mrwi1RuTjDSg7p1OPJ9eVIzMYzk87gbwNxZw8481Xb1WsgGmyH/mXJjD1 najoy@NAJOY-M-505G.CISCO.COM"
+sshd_disable_password_login: true
+
+# Nomad settings.
+nomad_version: "1.6.1"
+nomad_certificates:
+ - src: "{{ file_nomad_ca_pem }}"
+ dest: "{{ nomad_tls_ca_file }}"
+ - src: "{{ file_nomad_server_pem }}"
+ dest: "{{ nomad_tls_cert_file }}"
+ - src: "{{ file_nomad_server_key_pem }}"
+ dest: "{{ nomad_tls_key_file }}"
+nomad_datacenter: "yul1"
+nomad_encrypt: "Y4T+5JGx1C3l2NFBBvkTWQ=="
+nomad_node_name: "{{ hostname }}-{{ ansible_architecture }}"
+nomad_node_role: "client"
+nomad_node_class: "builder"
+nomad_options:
+ driver.raw_exec.enable: 1
+ docker.cleanup.image: true
+ docker.privileged.enabled: true
+ docker.volumes.enabled: true
+ driver.whitelist: "docker,raw_exec,exec"
+ fingerprint.network.disallow_link_local: true
+nomad_service_mgr: "systemd"
+nomad_consul_use_ssl: false
+nomad_use_tls: false
+nomad_tls_http: false
+nomad_tls_rpc: false
+nomad_use_vault: false
+nomad_retry_servers:
+ - "10.30.51.23"
+ - "10.30.51.26"
+ - "10.30.51.25"
+nomad_servers:
+ - "10.30.51.23:4647"
+ - "10.30.51.26:4647"
+ - "10.30.51.25:4647"
+
+# Consul settigs.
+nomad_use_consul: true
+consul_certificates:
+ - src: "{{ file_consul_ca_pem }}"
+ dest: "{{ consul_ca_file }}"
+ - src: "{{ file_consul_server_1_pem }}"
+ dest: "{{ consul_cert_file }}"
+ - src: "{{ file_consul_server_1_key_pem }}"
+ dest: "{{ consul_key_file }}"
+consul_verify_incoming: false
+consul_verify_outgoing: false
+consul_vefify_server_hostname: false
+consul_allow_tls: false
+consul_datacenter: "yul1"
+consul_node_role: "client"
+consul_encrypt: "Y4T+5JGx1C3l2NFBBvkTWQ=="
+consul_node_name: "{{ ansible_hostname }}"
+consul_retry_join: true
+consul_retry_servers:
+ - "10.30.51.23"
+ - "10.30.51.26"
+ - "10.30.51.25"
+consul_service_mgr: "systemd"
+
+# Vault settings.
+vault_version: "1.13.1"
+
+# Docker daemon settings.
+docker_daemon:
+ dns: ["172.17.0.1"]
+ dns-opts: []
+ dns-search: ["{{ansible_hostname}}"]
diff --git a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.23.yaml b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.23.yaml
new file mode 100644
index 0000000000..8c3afaf74c
--- /dev/null
+++ b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.23.yaml
@@ -0,0 +1,92 @@
+---
+# file: host_vars/10.30.51.23.yaml
+
+hostname: "s23-nomad"
+inventory_ipmi_hostname: "10.30.50.23"
+
+# User management.
+users:
+ - username: localadmin
+ groups: [adm, sudo]
+ password: "$6$FIsbVDQR$5D0wgufOd2FtnmOiRNsGlgg6Loh.0x3dWSj72DSQnqisSyE9DROfgSgA6s0yxDwz4Jd5SRTXiTKuRYuSQ5POI1"
+ ssh_key:
+ - "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIKM1D1kkGX1l7fSma1MOgw2yLI7zJHwTCcfVROQ4hh7r peter.mikus@protonmail.ch"
+ - "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQDCG7Shfr7ASXXwpazYDGFzChGl7i4HgY9n81GTwc17B/O19IiJdrxFcBQH19HMuTFtWU4d9bQ6xgzz2pojBN13L3pEsStCHovDlEYbvfRxI2l2hcAAop1j1E4izHAS9IvCoy1AgWqBg6tsfP5mzOwGbSbcdI9ADRKIHgDTVbHg9SqKed27bNOLU0u3/5ra2Oar/tVIW37geEqFV/nHIBZ03Y/mszvXP/t/hP5bgJIGJKkzTjLd1aqEcuGmubW+wTQnVnrhFB87dw91gPj6BVyV0+7Vt1wrvPKqP2sGJhojAMSrBQBySnlrYgEg00bwDgNGFevatfG9+nTId+nhoKBkXya3MjSp4HwrGqGcij3/h7ovlau3/iRhkqlSeqenaNm4zKTAXRTnb60j2WKa6im0zdqJX98anp4mhjE8xHhmmfZV3vRT8mtY4hF/lg79miXFHpWH97bZV6r/D9qj1HWI/laJfOC5MOJdRcLETwtsUNMHeHtVnY3yu0XFYNcl2Xwajtnz3waF7vn1QHvAM9p878+JrO/IEUqEc0yZWVvlhZ7Krx1pS+APoMy8097MYDGFzFTkYSstKLGbgm/S7dEiWuSVxmMUxW7JYO3gHrQ3f1EvAYh2UFdWy76Dzr5II9UpVwOwF+HL/Oy8Sk77bPaK+tn7Kh4Tx7WWE0+EOAgElQ== ayourtch@ayourtch-lnx"
+ - "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCXUbbhesOpvPF+KI8nq4TXvEO/Un1aU/ehZ9clCyw9C40xjDkX2BlcX8WeHxFHe7fjFaCd07Vg73rn/3M9uNDnDxvjH1GQ0twvi3iBTO4PkHBBfGF9qnE8MYzno6FvlsVKLuUuPbfm8kbOQ+ZDfdXq6gdtXh0hSYYkqC1heNPCNsqaakkB99Edyle+Ot0V7cpW+Yo2wo98KuX/cgUEhVoA8QnNVE7zaWcjSXBZEteoA4gLpAbV6p67/d6H/2ykHTidBViYTEsHco56tJoA4nTPuAupDOLBcWXgF5TAN6z1aCn2JA1DDfniLakgrZ5oVj2qHhUmbxQAtnKQfHADjqzV jlinkes@jlinkes"
+ - "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQD0GXoSGDZ95TE/TT3kf4ZS3Tiso0UO3MVhqAqZ/F6LOvLyqnMPfhxPz1XpHsDikxvKgwhZvdBm1dWbKkPsD7jtw0PGphQO8QuEwBd2ZMvxZ4Qg6lNDUl5x3zRO2nkbKpcqnOugGLTtXP+yfw/wfQ2HNFLDP9gE90xegsPT83PmRUntQlhbS3ByHcCSUScC+Y1heZXuoKNyrmUY46lxkKsNfhx8sQKo0YhB21atV/mcAQbAaO2LggmaQYGtWizqPNGWIRsi9W8ZYnKva67c3Pbv/TTfaqmrNCwOXJ8G9oL+/3MlKbl3b5mYlTs2a/e9yVgMNwUZVX7aiHpgPgaVjL6j swdev@BradyBunch-MacMini.local"
+ - "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCmo2YP4t/f58AAYH72rOe5VjYjk3wb/GY3aJEd5s9WspLhnsY0xBL67C+4kMq6VmQQvg0cUB8RJSFX1tUXMHCorVWnXNHkYomx0MCPcPUpVHuRyEqczYJ2pzgZsPzoEfw9E5hTrAiGzYFNAS/NOSavapVMDZxa1zsX7+sWQvJfmVtJWpcTQb0TkoWXRsy0YM3PYfUbYvK7lR3lGwyhwCcJn0WwWGreFB7bIok0poqqX5BgJ/okZuvC8II+UfuGoBHNhg49oqST1JlNi9gRqDNmLWkHRaneWZiF+Y2hdN3PRCdkt1x3eU0R+cdi5kPKslb6P0lsjOEA7fDLlq1+T2z1"
+ - "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDWWDIudZUaHp6pPkYmBsBeEYcbUoGGZH73eB374g8e9V3dfT7BuJOMR/Z3LxCGw5GcDDgzJHLQr8AykN7lUscaux68CXSf1CAKhfBeWBOCQD4G5tpmD6610jsiBig6bFISKpGW4nLt80yt3bKEqv6ezt35QuMeh3E45uK/pCRfDcukOThbwYG44xr7TeYAyNacWuNV9HDy5DzjBelElmolTOgjtZEbedmIcUbV/gb8yekUxxBTS3d3I5qVLtNRMHPqp9xbJqteRXIphizDlHbaTHZeVy9mrwi1RuTjDSg7p1OPJ9eVIzMYzk87gbwNxZw8481Xb1WsgGmyH/mXJjD1 najoy@NAJOY-M-505G.CISCO.COM"
+sshd_disable_password_login: true
+
+# Nomad settings.
+nomad_version: "1.6.1"
+nomad_certificates:
+ - src: "{{ file_nomad_ca_pem }}"
+ dest: "{{ nomad_tls_ca_file }}"
+ - src: "{{ file_nomad_server_pem }}"
+ dest: "{{ nomad_tls_cert_file }}"
+ - src: "{{ file_nomad_server_key_pem }}"
+ dest: "{{ nomad_tls_key_file }}"
+nomad_datacenter: "yul1"
+nomad_encrypt: "Y4T+5JGx1C3l2NFBBvkTWQ=="
+nomad_node_name: "{{ hostname }}-{{ ansible_architecture }}"
+nomad_node_role: "both"
+nomad_node_class: "builder"
+nomad_options:
+ driver.raw_exec.enable: 1
+ docker.cleanup.image: true
+ docker.privileged.enabled: true
+ docker.volumes.enabled: true
+ driver.whitelist: "docker,raw_exec,exec"
+ fingerprint.network.disallow_link_local: true
+nomad_service_mgr: "systemd"
+nomad_consul_use_ssl: false
+nomad_use_tls: false
+nomad_tls_http: false
+nomad_tls_rpc: false
+nomad_use_vault: false
+nomad_retry_servers:
+ - "10.30.51.26"
+ - "10.30.51.24"
+ - "10.30.51.25"
+nomad_servers:
+ - "10.30.51.26:4647"
+ - "10.30.51.24:4647"
+ - "10.30.51.25:4647"
+nomad_volumes:
+ - name: "prod-volume-data1-1"
+ path: "/data"
+ read_only: false
+
+# Consul settings.
+nomad_use_consul: true
+consul_certificates:
+ - src: "{{ file_consul_ca_pem }}"
+ dest: "{{ consul_ca_file }}"
+ - src: "{{ file_consul_server_0_pem }}"
+ dest: "{{ consul_cert_file }}"
+ - src: "{{ file_consul_server_0_key_pem }}"
+ dest: "{{ consul_key_file }}"
+consul_verify_incoming: false
+consul_verify_outgoing: false
+consul_vefify_server_hostname: false
+consul_allow_tls: true
+consul_datacenter: "yul1"
+consul_node_role: "both"
+consul_encrypt: "Y4T+5JGx1C3l2NFBBvkTWQ=="
+consul_node_name: "{{ ansible_hostname }}"
+consul_retry_join: true
+consul_retry_servers:
+ - "10.30.51.26"
+ - "10.30.51.24"
+ - "10.30.51.25"
+consul_service_mgr: "systemd"
+
+# Vault settings.
+vault_version: "1.13.1"
+
+# Docker daemon settings.
+docker_daemon:
+ dns: ["172.17.0.1"]
+ dns-opts: []
+ dns-search: ["{{ansible_hostname}}"]
diff --git a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.24.yaml b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.24.yaml
new file mode 100644
index 0000000000..cb65e81c26
--- /dev/null
+++ b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.24.yaml
@@ -0,0 +1,92 @@
+---
+# file: host_vars/10.30.51.24.yaml
+
+hostname: "s24-nomad"
+inventory_ipmi_hostname: "10.30.50.24"
+
+# User management.
+users:
+ - username: localadmin
+ groups: [adm, sudo]
+ password: "$6$FIsbVDQR$5D0wgufOd2FtnmOiRNsGlgg6Loh.0x3dWSj72DSQnqisSyE9DROfgSgA6s0yxDwz4Jd5SRTXiTKuRYuSQ5POI1"
+ ssh_key:
+ - "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIKM1D1kkGX1l7fSma1MOgw2yLI7zJHwTCcfVROQ4hh7r peter.mikus@protonmail.ch"
+ - "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQDCG7Shfr7ASXXwpazYDGFzChGl7i4HgY9n81GTwc17B/O19IiJdrxFcBQH19HMuTFtWU4d9bQ6xgzz2pojBN13L3pEsStCHovDlEYbvfRxI2l2hcAAop1j1E4izHAS9IvCoy1AgWqBg6tsfP5mzOwGbSbcdI9ADRKIHgDTVbHg9SqKed27bNOLU0u3/5ra2Oar/tVIW37geEqFV/nHIBZ03Y/mszvXP/t/hP5bgJIGJKkzTjLd1aqEcuGmubW+wTQnVnrhFB87dw91gPj6BVyV0+7Vt1wrvPKqP2sGJhojAMSrBQBySnlrYgEg00bwDgNGFevatfG9+nTId+nhoKBkXya3MjSp4HwrGqGcij3/h7ovlau3/iRhkqlSeqenaNm4zKTAXRTnb60j2WKa6im0zdqJX98anp4mhjE8xHhmmfZV3vRT8mtY4hF/lg79miXFHpWH97bZV6r/D9qj1HWI/laJfOC5MOJdRcLETwtsUNMHeHtVnY3yu0XFYNcl2Xwajtnz3waF7vn1QHvAM9p878+JrO/IEUqEc0yZWVvlhZ7Krx1pS+APoMy8097MYDGFzFTkYSstKLGbgm/S7dEiWuSVxmMUxW7JYO3gHrQ3f1EvAYh2UFdWy76Dzr5II9UpVwOwF+HL/Oy8Sk77bPaK+tn7Kh4Tx7WWE0+EOAgElQ== ayourtch@ayourtch-lnx"
+ - "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCXUbbhesOpvPF+KI8nq4TXvEO/Un1aU/ehZ9clCyw9C40xjDkX2BlcX8WeHxFHe7fjFaCd07Vg73rn/3M9uNDnDxvjH1GQ0twvi3iBTO4PkHBBfGF9qnE8MYzno6FvlsVKLuUuPbfm8kbOQ+ZDfdXq6gdtXh0hSYYkqC1heNPCNsqaakkB99Edyle+Ot0V7cpW+Yo2wo98KuX/cgUEhVoA8QnNVE7zaWcjSXBZEteoA4gLpAbV6p67/d6H/2ykHTidBViYTEsHco56tJoA4nTPuAupDOLBcWXgF5TAN6z1aCn2JA1DDfniLakgrZ5oVj2qHhUmbxQAtnKQfHADjqzV jlinkes@jlinkes"
+ - "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQD0GXoSGDZ95TE/TT3kf4ZS3Tiso0UO3MVhqAqZ/F6LOvLyqnMPfhxPz1XpHsDikxvKgwhZvdBm1dWbKkPsD7jtw0PGphQO8QuEwBd2ZMvxZ4Qg6lNDUl5x3zRO2nkbKpcqnOugGLTtXP+yfw/wfQ2HNFLDP9gE90xegsPT83PmRUntQlhbS3ByHcCSUScC+Y1heZXuoKNyrmUY46lxkKsNfhx8sQKo0YhB21atV/mcAQbAaO2LggmaQYGtWizqPNGWIRsi9W8ZYnKva67c3Pbv/TTfaqmrNCwOXJ8G9oL+/3MlKbl3b5mYlTs2a/e9yVgMNwUZVX7aiHpgPgaVjL6j swdev@BradyBunch-MacMini.local"
+ - "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCmo2YP4t/f58AAYH72rOe5VjYjk3wb/GY3aJEd5s9WspLhnsY0xBL67C+4kMq6VmQQvg0cUB8RJSFX1tUXMHCorVWnXNHkYomx0MCPcPUpVHuRyEqczYJ2pzgZsPzoEfw9E5hTrAiGzYFNAS/NOSavapVMDZxa1zsX7+sWQvJfmVtJWpcTQb0TkoWXRsy0YM3PYfUbYvK7lR3lGwyhwCcJn0WwWGreFB7bIok0poqqX5BgJ/okZuvC8II+UfuGoBHNhg49oqST1JlNi9gRqDNmLWkHRaneWZiF+Y2hdN3PRCdkt1x3eU0R+cdi5kPKslb6P0lsjOEA7fDLlq1+T2z1"
+ - "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDWWDIudZUaHp6pPkYmBsBeEYcbUoGGZH73eB374g8e9V3dfT7BuJOMR/Z3LxCGw5GcDDgzJHLQr8AykN7lUscaux68CXSf1CAKhfBeWBOCQD4G5tpmD6610jsiBig6bFISKpGW4nLt80yt3bKEqv6ezt35QuMeh3E45uK/pCRfDcukOThbwYG44xr7TeYAyNacWuNV9HDy5DzjBelElmolTOgjtZEbedmIcUbV/gb8yekUxxBTS3d3I5qVLtNRMHPqp9xbJqteRXIphizDlHbaTHZeVy9mrwi1RuTjDSg7p1OPJ9eVIzMYzk87gbwNxZw8481Xb1WsgGmyH/mXJjD1 najoy@NAJOY-M-505G.CISCO.COM"
+sshd_disable_password_login: true
+
+# Nomad settings.
+nomad_version: "1.6.1"
+nomad_certificates:
+ - src: "{{ file_nomad_ca_pem }}"
+ dest: "{{ nomad_tls_ca_file }}"
+ - src: "{{ file_nomad_server_pem }}"
+ dest: "{{ nomad_tls_cert_file }}"
+ - src: "{{ file_nomad_server_key_pem }}"
+ dest: "{{ nomad_tls_key_file }}"
+nomad_datacenter: "yul1"
+nomad_encrypt: "Y4T+5JGx1C3l2NFBBvkTWQ=="
+nomad_node_name: "{{ hostname }}-{{ ansible_architecture }}"
+nomad_node_role: "both"
+nomad_node_class: "builder"
+nomad_options:
+ driver.raw_exec.enable: 1
+ docker.cleanup.image: true
+ docker.privileged.enabled: true
+ docker.volumes.enabled: true
+ driver.whitelist: "docker,raw_exec,exec"
+ fingerprint.network.disallow_link_local: true
+nomad_service_mgr: "systemd"
+nomad_consul_use_ssl: false
+nomad_use_tls: false
+nomad_tls_http: false
+nomad_tls_rpc: false
+nomad_use_vault: false
+nomad_retry_servers:
+ - "10.30.51.23"
+ - "10.30.51.26"
+ - "10.30.51.25"
+nomad_servers:
+ - "10.30.51.23:4647"
+ - "10.30.51.26:4647"
+ - "10.30.51.25:4647"
+nomad_volumes:
+ - name: "prod-volume-data1-1"
+ path: "/data"
+ read_only: false
+
+# Consul settigs.
+nomad_use_consul: true
+consul_certificates:
+ - src: "{{ file_consul_ca_pem }}"
+ dest: "{{ consul_ca_file }}"
+ - src: "{{ file_consul_server_1_pem }}"
+ dest: "{{ consul_cert_file }}"
+ - src: "{{ file_consul_server_1_key_pem }}"
+ dest: "{{ consul_key_file }}"
+consul_verify_incoming: false
+consul_verify_outgoing: false
+consul_vefify_server_hostname: false
+consul_allow_tls: true
+consul_datacenter: "yul1"
+consul_node_role: "both"
+consul_encrypt: "Y4T+5JGx1C3l2NFBBvkTWQ=="
+consul_node_name: "{{ ansible_hostname }}"
+consul_retry_join: true
+consul_retry_servers:
+ - "10.30.51.23"
+ - "10.30.51.26"
+ - "10.30.51.25"
+consul_service_mgr: "systemd"
+
+# Vault settings.
+vault_version: "1.13.1"
+
+# Docker daemon settings.
+docker_daemon:
+ dns: ["172.17.0.1"]
+ dns-opts: []
+ dns-search: ["{{ansible_hostname}}"]
diff --git a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.25.yaml b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.25.yaml
new file mode 100644
index 0000000000..20e45a90ea
--- /dev/null
+++ b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.25.yaml
@@ -0,0 +1,92 @@
+---
+# file: host_vars/10.30.51.25.yaml
+
+hostname: "s25-nomad"
+inventory_ipmi_hostname: "10.30.50.25"
+
+# User management.
+users:
+ - username: localadmin
+ groups: [adm, sudo]
+ password: "$6$FIsbVDQR$5D0wgufOd2FtnmOiRNsGlgg6Loh.0x3dWSj72DSQnqisSyE9DROfgSgA6s0yxDwz4Jd5SRTXiTKuRYuSQ5POI1"
+ ssh_key:
+ - "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIKM1D1kkGX1l7fSma1MOgw2yLI7zJHwTCcfVROQ4hh7r peter.mikus@protonmail.ch"
+ - "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQDCG7Shfr7ASXXwpazYDGFzChGl7i4HgY9n81GTwc17B/O19IiJdrxFcBQH19HMuTFtWU4d9bQ6xgzz2pojBN13L3pEsStCHovDlEYbvfRxI2l2hcAAop1j1E4izHAS9IvCoy1AgWqBg6tsfP5mzOwGbSbcdI9ADRKIHgDTVbHg9SqKed27bNOLU0u3/5ra2Oar/tVIW37geEqFV/nHIBZ03Y/mszvXP/t/hP5bgJIGJKkzTjLd1aqEcuGmubW+wTQnVnrhFB87dw91gPj6BVyV0+7Vt1wrvPKqP2sGJhojAMSrBQBySnlrYgEg00bwDgNGFevatfG9+nTId+nhoKBkXya3MjSp4HwrGqGcij3/h7ovlau3/iRhkqlSeqenaNm4zKTAXRTnb60j2WKa6im0zdqJX98anp4mhjE8xHhmmfZV3vRT8mtY4hF/lg79miXFHpWH97bZV6r/D9qj1HWI/laJfOC5MOJdRcLETwtsUNMHeHtVnY3yu0XFYNcl2Xwajtnz3waF7vn1QHvAM9p878+JrO/IEUqEc0yZWVvlhZ7Krx1pS+APoMy8097MYDGFzFTkYSstKLGbgm/S7dEiWuSVxmMUxW7JYO3gHrQ3f1EvAYh2UFdWy76Dzr5II9UpVwOwF+HL/Oy8Sk77bPaK+tn7Kh4Tx7WWE0+EOAgElQ== ayourtch@ayourtch-lnx"
+ - "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCXUbbhesOpvPF+KI8nq4TXvEO/Un1aU/ehZ9clCyw9C40xjDkX2BlcX8WeHxFHe7fjFaCd07Vg73rn/3M9uNDnDxvjH1GQ0twvi3iBTO4PkHBBfGF9qnE8MYzno6FvlsVKLuUuPbfm8kbOQ+ZDfdXq6gdtXh0hSYYkqC1heNPCNsqaakkB99Edyle+Ot0V7cpW+Yo2wo98KuX/cgUEhVoA8QnNVE7zaWcjSXBZEteoA4gLpAbV6p67/d6H/2ykHTidBViYTEsHco56tJoA4nTPuAupDOLBcWXgF5TAN6z1aCn2JA1DDfniLakgrZ5oVj2qHhUmbxQAtnKQfHADjqzV jlinkes@jlinkes"
+ - "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQD0GXoSGDZ95TE/TT3kf4ZS3Tiso0UO3MVhqAqZ/F6LOvLyqnMPfhxPz1XpHsDikxvKgwhZvdBm1dWbKkPsD7jtw0PGphQO8QuEwBd2ZMvxZ4Qg6lNDUl5x3zRO2nkbKpcqnOugGLTtXP+yfw/wfQ2HNFLDP9gE90xegsPT83PmRUntQlhbS3ByHcCSUScC+Y1heZXuoKNyrmUY46lxkKsNfhx8sQKo0YhB21atV/mcAQbAaO2LggmaQYGtWizqPNGWIRsi9W8ZYnKva67c3Pbv/TTfaqmrNCwOXJ8G9oL+/3MlKbl3b5mYlTs2a/e9yVgMNwUZVX7aiHpgPgaVjL6j swdev@BradyBunch-MacMini.local"
+ - "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCmo2YP4t/f58AAYH72rOe5VjYjk3wb/GY3aJEd5s9WspLhnsY0xBL67C+4kMq6VmQQvg0cUB8RJSFX1tUXMHCorVWnXNHkYomx0MCPcPUpVHuRyEqczYJ2pzgZsPzoEfw9E5hTrAiGzYFNAS/NOSavapVMDZxa1zsX7+sWQvJfmVtJWpcTQb0TkoWXRsy0YM3PYfUbYvK7lR3lGwyhwCcJn0WwWGreFB7bIok0poqqX5BgJ/okZuvC8II+UfuGoBHNhg49oqST1JlNi9gRqDNmLWkHRaneWZiF+Y2hdN3PRCdkt1x3eU0R+cdi5kPKslb6P0lsjOEA7fDLlq1+T2z1"
+ - "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDWWDIudZUaHp6pPkYmBsBeEYcbUoGGZH73eB374g8e9V3dfT7BuJOMR/Z3LxCGw5GcDDgzJHLQr8AykN7lUscaux68CXSf1CAKhfBeWBOCQD4G5tpmD6610jsiBig6bFISKpGW4nLt80yt3bKEqv6ezt35QuMeh3E45uK/pCRfDcukOThbwYG44xr7TeYAyNacWuNV9HDy5DzjBelElmolTOgjtZEbedmIcUbV/gb8yekUxxBTS3d3I5qVLtNRMHPqp9xbJqteRXIphizDlHbaTHZeVy9mrwi1RuTjDSg7p1OPJ9eVIzMYzk87gbwNxZw8481Xb1WsgGmyH/mXJjD1 najoy@NAJOY-M-505G.CISCO.COM"
+sshd_disable_password_login: true
+
+# Nomad settings.
+nomad_version: "1.6.1"
+nomad_certificates:
+ - src: "{{ file_nomad_ca_pem }}"
+ dest: "{{ nomad_tls_ca_file }}"
+ - src: "{{ file_nomad_server_pem }}"
+ dest: "{{ nomad_tls_cert_file }}"
+ - src: "{{ file_nomad_server_key_pem }}"
+ dest: "{{ nomad_tls_key_file }}"
+nomad_datacenter: "yul1"
+nomad_encrypt: "Y4T+5JGx1C3l2NFBBvkTWQ=="
+nomad_node_name: "{{ hostname }}-{{ ansible_architecture }}"
+nomad_node_role: "both"
+nomad_node_class: "builder"
+nomad_options:
+ driver.raw_exec.enable: 1
+ docker.cleanup.image: true
+ docker.privileged.enabled: true
+ docker.volumes.enabled: true
+ driver.whitelist: "docker,raw_exec,exec"
+ fingerprint.network.disallow_link_local: true
+nomad_service_mgr: "systemd"
+nomad_consul_use_ssl: false
+nomad_use_tls: false
+nomad_tls_http: false
+nomad_tls_rpc: false
+nomad_use_vault: false
+nomad_retry_servers:
+ - "10.30.51.23"
+ - "10.30.51.24"
+ - "10.30.51.26"
+nomad_servers:
+ - "10.30.51.23:4647"
+ - "10.30.51.24:4647"
+ - "10.30.51.26:4647"
+nomad_volumes:
+ - name: "prod-volume-data1-1"
+ path: "/data"
+ read_only: false
+
+# Consul settigs.
+nomad_use_consul: true
+consul_certificates:
+ - src: "{{ file_consul_ca_pem }}"
+ dest: "{{ consul_ca_file }}"
+ - src: "{{ file_consul_server_2_pem }}"
+ dest: "{{ consul_cert_file }}"
+ - src: "{{ file_consul_server_2_key_pem }}"
+ dest: "{{ consul_key_file }}"
+consul_verify_incoming: false
+consul_verify_outgoing: false
+consul_vefify_server_hostname: false
+consul_allow_tls: true
+consul_datacenter: "yul1"
+consul_node_role: "both"
+consul_encrypt: "Y4T+5JGx1C3l2NFBBvkTWQ=="
+consul_node_name: "{{ ansible_hostname }}"
+consul_retry_join: true
+consul_retry_servers:
+ - "10.30.51.23"
+ - "10.30.51.24"
+ - "10.30.51.26"
+consul_service_mgr: "systemd"
+
+# Vault settings.
+vault_version: "1.13.1"
+
+# Docker daemon settings.
+docker_daemon:
+ dns: ["172.17.0.1"]
+ dns-opts: []
+ dns-search: ["{{ansible_hostname}}"]
diff --git a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.26.yaml b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.26.yaml
new file mode 100644
index 0000000000..0d71009ede
--- /dev/null
+++ b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.26.yaml
@@ -0,0 +1,92 @@
+---
+# file: host_vars/10.30.51.26.yaml
+
+hostname: "s26-nomad"
+inventory_ipmi_hostname: "10.30.50.26"
+
+# User management.
+users:
+ - username: localadmin
+ groups: [adm, sudo]
+ password: "$6$FIsbVDQR$5D0wgufOd2FtnmOiRNsGlgg6Loh.0x3dWSj72DSQnqisSyE9DROfgSgA6s0yxDwz4Jd5SRTXiTKuRYuSQ5POI1"
+ ssh_key:
+ - "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIKM1D1kkGX1l7fSma1MOgw2yLI7zJHwTCcfVROQ4hh7r peter.mikus@protonmail.ch"
+ - "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQDCG7Shfr7ASXXwpazYDGFzChGl7i4HgY9n81GTwc17B/O19IiJdrxFcBQH19HMuTFtWU4d9bQ6xgzz2pojBN13L3pEsStCHovDlEYbvfRxI2l2hcAAop1j1E4izHAS9IvCoy1AgWqBg6tsfP5mzOwGbSbcdI9ADRKIHgDTVbHg9SqKed27bNOLU0u3/5ra2Oar/tVIW37geEqFV/nHIBZ03Y/mszvXP/t/hP5bgJIGJKkzTjLd1aqEcuGmubW+wTQnVnrhFB87dw91gPj6BVyV0+7Vt1wrvPKqP2sGJhojAMSrBQBySnlrYgEg00bwDgNGFevatfG9+nTId+nhoKBkXya3MjSp4HwrGqGcij3/h7ovlau3/iRhkqlSeqenaNm4zKTAXRTnb60j2WKa6im0zdqJX98anp4mhjE8xHhmmfZV3vRT8mtY4hF/lg79miXFHpWH97bZV6r/D9qj1HWI/laJfOC5MOJdRcLETwtsUNMHeHtVnY3yu0XFYNcl2Xwajtnz3waF7vn1QHvAM9p878+JrO/IEUqEc0yZWVvlhZ7Krx1pS+APoMy8097MYDGFzFTkYSstKLGbgm/S7dEiWuSVxmMUxW7JYO3gHrQ3f1EvAYh2UFdWy76Dzr5II9UpVwOwF+HL/Oy8Sk77bPaK+tn7Kh4Tx7WWE0+EOAgElQ== ayourtch@ayourtch-lnx"
+ - "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCXUbbhesOpvPF+KI8nq4TXvEO/Un1aU/ehZ9clCyw9C40xjDkX2BlcX8WeHxFHe7fjFaCd07Vg73rn/3M9uNDnDxvjH1GQ0twvi3iBTO4PkHBBfGF9qnE8MYzno6FvlsVKLuUuPbfm8kbOQ+ZDfdXq6gdtXh0hSYYkqC1heNPCNsqaakkB99Edyle+Ot0V7cpW+Yo2wo98KuX/cgUEhVoA8QnNVE7zaWcjSXBZEteoA4gLpAbV6p67/d6H/2ykHTidBViYTEsHco56tJoA4nTPuAupDOLBcWXgF5TAN6z1aCn2JA1DDfniLakgrZ5oVj2qHhUmbxQAtnKQfHADjqzV jlinkes@jlinkes"
+ - "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQD0GXoSGDZ95TE/TT3kf4ZS3Tiso0UO3MVhqAqZ/F6LOvLyqnMPfhxPz1XpHsDikxvKgwhZvdBm1dWbKkPsD7jtw0PGphQO8QuEwBd2ZMvxZ4Qg6lNDUl5x3zRO2nkbKpcqnOugGLTtXP+yfw/wfQ2HNFLDP9gE90xegsPT83PmRUntQlhbS3ByHcCSUScC+Y1heZXuoKNyrmUY46lxkKsNfhx8sQKo0YhB21atV/mcAQbAaO2LggmaQYGtWizqPNGWIRsi9W8ZYnKva67c3Pbv/TTfaqmrNCwOXJ8G9oL+/3MlKbl3b5mYlTs2a/e9yVgMNwUZVX7aiHpgPgaVjL6j swdev@BradyBunch-MacMini.local"
+ - "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCmo2YP4t/f58AAYH72rOe5VjYjk3wb/GY3aJEd5s9WspLhnsY0xBL67C+4kMq6VmQQvg0cUB8RJSFX1tUXMHCorVWnXNHkYomx0MCPcPUpVHuRyEqczYJ2pzgZsPzoEfw9E5hTrAiGzYFNAS/NOSavapVMDZxa1zsX7+sWQvJfmVtJWpcTQb0TkoWXRsy0YM3PYfUbYvK7lR3lGwyhwCcJn0WwWGreFB7bIok0poqqX5BgJ/okZuvC8II+UfuGoBHNhg49oqST1JlNi9gRqDNmLWkHRaneWZiF+Y2hdN3PRCdkt1x3eU0R+cdi5kPKslb6P0lsjOEA7fDLlq1+T2z1"
+ - "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDWWDIudZUaHp6pPkYmBsBeEYcbUoGGZH73eB374g8e9V3dfT7BuJOMR/Z3LxCGw5GcDDgzJHLQr8AykN7lUscaux68CXSf1CAKhfBeWBOCQD4G5tpmD6610jsiBig6bFISKpGW4nLt80yt3bKEqv6ezt35QuMeh3E45uK/pCRfDcukOThbwYG44xr7TeYAyNacWuNV9HDy5DzjBelElmolTOgjtZEbedmIcUbV/gb8yekUxxBTS3d3I5qVLtNRMHPqp9xbJqteRXIphizDlHbaTHZeVy9mrwi1RuTjDSg7p1OPJ9eVIzMYzk87gbwNxZw8481Xb1WsgGmyH/mXJjD1 najoy@NAJOY-M-505G.CISCO.COM"
+sshd_disable_password_login: true
+
+# Nomad settings.
+nomad_version: "1.6.1"
+nomad_certificates:
+ - src: "{{ file_nomad_ca_pem }}"
+ dest: "{{ nomad_tls_ca_file }}"
+ - src: "{{ file_nomad_server_pem }}"
+ dest: "{{ nomad_tls_cert_file }}"
+ - src: "{{ file_nomad_server_key_pem }}"
+ dest: "{{ nomad_tls_key_file }}"
+nomad_datacenter: "yul1"
+nomad_encrypt: "Y4T+5JGx1C3l2NFBBvkTWQ=="
+nomad_node_name: "{{ hostname }}-{{ ansible_architecture }}"
+nomad_node_role: "both"
+nomad_node_class: "builder"
+nomad_options:
+ driver.raw_exec.enable: 1
+ docker.cleanup.image: true
+ docker.privileged.enabled: true
+ docker.volumes.enabled: true
+ driver.whitelist: "docker,raw_exec,exec"
+ fingerprint.network.disallow_link_local: true
+nomad_service_mgr: "systemd"
+nomad_consul_use_ssl: false
+nomad_use_tls: false
+nomad_tls_http: false
+nomad_tls_rpc: false
+nomad_use_vault: false
+nomad_retry_servers:
+ - "10.30.51.23"
+ - "10.30.51.24"
+ - "10.30.51.25"
+nomad_servers:
+ - "10.30.51.23:4647"
+ - "10.30.51.24:4647"
+ - "10.30.51.25:4647"
+nomad_volumes:
+ - name: "prod-volume-data1-1"
+ path: "/data"
+ read_only: false
+
+# Consul settigs.
+nomad_use_consul: true
+consul_certificates:
+ - src: "{{ file_consul_ca_pem }}"
+ dest: "{{ consul_ca_file }}"
+ - src: "{{ file_consul_server_3_pem }}"
+ dest: "{{ consul_cert_file }}"
+ - src: "{{ file_consul_server_3_key_pem }}"
+ dest: "{{ consul_key_file }}"
+consul_verify_incoming: false
+consul_verify_outgoing: false
+consul_vefify_server_hostname: false
+consul_allow_tls: true
+consul_datacenter: "yul1"
+consul_node_role: "both"
+consul_encrypt: "Y4T+5JGx1C3l2NFBBvkTWQ=="
+consul_node_name: "{{ ansible_hostname }}"
+consul_retry_join: true
+consul_retry_servers:
+ - "10.30.51.23"
+ - "10.30.51.24"
+ - "10.30.51.25"
+consul_service_mgr: "systemd"
+
+# Vault settings.
+vault_version: "1.13.1"
+
+# Docker daemon settings.
+docker_daemon:
+ dns: ["172.17.0.1"]
+ dns-opts: []
+ dns-search: ["{{ansible_hostname}}"]
diff --git a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.27.yaml b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.27.yaml
new file mode 100644
index 0000000000..745686c31e
--- /dev/null
+++ b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.27.yaml
@@ -0,0 +1,86 @@
+---
+# file: host_vars/10.30.51.27.yaml
+
+hostname: "s27-nomad"
+inventory_cimc_hostname: "10.30.50.27"
+
+# User management.
+users:
+ - username: localadmin
+ groups: [adm, sudo]
+ password: "$6$FIsbVDQR$5D0wgufOd2FtnmOiRNsGlgg6Loh.0x3dWSj72DSQnqisSyE9DROfgSgA6s0yxDwz4Jd5SRTXiTKuRYuSQ5POI1"
+ ssh_key:
+ - "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIKM1D1kkGX1l7fSma1MOgw2yLI7zJHwTCcfVROQ4hh7r peter.mikus@protonmail.ch"
+ - "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQDCG7Shfr7ASXXwpazYDGFzChGl7i4HgY9n81GTwc17B/O19IiJdrxFcBQH19HMuTFtWU4d9bQ6xgzz2pojBN13L3pEsStCHovDlEYbvfRxI2l2hcAAop1j1E4izHAS9IvCoy1AgWqBg6tsfP5mzOwGbSbcdI9ADRKIHgDTVbHg9SqKed27bNOLU0u3/5ra2Oar/tVIW37geEqFV/nHIBZ03Y/mszvXP/t/hP5bgJIGJKkzTjLd1aqEcuGmubW+wTQnVnrhFB87dw91gPj6BVyV0+7Vt1wrvPKqP2sGJhojAMSrBQBySnlrYgEg00bwDgNGFevatfG9+nTId+nhoKBkXya3MjSp4HwrGqGcij3/h7ovlau3/iRhkqlSeqenaNm4zKTAXRTnb60j2WKa6im0zdqJX98anp4mhjE8xHhmmfZV3vRT8mtY4hF/lg79miXFHpWH97bZV6r/D9qj1HWI/laJfOC5MOJdRcLETwtsUNMHeHtVnY3yu0XFYNcl2Xwajtnz3waF7vn1QHvAM9p878+JrO/IEUqEc0yZWVvlhZ7Krx1pS+APoMy8097MYDGFzFTkYSstKLGbgm/S7dEiWuSVxmMUxW7JYO3gHrQ3f1EvAYh2UFdWy76Dzr5II9UpVwOwF+HL/Oy8Sk77bPaK+tn7Kh4Tx7WWE0+EOAgElQ== ayourtch@ayourtch-lnx"
+ - "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCXUbbhesOpvPF+KI8nq4TXvEO/Un1aU/ehZ9clCyw9C40xjDkX2BlcX8WeHxFHe7fjFaCd07Vg73rn/3M9uNDnDxvjH1GQ0twvi3iBTO4PkHBBfGF9qnE8MYzno6FvlsVKLuUuPbfm8kbOQ+ZDfdXq6gdtXh0hSYYkqC1heNPCNsqaakkB99Edyle+Ot0V7cpW+Yo2wo98KuX/cgUEhVoA8QnNVE7zaWcjSXBZEteoA4gLpAbV6p67/d6H/2ykHTidBViYTEsHco56tJoA4nTPuAupDOLBcWXgF5TAN6z1aCn2JA1DDfniLakgrZ5oVj2qHhUmbxQAtnKQfHADjqzV jlinkes@jlinkes"
+ - "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQD0GXoSGDZ95TE/TT3kf4ZS3Tiso0UO3MVhqAqZ/F6LOvLyqnMPfhxPz1XpHsDikxvKgwhZvdBm1dWbKkPsD7jtw0PGphQO8QuEwBd2ZMvxZ4Qg6lNDUl5x3zRO2nkbKpcqnOugGLTtXP+yfw/wfQ2HNFLDP9gE90xegsPT83PmRUntQlhbS3ByHcCSUScC+Y1heZXuoKNyrmUY46lxkKsNfhx8sQKo0YhB21atV/mcAQbAaO2LggmaQYGtWizqPNGWIRsi9W8ZYnKva67c3Pbv/TTfaqmrNCwOXJ8G9oL+/3MlKbl3b5mYlTs2a/e9yVgMNwUZVX7aiHpgPgaVjL6j swdev@BradyBunch-MacMini.local"
+ - "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCmo2YP4t/f58AAYH72rOe5VjYjk3wb/GY3aJEd5s9WspLhnsY0xBL67C+4kMq6VmQQvg0cUB8RJSFX1tUXMHCorVWnXNHkYomx0MCPcPUpVHuRyEqczYJ2pzgZsPzoEfw9E5hTrAiGzYFNAS/NOSavapVMDZxa1zsX7+sWQvJfmVtJWpcTQb0TkoWXRsy0YM3PYfUbYvK7lR3lGwyhwCcJn0WwWGreFB7bIok0poqqX5BgJ/okZuvC8II+UfuGoBHNhg49oqST1JlNi9gRqDNmLWkHRaneWZiF+Y2hdN3PRCdkt1x3eU0R+cdi5kPKslb6P0lsjOEA7fDLlq1+T2z1"
+ - "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDWWDIudZUaHp6pPkYmBsBeEYcbUoGGZH73eB374g8e9V3dfT7BuJOMR/Z3LxCGw5GcDDgzJHLQr8AykN7lUscaux68CXSf1CAKhfBeWBOCQD4G5tpmD6610jsiBig6bFISKpGW4nLt80yt3bKEqv6ezt35QuMeh3E45uK/pCRfDcukOThbwYG44xr7TeYAyNacWuNV9HDy5DzjBelElmolTOgjtZEbedmIcUbV/gb8yekUxxBTS3d3I5qVLtNRMHPqp9xbJqteRXIphizDlHbaTHZeVy9mrwi1RuTjDSg7p1OPJ9eVIzMYzk87gbwNxZw8481Xb1WsgGmyH/mXJjD1 najoy@NAJOY-M-505G.CISCO.COM"
+sshd_disable_password_login: true
+
+# Nomad settings.
+nomad_version: "1.6.1"
+nomad_certificates:
+ - src: "{{ file_nomad_ca_pem }}"
+ dest: "{{ nomad_tls_ca_file }}"
+ - src: "{{ file_nomad_client_pem }}"
+ dest: "{{ nomad_tls_cert_file }}"
+ - src: "{{ file_nomad_client_key_pem }}"
+ dest: "{{ nomad_tls_key_file }}"
+nomad_datacenter: "yul1"
+nomad_node_name: "{{ hostname }}-{{ ansible_architecture }}"
+nomad_node_role: "client"
+nomad_node_class: "builder"
+nomad_options:
+ driver.raw_exec.enable: 1
+ docker.cleanup.image: false
+ docker.privileged.enabled: true
+ docker.volumes.enabled: true
+ driver.whitelist: "docker,raw_exec,exec"
+nomad_service_mgr: "systemd"
+nomad_consul_use_ssl: false
+nomad_use_tls: false
+nomad_tls_http: false
+nomad_tls_rpc: false
+nomad_use_vault: false
+nomad_retry_servers:
+ - "10.30.51.23"
+ - "10.30.51.24"
+ - "10.30.51.25"
+nomad_servers:
+ - "10.30.51.23:4647"
+ - "10.30.51.24:4647"
+ - "10.30.51.25:4647"
+
+# Consul settings.
+nomad_use_consul: true
+consul_certificates:
+ - src: "{{ file_consul_agent_ca_pem }}"
+ dest: "{{ consul_ca_file }}"
+ - src: "{{ file_consul_server_0_pem }}"
+ dest: "{{ consul_cert_file }}"
+ - src: "{{ file_consul_server_0_key_pem }}"
+ dest: "{{ consul_key_file }}"
+consul_verify_incoming: false
+consul_verify_outgoing: false
+consul_vefify_server_hostname: false
+consul_allow_tls: false
+consul_datacenter: "yul1"
+consul_node_role: "client"
+consul_encrypt: "Y4T+5JGx1C3l2NFBBvkTWQ=="
+consul_node_name: "{{ ansible_hostname }}"
+consul_retry_join: true
+consul_retry_servers:
+ - "10.30.51.23"
+ - "10.30.51.24"
+ - "10.30.51.25"
+consul_service_mgr: "systemd"
+
+# Vault settings.
+vault_version: "1.13.1"
+
+# Docker daemon settings.
+docker_daemon:
+ dns: ["172.17.0.1"]
+ dns-opts: []
+ dns-search: ["{{ansible_hostname}}"]
diff --git a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.28.yaml b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.28.yaml
new file mode 100644
index 0000000000..5a3c8896fc
--- /dev/null
+++ b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.28.yaml
@@ -0,0 +1,86 @@
+---
+# file: host_vars/10.30.51.28.yaml
+
+hostname: "s28-nomad"
+inventory_cimc_hostname: "10.30.50.28"
+
+# User management.
+users:
+ - username: localadmin
+ groups: [adm, sudo]
+ password: "$6$FIsbVDQR$5D0wgufOd2FtnmOiRNsGlgg6Loh.0x3dWSj72DSQnqisSyE9DROfgSgA6s0yxDwz4Jd5SRTXiTKuRYuSQ5POI1"
+ ssh_key:
+ - "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIKM1D1kkGX1l7fSma1MOgw2yLI7zJHwTCcfVROQ4hh7r peter.mikus@protonmail.ch"
+ - "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQDCG7Shfr7ASXXwpazYDGFzChGl7i4HgY9n81GTwc17B/O19IiJdrxFcBQH19HMuTFtWU4d9bQ6xgzz2pojBN13L3pEsStCHovDlEYbvfRxI2l2hcAAop1j1E4izHAS9IvCoy1AgWqBg6tsfP5mzOwGbSbcdI9ADRKIHgDTVbHg9SqKed27bNOLU0u3/5ra2Oar/tVIW37geEqFV/nHIBZ03Y/mszvXP/t/hP5bgJIGJKkzTjLd1aqEcuGmubW+wTQnVnrhFB87dw91gPj6BVyV0+7Vt1wrvPKqP2sGJhojAMSrBQBySnlrYgEg00bwDgNGFevatfG9+nTId+nhoKBkXya3MjSp4HwrGqGcij3/h7ovlau3/iRhkqlSeqenaNm4zKTAXRTnb60j2WKa6im0zdqJX98anp4mhjE8xHhmmfZV3vRT8mtY4hF/lg79miXFHpWH97bZV6r/D9qj1HWI/laJfOC5MOJdRcLETwtsUNMHeHtVnY3yu0XFYNcl2Xwajtnz3waF7vn1QHvAM9p878+JrO/IEUqEc0yZWVvlhZ7Krx1pS+APoMy8097MYDGFzFTkYSstKLGbgm/S7dEiWuSVxmMUxW7JYO3gHrQ3f1EvAYh2UFdWy76Dzr5II9UpVwOwF+HL/Oy8Sk77bPaK+tn7Kh4Tx7WWE0+EOAgElQ== ayourtch@ayourtch-lnx"
+ - "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCXUbbhesOpvPF+KI8nq4TXvEO/Un1aU/ehZ9clCyw9C40xjDkX2BlcX8WeHxFHe7fjFaCd07Vg73rn/3M9uNDnDxvjH1GQ0twvi3iBTO4PkHBBfGF9qnE8MYzno6FvlsVKLuUuPbfm8kbOQ+ZDfdXq6gdtXh0hSYYkqC1heNPCNsqaakkB99Edyle+Ot0V7cpW+Yo2wo98KuX/cgUEhVoA8QnNVE7zaWcjSXBZEteoA4gLpAbV6p67/d6H/2ykHTidBViYTEsHco56tJoA4nTPuAupDOLBcWXgF5TAN6z1aCn2JA1DDfniLakgrZ5oVj2qHhUmbxQAtnKQfHADjqzV jlinkes@jlinkes"
+ - "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQD0GXoSGDZ95TE/TT3kf4ZS3Tiso0UO3MVhqAqZ/F6LOvLyqnMPfhxPz1XpHsDikxvKgwhZvdBm1dWbKkPsD7jtw0PGphQO8QuEwBd2ZMvxZ4Qg6lNDUl5x3zRO2nkbKpcqnOugGLTtXP+yfw/wfQ2HNFLDP9gE90xegsPT83PmRUntQlhbS3ByHcCSUScC+Y1heZXuoKNyrmUY46lxkKsNfhx8sQKo0YhB21atV/mcAQbAaO2LggmaQYGtWizqPNGWIRsi9W8ZYnKva67c3Pbv/TTfaqmrNCwOXJ8G9oL+/3MlKbl3b5mYlTs2a/e9yVgMNwUZVX7aiHpgPgaVjL6j swdev@BradyBunch-MacMini.local"
+ - "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCmo2YP4t/f58AAYH72rOe5VjYjk3wb/GY3aJEd5s9WspLhnsY0xBL67C+4kMq6VmQQvg0cUB8RJSFX1tUXMHCorVWnXNHkYomx0MCPcPUpVHuRyEqczYJ2pzgZsPzoEfw9E5hTrAiGzYFNAS/NOSavapVMDZxa1zsX7+sWQvJfmVtJWpcTQb0TkoWXRsy0YM3PYfUbYvK7lR3lGwyhwCcJn0WwWGreFB7bIok0poqqX5BgJ/okZuvC8II+UfuGoBHNhg49oqST1JlNi9gRqDNmLWkHRaneWZiF+Y2hdN3PRCdkt1x3eU0R+cdi5kPKslb6P0lsjOEA7fDLlq1+T2z1"
+ - "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDWWDIudZUaHp6pPkYmBsBeEYcbUoGGZH73eB374g8e9V3dfT7BuJOMR/Z3LxCGw5GcDDgzJHLQr8AykN7lUscaux68CXSf1CAKhfBeWBOCQD4G5tpmD6610jsiBig6bFISKpGW4nLt80yt3bKEqv6ezt35QuMeh3E45uK/pCRfDcukOThbwYG44xr7TeYAyNacWuNV9HDy5DzjBelElmolTOgjtZEbedmIcUbV/gb8yekUxxBTS3d3I5qVLtNRMHPqp9xbJqteRXIphizDlHbaTHZeVy9mrwi1RuTjDSg7p1OPJ9eVIzMYzk87gbwNxZw8481Xb1WsgGmyH/mXJjD1 najoy@NAJOY-M-505G.CISCO.COM"
+sshd_disable_password_login: true
+
+# Nomad settings.
+nomad_version: "1.6.1"
+nomad_certificates:
+ - src: "{{ file_nomad_ca_pem }}"
+ dest: "{{ nomad_tls_ca_file }}"
+ - src: "{{ file_nomad_client_pem }}"
+ dest: "{{ nomad_tls_cert_file }}"
+ - src: "{{ file_nomad_client_key_pem }}"
+ dest: "{{ nomad_tls_key_file }}"
+nomad_datacenter: "yul1"
+nomad_node_name: "{{ hostname }}-{{ ansible_architecture }}"
+nomad_node_role: "client"
+nomad_node_class: "builder"
+nomad_options:
+ driver.raw_exec.enable: 1
+ docker.cleanup.image: false
+ docker.privileged.enabled: true
+ docker.volumes.enabled: true
+ driver.whitelist: "docker,raw_exec,exec"
+nomad_service_mgr: "systemd"
+nomad_consul_use_ssl: false
+nomad_use_tls: false
+nomad_tls_http: false
+nomad_tls_rpc: false
+nomad_use_vault: false
+nomad_retry_servers:
+ - "10.30.51.23"
+ - "10.30.51.24"
+ - "10.30.51.25"
+nomad_servers:
+ - "10.30.51.23:4647"
+ - "10.30.51.24:4647"
+ - "10.30.51.25:4647"
+
+# Consul settings.
+nomad_use_consul: true
+consul_certificates:
+ - src: "{{ file_consul_agent_ca_pem }}"
+ dest: "{{ consul_ca_file }}"
+ - src: "{{ file_consul_server_0_pem }}"
+ dest: "{{ consul_cert_file }}"
+ - src: "{{ file_consul_server_0_key_pem }}"
+ dest: "{{ consul_key_file }}"
+consul_verify_incoming: false
+consul_verify_outgoing: false
+consul_vefify_server_hostname: false
+consul_allow_tls: false
+consul_datacenter: "yul1"
+consul_node_role: "client"
+consul_encrypt: "Y4T+5JGx1C3l2NFBBvkTWQ=="
+consul_node_name: "{{ ansible_hostname }}"
+consul_retry_join: true
+consul_retry_servers:
+ - "10.30.51.23"
+ - "10.30.51.24"
+ - "10.30.51.25"
+consul_service_mgr: "systemd"
+
+# Vault settings.
+vault_version: "1.13.1"
+
+# Docker daemon settings.
+docker_daemon:
+ dns: ["172.17.0.1"]
+ dns-opts: []
+ dns-search: ["{{ansible_hostname}}"]
diff --git a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.30.yaml b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.30.yaml
new file mode 100644
index 0000000000..543f557d6e
--- /dev/null
+++ b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.30.yaml
@@ -0,0 +1,97 @@
+---
+# file: host_vars/10.30.51.30.yaml
+
+hostname: "s30-t15-sut1"
+grub:
+ hugepagesz: "2M"
+ hugepages: 32768
+ iommu: "on"
+ vfio.enable_unsafe_noiommu_mode: 1
+inventory_ipmi_hostname: "10.30.50.30"
+vfs_data_file: "csit-initialize-vfs-spr.sh"
+cpu_microarchitecture: "sapphirerapids"
+
+intel_800_matrix: "dpdk22.03"
+
+# User management.
+users:
+ - username: localadmin
+ groups: [adm, sudo]
+ password: "$6$FIsbVDQR$5D0wgufOd2FtnmOiRNsGlgg6Loh.0x3dWSj72DSQnqisSyE9DROfgSgA6s0yxDwz4Jd5SRTXiTKuRYuSQ5POI1"
+ ssh_key:
+ - "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIKM1D1kkGX1l7fSma1MOgw2yLI7zJHwTCcfVROQ4hh7r peter.mikus@protonmail.ch"
+ - "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQDCG7Shfr7ASXXwpazYDGFzChGl7i4HgY9n81GTwc17B/O19IiJdrxFcBQH19HMuTFtWU4d9bQ6xgzz2pojBN13L3pEsStCHovDlEYbvfRxI2l2hcAAop1j1E4izHAS9IvCoy1AgWqBg6tsfP5mzOwGbSbcdI9ADRKIHgDTVbHg9SqKed27bNOLU0u3/5ra2Oar/tVIW37geEqFV/nHIBZ03Y/mszvXP/t/hP5bgJIGJKkzTjLd1aqEcuGmubW+wTQnVnrhFB87dw91gPj6BVyV0+7Vt1wrvPKqP2sGJhojAMSrBQBySnlrYgEg00bwDgNGFevatfG9+nTId+nhoKBkXya3MjSp4HwrGqGcij3/h7ovlau3/iRhkqlSeqenaNm4zKTAXRTnb60j2WKa6im0zdqJX98anp4mhjE8xHhmmfZV3vRT8mtY4hF/lg79miXFHpWH97bZV6r/D9qj1HWI/laJfOC5MOJdRcLETwtsUNMHeHtVnY3yu0XFYNcl2Xwajtnz3waF7vn1QHvAM9p878+JrO/IEUqEc0yZWVvlhZ7Krx1pS+APoMy8097MYDGFzFTkYSstKLGbgm/S7dEiWuSVxmMUxW7JYO3gHrQ3f1EvAYh2UFdWy76Dzr5II9UpVwOwF+HL/Oy8Sk77bPaK+tn7Kh4Tx7WWE0+EOAgElQ== ayourtch@ayourtch-lnx"
+ - "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCXUbbhesOpvPF+KI8nq4TXvEO/Un1aU/ehZ9clCyw9C40xjDkX2BlcX8WeHxFHe7fjFaCd07Vg73rn/3M9uNDnDxvjH1GQ0twvi3iBTO4PkHBBfGF9qnE8MYzno6FvlsVKLuUuPbfm8kbOQ+ZDfdXq6gdtXh0hSYYkqC1heNPCNsqaakkB99Edyle+Ot0V7cpW+Yo2wo98KuX/cgUEhVoA8QnNVE7zaWcjSXBZEteoA4gLpAbV6p67/d6H/2ykHTidBViYTEsHco56tJoA4nTPuAupDOLBcWXgF5TAN6z1aCn2JA1DDfniLakgrZ5oVj2qHhUmbxQAtnKQfHADjqzV jlinkes@jlinkes"
+ - "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQD0GXoSGDZ95TE/TT3kf4ZS3Tiso0UO3MVhqAqZ/F6LOvLyqnMPfhxPz1XpHsDikxvKgwhZvdBm1dWbKkPsD7jtw0PGphQO8QuEwBd2ZMvxZ4Qg6lNDUl5x3zRO2nkbKpcqnOugGLTtXP+yfw/wfQ2HNFLDP9gE90xegsPT83PmRUntQlhbS3ByHcCSUScC+Y1heZXuoKNyrmUY46lxkKsNfhx8sQKo0YhB21atV/mcAQbAaO2LggmaQYGtWizqPNGWIRsi9W8ZYnKva67c3Pbv/TTfaqmrNCwOXJ8G9oL+/3MlKbl3b5mYlTs2a/e9yVgMNwUZVX7aiHpgPgaVjL6j swdev@BradyBunch-MacMini.local"
+ - "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCmo2YP4t/f58AAYH72rOe5VjYjk3wb/GY3aJEd5s9WspLhnsY0xBL67C+4kMq6VmQQvg0cUB8RJSFX1tUXMHCorVWnXNHkYomx0MCPcPUpVHuRyEqczYJ2pzgZsPzoEfw9E5hTrAiGzYFNAS/NOSavapVMDZxa1zsX7+sWQvJfmVtJWpcTQb0TkoWXRsy0YM3PYfUbYvK7lR3lGwyhwCcJn0WwWGreFB7bIok0poqqX5BgJ/okZuvC8II+UfuGoBHNhg49oqST1JlNi9gRqDNmLWkHRaneWZiF+Y2hdN3PRCdkt1x3eU0R+cdi5kPKslb6P0lsjOEA7fDLlq1+T2z1"
+ - "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDWWDIudZUaHp6pPkYmBsBeEYcbUoGGZH73eB374g8e9V3dfT7BuJOMR/Z3LxCGw5GcDDgzJHLQr8AykN7lUscaux68CXSf1CAKhfBeWBOCQD4G5tpmD6610jsiBig6bFISKpGW4nLt80yt3bKEqv6ezt35QuMeh3E45uK/pCRfDcukOThbwYG44xr7TeYAyNacWuNV9HDy5DzjBelElmolTOgjtZEbedmIcUbV/gb8yekUxxBTS3d3I5qVLtNRMHPqp9xbJqteRXIphizDlHbaTHZeVy9mrwi1RuTjDSg7p1OPJ9eVIzMYzk87gbwNxZw8481Xb1WsgGmyH/mXJjD1 najoy@NAJOY-M-505G.CISCO.COM"
+sshd_disable_password_login: true
+
+# Nomad settings.
+nomad_version: "1.6.1"
+nomad_certificates:
+ - src: "{{ file_nomad_ca_pem }}"
+ dest: "{{ nomad_tls_ca_file }}"
+ - src: "{{ file_nomad_client_pem }}"
+ dest: "{{ nomad_tls_cert_file }}"
+ - src: "{{ file_nomad_client_key_pem }}"
+ dest: "{{ nomad_tls_key_file }}"
+nomad_datacenter: "yul1"
+nomad_node_name: "{{ hostname }}-{{ ansible_architecture }}"
+nomad_node_role: "client"
+nomad_node_class: "csit"
+nomad_options:
+ driver.raw_exec.enable: 1
+ docker.cleanup.image: false
+ docker.privileged.enabled: true
+ docker.volumes.enabled: true
+ driver.whitelist: "docker,raw_exec,exec"
+nomad_service_mgr: "systemd"
+nomad_consul_use_ssl: false
+nomad_use_tls: false
+nomad_tls_http: false
+nomad_tls_rpc: false
+nomad_use_vault: false
+nomad_retry_servers:
+ - "10.30.51.23"
+ - "10.30.51.24"
+ - "10.30.51.25"
+nomad_servers:
+ - "10.30.51.23:4647"
+ - "10.30.51.24:4647"
+ - "10.30.51.25:4647"
+
+# Consul settigs.
+nomad_use_consul: true
+consul_certificates:
+ - src: "{{ file_consul_ca_pem }}"
+ dest: "{{ consul_ca_file }}"
+ - src: "{{ file_consul_server_0_pem }}"
+ dest: "{{ consul_cert_file }}"
+ - src: "{{ file_consul_server_0_key_pem }}"
+ dest: "{{ consul_key_file }}"
+consul_verify_incoming: false
+consul_verify_outgoing: false
+consul_vefify_server_hostname: false
+consul_allow_tls: false
+consul_datacenter: "yul1"
+consul_node_role: "client"
+consul_encrypt: "Y4T+5JGx1C3l2NFBBvkTWQ=="
+consul_node_name: "{{ ansible_hostname }}"
+consul_retry_join: true
+consul_retry_servers:
+ - "10.30.51.23"
+ - "10.30.51.24"
+ - "10.30.51.25"
+consul_service_mgr: "systemd"
+
+# Vault settings.
+vault_version: "1.13.1"
+
+# Docker settings.
+docker_daemon:
+ default-shm-size: "1073741824"
+ dns: ["172.17.0.1"]
+ dns-opts: []
+ dns-search: ["{{ ansible_hostname }}"]
+ host: ["172.17.0.1:/var/run/docker.sock"]
diff --git a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.31.yaml b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.31.yaml
new file mode 100644
index 0000000000..1c80c5d4a6
--- /dev/null
+++ b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.31.yaml
@@ -0,0 +1,97 @@
+---
+# file: host_vars/10.30.51.31.yaml
+
+hostname: "s31-t16-sut1"
+grub:
+ hugepagesz: "2M"
+ hugepages: 32768
+ iommu: "on"
+ vfio.enable_unsafe_noiommu_mode: 1
+inventory_ipmi_hostname: "10.30.50.31"
+vfs_data_file: "csit-initialize-vfs-spr.sh"
+cpu_microarchitecture: "sapphirerapids"
+
+intel_800_matrix: "dpdk22.03"
+
+# User management.
+users:
+ - username: localadmin
+ groups: [adm, sudo]
+ password: "$6$FIsbVDQR$5D0wgufOd2FtnmOiRNsGlgg6Loh.0x3dWSj72DSQnqisSyE9DROfgSgA6s0yxDwz4Jd5SRTXiTKuRYuSQ5POI1"
+ ssh_key:
+ - "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIKM1D1kkGX1l7fSma1MOgw2yLI7zJHwTCcfVROQ4hh7r peter.mikus@protonmail.ch"
+ - "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQDCG7Shfr7ASXXwpazYDGFzChGl7i4HgY9n81GTwc17B/O19IiJdrxFcBQH19HMuTFtWU4d9bQ6xgzz2pojBN13L3pEsStCHovDlEYbvfRxI2l2hcAAop1j1E4izHAS9IvCoy1AgWqBg6tsfP5mzOwGbSbcdI9ADRKIHgDTVbHg9SqKed27bNOLU0u3/5ra2Oar/tVIW37geEqFV/nHIBZ03Y/mszvXP/t/hP5bgJIGJKkzTjLd1aqEcuGmubW+wTQnVnrhFB87dw91gPj6BVyV0+7Vt1wrvPKqP2sGJhojAMSrBQBySnlrYgEg00bwDgNGFevatfG9+nTId+nhoKBkXya3MjSp4HwrGqGcij3/h7ovlau3/iRhkqlSeqenaNm4zKTAXRTnb60j2WKa6im0zdqJX98anp4mhjE8xHhmmfZV3vRT8mtY4hF/lg79miXFHpWH97bZV6r/D9qj1HWI/laJfOC5MOJdRcLETwtsUNMHeHtVnY3yu0XFYNcl2Xwajtnz3waF7vn1QHvAM9p878+JrO/IEUqEc0yZWVvlhZ7Krx1pS+APoMy8097MYDGFzFTkYSstKLGbgm/S7dEiWuSVxmMUxW7JYO3gHrQ3f1EvAYh2UFdWy76Dzr5II9UpVwOwF+HL/Oy8Sk77bPaK+tn7Kh4Tx7WWE0+EOAgElQ== ayourtch@ayourtch-lnx"
+ - "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCXUbbhesOpvPF+KI8nq4TXvEO/Un1aU/ehZ9clCyw9C40xjDkX2BlcX8WeHxFHe7fjFaCd07Vg73rn/3M9uNDnDxvjH1GQ0twvi3iBTO4PkHBBfGF9qnE8MYzno6FvlsVKLuUuPbfm8kbOQ+ZDfdXq6gdtXh0hSYYkqC1heNPCNsqaakkB99Edyle+Ot0V7cpW+Yo2wo98KuX/cgUEhVoA8QnNVE7zaWcjSXBZEteoA4gLpAbV6p67/d6H/2ykHTidBViYTEsHco56tJoA4nTPuAupDOLBcWXgF5TAN6z1aCn2JA1DDfniLakgrZ5oVj2qHhUmbxQAtnKQfHADjqzV jlinkes@jlinkes"
+ - "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQD0GXoSGDZ95TE/TT3kf4ZS3Tiso0UO3MVhqAqZ/F6LOvLyqnMPfhxPz1XpHsDikxvKgwhZvdBm1dWbKkPsD7jtw0PGphQO8QuEwBd2ZMvxZ4Qg6lNDUl5x3zRO2nkbKpcqnOugGLTtXP+yfw/wfQ2HNFLDP9gE90xegsPT83PmRUntQlhbS3ByHcCSUScC+Y1heZXuoKNyrmUY46lxkKsNfhx8sQKo0YhB21atV/mcAQbAaO2LggmaQYGtWizqPNGWIRsi9W8ZYnKva67c3Pbv/TTfaqmrNCwOXJ8G9oL+/3MlKbl3b5mYlTs2a/e9yVgMNwUZVX7aiHpgPgaVjL6j swdev@BradyBunch-MacMini.local"
+ - "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCmo2YP4t/f58AAYH72rOe5VjYjk3wb/GY3aJEd5s9WspLhnsY0xBL67C+4kMq6VmQQvg0cUB8RJSFX1tUXMHCorVWnXNHkYomx0MCPcPUpVHuRyEqczYJ2pzgZsPzoEfw9E5hTrAiGzYFNAS/NOSavapVMDZxa1zsX7+sWQvJfmVtJWpcTQb0TkoWXRsy0YM3PYfUbYvK7lR3lGwyhwCcJn0WwWGreFB7bIok0poqqX5BgJ/okZuvC8II+UfuGoBHNhg49oqST1JlNi9gRqDNmLWkHRaneWZiF+Y2hdN3PRCdkt1x3eU0R+cdi5kPKslb6P0lsjOEA7fDLlq1+T2z1"
+ - "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDWWDIudZUaHp6pPkYmBsBeEYcbUoGGZH73eB374g8e9V3dfT7BuJOMR/Z3LxCGw5GcDDgzJHLQr8AykN7lUscaux68CXSf1CAKhfBeWBOCQD4G5tpmD6610jsiBig6bFISKpGW4nLt80yt3bKEqv6ezt35QuMeh3E45uK/pCRfDcukOThbwYG44xr7TeYAyNacWuNV9HDy5DzjBelElmolTOgjtZEbedmIcUbV/gb8yekUxxBTS3d3I5qVLtNRMHPqp9xbJqteRXIphizDlHbaTHZeVy9mrwi1RuTjDSg7p1OPJ9eVIzMYzk87gbwNxZw8481Xb1WsgGmyH/mXJjD1 najoy@NAJOY-M-505G.CISCO.COM"
+sshd_disable_password_login: true
+
+# Nomad settings.
+nomad_version: "1.6.1"
+nomad_certificates:
+ - src: "{{ file_nomad_ca_pem }}"
+ dest: "{{ nomad_tls_ca_file }}"
+ - src: "{{ file_nomad_client_pem }}"
+ dest: "{{ nomad_tls_cert_file }}"
+ - src: "{{ file_nomad_client_key_pem }}"
+ dest: "{{ nomad_tls_key_file }}"
+nomad_datacenter: "yul1"
+nomad_node_name: "{{ hostname }}-{{ ansible_architecture }}"
+nomad_node_role: "client"
+nomad_node_class: "csit"
+nomad_options:
+ driver.raw_exec.enable: 1
+ docker.cleanup.image: false
+ docker.privileged.enabled: true
+ docker.volumes.enabled: true
+ driver.whitelist: "docker,raw_exec,exec"
+nomad_service_mgr: "systemd"
+nomad_consul_use_ssl: false
+nomad_use_tls: false
+nomad_tls_http: false
+nomad_tls_rpc: false
+nomad_use_vault: false
+nomad_retry_servers:
+ - "10.30.51.23"
+ - "10.30.51.24"
+ - "10.30.51.25"
+nomad_servers:
+ - "10.30.51.23:4647"
+ - "10.30.51.24:4647"
+ - "10.30.51.25:4647"
+
+# Consul settigs.
+nomad_use_consul: true
+consul_certificates:
+ - src: "{{ file_consul_ca_pem }}"
+ dest: "{{ consul_ca_file }}"
+ - src: "{{ file_consul_server_0_pem }}"
+ dest: "{{ consul_cert_file }}"
+ - src: "{{ file_consul_server_0_key_pem }}"
+ dest: "{{ consul_key_file }}"
+consul_verify_incoming: false
+consul_verify_outgoing: false
+consul_vefify_server_hostname: false
+consul_allow_tls: false
+consul_datacenter: "yul1"
+consul_node_role: "client"
+consul_encrypt: "Y4T+5JGx1C3l2NFBBvkTWQ=="
+consul_node_name: "{{ ansible_hostname }}"
+consul_retry_join: true
+consul_retry_servers:
+ - "10.30.51.23"
+ - "10.30.51.24"
+ - "10.30.51.25"
+consul_service_mgr: "systemd"
+
+# Vault settings.
+vault_version: "1.13.1"
+
+# Docker settings.
+docker_daemon:
+ default-shm-size: "1073741824"
+ dns: ["172.17.0.1"]
+ dns-opts: []
+ dns-search: ["{{ ansible_hostname }}"]
+ host: ["172.17.0.1:/var/run/docker.sock"]
diff --git a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.32.yaml b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.32.yaml
new file mode 100644
index 0000000000..f7d9c092e5
--- /dev/null
+++ b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.32.yaml
@@ -0,0 +1,38 @@
+---
+# file: host_vars/10.30.51.32.yaml
+
+hostname: "s32-t31-sut1"
+grub:
+ audit: "0"
+ default_hugepagesz: "2M hugepagesz=1G hugepages=16 hugepagesz=2M hugepages=8192"
+ hpet: "disable"
+ intel_idle.max_cstate: "1"
+ intel_iommu: "on"
+ intel_pstate: "disable"
+ iommu: "pt"
+ isolcpus: "1-19,21-39"
+ mce: "off"
+ nmi_watchdog: "0"
+ nohz_full: "1-19,21-39"
+ nosoftlockup: true
+ numa_balancing: "disable"
+ processor.max_cstate: "1"
+ rcu_nocbs: "1-19,21-39"
+ tsc: "reliable"
+sysctl:
+ kernel:
+ watchdog_cpumask: "0,20"
+ vm:
+ nr_hugepages: 8192
+ max_map_count: 20000
+
+inventory_ipmi_hostname: "10.30.50.32"
+cpu_microarchitecture: "icelake"
+
+intel_800_matrix: "dpdk23.11"
+intel_qat_matrix: true
+
+kernel_version_by_distro:
+ ubuntu:
+ jammy:
+ - "5.15.0-72" \ No newline at end of file
diff --git a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.33.yaml b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.33.yaml
new file mode 100644
index 0000000000..c91d5e8d35
--- /dev/null
+++ b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.33.yaml
@@ -0,0 +1,38 @@
+---
+# file: host_vars/10.30.51.33.yaml
+
+hostname: "s33-t31-sut2"
+grub:
+ audit: "0"
+ default_hugepagesz: "2M hugepagesz=1G hugepages=16 hugepagesz=2M hugepages=8192"
+ hpet: "disable"
+ intel_idle.max_cstate: "1"
+ intel_iommu: "on"
+ intel_pstate: "disable"
+ iommu: "pt"
+ isolcpus: "1-19,21-39"
+ mce: "off"
+ nmi_watchdog: "0"
+ nohz_full: "1-19,21-39"
+ nosoftlockup: true
+ numa_balancing: "disable"
+ processor.max_cstate: "1"
+ rcu_nocbs: "1-19,21-39"
+ tsc: "reliable"
+sysctl:
+ kernel:
+ watchdog_cpumask: "0,20"
+ vm:
+ nr_hugepages: 8192
+ max_map_count: 20000
+
+inventory_ipmi_hostname: "10.30.50.33"
+cpu_microarchitecture: "icelake"
+
+intel_800_matrix: "dpdk23.11"
+intel_qat_matrix: true
+
+kernel_version_by_distro:
+ ubuntu:
+ jammy:
+ - "5.15.0-72" \ No newline at end of file
diff --git a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.34.yaml b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.34.yaml
new file mode 100644
index 0000000000..0c26db6084
--- /dev/null
+++ b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.34.yaml
@@ -0,0 +1,38 @@
+---
+# file: host_vars/10.30.51.34.yaml
+
+hostname: "s34-t32-sut1"
+grub:
+ audit: "0"
+ default_hugepagesz: "2M hugepagesz=1G hugepages=16 hugepagesz=2M hugepages=8192"
+ hpet: "disable"
+ intel_idle.max_cstate: "1"
+ intel_iommu: "on"
+ intel_pstate: "disable"
+ iommu: "pt"
+ isolcpus: "1-19,21-39"
+ mce: "off"
+ nmi_watchdog: "0"
+ nohz_full: "1-19,21-39"
+ nosoftlockup: true
+ numa_balancing: "disable"
+ processor.max_cstate: "1"
+ rcu_nocbs: "1-19,21-39"
+ tsc: "reliable"
+sysctl:
+ kernel:
+ watchdog_cpumask: "0,20"
+ vm:
+ nr_hugepages: 8192
+ max_map_count: 20000
+
+inventory_ipmi_hostname: "10.30.50.34"
+cpu_microarchitecture: "icelake"
+
+intel_800_matrix: "dpdk23.11"
+intel_qat_matrix: true
+
+kernel_version_by_distro:
+ ubuntu:
+ jammy:
+ - "5.15.0-72" \ No newline at end of file
diff --git a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.35.yaml b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.35.yaml
new file mode 100644
index 0000000000..d60b46c52a
--- /dev/null
+++ b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.35.yaml
@@ -0,0 +1,38 @@
+---
+# file: host_vars/10.30.51.35.yaml
+
+hostname: "s35-t32-sut2"
+grub:
+ audit: "0"
+ default_hugepagesz: "2M hugepagesz=1G hugepages=16 hugepagesz=2M hugepages=8192"
+ hpet: "disable"
+ intel_idle.max_cstate: "1"
+ intel_iommu: "on"
+ intel_pstate: "disable"
+ iommu: "pt"
+ isolcpus: "1-19,21-39"
+ mce: "off"
+ nmi_watchdog: "0"
+ nohz_full: "1-19,21-39"
+ nosoftlockup: true
+ numa_balancing: "disable"
+ processor.max_cstate: "1"
+ rcu_nocbs: "1-19,21-39"
+ tsc: "reliable"
+sysctl:
+ kernel:
+ watchdog_cpumask: "0,20"
+ vm:
+ nr_hugepages: 8192
+ max_map_count: 20000
+
+inventory_ipmi_hostname: "10.30.50.35"
+cpu_microarchitecture: "icelake"
+
+intel_800_matrix: "dpdk23.11"
+intel_qat_matrix: true
+
+kernel_version_by_distro:
+ ubuntu:
+ jammy:
+ - "5.15.0-72" \ No newline at end of file
diff --git a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.36.yaml b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.36.yaml
new file mode 100644
index 0000000000..1db367bd67
--- /dev/null
+++ b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.36.yaml
@@ -0,0 +1,23 @@
+---
+# file: host_vars/10.30.51.36.yaml
+
+hostname: "s17-t33-sut1"
+grub:
+ audit: "0"
+ default_hugepagesz: "2M hugepagesz=1G hugepages=32 hugepagesz=2M hugepages=32768"
+ iommu.passthrough: "1"
+ vfio.enable_unsafe_noiommu_mode: 1
+ isolcpus: "1-10,17-26,33-42,49-58"
+ nmi_watchdog: "0"
+ nohz_full: "1-10,17-26,33-42,49-58"
+ nosoftlockup: true
+ processor.max_cstate: "1"
+ rcu_nocbs: "1-10,17-26,33-42,49-58"
+sysctl:
+ kernel:
+ watchdog_cpumask: "0,11-16,27-32,43-48,59-63"
+ vm:
+ nr_hugepages: 32768
+
+inventory_ipmi_hostname: "10.30.50.36"
+cpu_microarchitecture: "taishan"
diff --git a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.37.yaml b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.37.yaml
new file mode 100644
index 0000000000..7d3188251a
--- /dev/null
+++ b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.37.yaml
@@ -0,0 +1,23 @@
+---
+# file: host_vars/10.30.51.37.yaml
+
+hostname: "s18-t33-sut2"
+grub:
+ audit: "0"
+ default_hugepagesz: "2M hugepagesz=1G hugepages=32 hugepagesz=2M hugepages=32768"
+ iommu.passthrough: "1"
+ vfio.enable_unsafe_noiommu_mode: 1
+ isolcpus: "1-10,17-26,33-42,49-58"
+ nmi_watchdog: "0"
+ nohz_full: "1-10,17-26,33-42,49-58"
+ nosoftlockup: true
+ processor.max_cstate: "1"
+ rcu_nocbs: "1-10,17-26,33-42,49-58"
+sysctl:
+ kernel:
+ watchdog_cpumask: "0,11-16,27-32,43-48,59-63"
+ vm:
+ nr_hugepages: 32768
+
+inventory_ipmi_hostname: "10.30.50.37"
+cpu_microarchitecture: "taishan"
diff --git a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.38.yaml b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.38.yaml
new file mode 100644
index 0000000000..8b95603594
--- /dev/null
+++ b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.38.yaml
@@ -0,0 +1,20 @@
+---
+# file: host_vars/10.30.51.38.yaml
+
+hostname: "fdio-marvell-dev"
+inventory_ipmi_hostname: "10.30.50.38"
+cpu_microarchitecture: "thunderx"
+
+# User management.
+users:
+ - username: localadmin
+ groups: [adm, sudo]
+ password: "$6$FIsbVDQR$5D0wgufOd2FtnmOiRNsGlgg6Loh.0x3dWSj72DSQnqisSyE9DROfgSgA6s0yxDwz4Jd5SRTXiTKuRYuSQ5POI1"
+ ssh_key:
+ - "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIKM1D1kkGX1l7fSma1MOgw2yLI7zJHwTCcfVROQ4hh7r peter.mikus@protonmail.ch"
+ - "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQDCG7Shfr7ASXXwpazYDGFzChGl7i4HgY9n81GTwc17B/O19IiJdrxFcBQH19HMuTFtWU4d9bQ6xgzz2pojBN13L3pEsStCHovDlEYbvfRxI2l2hcAAop1j1E4izHAS9IvCoy1AgWqBg6tsfP5mzOwGbSbcdI9ADRKIHgDTVbHg9SqKed27bNOLU0u3/5ra2Oar/tVIW37geEqFV/nHIBZ03Y/mszvXP/t/hP5bgJIGJKkzTjLd1aqEcuGmubW+wTQnVnrhFB87dw91gPj6BVyV0+7Vt1wrvPKqP2sGJhojAMSrBQBySnlrYgEg00bwDgNGFevatfG9+nTId+nhoKBkXya3MjSp4HwrGqGcij3/h7ovlau3/iRhkqlSeqenaNm4zKTAXRTnb60j2WKa6im0zdqJX98anp4mhjE8xHhmmfZV3vRT8mtY4hF/lg79miXFHpWH97bZV6r/D9qj1HWI/laJfOC5MOJdRcLETwtsUNMHeHtVnY3yu0XFYNcl2Xwajtnz3waF7vn1QHvAM9p878+JrO/IEUqEc0yZWVvlhZ7Krx1pS+APoMy8097MYDGFzFTkYSstKLGbgm/S7dEiWuSVxmMUxW7JYO3gHrQ3f1EvAYh2UFdWy76Dzr5II9UpVwOwF+HL/Oy8Sk77bPaK+tn7Kh4Tx7WWE0+EOAgElQ== ayourtch@ayourtch-lnx"
+ - "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCXUbbhesOpvPF+KI8nq4TXvEO/Un1aU/ehZ9clCyw9C40xjDkX2BlcX8WeHxFHe7fjFaCd07Vg73rn/3M9uNDnDxvjH1GQ0twvi3iBTO4PkHBBfGF9qnE8MYzno6FvlsVKLuUuPbfm8kbOQ+ZDfdXq6gdtXh0hSYYkqC1heNPCNsqaakkB99Edyle+Ot0V7cpW+Yo2wo98KuX/cgUEhVoA8QnNVE7zaWcjSXBZEteoA4gLpAbV6p67/d6H/2ykHTidBViYTEsHco56tJoA4nTPuAupDOLBcWXgF5TAN6z1aCn2JA1DDfniLakgrZ5oVj2qHhUmbxQAtnKQfHADjqzV jlinkes@jlinkes"
+ - "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQD0GXoSGDZ95TE/TT3kf4ZS3Tiso0UO3MVhqAqZ/F6LOvLyqnMPfhxPz1XpHsDikxvKgwhZvdBm1dWbKkPsD7jtw0PGphQO8QuEwBd2ZMvxZ4Qg6lNDUl5x3zRO2nkbKpcqnOugGLTtXP+yfw/wfQ2HNFLDP9gE90xegsPT83PmRUntQlhbS3ByHcCSUScC+Y1heZXuoKNyrmUY46lxkKsNfhx8sQKo0YhB21atV/mcAQbAaO2LggmaQYGtWizqPNGWIRsi9W8ZYnKva67c3Pbv/TTfaqmrNCwOXJ8G9oL+/3MlKbl3b5mYlTs2a/e9yVgMNwUZVX7aiHpgPgaVjL6j swdev@BradyBunch-MacMini.local"
+ - "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCmo2YP4t/f58AAYH72rOe5VjYjk3wb/GY3aJEd5s9WspLhnsY0xBL67C+4kMq6VmQQvg0cUB8RJSFX1tUXMHCorVWnXNHkYomx0MCPcPUpVHuRyEqczYJ2pzgZsPzoEfw9E5hTrAiGzYFNAS/NOSavapVMDZxa1zsX7+sWQvJfmVtJWpcTQb0TkoWXRsy0YM3PYfUbYvK7lR3lGwyhwCcJn0WwWGreFB7bIok0poqqX5BgJ/okZuvC8II+UfuGoBHNhg49oqST1JlNi9gRqDNmLWkHRaneWZiF+Y2hdN3PRCdkt1x3eU0R+cdi5kPKslb6P0lsjOEA7fDLlq1+T2z1"
+ - "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDWWDIudZUaHp6pPkYmBsBeEYcbUoGGZH73eB374g8e9V3dfT7BuJOMR/Z3LxCGw5GcDDgzJHLQr8AykN7lUscaux68CXSf1CAKhfBeWBOCQD4G5tpmD6610jsiBig6bFISKpGW4nLt80yt3bKEqv6ezt35QuMeh3E45uK/pCRfDcukOThbwYG44xr7TeYAyNacWuNV9HDy5DzjBelElmolTOgjtZEbedmIcUbV/gb8yekUxxBTS3d3I5qVLtNRMHPqp9xbJqteRXIphizDlHbaTHZeVy9mrwi1RuTjDSg7p1OPJ9eVIzMYzk87gbwNxZw8481Xb1WsgGmyH/mXJjD1 najoy@NAJOY-M-505G.CISCO.COM"
+sshd_disable_password_login: true
diff --git a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.49.yaml b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.49.yaml
new file mode 100644
index 0000000000..3b9b63dfd0
--- /dev/null
+++ b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.49.yaml
@@ -0,0 +1,33 @@
+---
+# file: host_vars/10.30.51.49.yaml
+
+hostname: "s19-t33t211-tg1"
+grub:
+ audit: "0"
+ default_hugepagesz: "2M hugepagesz=1G hugepages=32 hugepagesz=2M hugepages=16384"
+ hpet: "disable"
+ intel_idle.max_cstate: "1"
+ intel_iommu: "on"
+ intel_pstate: "disable"
+ iommu: "pt"
+ isolcpus: "1-27,29-55,57-83,85-111"
+ mce: "off"
+ nmi_watchdog: "0"
+ nohz_full: "1-27,29-55,57-83,85-111"
+ nosoftlockup: true
+ numa_balancing: "disable"
+ processor.max_cstate: "1"
+ rcu_nocbs: "1-27,29-55,57-83,85-111"
+ tsc: "reliable"
+sysctl:
+ kernel:
+ watchdog_cpumask: "0,28,56,84"
+ vm:
+ nr_hugepages: 16384
+ max_map_count: 20000
+
+inventory_ipmi_hostname: "10.30.50.46"
+cpu_microarchitecture: "skylake"
+docker_tg: true
+
+intel_700_matrix: "dpdk22.07"
diff --git a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.50.yaml b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.50.yaml
new file mode 100644
index 0000000000..117c6d2c31
--- /dev/null
+++ b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.50.yaml
@@ -0,0 +1,96 @@
+---
+# file: host_vars/10.30.51.50.yaml
+
+hostname: "s1-t11-sut1"
+grub:
+ hugepagesz: "2M"
+ hugepages: 32768
+ iommu: "on"
+ vfio.enable_unsafe_noiommu_mode: 1
+inventory_ipmi_hostname: "10.30.50.47"
+cpu_microarchitecture: "skylake"
+
+intel_800_matrix: "dpdk22.03"
+
+# User management.
+users:
+ - username: localadmin
+ groups: [adm, sudo]
+ password: "$6$FIsbVDQR$5D0wgufOd2FtnmOiRNsGlgg6Loh.0x3dWSj72DSQnqisSyE9DROfgSgA6s0yxDwz4Jd5SRTXiTKuRYuSQ5POI1"
+ ssh_key:
+ - "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIKM1D1kkGX1l7fSma1MOgw2yLI7zJHwTCcfVROQ4hh7r peter.mikus@protonmail.ch"
+ - "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQDCG7Shfr7ASXXwpazYDGFzChGl7i4HgY9n81GTwc17B/O19IiJdrxFcBQH19HMuTFtWU4d9bQ6xgzz2pojBN13L3pEsStCHovDlEYbvfRxI2l2hcAAop1j1E4izHAS9IvCoy1AgWqBg6tsfP5mzOwGbSbcdI9ADRKIHgDTVbHg9SqKed27bNOLU0u3/5ra2Oar/tVIW37geEqFV/nHIBZ03Y/mszvXP/t/hP5bgJIGJKkzTjLd1aqEcuGmubW+wTQnVnrhFB87dw91gPj6BVyV0+7Vt1wrvPKqP2sGJhojAMSrBQBySnlrYgEg00bwDgNGFevatfG9+nTId+nhoKBkXya3MjSp4HwrGqGcij3/h7ovlau3/iRhkqlSeqenaNm4zKTAXRTnb60j2WKa6im0zdqJX98anp4mhjE8xHhmmfZV3vRT8mtY4hF/lg79miXFHpWH97bZV6r/D9qj1HWI/laJfOC5MOJdRcLETwtsUNMHeHtVnY3yu0XFYNcl2Xwajtnz3waF7vn1QHvAM9p878+JrO/IEUqEc0yZWVvlhZ7Krx1pS+APoMy8097MYDGFzFTkYSstKLGbgm/S7dEiWuSVxmMUxW7JYO3gHrQ3f1EvAYh2UFdWy76Dzr5II9UpVwOwF+HL/Oy8Sk77bPaK+tn7Kh4Tx7WWE0+EOAgElQ== ayourtch@ayourtch-lnx"
+ - "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCXUbbhesOpvPF+KI8nq4TXvEO/Un1aU/ehZ9clCyw9C40xjDkX2BlcX8WeHxFHe7fjFaCd07Vg73rn/3M9uNDnDxvjH1GQ0twvi3iBTO4PkHBBfGF9qnE8MYzno6FvlsVKLuUuPbfm8kbOQ+ZDfdXq6gdtXh0hSYYkqC1heNPCNsqaakkB99Edyle+Ot0V7cpW+Yo2wo98KuX/cgUEhVoA8QnNVE7zaWcjSXBZEteoA4gLpAbV6p67/d6H/2ykHTidBViYTEsHco56tJoA4nTPuAupDOLBcWXgF5TAN6z1aCn2JA1DDfniLakgrZ5oVj2qHhUmbxQAtnKQfHADjqzV jlinkes@jlinkes"
+ - "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQD0GXoSGDZ95TE/TT3kf4ZS3Tiso0UO3MVhqAqZ/F6LOvLyqnMPfhxPz1XpHsDikxvKgwhZvdBm1dWbKkPsD7jtw0PGphQO8QuEwBd2ZMvxZ4Qg6lNDUl5x3zRO2nkbKpcqnOugGLTtXP+yfw/wfQ2HNFLDP9gE90xegsPT83PmRUntQlhbS3ByHcCSUScC+Y1heZXuoKNyrmUY46lxkKsNfhx8sQKo0YhB21atV/mcAQbAaO2LggmaQYGtWizqPNGWIRsi9W8ZYnKva67c3Pbv/TTfaqmrNCwOXJ8G9oL+/3MlKbl3b5mYlTs2a/e9yVgMNwUZVX7aiHpgPgaVjL6j swdev@BradyBunch-MacMini.local"
+ - "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCmo2YP4t/f58AAYH72rOe5VjYjk3wb/GY3aJEd5s9WspLhnsY0xBL67C+4kMq6VmQQvg0cUB8RJSFX1tUXMHCorVWnXNHkYomx0MCPcPUpVHuRyEqczYJ2pzgZsPzoEfw9E5hTrAiGzYFNAS/NOSavapVMDZxa1zsX7+sWQvJfmVtJWpcTQb0TkoWXRsy0YM3PYfUbYvK7lR3lGwyhwCcJn0WwWGreFB7bIok0poqqX5BgJ/okZuvC8II+UfuGoBHNhg49oqST1JlNi9gRqDNmLWkHRaneWZiF+Y2hdN3PRCdkt1x3eU0R+cdi5kPKslb6P0lsjOEA7fDLlq1+T2z1"
+ - "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDWWDIudZUaHp6pPkYmBsBeEYcbUoGGZH73eB374g8e9V3dfT7BuJOMR/Z3LxCGw5GcDDgzJHLQr8AykN7lUscaux68CXSf1CAKhfBeWBOCQD4G5tpmD6610jsiBig6bFISKpGW4nLt80yt3bKEqv6ezt35QuMeh3E45uK/pCRfDcukOThbwYG44xr7TeYAyNacWuNV9HDy5DzjBelElmolTOgjtZEbedmIcUbV/gb8yekUxxBTS3d3I5qVLtNRMHPqp9xbJqteRXIphizDlHbaTHZeVy9mrwi1RuTjDSg7p1OPJ9eVIzMYzk87gbwNxZw8481Xb1WsgGmyH/mXJjD1 najoy@NAJOY-M-505G.CISCO.COM"
+sshd_disable_password_login: true
+
+# Nomad settings.
+nomad_version: "1.6.1"
+nomad_certificates:
+ - src: "{{ file_nomad_ca_pem }}"
+ dest: "{{ nomad_tls_ca_file }}"
+ - src: "{{ file_nomad_client_pem }}"
+ dest: "{{ nomad_tls_cert_file }}"
+ - src: "{{ file_nomad_client_key_pem }}"
+ dest: "{{ nomad_tls_key_file }}"
+nomad_datacenter: "yul1"
+nomad_node_name: "{{ hostname }}-{{ ansible_architecture }}"
+nomad_node_role: "client"
+nomad_node_class: "csit"
+nomad_options:
+ driver.raw_exec.enable: 1
+ docker.cleanup.image: false
+ docker.privileged.enabled: true
+ docker.volumes.enabled: true
+ driver.whitelist: "docker,raw_exec,exec"
+nomad_service_mgr: "systemd"
+nomad_consul_use_ssl: false
+nomad_use_tls: false
+nomad_tls_http: false
+nomad_tls_rpc: false
+nomad_use_vault: false
+nomad_retry_servers:
+ - "10.30.51.23"
+ - "10.30.51.24"
+ - "10.30.51.25"
+nomad_servers:
+ - "10.30.51.23:4647"
+ - "10.30.51.24:4647"
+ - "10.30.51.25:4647"
+
+# Consul settigs.
+nomad_use_consul: true
+consul_certificates:
+ - src: "{{ file_consul_agent_ca_pem }}"
+ dest: "{{ consul_ca_file }}"
+ - src: "{{ file_consul_server_0_pem }}"
+ dest: "{{ consul_cert_file }}"
+ - src: "{{ file_consul_server_0_key_pem }}"
+ dest: "{{ consul_key_file }}"
+consul_verify_incoming: false
+consul_verify_outgoing: false
+consul_vefify_server_hostname: false
+consul_allow_tls: false
+consul_datacenter: "yul1"
+consul_node_role: "client"
+consul_encrypt: "Y4T+5JGx1C3l2NFBBvkTWQ=="
+consul_node_name: "{{ ansible_hostname }}"
+consul_retry_join: true
+consul_retry_servers:
+ - "10.30.51.23"
+ - "10.30.51.24"
+ - "10.30.51.25"
+consul_service_mgr: "systemd"
+
+# Vault settings.
+vault_version: "1.13.1"
+
+# Docker settings.
+docker_daemon:
+ default-shm-size: "1073741824"
+ dns: ["172.17.0.1"]
+ dns-opts: []
+ dns-search: ["{{ ansible_hostname }}"]
+ host: ["172.17.0.1:/var/run/docker.sock"]
diff --git a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.51.yaml b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.51.yaml
new file mode 100644
index 0000000000..875b759675
--- /dev/null
+++ b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.51.yaml
@@ -0,0 +1,96 @@
+---
+# file: host_vars/10.30.51.51.yaml
+
+hostname: "s2-t12-sut1"
+grub:
+ hugepagesz: "2M"
+ hugepages: 32768
+ iommu: "on"
+ vfio.enable_unsafe_noiommu_mode: 1
+inventory_ipmi_hostname: "10.30.50.48"
+cpu_microarchitecture: "skylake"
+
+intel_800_matrix: "dpdk22.03"
+
+# User management.
+users:
+ - username: localadmin
+ groups: [adm, sudo]
+ password: "$6$FIsbVDQR$5D0wgufOd2FtnmOiRNsGlgg6Loh.0x3dWSj72DSQnqisSyE9DROfgSgA6s0yxDwz4Jd5SRTXiTKuRYuSQ5POI1"
+ ssh_key:
+ - "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIKM1D1kkGX1l7fSma1MOgw2yLI7zJHwTCcfVROQ4hh7r peter.mikus@protonmail.ch"
+ - "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQDCG7Shfr7ASXXwpazYDGFzChGl7i4HgY9n81GTwc17B/O19IiJdrxFcBQH19HMuTFtWU4d9bQ6xgzz2pojBN13L3pEsStCHovDlEYbvfRxI2l2hcAAop1j1E4izHAS9IvCoy1AgWqBg6tsfP5mzOwGbSbcdI9ADRKIHgDTVbHg9SqKed27bNOLU0u3/5ra2Oar/tVIW37geEqFV/nHIBZ03Y/mszvXP/t/hP5bgJIGJKkzTjLd1aqEcuGmubW+wTQnVnrhFB87dw91gPj6BVyV0+7Vt1wrvPKqP2sGJhojAMSrBQBySnlrYgEg00bwDgNGFevatfG9+nTId+nhoKBkXya3MjSp4HwrGqGcij3/h7ovlau3/iRhkqlSeqenaNm4zKTAXRTnb60j2WKa6im0zdqJX98anp4mhjE8xHhmmfZV3vRT8mtY4hF/lg79miXFHpWH97bZV6r/D9qj1HWI/laJfOC5MOJdRcLETwtsUNMHeHtVnY3yu0XFYNcl2Xwajtnz3waF7vn1QHvAM9p878+JrO/IEUqEc0yZWVvlhZ7Krx1pS+APoMy8097MYDGFzFTkYSstKLGbgm/S7dEiWuSVxmMUxW7JYO3gHrQ3f1EvAYh2UFdWy76Dzr5II9UpVwOwF+HL/Oy8Sk77bPaK+tn7Kh4Tx7WWE0+EOAgElQ== ayourtch@ayourtch-lnx"
+ - "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCXUbbhesOpvPF+KI8nq4TXvEO/Un1aU/ehZ9clCyw9C40xjDkX2BlcX8WeHxFHe7fjFaCd07Vg73rn/3M9uNDnDxvjH1GQ0twvi3iBTO4PkHBBfGF9qnE8MYzno6FvlsVKLuUuPbfm8kbOQ+ZDfdXq6gdtXh0hSYYkqC1heNPCNsqaakkB99Edyle+Ot0V7cpW+Yo2wo98KuX/cgUEhVoA8QnNVE7zaWcjSXBZEteoA4gLpAbV6p67/d6H/2ykHTidBViYTEsHco56tJoA4nTPuAupDOLBcWXgF5TAN6z1aCn2JA1DDfniLakgrZ5oVj2qHhUmbxQAtnKQfHADjqzV jlinkes@jlinkes"
+ - "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQD0GXoSGDZ95TE/TT3kf4ZS3Tiso0UO3MVhqAqZ/F6LOvLyqnMPfhxPz1XpHsDikxvKgwhZvdBm1dWbKkPsD7jtw0PGphQO8QuEwBd2ZMvxZ4Qg6lNDUl5x3zRO2nkbKpcqnOugGLTtXP+yfw/wfQ2HNFLDP9gE90xegsPT83PmRUntQlhbS3ByHcCSUScC+Y1heZXuoKNyrmUY46lxkKsNfhx8sQKo0YhB21atV/mcAQbAaO2LggmaQYGtWizqPNGWIRsi9W8ZYnKva67c3Pbv/TTfaqmrNCwOXJ8G9oL+/3MlKbl3b5mYlTs2a/e9yVgMNwUZVX7aiHpgPgaVjL6j swdev@BradyBunch-MacMini.local"
+ - "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCmo2YP4t/f58AAYH72rOe5VjYjk3wb/GY3aJEd5s9WspLhnsY0xBL67C+4kMq6VmQQvg0cUB8RJSFX1tUXMHCorVWnXNHkYomx0MCPcPUpVHuRyEqczYJ2pzgZsPzoEfw9E5hTrAiGzYFNAS/NOSavapVMDZxa1zsX7+sWQvJfmVtJWpcTQb0TkoWXRsy0YM3PYfUbYvK7lR3lGwyhwCcJn0WwWGreFB7bIok0poqqX5BgJ/okZuvC8II+UfuGoBHNhg49oqST1JlNi9gRqDNmLWkHRaneWZiF+Y2hdN3PRCdkt1x3eU0R+cdi5kPKslb6P0lsjOEA7fDLlq1+T2z1"
+ - "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDWWDIudZUaHp6pPkYmBsBeEYcbUoGGZH73eB374g8e9V3dfT7BuJOMR/Z3LxCGw5GcDDgzJHLQr8AykN7lUscaux68CXSf1CAKhfBeWBOCQD4G5tpmD6610jsiBig6bFISKpGW4nLt80yt3bKEqv6ezt35QuMeh3E45uK/pCRfDcukOThbwYG44xr7TeYAyNacWuNV9HDy5DzjBelElmolTOgjtZEbedmIcUbV/gb8yekUxxBTS3d3I5qVLtNRMHPqp9xbJqteRXIphizDlHbaTHZeVy9mrwi1RuTjDSg7p1OPJ9eVIzMYzk87gbwNxZw8481Xb1WsgGmyH/mXJjD1 najoy@NAJOY-M-505G.CISCO.COM"
+sshd_disable_password_login: true
+
+# Nomad settings.
+nomad_version: "1.6.1"
+nomad_certificates:
+ - src: "{{ file_nomad_ca_pem }}"
+ dest: "{{ nomad_tls_ca_file }}"
+ - src: "{{ file_nomad_client_pem }}"
+ dest: "{{ nomad_tls_cert_file }}"
+ - src: "{{ file_nomad_client_key_pem }}"
+ dest: "{{ nomad_tls_key_file }}"
+nomad_datacenter: "yul1"
+nomad_node_name: "{{ hostname }}-{{ ansible_architecture }}"
+nomad_node_role: "client"
+nomad_node_class: "csit"
+nomad_options:
+ driver.raw_exec.enable: 1
+ docker.cleanup.image: false
+ docker.privileged.enabled: true
+ docker.volumes.enabled: true
+ driver.whitelist: "docker,raw_exec,exec"
+nomad_service_mgr: "systemd"
+nomad_consul_use_ssl: false
+nomad_use_tls: false
+nomad_tls_http: false
+nomad_tls_rpc: false
+nomad_use_vault: false
+nomad_retry_servers:
+ - "10.30.51.23"
+ - "10.30.51.24"
+ - "10.30.51.25"
+nomad_servers:
+ - "10.30.51.23:4647"
+ - "10.30.51.24:4647"
+ - "10.30.51.25:4647"
+
+# Consul settigs.
+nomad_use_consul: true
+consul_certificates:
+ - src: "{{ file_consul_agent_ca_pem }}"
+ dest: "{{ consul_ca_file }}"
+ - src: "{{ file_consul_server_0_pem }}"
+ dest: "{{ consul_cert_file }}"
+ - src: "{{ file_consul_server_0_key_pem }}"
+ dest: "{{ consul_key_file }}"
+consul_verify_incoming: false
+consul_verify_outgoing: false
+consul_vefify_server_hostname: false
+consul_allow_tls: false
+consul_datacenter: "yul1"
+consul_node_role: "client"
+consul_encrypt: "Y4T+5JGx1C3l2NFBBvkTWQ=="
+consul_node_name: "{{ ansible_hostname }}"
+consul_retry_join: true
+consul_retry_servers:
+ - "10.30.51.23"
+ - "10.30.51.24"
+ - "10.30.51.25"
+consul_service_mgr: "systemd"
+
+# Vault settings.
+vault_version: "1.13.1"
+
+# Docker settings.
+docker_daemon:
+ default-shm-size: "1073741824"
+ dns: ["172.17.0.1"]
+ dns-opts: []
+ dns-search: ["{{ ansible_hostname }}"]
+ host: ["172.17.0.1:/var/run/docker.sock"]
diff --git a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.52.yaml b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.52.yaml
new file mode 100644
index 0000000000..8e8d3d39c2
--- /dev/null
+++ b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.52.yaml
@@ -0,0 +1,74 @@
+---
+# file: host_vars/10.30.51.52.yaml
+
+hostname: "s52-t21-sut1"
+grub:
+ audit: "0"
+ default_hugepagesz: "2M hugepagesz=1G hugepages=32 hugepagesz=2M hugepages=32768"
+ hpet: "disable"
+ intel_idle.max_cstate: "1"
+ intel_iommu: "on,sm_on"
+ intel_pstate: "disable"
+ iommu: "pt"
+ isolcpus: "1-31,33-63,65-95,97-127"
+ mce: "off"
+ nmi_watchdog: "0"
+ nohz_full: "1-31,33-63,65-95,97-127"
+ nosoftlockup: true
+ numa_balancing: "disable"
+ processor.max_cstate: "1"
+ rcu_nocbs: "1-31,33-63,65-95,97-127"
+ tsc: "reliable"
+sysctl:
+ kernel:
+ watchdog_cpumask: "0,32,64,96"
+ vm:
+ nr_hugepages: 32768
+ max_map_count: 20000
+
+inventory_ipmi_hostname: "10.30.50.52"
+cpu_microarchitecture: "sapphirerapids"
+
+mellanox_matrix: "dpdk23.11"
+intel_dsa_matrix: true
+intel_qat_matrix: true
+
+kernel_version_by_distro:
+ ubuntu:
+ jammy:
+ - "5.15.0-72"
+
+docker_sut: true
+docker_volumes:
+ - source: "/usr/bin/ofed_info"
+ target: "/usr/bin/ofed_info"
+ - source: "/dev/hugepages"
+ target: "/dev/hugepages"
+ - source: "/dev/vfio"
+ target: "/dev/vfio"
+ - source: "/etc/sudoers"
+ target: "/etc/sudoers"
+ - source: "/dev/null"
+ target: "/etc/sysctl.d/80-vpp.conf"
+ - source: "/opt/boot/"
+ target: "/opt/boot/"
+ - source: "/usr/bin/iperf3"
+ target: "/usr/bin/iperf3"
+ - source: "/usr/lib/x86_64-linux-gnu/libiperf.so.0"
+ target: "/usr/lib/x86_64-linux-gnu/libiperf.so.0"
+ - source: "/usr/lib/x86_64-linux-gnu/libiperf.so.0.0.0"
+ target: "/usr/lib/x86_64-linux-gnu/libiperf.so.0.0.0"
+ - source: "/var/run/docker.sock"
+ target: "/var/run/docker.sock"
+ - source: "/usr/lib/firmware/"
+ target: "/usr/lib/firmware/"
+ - source: "/usr/local/bin/adf_ctl"
+ target: "/usr/local/bin/adf_ctl"
+ - source: "/etc/4xxx_dev0.conf"
+ target: "/etc/4xxx_dev0.conf"
+ - source: "/etc/4xxx_dev1.conf"
+ target: "/etc/4xxx_dev1.conf"
+ - source: "/etc/4xxx_dev2.conf"
+ target: "/etc/4xxx_dev2.conf"
+ - source: "/etc/4xxx_dev3.conf"
+ target: "/etc/4xxx_dev3.conf" \ No newline at end of file
diff --git a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.53.yaml b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.53.yaml
new file mode 100644
index 0000000000..ce07968323
--- /dev/null
+++ b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.53.yaml
@@ -0,0 +1,65 @@
+---
+# file: host_vars/10.30.51.53.yaml
+
+hostname: "s53-t21-tg1"
+grub:
+ audit: "0"
+ default_hugepagesz: "2M hugepagesz=1G hugepages=32 hugepagesz=2M hugepages=32768"
+ hpet: "disable"
+ intel_idle.max_cstate: "1"
+ intel_iommu: "on,sm_on"
+ intel_pstate: "disable"
+ iommu: "pt"
+ isolcpus: "1-31,33-63,65-95,97-127"
+ mce: "off"
+ nmi_watchdog: "0"
+ nohz_full: "1-31,33-63,65-95,97-127"
+ nosoftlockup: true
+ numa_balancing: "disable"
+ processor.max_cstate: "1"
+ rcu_nocbs: "1-31,33-63,65-95,97-127"
+ tsc: "reliable"
+sysctl:
+ kernel:
+ watchdog_cpumask: "0,32,64,96"
+ vm:
+ nr_hugepages: 32768
+ max_map_count: 20000
+
+inventory_ipmi_hostname: "10.30.50.53"
+cpu_microarchitecture: "sapphirerapids"
+
+mellanox_matrix: "dpdk23.07"
+intel_dsa_matrix: true
+intel_qat_matrix: true
+
+docker_tg: true
+docker_volumes:
+ - source: "/usr/bin/ofed_info"
+ target: "/usr/bin/ofed_info"
+ - source: "/dev/hugepages"
+ target: "/dev/hugepages"
+ - source: "/dev/vfio"
+ target: "/dev/vfio"
+ - source: "/etc/sudoers"
+ target: "/etc/sudoers"
+ - source: "/opt/"
+ target: "/opt/"
+ - source: "/usr/bin/iperf3"
+ target: "/usr/bin/iperf3"
+ - source: "/usr/lib/x86_64-linux-gnu/libiperf.so.0"
+ target: "/usr/lib/x86_64-linux-gnu/libiperf.so.0"
+ - source: "/usr/lib/x86_64-linux-gnu/libiperf.so.0.0.0"
+ target: "/usr/lib/x86_64-linux-gnu/libiperf.so.0.0.0"
+ - source: "/usr/lib/firmware/"
+ target: "/usr/lib/firmware/"
+ - source: "/usr/local/bin/adf_ctl"
+ target: "/usr/local/bin/adf_ctl"
+ - source: "/etc/4xxx_dev0.conf"
+ target: "/etc/4xxx_dev0.conf"
+ - source: "/etc/4xxx_dev1.conf"
+ target: "/etc/4xxx_dev1.conf"
+ - source: "/etc/4xxx_dev2.conf"
+ target: "/etc/4xxx_dev2.conf"
+ - source: "/etc/4xxx_dev3.conf"
+ target: "/etc/4xxx_dev3.conf" \ No newline at end of file
diff --git a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.54.yaml b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.54.yaml
new file mode 100644
index 0000000000..1fd8edd5dd
--- /dev/null
+++ b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.54.yaml
@@ -0,0 +1,67 @@
+---
+# file: host_vars/10.30.51.54.yaml
+
+hostname: "s54-t22-sut1"
+grub:
+ audit: "0"
+ default_hugepagesz: "2M hugepagesz=1G hugepages=32 hugepagesz=2M hugepages=32768"
+ hpet: "disable"
+ intel_idle.max_cstate: "1"
+ intel_iommu: "on,sm_on"
+ intel_pstate: "disable"
+ iommu: "pt"
+ isolcpus: "1-31,33-63,65-95,97-127"
+ mce: "off"
+ nmi_watchdog: "0"
+ nohz_full: "1-31,33-63,65-95,97-127"
+ nosoftlockup: true
+ numa_balancing: "disable"
+ processor.max_cstate: "1"
+ rcu_nocbs: "1-31,33-63,65-95,97-127"
+ tsc: "reliable"
+sysctl:
+ kernel:
+ watchdog_cpumask: "0,32,64,96"
+ vm:
+ nr_hugepages: 32768
+ max_map_count: 20000
+
+inventory_ipmi_hostname: "10.30.50.54"
+cpu_microarchitecture: "sapphirerapids"
+
+intel_800_matrix: "dpdk23.11"
+intel_dsa_matrix: true
+intel_qat_matrix: true
+
+docker_sut: true
+docker_volumes:
+ - source: "/dev/hugepages"
+ target: "/dev/hugepages"
+ - source: "/dev/vfio"
+ target: "/dev/vfio"
+ - source: "/etc/sudoers"
+ target: "/etc/sudoers"
+ - source: "/dev/null"
+ target: "/etc/sysctl.d/80-vpp.conf"
+ - source: "/opt/boot/"
+ target: "/opt/boot/"
+ - source: "/usr/bin/iperf3"
+ target: "/usr/bin/iperf3"
+ - source: "/usr/lib/x86_64-linux-gnu/libiperf.so.0"
+ target: "/usr/lib/x86_64-linux-gnu/libiperf.so.0"
+ - source: "/usr/lib/x86_64-linux-gnu/libiperf.so.0.0.0"
+ target: "/usr/lib/x86_64-linux-gnu/libiperf.so.0.0.0"
+ - source: "/var/run/docker.sock"
+ target: "/var/run/docker.sock"
+ - source: "/usr/lib/firmware/"
+ target: "/usr/lib/firmware/"
+ - source: "/usr/local/bin/adf_ctl"
+ target: "/usr/local/bin/adf_ctl"
+ - source: "/etc/4xxx_dev0.conf"
+ target: "/etc/4xxx_dev0.conf"
+ - source: "/etc/4xxx_dev1.conf"
+ target: "/etc/4xxx_dev1.conf"
+ - source: "/etc/4xxx_dev2.conf"
+ target: "/etc/4xxx_dev2.conf"
+ - source: "/etc/4xxx_dev3.conf"
+ target: "/etc/4xxx_dev3.conf" \ No newline at end of file
diff --git a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.55.yaml b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.55.yaml
new file mode 100644
index 0000000000..1b9f9a56d3
--- /dev/null
+++ b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.55.yaml
@@ -0,0 +1,63 @@
+---
+# file: host_vars/10.30.51.55.yaml
+
+hostname: "s55-t22-tg1"
+grub:
+ audit: "0"
+ default_hugepagesz: "2M hugepagesz=1G hugepages=32 hugepagesz=2M hugepages=32768"
+ hpet: "disable"
+ intel_idle.max_cstate: "1"
+ intel_iommu: "on,sm_on"
+ intel_pstate: "disable"
+ iommu: "pt"
+ isolcpus: "1-31,33-63,65-95,97-127"
+ mce: "off"
+ nmi_watchdog: "0"
+ nohz_full: "1-31,33-63,65-95,97-127"
+ nosoftlockup: true
+ numa_balancing: "disable"
+ processor.max_cstate: "1"
+ rcu_nocbs: "1-31,33-63,65-95,97-127"
+ tsc: "reliable"
+sysctl:
+ kernel:
+ watchdog_cpumask: "0,32,64,96"
+ vm:
+ nr_hugepages: 32768
+ max_map_count: 20000
+
+inventory_ipmi_hostname: "10.30.50.55"
+cpu_microarchitecture: "sapphirerapids"
+
+intel_800_matrix: "dpdk22.07"
+intel_dsa_matrix: true
+intel_qat_matrix: true
+
+docker_tg: true
+docker_volumes:
+ - source: "/dev/hugepages"
+ target: "/dev/hugepages"
+ - source: "/dev/vfio"
+ target: "/dev/vfio"
+ - source: "/etc/sudoers"
+ target: "/etc/sudoers"
+ - source: "/opt/"
+ target: "/opt/"
+ - source: "/usr/bin/iperf3"
+ target: "/usr/bin/iperf3"
+ - source: "/usr/lib/x86_64-linux-gnu/libiperf.so.0"
+ target: "/usr/lib/x86_64-linux-gnu/libiperf.so.0"
+ - source: "/usr/lib/x86_64-linux-gnu/libiperf.so.0.0.0"
+ target: "/usr/lib/x86_64-linux-gnu/libiperf.so.0.0.0"
+ - source: "/usr/lib/firmware/"
+ target: "/usr/lib/firmware/"
+ - source: "/usr/local/bin/adf_ctl"
+ target: "/usr/local/bin/adf_ctl"
+ - source: "/etc/4xxx_dev0.conf"
+ target: "/etc/4xxx_dev0.conf"
+ - source: "/etc/4xxx_dev1.conf"
+ target: "/etc/4xxx_dev1.conf"
+ - source: "/etc/4xxx_dev2.conf"
+ target: "/etc/4xxx_dev2.conf"
+ - source: "/etc/4xxx_dev3.conf"
+ target: "/etc/4xxx_dev3.conf" \ No newline at end of file
diff --git a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.56.yaml b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.56.yaml
new file mode 100644
index 0000000000..e8f5c55393
--- /dev/null
+++ b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.56.yaml
@@ -0,0 +1,36 @@
+---
+# file: host_vars/10.30.51.56.yaml
+
+hostname: "s56-t23-sut1"
+grub:
+ audit: "0"
+ default_hugepagesz: "2M hugepagesz=1G hugepages=32 hugepagesz=2M hugepages=32768"
+ hpet: "disable"
+ intel_idle.max_cstate: "1"
+ intel_iommu: "on,sm_on"
+ intel_pstate: "disable"
+ iommu: "pt"
+ isolcpus: "1-31,33-63,65-95,97-127"
+ mce: "off"
+ nmi_watchdog: "0"
+ nohz_full: "1-31,33-63,65-95,97-127"
+ nosoftlockup: true
+ numa_balancing: "disable"
+ processor.max_cstate: "1"
+ rcu_nocbs: "1-31,33-63,65-95,97-127"
+ tsc: "reliable"
+sysctl:
+ kernel:
+ watchdog_cpumask: "0,32,64,96"
+ vm:
+ nr_hugepages: 32768
+ max_map_count: 20000
+
+inventory_ipmi_hostname: "10.30.50.56"
+cpu_microarchitecture: "sapphirerapids"
+docker_sut: true
+
+intel_800_matrix: "dpdk23.11"
+mellanox_matrix: "dpdk23.11"
+intel_dsa_matrix: true
+intel_qat_matrix: true \ No newline at end of file
diff --git a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.57.yaml b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.57.yaml
new file mode 100644
index 0000000000..03817cdef9
--- /dev/null
+++ b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.57.yaml
@@ -0,0 +1,36 @@
+---
+# file: host_vars/10.30.51.57.yaml
+
+hostname: "s57-t23-tg1"
+grub:
+ audit: "0"
+ default_hugepagesz: "2M hugepagesz=1G hugepages=32 hugepagesz=2M hugepages=32768"
+ hpet: "disable"
+ intel_idle.max_cstate: "1"
+ intel_iommu: "on,sm_on"
+ intel_pstate: "disable"
+ iommu: "pt"
+ isolcpus: "1-31,33-63,65-95,97-127"
+ mce: "off"
+ nmi_watchdog: "0"
+ nohz_full: "1-31,33-63,65-95,97-127"
+ nosoftlockup: true
+ numa_balancing: "disable"
+ processor.max_cstate: "1"
+ rcu_nocbs: "1-31,33-63,65-95,97-127"
+ tsc: "reliable"
+sysctl:
+ kernel:
+ watchdog_cpumask: "0,32,64,96"
+ vm:
+ nr_hugepages: 32768
+ max_map_count: 20000
+
+inventory_ipmi_hostname: "10.30.50.57"
+cpu_microarchitecture: "sapphirerapids"
+docker_tg: true
+
+intel_800_matrix: "dpdk22.07"
+mellanox_matrix: "dpdk23.07"
+intel_dsa_matrix: true
+intel_qat_matrix: true \ No newline at end of file
diff --git a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.58.yaml b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.58.yaml
new file mode 100644
index 0000000000..ecfced1823
--- /dev/null
+++ b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.58.yaml
@@ -0,0 +1,36 @@
+---
+# file: host_vars/10.30.51.58.yaml
+
+hostname: "s58-t24-sut1"
+grub:
+ audit: "0"
+ default_hugepagesz: "2M hugepagesz=1G hugepages=32 hugepagesz=2M hugepages=32768"
+ hpet: "disable"
+ intel_idle.max_cstate: "1"
+ intel_iommu: "on,sm_on"
+ intel_pstate: "disable"
+ iommu: "pt"
+ isolcpus: "1-31,33-63,65-95,97-127"
+ mce: "off"
+ nmi_watchdog: "0"
+ nohz_full: "1-31,33-63,65-95,97-127"
+ nosoftlockup: true
+ numa_balancing: "disable"
+ processor.max_cstate: "1"
+ rcu_nocbs: "1-31,33-63,65-95,97-127"
+ tsc: "reliable"
+sysctl:
+ kernel:
+ watchdog_cpumask: "0,32,64,96"
+ vm:
+ nr_hugepages: 32768
+ max_map_count: 20000
+
+inventory_ipmi_hostname: "10.30.50.58"
+cpu_microarchitecture: "sapphirerapids"
+docker_sut: true
+
+intel_800_matrix: "dpdk23.11"
+mellanox_matrix: "dpdk23.11"
+intel_dsa_matrix: true
+#intel_qat_matrix: true \ No newline at end of file
diff --git a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.59.yaml b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.59.yaml
new file mode 100644
index 0000000000..5b9cd9b98c
--- /dev/null
+++ b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.59.yaml
@@ -0,0 +1,41 @@
+---
+# file: host_vars/10.30.51.59.yaml
+
+hostname: "s59-t24-tg1"
+grub:
+ audit: "0"
+ default_hugepagesz: "2M hugepagesz=1G hugepages=32 hugepagesz=2M hugepages=32768"
+ hpet: "disable"
+ intel_idle.max_cstate: "1"
+ intel_iommu: "on,sm_on"
+ intel_pstate: "disable"
+ iommu: "pt"
+ isolcpus: "1-31,33-63,65-95,97-127"
+ mce: "off"
+ nmi_watchdog: "0"
+ nohz_full: "1-31,33-63,65-95,97-127"
+ nosoftlockup: true
+ numa_balancing: "disable"
+ processor.max_cstate: "1"
+ rcu_nocbs: "1-31,33-63,65-95,97-127"
+ tsc: "reliable"
+sysctl:
+ kernel:
+ watchdog_cpumask: "0,32,64,96"
+ vm:
+ nr_hugepages: 32768
+ max_map_count: 20000
+
+inventory_ipmi_hostname: "10.30.50.59"
+cpu_microarchitecture: "sapphirerapids"
+docker_tg: true
+
+intel_800_matrix: "dpdk22.07"
+mellanox_matrix: "dpdk23.07"
+intel_dsa_matrix: true
+#intel_qat_matrix: true
+
+kernel_version_by_distro:
+ ubuntu:
+ jammy:
+ - "5.15.0-72" \ No newline at end of file
diff --git a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.69.yaml b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.69.yaml
new file mode 100644
index 0000000000..3b5bb0be8a
--- /dev/null
+++ b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.69.yaml
@@ -0,0 +1,42 @@
+---
+# file: host_vars/10.30.51.69.yaml
+
+hostname: "s27-t211-sut1"
+grub:
+ audit: "0"
+ default_hugepagesz: "2M hugepagesz=1G hugepages=32 hugepagesz=2M hugepages=32768"
+ iommu.passthrough: "1"
+ isolcpus: "1-10,29-38"
+ nmi_watchdog: "0"
+ nohz_full: "1-10,29-38"
+ nosoftlockup: true
+ processor.max_cstate: "1"
+ rcu_nocbs: "1-10,29-38"
+sysctl:
+ kernel:
+ watchdog_cpumask: "0,11-28,39-55"
+ vm:
+ nr_hugepages: 32768
+ max_map_count: 20000
+
+inventory_ipmi_hostname: "10.30.50.69"
+cpu_microarchitecture: "thunderx2"
+
+# User management.
+users:
+ - username: localadmin
+ groups: [adm, sudo]
+ password: "$6$FIsbVDQR$5D0wgufOd2FtnmOiRNsGlgg6Loh.0x3dWSj72DSQnqisSyE9DROfgSgA6s0yxDwz4Jd5SRTXiTKuRYuSQ5POI1"
+ ssh_key:
+ - "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIKM1D1kkGX1l7fSma1MOgw2yLI7zJHwTCcfVROQ4hh7r peter.mikus@protonmail.ch"
+ - "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCXUbbhesOpvPF+KI8nq4TXvEO/Un1aU/ehZ9clCyw9C40xjDkX2BlcX8WeHxFHe7fjFaCd07Vg73rn/3M9uNDnDxvjH1GQ0twvi3iBTO4PkHBBfGF9qnE8MYzno6FvlsVKLuUuPbfm8kbOQ+ZDfdXq6gdtXh0hSYYkqC1heNPCNsqaakkB99Edyle+Ot0V7cpW+Yo2wo98KuX/cgUEhVoA8QnNVE7zaWcjSXBZEteoA4gLpAbV6p67/d6H/2ykHTidBViYTEsHco56tJoA4nTPuAupDOLBcWXgF5TAN6z1aCn2JA1DDfniLakgrZ5oVj2qHhUmbxQAtnKQfHADjqzV jlinkes@jlinkes"
+ - "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDWWDIudZUaHp6pPkYmBsBeEYcbUoGGZH73eB374g8e9V3dfT7BuJOMR/Z3LxCGw5GcDDgzJHLQr8AykN7lUscaux68CXSf1CAKhfBeWBOCQD4G5tpmD6610jsiBig6bFISKpGW4nLt80yt3bKEqv6ezt35QuMeh3E45uK/pCRfDcukOThbwYG44xr7TeYAyNacWuNV9HDy5DzjBelElmolTOgjtZEbedmIcUbV/gb8yekUxxBTS3d3I5qVLtNRMHPqp9xbJqteRXIphizDlHbaTHZeVy9mrwi1RuTjDSg7p1OPJ9eVIzMYzk87gbwNxZw8481Xb1WsgGmyH/mXJjD1 najoy@NAJOY-M-505G.CISCO.COM"
+ - username: testuser
+ groups: [adm, sudo]
+ password: "$6$zpBUdQ4q$P2zKclumvCndWujgP/qQ8eMk3YZk7ESAom04Fqp26hJH2jWkMXEX..jqxzMdDLJKiDaDHIaSkQMVjHzd3cRLs1"
+ ssh_key:
+ - "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIKM1D1kkGX1l7fSma1MOgw2yLI7zJHwTCcfVROQ4hh7r peter.mikus@protonmail.ch"
+ - "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCXUbbhesOpvPF+KI8nq4TXvEO/Un1aU/ehZ9clCyw9C40xjDkX2BlcX8WeHxFHe7fjFaCd07Vg73rn/3M9uNDnDxvjH1GQ0twvi3iBTO4PkHBBfGF9qnE8MYzno6FvlsVKLuUuPbfm8kbOQ+ZDfdXq6gdtXh0hSYYkqC1heNPCNsqaakkB99Edyle+Ot0V7cpW+Yo2wo98KuX/cgUEhVoA8QnNVE7zaWcjSXBZEteoA4gLpAbV6p67/d6H/2ykHTidBViYTEsHco56tJoA4nTPuAupDOLBcWXgF5TAN6z1aCn2JA1DDfniLakgrZ5oVj2qHhUmbxQAtnKQfHADjqzV jlinkes@jlinkes"
+ - "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDWWDIudZUaHp6pPkYmBsBeEYcbUoGGZH73eB374g8e9V3dfT7BuJOMR/Z3LxCGw5GcDDgzJHLQr8AykN7lUscaux68CXSf1CAKhfBeWBOCQD4G5tpmD6610jsiBig6bFISKpGW4nLt80yt3bKEqv6ezt35QuMeh3E45uK/pCRfDcukOThbwYG44xr7TeYAyNacWuNV9HDy5DzjBelElmolTOgjtZEbedmIcUbV/gb8yekUxxBTS3d3I5qVLtNRMHPqp9xbJqteRXIphizDlHbaTHZeVy9mrwi1RuTjDSg7p1OPJ9eVIzMYzk87gbwNxZw8481Xb1WsgGmyH/mXJjD1 najoy@NAJOY-M-505G.CISCO.COM"
+
+intel_700_matrix: "dpdk23.11"
diff --git a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.70.yaml b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.70.yaml
new file mode 100644
index 0000000000..2337277144
--- /dev/null
+++ b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.70.yaml
@@ -0,0 +1,98 @@
+---
+# file: host_vars/10.30.51.70.yaml
+
+hostname: "s70-t13-sut1"
+grub:
+ hugepagesz: "2M"
+ hugepages: 32768
+ iommu.passthrough: "1"
+inventory_ipmi_hostname: "10.30.50.70"
+vfs_data_file: "csit-initialize-vfs-alt.sh"
+cpu_microarchitecture: "altra"
+
+intel_700_matrix: "dpdk22.07"
+
+# User management.
+users:
+ - username: localadmin
+ groups: [adm, sudo]
+ password: "$6$FIsbVDQR$5D0wgufOd2FtnmOiRNsGlgg6Loh.0x3dWSj72DSQnqisSyE9DROfgSgA6s0yxDwz4Jd5SRTXiTKuRYuSQ5POI1"
+ ssh_key:
+ - "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIKM1D1kkGX1l7fSma1MOgw2yLI7zJHwTCcfVROQ4hh7r peter.mikus@protonmail.ch"
+ - "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQDCG7Shfr7ASXXwpazYDGFzChGl7i4HgY9n81GTwc17B/O19IiJdrxFcBQH19HMuTFtWU4d9bQ6xgzz2pojBN13L3pEsStCHovDlEYbvfRxI2l2hcAAop1j1E4izHAS9IvCoy1AgWqBg6tsfP5mzOwGbSbcdI9ADRKIHgDTVbHg9SqKed27bNOLU0u3/5ra2Oar/tVIW37geEqFV/nHIBZ03Y/mszvXP/t/hP5bgJIGJKkzTjLd1aqEcuGmubW+wTQnVnrhFB87dw91gPj6BVyV0+7Vt1wrvPKqP2sGJhojAMSrBQBySnlrYgEg00bwDgNGFevatfG9+nTId+nhoKBkXya3MjSp4HwrGqGcij3/h7ovlau3/iRhkqlSeqenaNm4zKTAXRTnb60j2WKa6im0zdqJX98anp4mhjE8xHhmmfZV3vRT8mtY4hF/lg79miXFHpWH97bZV6r/D9qj1HWI/laJfOC5MOJdRcLETwtsUNMHeHtVnY3yu0XFYNcl2Xwajtnz3waF7vn1QHvAM9p878+JrO/IEUqEc0yZWVvlhZ7Krx1pS+APoMy8097MYDGFzFTkYSstKLGbgm/S7dEiWuSVxmMUxW7JYO3gHrQ3f1EvAYh2UFdWy76Dzr5II9UpVwOwF+HL/Oy8Sk77bPaK+tn7Kh4Tx7WWE0+EOAgElQ== ayourtch@ayourtch-lnx"
+ - "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCXUbbhesOpvPF+KI8nq4TXvEO/Un1aU/ehZ9clCyw9C40xjDkX2BlcX8WeHxFHe7fjFaCd07Vg73rn/3M9uNDnDxvjH1GQ0twvi3iBTO4PkHBBfGF9qnE8MYzno6FvlsVKLuUuPbfm8kbOQ+ZDfdXq6gdtXh0hSYYkqC1heNPCNsqaakkB99Edyle+Ot0V7cpW+Yo2wo98KuX/cgUEhVoA8QnNVE7zaWcjSXBZEteoA4gLpAbV6p67/d6H/2ykHTidBViYTEsHco56tJoA4nTPuAupDOLBcWXgF5TAN6z1aCn2JA1DDfniLakgrZ5oVj2qHhUmbxQAtnKQfHADjqzV jlinkes@jlinkes"
+ - "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQD0GXoSGDZ95TE/TT3kf4ZS3Tiso0UO3MVhqAqZ/F6LOvLyqnMPfhxPz1XpHsDikxvKgwhZvdBm1dWbKkPsD7jtw0PGphQO8QuEwBd2ZMvxZ4Qg6lNDUl5x3zRO2nkbKpcqnOugGLTtXP+yfw/wfQ2HNFLDP9gE90xegsPT83PmRUntQlhbS3ByHcCSUScC+Y1heZXuoKNyrmUY46lxkKsNfhx8sQKo0YhB21atV/mcAQbAaO2LggmaQYGtWizqPNGWIRsi9W8ZYnKva67c3Pbv/TTfaqmrNCwOXJ8G9oL+/3MlKbl3b5mYlTs2a/e9yVgMNwUZVX7aiHpgPgaVjL6j swdev@BradyBunch-MacMini.local"
+ - "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCmo2YP4t/f58AAYH72rOe5VjYjk3wb/GY3aJEd5s9WspLhnsY0xBL67C+4kMq6VmQQvg0cUB8RJSFX1tUXMHCorVWnXNHkYomx0MCPcPUpVHuRyEqczYJ2pzgZsPzoEfw9E5hTrAiGzYFNAS/NOSavapVMDZxa1zsX7+sWQvJfmVtJWpcTQb0TkoWXRsy0YM3PYfUbYvK7lR3lGwyhwCcJn0WwWGreFB7bIok0poqqX5BgJ/okZuvC8II+UfuGoBHNhg49oqST1JlNi9gRqDNmLWkHRaneWZiF+Y2hdN3PRCdkt1x3eU0R+cdi5kPKslb6P0lsjOEA7fDLlq1+T2z1"
+ - "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQDJIAgAKD47mDur3n5jeHXzId2uUFHKa5kBiF649YQsdBKeQyfMVysvN9immKSTvwo4BUlwqeKQq5aIWWpjKMJU2/WXe4WU1YVyKLYCAlbkYJ3WuIBKQ/fm2wb8M4oXtgkYb+wEr5RkP48WqtIo3Cm/L+1j6k5jiu5E1hKBmdaY1er5OG9nCpOHfN3e+VkWwIjqdHFphB9NIMu2X+1iKDwOq4+sIX6POweVvcGFZJ8djB4RRtnkuH5W89x7k8IM4e2w0SK/5yKfxNfN3CzWSQ1dsqpQFPbry7z8Oy+56mlRs15bv5TU9IJ78aDpp/FbSZPfVfmTfwFLUBIHMtEjLUGBrGPQN8p32ap+6a9st5Qfh7rVhIGyB/4npLmar9Nw0lJNX9nmKiD119bkwyuWZjk4s2ELvCAw9RBJCHP8AxXnLgieqkBebn00zoGL/gdQTxXKDJGe3SEbOk56AkkIynB6I7prERvnbIhGI/ObwrNKtfKliiIKq3iWTdBP6BfCgAOqgD6320G2VdZyXyh3oXyM2AlFXzuA8zc8wpZraUCX9J/iMoxhELcL0gpDFO4HUKxTt+uU45uNNK0DkXw3GDF/lr+oYvzJ45jX0qMExF6EHaKfplZxW0Nt9rPT8pKi9BC8dzdSHXuunA1PshvEfc7mLMtz0QdOXOvomtM2Jv84lw== jieqiang.wang@arm.com"
+ - "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDPsm7Ny+8QYyJ5JEECF0sntRbsF3jRqdSItPTgcbBEFwfNBVd0ulmmkwPUVrcJRMfGuzp3vA3Ss/BgutfvNo3WD5G+WECnOWXiTzroM34oZQ6awoZujxlQsNGBRsiGTPNay6oFoS2hIaW5OB/QHZwZH8HVYcc53oyM0uC72ItnCg5cvSS5v1XaoQby0pUsu2v5uSOm35XV/N2ishcF3sxfCjTMZEODCwYdcb1xOflzIWlIk7ZSDNzOlpmG/jZNDfc7V2GHvGz7WnBFkjkcVH86SEVcQmsc7yyQD1UUG/EZ5AA75vbH4vFye4cISTWpBZik5CbkElxvX9XrfFxtYEM/ tianyu.li@arm.com"
+ - "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDWWDIudZUaHp6pPkYmBsBeEYcbUoGGZH73eB374g8e9V3dfT7BuJOMR/Z3LxCGw5GcDDgzJHLQr8AykN7lUscaux68CXSf1CAKhfBeWBOCQD4G5tpmD6610jsiBig6bFISKpGW4nLt80yt3bKEqv6ezt35QuMeh3E45uK/pCRfDcukOThbwYG44xr7TeYAyNacWuNV9HDy5DzjBelElmolTOgjtZEbedmIcUbV/gb8yekUxxBTS3d3I5qVLtNRMHPqp9xbJqteRXIphizDlHbaTHZeVy9mrwi1RuTjDSg7p1OPJ9eVIzMYzk87gbwNxZw8481Xb1WsgGmyH/mXJjD1 najoy@NAJOY-M-505G.CISCO.COM"
+sshd_disable_password_login: true
+
+# Nomad settings.
+nomad_version: "1.6.1"
+nomad_certificates:
+ - src: "{{ file_nomad_ca_pem }}"
+ dest: "{{ nomad_tls_ca_file }}"
+ - src: "{{ file_nomad_client_pem }}"
+ dest: "{{ nomad_tls_cert_file }}"
+ - src: "{{ file_nomad_client_key_pem }}"
+ dest: "{{ nomad_tls_key_file }}"
+nomad_datacenter: "yul1"
+nomad_node_name: "{{ hostname }}-{{ ansible_architecture }}"
+nomad_node_role: "client"
+nomad_node_class: "csitarm"
+nomad_options:
+ driver.raw_exec.enable: 1
+ docker.cleanup.image: false
+ docker.privileged.enabled: true
+ docker.volumes.enabled: true
+ driver.whitelist: "docker,raw_exec,exec"
+nomad_service_mgr: "systemd"
+nomad_consul_use_ssl: false
+nomad_use_tls: false
+nomad_tls_http: false
+nomad_tls_rpc: false
+nomad_use_vault: false
+nomad_retry_servers:
+ - "10.30.51.23"
+ - "10.30.51.24"
+ - "10.30.51.25"
+nomad_servers:
+ - "10.30.51.23:4647"
+ - "10.30.51.24:4647"
+ - "10.30.51.25:4647"
+
+# Consul settigs.
+nomad_use_consul: true
+consul_certificates:
+ - src: "{{ file_consul_ca_pem }}"
+ dest: "{{ consul_ca_file }}"
+ - src: "{{ file_consul_server_0_pem }}"
+ dest: "{{ consul_cert_file }}"
+ - src: "{{ file_consul_server_0_key_pem }}"
+ dest: "{{ consul_key_file }}"
+consul_verify_incoming: false
+consul_verify_outgoing: false
+consul_vefify_server_hostname: false
+consul_allow_tls: false
+consul_datacenter: "yul1"
+consul_node_role: "client"
+consul_encrypt: "Y4T+5JGx1C3l2NFBBvkTWQ=="
+consul_node_name: "{{ ansible_hostname }}"
+consul_retry_join: true
+consul_retry_servers:
+ - "10.30.51.23"
+ - "10.30.51.24"
+ - "10.30.51.25"
+consul_service_mgr: "systemd"
+
+# Vault settings.
+vault_version: "1.13.1"
+
+# Docker settings.
+docker_daemon:
+ default-shm-size: "1073741824"
+# dns: ["172.17.0.1"]
+# dns-opts: []
+# dns-search: ["{{ansible_hostname}}"]
+# host: ["172.17.0.1:/var/run/docker.sock"]
diff --git a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.71.yaml b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.71.yaml
new file mode 100644
index 0000000000..1d414b32c7
--- /dev/null
+++ b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.71.yaml
@@ -0,0 +1,98 @@
+---
+# file: host_vars/10.30.51.71.yaml
+
+hostname: "s71-t14-sut1"
+grub:
+ hugepagesz: "2M"
+ hugepages: 32768
+ iommu.passthrough: "1"
+inventory_ipmi_hostname: "10.30.50.71"
+vfs_data_file: "csit-initialize-vfs-alt.sh"
+cpu_microarchitecture: "altra"
+
+intel_700_matrix: "dpdk22.07"
+
+# User management.
+users:
+ - username: localadmin
+ groups: [adm, sudo]
+ password: "$6$FIsbVDQR$5D0wgufOd2FtnmOiRNsGlgg6Loh.0x3dWSj72DSQnqisSyE9DROfgSgA6s0yxDwz4Jd5SRTXiTKuRYuSQ5POI1"
+ ssh_key:
+ - "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIKM1D1kkGX1l7fSma1MOgw2yLI7zJHwTCcfVROQ4hh7r peter.mikus@protonmail.ch"
+ - "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQDCG7Shfr7ASXXwpazYDGFzChGl7i4HgY9n81GTwc17B/O19IiJdrxFcBQH19HMuTFtWU4d9bQ6xgzz2pojBN13L3pEsStCHovDlEYbvfRxI2l2hcAAop1j1E4izHAS9IvCoy1AgWqBg6tsfP5mzOwGbSbcdI9ADRKIHgDTVbHg9SqKed27bNOLU0u3/5ra2Oar/tVIW37geEqFV/nHIBZ03Y/mszvXP/t/hP5bgJIGJKkzTjLd1aqEcuGmubW+wTQnVnrhFB87dw91gPj6BVyV0+7Vt1wrvPKqP2sGJhojAMSrBQBySnlrYgEg00bwDgNGFevatfG9+nTId+nhoKBkXya3MjSp4HwrGqGcij3/h7ovlau3/iRhkqlSeqenaNm4zKTAXRTnb60j2WKa6im0zdqJX98anp4mhjE8xHhmmfZV3vRT8mtY4hF/lg79miXFHpWH97bZV6r/D9qj1HWI/laJfOC5MOJdRcLETwtsUNMHeHtVnY3yu0XFYNcl2Xwajtnz3waF7vn1QHvAM9p878+JrO/IEUqEc0yZWVvlhZ7Krx1pS+APoMy8097MYDGFzFTkYSstKLGbgm/S7dEiWuSVxmMUxW7JYO3gHrQ3f1EvAYh2UFdWy76Dzr5II9UpVwOwF+HL/Oy8Sk77bPaK+tn7Kh4Tx7WWE0+EOAgElQ== ayourtch@ayourtch-lnx"
+ - "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCXUbbhesOpvPF+KI8nq4TXvEO/Un1aU/ehZ9clCyw9C40xjDkX2BlcX8WeHxFHe7fjFaCd07Vg73rn/3M9uNDnDxvjH1GQ0twvi3iBTO4PkHBBfGF9qnE8MYzno6FvlsVKLuUuPbfm8kbOQ+ZDfdXq6gdtXh0hSYYkqC1heNPCNsqaakkB99Edyle+Ot0V7cpW+Yo2wo98KuX/cgUEhVoA8QnNVE7zaWcjSXBZEteoA4gLpAbV6p67/d6H/2ykHTidBViYTEsHco56tJoA4nTPuAupDOLBcWXgF5TAN6z1aCn2JA1DDfniLakgrZ5oVj2qHhUmbxQAtnKQfHADjqzV jlinkes@jlinkes"
+ - "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQD0GXoSGDZ95TE/TT3kf4ZS3Tiso0UO3MVhqAqZ/F6LOvLyqnMPfhxPz1XpHsDikxvKgwhZvdBm1dWbKkPsD7jtw0PGphQO8QuEwBd2ZMvxZ4Qg6lNDUl5x3zRO2nkbKpcqnOugGLTtXP+yfw/wfQ2HNFLDP9gE90xegsPT83PmRUntQlhbS3ByHcCSUScC+Y1heZXuoKNyrmUY46lxkKsNfhx8sQKo0YhB21atV/mcAQbAaO2LggmaQYGtWizqPNGWIRsi9W8ZYnKva67c3Pbv/TTfaqmrNCwOXJ8G9oL+/3MlKbl3b5mYlTs2a/e9yVgMNwUZVX7aiHpgPgaVjL6j swdev@BradyBunch-MacMini.local"
+ - "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCmo2YP4t/f58AAYH72rOe5VjYjk3wb/GY3aJEd5s9WspLhnsY0xBL67C+4kMq6VmQQvg0cUB8RJSFX1tUXMHCorVWnXNHkYomx0MCPcPUpVHuRyEqczYJ2pzgZsPzoEfw9E5hTrAiGzYFNAS/NOSavapVMDZxa1zsX7+sWQvJfmVtJWpcTQb0TkoWXRsy0YM3PYfUbYvK7lR3lGwyhwCcJn0WwWGreFB7bIok0poqqX5BgJ/okZuvC8II+UfuGoBHNhg49oqST1JlNi9gRqDNmLWkHRaneWZiF+Y2hdN3PRCdkt1x3eU0R+cdi5kPKslb6P0lsjOEA7fDLlq1+T2z1"
+ - "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQDJIAgAKD47mDur3n5jeHXzId2uUFHKa5kBiF649YQsdBKeQyfMVysvN9immKSTvwo4BUlwqeKQq5aIWWpjKMJU2/WXe4WU1YVyKLYCAlbkYJ3WuIBKQ/fm2wb8M4oXtgkYb+wEr5RkP48WqtIo3Cm/L+1j6k5jiu5E1hKBmdaY1er5OG9nCpOHfN3e+VkWwIjqdHFphB9NIMu2X+1iKDwOq4+sIX6POweVvcGFZJ8djB4RRtnkuH5W89x7k8IM4e2w0SK/5yKfxNfN3CzWSQ1dsqpQFPbry7z8Oy+56mlRs15bv5TU9IJ78aDpp/FbSZPfVfmTfwFLUBIHMtEjLUGBrGPQN8p32ap+6a9st5Qfh7rVhIGyB/4npLmar9Nw0lJNX9nmKiD119bkwyuWZjk4s2ELvCAw9RBJCHP8AxXnLgieqkBebn00zoGL/gdQTxXKDJGe3SEbOk56AkkIynB6I7prERvnbIhGI/ObwrNKtfKliiIKq3iWTdBP6BfCgAOqgD6320G2VdZyXyh3oXyM2AlFXzuA8zc8wpZraUCX9J/iMoxhELcL0gpDFO4HUKxTt+uU45uNNK0DkXw3GDF/lr+oYvzJ45jX0qMExF6EHaKfplZxW0Nt9rPT8pKi9BC8dzdSHXuunA1PshvEfc7mLMtz0QdOXOvomtM2Jv84lw== jieqiang.wang@arm.com"
+ - "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDPsm7Ny+8QYyJ5JEECF0sntRbsF3jRqdSItPTgcbBEFwfNBVd0ulmmkwPUVrcJRMfGuzp3vA3Ss/BgutfvNo3WD5G+WECnOWXiTzroM34oZQ6awoZujxlQsNGBRsiGTPNay6oFoS2hIaW5OB/QHZwZH8HVYcc53oyM0uC72ItnCg5cvSS5v1XaoQby0pUsu2v5uSOm35XV/N2ishcF3sxfCjTMZEODCwYdcb1xOflzIWlIk7ZSDNzOlpmG/jZNDfc7V2GHvGz7WnBFkjkcVH86SEVcQmsc7yyQD1UUG/EZ5AA75vbH4vFye4cISTWpBZik5CbkElxvX9XrfFxtYEM/ tianyu.li@arm.com"
+ - "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDWWDIudZUaHp6pPkYmBsBeEYcbUoGGZH73eB374g8e9V3dfT7BuJOMR/Z3LxCGw5GcDDgzJHLQr8AykN7lUscaux68CXSf1CAKhfBeWBOCQD4G5tpmD6610jsiBig6bFISKpGW4nLt80yt3bKEqv6ezt35QuMeh3E45uK/pCRfDcukOThbwYG44xr7TeYAyNacWuNV9HDy5DzjBelElmolTOgjtZEbedmIcUbV/gb8yekUxxBTS3d3I5qVLtNRMHPqp9xbJqteRXIphizDlHbaTHZeVy9mrwi1RuTjDSg7p1OPJ9eVIzMYzk87gbwNxZw8481Xb1WsgGmyH/mXJjD1 najoy@NAJOY-M-505G.CISCO.COM"
+sshd_disable_password_login: true
+
+# Nomad settings.
+nomad_version: "1.6.1"
+nomad_certificates:
+ - src: "{{ file_nomad_ca_pem }}"
+ dest: "{{ nomad_tls_ca_file }}"
+ - src: "{{ file_nomad_client_pem }}"
+ dest: "{{ nomad_tls_cert_file }}"
+ - src: "{{ file_nomad_client_key_pem }}"
+ dest: "{{ nomad_tls_key_file }}"
+nomad_datacenter: "yul1"
+nomad_node_name: "{{ hostname }}-{{ ansible_architecture }}"
+nomad_node_role: "client"
+nomad_node_class: "csitarm"
+nomad_options:
+ driver.raw_exec.enable: 1
+ docker.cleanup.image: false
+ docker.privileged.enabled: true
+ docker.volumes.enabled: true
+ driver.whitelist: "docker,raw_exec,exec"
+nomad_service_mgr: "systemd"
+nomad_consul_use_ssl: false
+nomad_use_tls: false
+nomad_tls_http: false
+nomad_tls_rpc: false
+nomad_use_vault: false
+nomad_retry_servers:
+ - "10.30.51.23"
+ - "10.30.51.24"
+ - "10.30.51.25"
+nomad_servers:
+ - "10.30.51.23:4647"
+ - "10.30.51.24:4647"
+ - "10.30.51.25:4647"
+
+# Consul settigs.
+nomad_use_consul: true
+consul_certificates:
+ - src: "{{ file_consul_ca_pem }}"
+ dest: "{{ consul_ca_file }}"
+ - src: "{{ file_consul_server_0_pem }}"
+ dest: "{{ consul_cert_file }}"
+ - src: "{{ file_consul_server_0_key_pem }}"
+ dest: "{{ consul_key_file }}"
+consul_verify_incoming: false
+consul_verify_outgoing: false
+consul_vefify_server_hostname: false
+consul_allow_tls: false
+consul_datacenter: "yul1"
+consul_node_role: "client"
+consul_encrypt: "Y4T+5JGx1C3l2NFBBvkTWQ=="
+consul_node_name: "{{ ansible_hostname }}"
+consul_retry_join: true
+consul_retry_servers:
+ - "10.30.51.23"
+ - "10.30.51.24"
+ - "10.30.51.25"
+consul_service_mgr: "systemd"
+
+# Vault settings.
+vault_version: "1.13.1"
+
+# Docker settings.
+docker_daemon:
+ default-shm-size: "1073741824"
+# dns: ["172.17.0.1"]
+# dns-opts: []
+# dns-search: ["{{ansible_hostname}}"]
+# host: ["172.17.0.1:/var/run/docker.sock"]
diff --git a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.72.yaml b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.72.yaml
new file mode 100644
index 0000000000..b7c8c26aae
--- /dev/null
+++ b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.72.yaml
@@ -0,0 +1,26 @@
+---
+# file: host_vars/10.30.51.72.yaml
+
+hostname: "s62-t34-sut1"
+grub:
+ audit: "0"
+ default_hugepagesz: "2M hugepagesz=1G hugepages=32 hugepagesz=2M hugepages=32768"
+ iommu.passthrough: "1"
+ isolcpus: "1-40,81-120"
+ nmi_watchdog: "0"
+ nohz_full: "1-40,81-120"
+ nosoftlockup: true
+ processor.max_cstate: "1"
+ rcu_nocbs: "1-40,81-120"
+sysctl:
+ kernel:
+ watchdog_cpumask: "0,41-80,121-159"
+ vm:
+ nr_hugepages: 32768
+ max_map_count: 20000
+
+inventory_ipmi_hostname: "10.30.50.72"
+cpu_microarchitecture: "altra"
+
+intel_700_matrix: "dpdk23.11"
+mellanox_matrix: "dpdk23.11"
diff --git a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.73.yaml b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.73.yaml
new file mode 100644
index 0000000000..0811b038b7
--- /dev/null
+++ b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.73.yaml
@@ -0,0 +1,26 @@
+---
+# file: host_vars/10.30.51.73.yaml
+
+hostname: "s63-t34-sut2"
+grub:
+ audit: "0"
+ default_hugepagesz: "2M hugepagesz=1G hugepages=32 hugepagesz=2M hugepages=32768"
+ iommu.passthrough: "1"
+ isolcpus: "1-40,81-120"
+ nmi_watchdog: "0"
+ nohz_full: "1-40,81-120"
+ nosoftlockup: true
+ processor.max_cstate: "1"
+ rcu_nocbs: "1-40,81-120"
+sysctl:
+ kernel:
+ watchdog_cpumask: "0,41-80,121-159"
+ vm:
+ nr_hugepages: 32768
+ max_map_count: 20000
+
+inventory_ipmi_hostname: "10.30.50.73"
+cpu_microarchitecture: "altra"
+
+intel_700_matrix: "dpdk23.11"
+mellanox_matrix: "dpdk23.11"
diff --git a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.74.yaml b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.74.yaml
new file mode 100644
index 0000000000..473e4a9a5e
--- /dev/null
+++ b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.74.yaml
@@ -0,0 +1,33 @@
+---
+# file: host_vars/10.30.51.74.yaml
+
+hostname: "s64-t34-tg1"
+grub:
+ audit: "0"
+ default_hugepagesz: "2M hugepagesz=1G hugepages=32 hugepagesz=2M hugepages=32768"
+ hpet: "disable"
+ intel_idle.max_cstate: "1"
+ intel_iommu: "on"
+ intel_pstate: "disable"
+ iommu: "pt"
+ isolcpus: "1-31,33-63,65-95,97-127"
+ mce: "off"
+ nmi_watchdog: "0"
+ nohz_full: "1-31,33-63,65-95,97-127"
+ nosoftlockup: true
+ numa_balancing: "disable"
+ processor.max_cstate: "1"
+ rcu_nocbs: "1-31,33-63,65-95,97-127"
+ tsc: "reliable"
+sysctl:
+ kernel:
+ watchdog_cpumask: "0,32,64,96"
+ vm:
+ nr_hugepages: 32768
+ max_map_count: 20000
+
+inventory_ipmi_hostname: "10.30.50.74"
+cpu_microarchitecture: "icelake"
+
+intel_700_matrix: "dpdk22.07"
+intel_800_matrix: "dpdk22.07"
diff --git a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.75.yaml b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.75.yaml
new file mode 100644
index 0000000000..a96f087643
--- /dev/null
+++ b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.75.yaml
@@ -0,0 +1,34 @@
+---
+# file: host_vars/10.30.51.75.yaml
+
+hostname: "s65-t37-sut1"
+grub:
+ audit: "0"
+ default_hugepagesz: "2M hugepagesz=1G hugepages=32 hugepagesz=2M hugepages=32768"
+ hpet: "disable"
+ intel_idle.max_cstate: "1"
+ intel_iommu: "on"
+ intel_pstate: "disable"
+ iommu: "pt"
+ isolcpus: "1-31,33-63,65-95,97-127"
+ mce: "off"
+ nmi_watchdog: "0"
+ nohz_full: "1-31,33-63,65-95,97-127"
+ nosoftlockup: true
+ numa_balancing: "disable"
+ processor.max_cstate: "1"
+ rcu_nocbs: "1-31,33-63,65-95,97-127"
+ tsc: "reliable"
+sysctl:
+ kernel:
+ watchdog_cpumask: "0,32,64,96"
+ vm:
+ nr_hugepages: 32768
+ max_map_count: 20000
+
+inventory_ipmi_hostname: "10.30.50.75"
+cpu_microarchitecture: "icelake"
+
+intel_700_matrix: "dpdk23.11"
+intel_800_matrix: "dpdk23.11"
+mellanox_matrix: "dpdk23.11"
diff --git a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.76.yaml b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.76.yaml
new file mode 100644
index 0000000000..c1ddcf5a58
--- /dev/null
+++ b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.76.yaml
@@ -0,0 +1,34 @@
+---
+# file: host_vars/10.30.51.76.yaml
+
+hostname: "s66-t37-sut2"
+grub:
+ audit: "0"
+ default_hugepagesz: "2M hugepagesz=1G hugepages=32 hugepagesz=2M hugepages=32768"
+ hpet: "disable"
+ intel_idle.max_cstate: "1"
+ intel_iommu: "on"
+ intel_pstate: "disable"
+ iommu: "pt"
+ isolcpus: "1-31,33-63,65-95,97-127"
+ mce: "off"
+ nmi_watchdog: "0"
+ nohz_full: "1-31,33-63,65-95,97-127"
+ nosoftlockup: true
+ numa_balancing: "disable"
+ processor.max_cstate: "1"
+ rcu_nocbs: "1-31,33-63,65-95,97-127"
+ tsc: "reliable"
+sysctl:
+ kernel:
+ watchdog_cpumask: "0,32,64,96"
+ vm:
+ nr_hugepages: 32768
+ max_map_count: 20000
+
+inventory_ipmi_hostname: "10.30.50.76"
+cpu_microarchitecture: "icelake"
+
+intel_700_matrix: "dpdk23.11"
+intel_800_matrix: "dpdk23.11"
+mellanox_matrix: "dpdk23.11"
diff --git a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.77.yaml b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.77.yaml
new file mode 100644
index 0000000000..e447ed2c81
--- /dev/null
+++ b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.77.yaml
@@ -0,0 +1,34 @@
+---
+# file: host_vars/10.30.51.77.yaml
+
+hostname: "s67-t37-tg1"
+grub:
+ audit: "0"
+ default_hugepagesz: "2M hugepagesz=1G hugepages=32 hugepagesz=2M hugepages=32768"
+ hpet: "disable"
+ intel_idle.max_cstate: "1"
+ intel_iommu: "on"
+ intel_pstate: "disable"
+ iommu: "pt"
+ isolcpus: "1-31,33-63,65-95,97-127"
+ mce: "off"
+ nmi_watchdog: "0"
+ nohz_full: "1-31,33-63,65-95,97-127"
+ nosoftlockup: true
+ numa_balancing: "disable"
+ processor.max_cstate: "1"
+ rcu_nocbs: "1-31,33-63,65-95,97-127"
+ tsc: "reliable"
+sysctl:
+ kernel:
+ watchdog_cpumask: "0,32,64,96"
+ vm:
+ nr_hugepages: 32768
+ max_map_count: 20000
+
+inventory_ipmi_hostname: "10.30.50.77"
+cpu_microarchitecture: "icelake"
+
+intel_700_matrix: "dpdk22.07"
+intel_800_matrix: "dpdk22.07"
+mellanox_matrix: "dpdk23.07"
diff --git a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.78.yaml b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.78.yaml
new file mode 100644
index 0000000000..88f36b3880
--- /dev/null
+++ b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.78.yaml
@@ -0,0 +1,34 @@
+---
+# file: host_vars/10.30.51.78.yaml
+
+hostname: "s78-t38-sut1"
+grub:
+ audit: "0"
+ default_hugepagesz: "2M hugepagesz=1G hugepages=32 hugepagesz=2M hugepages=32768"
+ hpet: "disable"
+ intel_idle.max_cstate: "1"
+ intel_iommu: "on"
+ intel_pstate: "disable"
+ iommu: "pt"
+ isolcpus: "1-31,33-63,65-95,97-127"
+ mce: "off"
+ nmi_watchdog: "0"
+ nohz_full: "1-31,33-63,65-95,97-127"
+ nosoftlockup: true
+ numa_balancing: "disable"
+ processor.max_cstate: "1"
+ rcu_nocbs: "1-31,33-63,65-95,97-127"
+ tsc: "reliable"
+sysctl:
+ kernel:
+ watchdog_cpumask: "0,32,64,96"
+ vm:
+ nr_hugepages: 32768
+ max_map_count: 20000
+
+inventory_ipmi_hostname: "10.30.50.78"
+cpu_microarchitecture: "icelake"
+
+intel_700_matrix: "dpdk23.11"
+intel_800_matrix: "dpdk23.11"
+mellanox_matrix: "dpdk23.11"
diff --git a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.79.yaml b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.79.yaml
new file mode 100644
index 0000000000..37d6a18b2c
--- /dev/null
+++ b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.79.yaml
@@ -0,0 +1,34 @@
+---
+# file: host_vars/10.30.51.79.yaml
+
+hostname: "s79-t38-sut2"
+grub:
+ audit: "0"
+ default_hugepagesz: "2M hugepagesz=1G hugepages=32 hugepagesz=2M hugepages=32768"
+ hpet: "disable"
+ intel_idle.max_cstate: "1"
+ intel_iommu: "on"
+ intel_pstate: "disable"
+ iommu: "pt"
+ isolcpus: "1-31,33-63,65-95,97-127"
+ mce: "off"
+ nmi_watchdog: "0"
+ nohz_full: "1-31,33-63,65-95,97-127"
+ nosoftlockup: true
+ numa_balancing: "disable"
+ processor.max_cstate: "1"
+ rcu_nocbs: "1-31,33-63,65-95,97-127"
+ tsc: "reliable"
+sysctl:
+ kernel:
+ watchdog_cpumask: "0,32,64,96"
+ vm:
+ nr_hugepages: 32768
+ max_map_count: 20000
+
+inventory_ipmi_hostname: "10.30.50.79"
+cpu_microarchitecture: "icelake"
+
+intel_700_matrix: "dpdk23.11"
+intel_800_matrix: "dpdk23.11"
+mellanox_matrix: "dpdk23.11"
diff --git a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.80.yaml b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.80.yaml
new file mode 100644
index 0000000000..dcb87d1a7c
--- /dev/null
+++ b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.80.yaml
@@ -0,0 +1,34 @@
+---
+# file: host_vars/10.30.51.80.yaml
+
+hostname: "s80-t38-tg1"
+grub:
+ audit: "0"
+ default_hugepagesz: "2M hugepagesz=1G hugepages=32 hugepagesz=2M hugepages=32768"
+ hpet: "disable"
+ intel_idle.max_cstate: "1"
+ intel_iommu: "on"
+ intel_pstate: "disable"
+ iommu: "pt"
+ isolcpus: "1-31,33-63,65-95,97-127"
+ mce: "off"
+ nmi_watchdog: "0"
+ nohz_full: "1-31,33-63,65-95,97-127"
+ nosoftlockup: true
+ numa_balancing: "disable"
+ processor.max_cstate: "1"
+ rcu_nocbs: "1-31,33-63,65-95,97-127"
+ tsc: "reliable"
+sysctl:
+ kernel:
+ watchdog_cpumask: "0,32,64,96"
+ vm:
+ nr_hugepages: 32768
+ max_map_count: 20000
+
+inventory_ipmi_hostname: "10.30.50.80"
+cpu_microarchitecture: "icelake"
+
+intel_700_matrix: "dpdk22.07"
+intel_800_matrix: "dpdk22.07"
+mellanox_matrix: "dpdk23.07"
diff --git a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.81.yaml b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.81.yaml
new file mode 100644
index 0000000000..e984947235
--- /dev/null
+++ b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.81.yaml
@@ -0,0 +1,34 @@
+---
+# file: host_vars/10.30.51.81.yaml
+
+hostname: "s71-t212-sut1"
+grub:
+ audit: "0"
+ default_hugepagesz: "2M hugepagesz=1G hugepages=32 hugepagesz=2M hugepages=32768"
+ hpet: "disable"
+ intel_idle.max_cstate: "1"
+ intel_iommu: "on"
+ intel_pstate: "disable"
+ iommu: "pt"
+ isolcpus: "1-31,33-63,65-95,97-127"
+ mce: "off"
+ nmi_watchdog: "0"
+ nohz_full: "1-31,33-63,65-95,97-127"
+ nosoftlockup: true
+ numa_balancing: "disable"
+ processor.max_cstate: "1"
+ rcu_nocbs: "1-31,33-63,65-95,97-127"
+ tsc: "reliable"
+sysctl:
+ kernel:
+ watchdog_cpumask: "0,32,64,96"
+ vm:
+ nr_hugepages: 32768
+ max_map_count: 20000
+
+inventory_ipmi_hostname: "10.30.50.81"
+cpu_microarchitecture: "icelake"
+
+intel_700_matrix: "dpdk23.11"
+intel_800_matrix: "dpdk23.11"
+mellanox_matrix: "dpdk23.11"
diff --git a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.82.yaml b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.82.yaml
new file mode 100644
index 0000000000..5e6160e3ec
--- /dev/null
+++ b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.82.yaml
@@ -0,0 +1,34 @@
+---
+# file: host_vars/10.30.51.82.yaml
+
+hostname: "s72-t212-tg1"
+grub:
+ audit: "0"
+ default_hugepagesz: "2M hugepagesz=1G hugepages=32 hugepagesz=2M hugepages=32768"
+ hpet: "disable"
+ intel_idle.max_cstate: "1"
+ intel_iommu: "on"
+ intel_pstate: "disable"
+ iommu: "pt"
+ isolcpus: "1-31,33-63,65-95,97-127"
+ mce: "off"
+ nmi_watchdog: "0"
+ nohz_full: "1-31,33-63,65-95,97-127"
+ nosoftlockup: true
+ numa_balancing: "disable"
+ processor.max_cstate: "1"
+ rcu_nocbs: "1-31,33-63,65-95,97-127"
+ tsc: "reliable"
+sysctl:
+ kernel:
+ watchdog_cpumask: "0,32,64,96"
+ vm:
+ nr_hugepages: 32768
+ max_map_count: 20000
+
+inventory_ipmi_hostname: "10.30.50.82"
+cpu_microarchitecture: "icelake"
+
+intel_700_matrix: "dpdk22.07"
+intel_800_matrix: "dpdk22.07"
+mellanox_matrix: "dpdk23.07"
diff --git a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.83.yaml b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.83.yaml
new file mode 100644
index 0000000000..ac936cd89d
--- /dev/null
+++ b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.83.yaml
@@ -0,0 +1,34 @@
+---
+# file: host_vars/10.30.51.83.yaml
+
+hostname: "s83-t213-sut1"
+grub:
+ audit: "0"
+ default_hugepagesz: "2M hugepagesz=1G hugepages=32 hugepagesz=2M hugepages=32768"
+ hpet: "disable"
+ intel_idle.max_cstate: "1"
+ intel_iommu: "on"
+ intel_pstate: "disable"
+ iommu: "pt"
+ isolcpus: "1-31,33-63,65-95,97-127"
+ mce: "off"
+ nmi_watchdog: "0"
+ nohz_full: "1-31,33-63,65-95,97-127"
+ nosoftlockup: true
+ numa_balancing: "disable"
+ processor.max_cstate: "1"
+ rcu_nocbs: "1-31,33-63,65-95,97-127"
+ tsc: "reliable"
+sysctl:
+ kernel:
+ watchdog_cpumask: "0,32,64,96"
+ vm:
+ nr_hugepages: 32768
+ max_map_count: 20000
+
+inventory_ipmi_hostname: "10.30.50.83"
+cpu_microarchitecture: "icelake"
+
+intel_700_matrix: "dpdk23.11"
+intel_800_matrix: "dpdk23.11"
+mellanox_matrix: "dpdk23.11"
diff --git a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.84.yaml b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.84.yaml
new file mode 100644
index 0000000000..05877b59e9
--- /dev/null
+++ b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.84.yaml
@@ -0,0 +1,34 @@
+---
+# file: host_vars/10.30.51.84.yaml
+
+hostname: "s84-t213-tg1"
+grub:
+ audit: "0"
+ default_hugepagesz: "2M hugepagesz=1G hugepages=32 hugepagesz=2M hugepages=32768"
+ hpet: "disable"
+ intel_idle.max_cstate: "1"
+ intel_iommu: "on"
+ intel_pstate: "disable"
+ iommu: "pt"
+ isolcpus: "1-31,33-63,65-95,97-127"
+ mce: "off"
+ nmi_watchdog: "0"
+ nohz_full: "1-31,33-63,65-95,97-127"
+ nosoftlockup: true
+ numa_balancing: "disable"
+ processor.max_cstate: "1"
+ rcu_nocbs: "1-31,33-63,65-95,97-127"
+ tsc: "reliable"
+sysctl:
+ kernel:
+ watchdog_cpumask: "0,32,64,96"
+ vm:
+ nr_hugepages: 32768
+ max_map_count: 20000
+
+inventory_ipmi_hostname: "10.30.50.84"
+cpu_microarchitecture: "icelake"
+
+intel_700_matrix: "dpdk22.07"
+intel_800_matrix: "dpdk22.07"
+mellanox_matrix: "dpdk23.07"
diff --git a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.85.yaml b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.85.yaml
new file mode 100644
index 0000000000..0d61c87e4f
--- /dev/null
+++ b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.85.yaml
@@ -0,0 +1,34 @@
+---
+# file: host_vars/10.30.51.85.yaml
+
+hostname: "s85-t214-sut1"
+grub:
+ audit: "0"
+ default_hugepagesz: "2M hugepagesz=1G hugepages=32 hugepagesz=2M hugepages=32768"
+ hpet: "disable"
+ intel_idle.max_cstate: "1"
+ intel_iommu: "on"
+ intel_pstate: "disable"
+ iommu: "pt"
+ isolcpus: "1-31,33-63,65-95,97-127"
+ mce: "off"
+ nmi_watchdog: "0"
+ nohz_full: "1-31,33-63,65-95,97-127"
+ nosoftlockup: true
+ numa_balancing: "disable"
+ processor.max_cstate: "1"
+ rcu_nocbs: "1-31,33-63,65-95,97-127"
+ tsc: "reliable"
+sysctl:
+ kernel:
+ watchdog_cpumask: "0,32,64,96"
+ vm:
+ nr_hugepages: 32768
+ max_map_count: 20000
+
+inventory_ipmi_hostname: "10.30.50.85"
+cpu_microarchitecture: "icelake"
+
+intel_700_matrix: "dpdk23.11"
+intel_800_matrix: "dpdk23.11"
+mellanox_matrix: "dpdk23.11"
diff --git a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.86.yaml b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.86.yaml
new file mode 100644
index 0000000000..f40b86bd6b
--- /dev/null
+++ b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.86.yaml
@@ -0,0 +1,34 @@
+---
+# file: host_vars/10.30.51.86.yaml
+
+hostname: "s86-t214-tg1"
+grub:
+ audit: "0"
+ default_hugepagesz: "2M hugepagesz=1G hugepages=32 hugepagesz=2M hugepages=32768"
+ hpet: "disable"
+ intel_idle.max_cstate: "1"
+ intel_iommu: "on"
+ intel_pstate: "disable"
+ iommu: "pt"
+ isolcpus: "1-31,33-63,65-95,97-127"
+ mce: "off"
+ nmi_watchdog: "0"
+ nohz_full: "1-31,33-63,65-95,97-127"
+ nosoftlockup: true
+ numa_balancing: "disable"
+ processor.max_cstate: "1"
+ rcu_nocbs: "1-31,33-63,65-95,97-127"
+ tsc: "reliable"
+sysctl:
+ kernel:
+ watchdog_cpumask: "0,32,64,96"
+ vm:
+ nr_hugepages: 32768
+ max_map_count: 20000
+
+inventory_ipmi_hostname: "10.30.50.86"
+cpu_microarchitecture: "icelake"
+
+intel_700_matrix: "dpdk22.07"
+intel_800_matrix: "dpdk22.07"
+mellanox_matrix: "dpdk23.07"
diff --git a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.87.yaml b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.87.yaml
new file mode 100644
index 0000000000..2c767d6795
--- /dev/null
+++ b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.87.yaml
@@ -0,0 +1,34 @@
+---
+# file: host_vars/10.30.51.87.yaml
+
+hostname: "s87-t215-sut1"
+grub:
+ audit: "0"
+ default_hugepagesz: "2M hugepagesz=1G hugepages=32 hugepagesz=2M hugepages=32768"
+ hpet: "disable"
+ intel_idle.max_cstate: "1"
+ intel_iommu: "on"
+ intel_pstate: "disable"
+ iommu: "pt"
+ isolcpus: "1-31,33-63,65-95,97-127"
+ mce: "off"
+ nmi_watchdog: "0"
+ nohz_full: "1-31,33-63,65-95,97-127"
+ nosoftlockup: true
+ numa_balancing: "disable"
+ processor.max_cstate: "1"
+ rcu_nocbs: "1-31,33-63,65-95,97-127"
+ tsc: "reliable"
+sysctl:
+ kernel:
+ watchdog_cpumask: "0,32,64,96"
+ vm:
+ nr_hugepages: 32768
+ max_map_count: 20000
+
+inventory_ipmi_hostname: "10.30.50.87"
+cpu_microarchitecture: "icelake"
+
+intel_700_matrix: "dpdk23.11"
+intel_800_matrix: "dpdk23.11"
+mellanox_matrix: "dpdk23.11"
diff --git a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.88.yaml b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.88.yaml
new file mode 100644
index 0000000000..313c316752
--- /dev/null
+++ b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.88.yaml
@@ -0,0 +1,34 @@
+---
+# file: host_vars/10.30.51.88.yaml
+
+hostname: "s88-t215-tg1"
+grub:
+ audit: "0"
+ default_hugepagesz: "2M hugepagesz=1G hugepages=32 hugepagesz=2M hugepages=32768"
+ hpet: "disable"
+ intel_idle.max_cstate: "1"
+ intel_iommu: "on"
+ intel_pstate: "disable"
+ iommu: "pt"
+ isolcpus: "1-31,33-63,65-95,97-127"
+ mce: "off"
+ nmi_watchdog: "0"
+ nohz_full: "1-31,33-63,65-95,97-127"
+ nosoftlockup: true
+ numa_balancing: "disable"
+ processor.max_cstate: "1"
+ rcu_nocbs: "1-31,33-63,65-95,97-127"
+ tsc: "reliable"
+sysctl:
+ kernel:
+ watchdog_cpumask: "0,32,64,96"
+ vm:
+ nr_hugepages: 32768
+ max_map_count: 20000
+
+inventory_ipmi_hostname: "10.30.50.88"
+cpu_microarchitecture: "icelake"
+
+intel_700_matrix: "dpdk22.07"
+intel_800_matrix: "dpdk22.07"
+mellanox_matrix: "dpdk23.07"
diff --git a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.89.yaml b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.89.yaml
new file mode 100644
index 0000000000..7ab0d8ab68
--- /dev/null
+++ b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.89.yaml
@@ -0,0 +1,33 @@
+---
+# file: host_vars/10.30.51.89.yaml
+
+hostname: "s89-t39t310-tg1"
+grub:
+ audit: "0"
+ default_hugepagesz: "2M hugepagesz=1G hugepages=32 hugepagesz=2M hugepages=32768"
+ hpet: "disable"
+ intel_idle.max_cstate: "1"
+ intel_iommu: "on"
+ intel_pstate: "disable"
+ iommu: "pt"
+ isolcpus: "1-31,33-63,65-95,97-127"
+ mce: "off"
+ nmi_watchdog: "0"
+ nohz_full: "1-31,33-63,65-95,97-127"
+ nosoftlockup: true
+ numa_balancing: "disable"
+ processor.max_cstate: "1"
+ rcu_nocbs: "1-31,33-63,65-95,97-127"
+ tsc: "reliable"
+sysctl:
+ kernel:
+ watchdog_cpumask: "0,32,64,96"
+ vm:
+ nr_hugepages: 32768
+ max_map_count: 20000
+
+inventory_ipmi_hostname: "10.30.50.89"
+cpu_microarchitecture: "icelake"
+
+intel_700_matrix: "dpdk22.07"
+intel_800_matrix: "dpdk22.07"
diff --git a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.90.yaml b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.90.yaml
new file mode 100644
index 0000000000..dfc36904f8
--- /dev/null
+++ b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.90.yaml
@@ -0,0 +1,38 @@
+---
+# file: host_vars/10.30.51.90.yaml
+
+hostname: "s90-t31t32-tg1"
+grub:
+ audit: "0"
+ default_hugepagesz: "2M hugepagesz=1G hugepages=32 hugepagesz=2M hugepages=32768"
+ hpet: "disable"
+ intel_idle.max_cstate: "1"
+ intel_iommu: "on,sm_on"
+ intel_pstate: "disable"
+ iommu: "pt"
+ isolcpus: "1-31,33-63,65-95,97-127"
+ mce: "off"
+ nmi_watchdog: "0"
+ nohz_full: "1-31,33-63,65-95,97-127"
+ nosoftlockup: true
+ numa_balancing: "disable"
+ processor.max_cstate: "1"
+ rcu_nocbs: "1-31,33-63,65-95,97-127"
+ tsc: "reliable"
+sysctl:
+ kernel:
+ watchdog_cpumask: "0,32,64,96"
+ vm:
+ nr_hugepages: 32768
+ max_map_count: 20000
+
+inventory_ipmi_hostname: "10.30.50.90"
+cpu_microarchitecture: "icelake"
+docker_tg: true
+
+intel_800_matrix: "dpdk23.07"
+
+kernel_version_by_distro:
+ ubuntu:
+ jammy:
+ - "5.15.0-72" \ No newline at end of file
diff --git a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.91.yaml b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.91.yaml
new file mode 100644
index 0000000000..53239492ef
--- /dev/null
+++ b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.91.yaml
@@ -0,0 +1,88 @@
+---
+# file: host_vars/10.30.51.91.yaml
+
+hostname: "s91-nomad"
+inventory_ipmi_hostname: "10.30.50.91"
+cpu_microarchitecture: "neoversen1"
+
+# User management.
+users:
+ - username: localadmin
+ groups: [adm, sudo]
+ password: "$6$FIsbVDQR$5D0wgufOd2FtnmOiRNsGlgg6Loh.0x3dWSj72DSQnqisSyE9DROfgSgA6s0yxDwz4Jd5SRTXiTKuRYuSQ5POI1"
+ ssh_key:
+ - "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIKM1D1kkGX1l7fSma1MOgw2yLI7zJHwTCcfVROQ4hh7r peter.mikus@protonmail.ch"
+ - "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQDCG7Shfr7ASXXwpazYDGFzChGl7i4HgY9n81GTwc17B/O19IiJdrxFcBQH19HMuTFtWU4d9bQ6xgzz2pojBN13L3pEsStCHovDlEYbvfRxI2l2hcAAop1j1E4izHAS9IvCoy1AgWqBg6tsfP5mzOwGbSbcdI9ADRKIHgDTVbHg9SqKed27bNOLU0u3/5ra2Oar/tVIW37geEqFV/nHIBZ03Y/mszvXP/t/hP5bgJIGJKkzTjLd1aqEcuGmubW+wTQnVnrhFB87dw91gPj6BVyV0+7Vt1wrvPKqP2sGJhojAMSrBQBySnlrYgEg00bwDgNGFevatfG9+nTId+nhoKBkXya3MjSp4HwrGqGcij3/h7ovlau3/iRhkqlSeqenaNm4zKTAXRTnb60j2WKa6im0zdqJX98anp4mhjE8xHhmmfZV3vRT8mtY4hF/lg79miXFHpWH97bZV6r/D9qj1HWI/laJfOC5MOJdRcLETwtsUNMHeHtVnY3yu0XFYNcl2Xwajtnz3waF7vn1QHvAM9p878+JrO/IEUqEc0yZWVvlhZ7Krx1pS+APoMy8097MYDGFzFTkYSstKLGbgm/S7dEiWuSVxmMUxW7JYO3gHrQ3f1EvAYh2UFdWy76Dzr5II9UpVwOwF+HL/Oy8Sk77bPaK+tn7Kh4Tx7WWE0+EOAgElQ== ayourtch@ayourtch-lnx"
+ - "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCXUbbhesOpvPF+KI8nq4TXvEO/Un1aU/ehZ9clCyw9C40xjDkX2BlcX8WeHxFHe7fjFaCd07Vg73rn/3M9uNDnDxvjH1GQ0twvi3iBTO4PkHBBfGF9qnE8MYzno6FvlsVKLuUuPbfm8kbOQ+ZDfdXq6gdtXh0hSYYkqC1heNPCNsqaakkB99Edyle+Ot0V7cpW+Yo2wo98KuX/cgUEhVoA8QnNVE7zaWcjSXBZEteoA4gLpAbV6p67/d6H/2ykHTidBViYTEsHco56tJoA4nTPuAupDOLBcWXgF5TAN6z1aCn2JA1DDfniLakgrZ5oVj2qHhUmbxQAtnKQfHADjqzV jlinkes@jlinkes"
+ - "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQD0GXoSGDZ95TE/TT3kf4ZS3Tiso0UO3MVhqAqZ/F6LOvLyqnMPfhxPz1XpHsDikxvKgwhZvdBm1dWbKkPsD7jtw0PGphQO8QuEwBd2ZMvxZ4Qg6lNDUl5x3zRO2nkbKpcqnOugGLTtXP+yfw/wfQ2HNFLDP9gE90xegsPT83PmRUntQlhbS3ByHcCSUScC+Y1heZXuoKNyrmUY46lxkKsNfhx8sQKo0YhB21atV/mcAQbAaO2LggmaQYGtWizqPNGWIRsi9W8ZYnKva67c3Pbv/TTfaqmrNCwOXJ8G9oL+/3MlKbl3b5mYlTs2a/e9yVgMNwUZVX7aiHpgPgaVjL6j swdev@BradyBunch-MacMini.local"
+ - "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCmo2YP4t/f58AAYH72rOe5VjYjk3wb/GY3aJEd5s9WspLhnsY0xBL67C+4kMq6VmQQvg0cUB8RJSFX1tUXMHCorVWnXNHkYomx0MCPcPUpVHuRyEqczYJ2pzgZsPzoEfw9E5hTrAiGzYFNAS/NOSavapVMDZxa1zsX7+sWQvJfmVtJWpcTQb0TkoWXRsy0YM3PYfUbYvK7lR3lGwyhwCcJn0WwWGreFB7bIok0poqqX5BgJ/okZuvC8II+UfuGoBHNhg49oqST1JlNi9gRqDNmLWkHRaneWZiF+Y2hdN3PRCdkt1x3eU0R+cdi5kPKslb6P0lsjOEA7fDLlq1+T2z1"
+ - "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDWWDIudZUaHp6pPkYmBsBeEYcbUoGGZH73eB374g8e9V3dfT7BuJOMR/Z3LxCGw5GcDDgzJHLQr8AykN7lUscaux68CXSf1CAKhfBeWBOCQD4G5tpmD6610jsiBig6bFISKpGW4nLt80yt3bKEqv6ezt35QuMeh3E45uK/pCRfDcukOThbwYG44xr7TeYAyNacWuNV9HDy5DzjBelElmolTOgjtZEbedmIcUbV/gb8yekUxxBTS3d3I5qVLtNRMHPqp9xbJqteRXIphizDlHbaTHZeVy9mrwi1RuTjDSg7p1OPJ9eVIzMYzk87gbwNxZw8481Xb1WsgGmyH/mXJjD1 najoy@NAJOY-M-505G.CISCO.COM"
+sshd_disable_password_login: true
+
+# Nomad settings.
+nomad_version: "1.6.1"
+nomad_certificates:
+ - src: "{{ file_nomad_ca_pem }}"
+ dest: "{{ nomad_tls_ca_file }}"
+ - src: "{{ file_nomad_client_pem }}"
+ dest: "{{ nomad_tls_cert_file }}"
+ - src: "{{ file_nomad_client_key_pem }}"
+ dest: "{{ nomad_tls_key_file }}"
+nomad_datacenter: "yul1"
+nomad_node_name: "{{ hostname }}-{{ ansible_architecture }}"
+nomad_node_role: "client"
+nomad_node_class: "builder"
+nomad_options:
+ driver.raw_exec.enable: 1
+ docker.cleanup.image: false
+ docker.privileged.enabled: true
+ docker.volumes.enabled: true
+ driver.whitelist: "docker,raw_exec,exec"
+nomad_service_mgr: "systemd"
+nomad_consul_use_ssl: false
+nomad_use_tls: false
+nomad_tls_http: false
+nomad_tls_rpc: false
+nomad_use_vault: false
+nomad_retry_servers:
+ - "10.30.51.23"
+ - "10.30.51.24"
+ - "10.30.51.25"
+nomad_servers:
+ - "10.30.51.23:4647"
+ - "10.30.51.24:4647"
+ - "10.30.51.25:4647"
+
+# Consul settings.
+nomad_use_consul: true
+consul_certificates:
+ - src: "{{ file_consul_agent_ca_pem }}"
+ dest: "{{ consul_ca_file }}"
+ - src: "{{ file_consul_server_0_pem }}"
+ dest: "{{ consul_cert_file }}"
+ - src: "{{ file_consul_server_0_key_pem }}"
+ dest: "{{ consul_key_file }}"
+consul_verify_incoming: false
+consul_verify_outgoing: false
+consul_vefify_server_hostname: false
+consul_allow_tls: false
+consul_datacenter: "yul1"
+consul_node_role: "client"
+consul_encrypt: "Y4T+5JGx1C3l2NFBBvkTWQ=="
+consul_node_name: "{{ ansible_hostname }}"
+consul_retry_join: true
+consul_retry_servers:
+ - "10.30.51.23"
+ - "10.30.51.24"
+ - "10.30.51.25"
+consul_service_mgr: "systemd"
+#consul_package_version: "1.5.2+dfsg2-14"
+
+# Vault settings.
+vault_version: "1.13.1"
+
+# Docker daemon settings.
+docker_daemon:
+ dns: ["172.17.0.1"]
+ dns-opts: []
+ dns-search: ["{{ansible_hostname}}"]
diff --git a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.92.yaml b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.92.yaml
new file mode 100644
index 0000000000..19ec70ce83
--- /dev/null
+++ b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.92.yaml
@@ -0,0 +1,88 @@
+---
+# file: host_vars/10.30.51.92.yaml
+
+hostname: "s92-nomad"
+inventory_ipmi_hostname: "10.30.50.92"
+cpu_microarchitecture: "neoversen1"
+
+# User management.
+users:
+ - username: localadmin
+ groups: [adm, sudo]
+ password: "$6$FIsbVDQR$5D0wgufOd2FtnmOiRNsGlgg6Loh.0x3dWSj72DSQnqisSyE9DROfgSgA6s0yxDwz4Jd5SRTXiTKuRYuSQ5POI1"
+ ssh_key:
+ - "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIKM1D1kkGX1l7fSma1MOgw2yLI7zJHwTCcfVROQ4hh7r peter.mikus@protonmail.ch"
+ - "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQDCG7Shfr7ASXXwpazYDGFzChGl7i4HgY9n81GTwc17B/O19IiJdrxFcBQH19HMuTFtWU4d9bQ6xgzz2pojBN13L3pEsStCHovDlEYbvfRxI2l2hcAAop1j1E4izHAS9IvCoy1AgWqBg6tsfP5mzOwGbSbcdI9ADRKIHgDTVbHg9SqKed27bNOLU0u3/5ra2Oar/tVIW37geEqFV/nHIBZ03Y/mszvXP/t/hP5bgJIGJKkzTjLd1aqEcuGmubW+wTQnVnrhFB87dw91gPj6BVyV0+7Vt1wrvPKqP2sGJhojAMSrBQBySnlrYgEg00bwDgNGFevatfG9+nTId+nhoKBkXya3MjSp4HwrGqGcij3/h7ovlau3/iRhkqlSeqenaNm4zKTAXRTnb60j2WKa6im0zdqJX98anp4mhjE8xHhmmfZV3vRT8mtY4hF/lg79miXFHpWH97bZV6r/D9qj1HWI/laJfOC5MOJdRcLETwtsUNMHeHtVnY3yu0XFYNcl2Xwajtnz3waF7vn1QHvAM9p878+JrO/IEUqEc0yZWVvlhZ7Krx1pS+APoMy8097MYDGFzFTkYSstKLGbgm/S7dEiWuSVxmMUxW7JYO3gHrQ3f1EvAYh2UFdWy76Dzr5II9UpVwOwF+HL/Oy8Sk77bPaK+tn7Kh4Tx7WWE0+EOAgElQ== ayourtch@ayourtch-lnx"
+ - "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCXUbbhesOpvPF+KI8nq4TXvEO/Un1aU/ehZ9clCyw9C40xjDkX2BlcX8WeHxFHe7fjFaCd07Vg73rn/3M9uNDnDxvjH1GQ0twvi3iBTO4PkHBBfGF9qnE8MYzno6FvlsVKLuUuPbfm8kbOQ+ZDfdXq6gdtXh0hSYYkqC1heNPCNsqaakkB99Edyle+Ot0V7cpW+Yo2wo98KuX/cgUEhVoA8QnNVE7zaWcjSXBZEteoA4gLpAbV6p67/d6H/2ykHTidBViYTEsHco56tJoA4nTPuAupDOLBcWXgF5TAN6z1aCn2JA1DDfniLakgrZ5oVj2qHhUmbxQAtnKQfHADjqzV jlinkes@jlinkes"
+ - "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQD0GXoSGDZ95TE/TT3kf4ZS3Tiso0UO3MVhqAqZ/F6LOvLyqnMPfhxPz1XpHsDikxvKgwhZvdBm1dWbKkPsD7jtw0PGphQO8QuEwBd2ZMvxZ4Qg6lNDUl5x3zRO2nkbKpcqnOugGLTtXP+yfw/wfQ2HNFLDP9gE90xegsPT83PmRUntQlhbS3ByHcCSUScC+Y1heZXuoKNyrmUY46lxkKsNfhx8sQKo0YhB21atV/mcAQbAaO2LggmaQYGtWizqPNGWIRsi9W8ZYnKva67c3Pbv/TTfaqmrNCwOXJ8G9oL+/3MlKbl3b5mYlTs2a/e9yVgMNwUZVX7aiHpgPgaVjL6j swdev@BradyBunch-MacMini.local"
+ - "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCmo2YP4t/f58AAYH72rOe5VjYjk3wb/GY3aJEd5s9WspLhnsY0xBL67C+4kMq6VmQQvg0cUB8RJSFX1tUXMHCorVWnXNHkYomx0MCPcPUpVHuRyEqczYJ2pzgZsPzoEfw9E5hTrAiGzYFNAS/NOSavapVMDZxa1zsX7+sWQvJfmVtJWpcTQb0TkoWXRsy0YM3PYfUbYvK7lR3lGwyhwCcJn0WwWGreFB7bIok0poqqX5BgJ/okZuvC8II+UfuGoBHNhg49oqST1JlNi9gRqDNmLWkHRaneWZiF+Y2hdN3PRCdkt1x3eU0R+cdi5kPKslb6P0lsjOEA7fDLlq1+T2z1"
+ - "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDWWDIudZUaHp6pPkYmBsBeEYcbUoGGZH73eB374g8e9V3dfT7BuJOMR/Z3LxCGw5GcDDgzJHLQr8AykN7lUscaux68CXSf1CAKhfBeWBOCQD4G5tpmD6610jsiBig6bFISKpGW4nLt80yt3bKEqv6ezt35QuMeh3E45uK/pCRfDcukOThbwYG44xr7TeYAyNacWuNV9HDy5DzjBelElmolTOgjtZEbedmIcUbV/gb8yekUxxBTS3d3I5qVLtNRMHPqp9xbJqteRXIphizDlHbaTHZeVy9mrwi1RuTjDSg7p1OPJ9eVIzMYzk87gbwNxZw8481Xb1WsgGmyH/mXJjD1 najoy@NAJOY-M-505G.CISCO.COM"
+sshd_disable_password_login: true
+
+# Nomad settings.
+nomad_version: "1.6.1"
+nomad_certificates:
+ - src: "{{ file_nomad_ca_pem }}"
+ dest: "{{ nomad_tls_ca_file }}"
+ - src: "{{ file_nomad_client_pem }}"
+ dest: "{{ nomad_tls_cert_file }}"
+ - src: "{{ file_nomad_client_key_pem }}"
+ dest: "{{ nomad_tls_key_file }}"
+nomad_datacenter: "yul1"
+nomad_node_name: "{{ hostname }}-{{ ansible_architecture }}"
+nomad_node_role: "client"
+nomad_node_class: "builder"
+nomad_options:
+ driver.raw_exec.enable: 1
+ docker.cleanup.image: false
+ docker.privileged.enabled: true
+ docker.volumes.enabled: true
+ driver.whitelist: "docker,raw_exec,exec"
+nomad_service_mgr: "systemd"
+nomad_consul_use_ssl: false
+nomad_use_tls: false
+nomad_tls_http: false
+nomad_tls_rpc: false
+nomad_use_vault: false
+nomad_retry_servers:
+ - "10.30.51.23"
+ - "10.30.51.24"
+ - "10.30.51.25"
+nomad_servers:
+ - "10.30.51.23:4647"
+ - "10.30.51.24:4647"
+ - "10.30.51.25:4647"
+
+# Consul settings.
+nomad_use_consul: true
+consul_certificates:
+ - src: "{{ file_consul_agent_ca_pem }}"
+ dest: "{{ consul_ca_file }}"
+ - src: "{{ file_consul_server_0_pem }}"
+ dest: "{{ consul_cert_file }}"
+ - src: "{{ file_consul_server_0_key_pem }}"
+ dest: "{{ consul_key_file }}"
+consul_verify_incoming: false
+consul_verify_outgoing: false
+consul_vefify_server_hostname: false
+consul_allow_tls: false
+consul_datacenter: "yul1"
+consul_node_role: "client"
+consul_encrypt: "Y4T+5JGx1C3l2NFBBvkTWQ=="
+consul_node_name: "{{ ansible_hostname }}"
+consul_retry_join: true
+consul_retry_servers:
+ - "10.30.51.23"
+ - "10.30.51.24"
+ - "10.30.51.25"
+consul_service_mgr: "systemd"
+#consul_package_version: "1.5.2+dfsg2-14"
+
+# Vault settings.
+vault_version: "1.13.1"
+
+# Docker daemon settings.
+docker_daemon:
+ dns: ["172.17.0.1"]
+ dns-opts: []
+ dns-search: ["{{ansible_hostname}}"]
diff --git a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.93.yaml b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.93.yaml
new file mode 100644
index 0000000000..cf4816a5f8
--- /dev/null
+++ b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.93.yaml
@@ -0,0 +1,33 @@
+---
+# file: host_vars/10.30.51.93.yaml
+
+hostname: "s93-t39-sut1"
+grub:
+ audit: "0"
+ default_hugepagesz: "2M hugepagesz=1G hugepages=2 hugepagesz=2M hugepages=4096"
+ hpet: "disable"
+ intel_idle.max_cstate: "1"
+ intel_iommu: "on"
+ intel_pstate: "disable"
+ iommu: "pt"
+ isolcpus: "1-23"
+ mce: "off"
+ nmi_watchdog: "0"
+ nohz_full: "1-23"
+ nosoftlockup: true
+ numa_balancing: "disable"
+ processor.max_cstate: "1"
+ rcu_nocbs: "1-23"
+ tsc: "reliable"
+sysctl:
+ kernel:
+ watchdog_cpumask: "0"
+ vm:
+ nr_hugepages: 4096
+ max_map_count: 20000
+
+inventory_ipmi_hostname: "10.30.50.93"
+cpu_microarchitecture: "snowridge"
+
+intel_800_matrix: "dpdk23.11"
+intel_qat_matrix: true \ No newline at end of file
diff --git a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.94.yaml b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.94.yaml
new file mode 100644
index 0000000000..d663cda18b
--- /dev/null
+++ b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.94.yaml
@@ -0,0 +1,33 @@
+---
+# file: host_vars/10.30.51.94.yaml
+
+hostname: "s94-t39-sut2"
+grub:
+ audit: "0"
+ default_hugepagesz: "2M hugepagesz=1G hugepages=2 hugepagesz=2M hugepages=4096"
+ hpet: "disable"
+ intel_idle.max_cstate: "1"
+ intel_iommu: "on"
+ intel_pstate: "disable"
+ iommu: "pt"
+ isolcpus: "1-23"
+ mce: "off"
+ nmi_watchdog: "0"
+ nohz_full: "1-23"
+ nosoftlockup: true
+ numa_balancing: "disable"
+ processor.max_cstate: "1"
+ rcu_nocbs: "1-23"
+ tsc: "reliable"
+sysctl:
+ kernel:
+ watchdog_cpumask: "0"
+ vm:
+ nr_hugepages: 4096
+ max_map_count: 20000
+
+inventory_ipmi_hostname: "10.30.50.94"
+cpu_microarchitecture: "snowridge"
+
+intel_800_matrix: "dpdk23.11"
+intel_qat_matrix: true \ No newline at end of file
diff --git a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.32.8.18.yaml b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.32.8.18.yaml
new file mode 100644
index 0000000000..4423a36f56
--- /dev/null
+++ b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.32.8.18.yaml
@@ -0,0 +1,34 @@
+---
+# file: host_vars/10.32.8.18.yaml
+
+hostname: "s33-t27-sut1"
+grub:
+ audit: "0"
+ default_hugepagesz: "2M hugepagesz=1G hugepages=32 hugepagesz=2M hugepages=32768"
+ hpet: "disable"
+ intel_idle.max_cstate: "1"
+ intel_iommu: "on"
+ intel_pstate: "disable"
+ iommu: "pt"
+ isolcpus: "1-23,25-47,49-71,73-95"
+ mce: "off"
+ nmi_watchdog: "0"
+ nohz_full: "1-23,25-47,49-71,73-95"
+ nosoftlockup: true
+ numa_balancing: "disable"
+ processor.max_cstate: "1"
+ rcu_nocbs: "1-23,25-47,49-71,73-95"
+ tsc: "reliable"
+sysctl:
+ kernel:
+ watchdog_cpumask: "0,24,48,72"
+ vm:
+ nr_hugepages: 32768
+ max_map_count: 20000
+
+inventory_ipmi_hostname: "10.30.55.18"
+cpu_microarchitecture: "cascadelake"
+
+intel_700_matrix: "dpdk23.11"
+intel_800_matrix: "dpdk23.11"
+mellanox_matrix: "dpdk23.11"
diff --git a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.32.8.19.yaml b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.32.8.19.yaml
new file mode 100644
index 0000000000..22210ae4bd
--- /dev/null
+++ b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.32.8.19.yaml
@@ -0,0 +1,34 @@
+---
+# file: host_vars/10.32.8.19.yaml
+
+hostname: "s34-t27-tg1"
+grub:
+ audit: "0"
+ default_hugepagesz: "2M hugepagesz=1G hugepages=32 hugepagesz=2M hugepages=8192"
+ hpet: "disable"
+ intel_idle.max_cstate: "1"
+ intel_iommu: "on"
+ intel_pstate: "disable"
+ iommu: "pt"
+ isolcpus: "1-27,29-55,57-83,85-111"
+ mce: "off"
+ nmi_watchdog: "0"
+ nohz_full: "1-27,29-55,57-83,85-111"
+ nosoftlockup: true
+ numa_balancing: "disable"
+ processor.max_cstate: "1"
+ rcu_nocbs: "1-27,29-55,57-83,85-111"
+ tsc: "reliable"
+sysctl:
+ kernel:
+ watchdog_cpumask: "0,28,56,84"
+ vm:
+ nr_hugepages: 8192
+ max_map_count: 20000
+
+inventory_ipmi_hostname: "10.30.55.19"
+cpu_microarchitecture: "cascadelake"
+
+intel_700_matrix: "dpdk22.07"
+intel_800_matrix: "dpdk22.07"
+mellanox_matrix: "dpdk22.07"
diff --git a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.32.8.20.yaml b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.32.8.20.yaml
new file mode 100644
index 0000000000..36ba5c15f8
--- /dev/null
+++ b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.32.8.20.yaml
@@ -0,0 +1,34 @@
+---
+# file: host_vars/10.32.8.20.yaml
+
+hostname: "s35-t28-sut1"
+grub:
+ audit: "0"
+ default_hugepagesz: "2M hugepagesz=1G hugepages=32 hugepagesz=2M hugepages=32768"
+ hpet: "disable"
+ intel_idle.max_cstate: "1"
+ intel_iommu: "on"
+ intel_pstate: "disable"
+ iommu: "pt"
+ isolcpus: "1-23,25-47,49-71,73-95"
+ mce: "off"
+ nmi_watchdog: "0"
+ nohz_full: "1-23,25-47,49-71,73-95"
+ nosoftlockup: true
+ numa_balancing: "disable"
+ processor.max_cstate: "1"
+ rcu_nocbs: "1-23,25-47,49-71,73-95"
+ tsc: "reliable"
+sysctl:
+ kernel:
+ watchdog_cpumask: "0,24,48,72"
+ vm:
+ nr_hugepages: 32768
+ max_map_count: 20000
+
+inventory_ipmi_hostname: "10.30.55.20"
+cpu_microarchitecture: "cascadelake"
+
+intel_700_matrix: "dpdk23.11"
+intel_800_matrix: "dpdk23.11"
+mellanox_matrix: "dpdk23.11"
diff --git a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.32.8.21.yaml b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.32.8.21.yaml
new file mode 100644
index 0000000000..61c6f51b06
--- /dev/null
+++ b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.32.8.21.yaml
@@ -0,0 +1,34 @@
+---
+# file: host_vars/10.32.8.21.yaml
+
+hostname: "s36-t28-tg1"
+grub:
+ audit: "0"
+ default_hugepagesz: "2M hugepagesz=1G hugepages=32 hugepagesz=2M hugepages=8192"
+ hpet: "disable"
+ intel_idle.max_cstate: "1"
+ intel_iommu: "on"
+ intel_pstate: "disable"
+ iommu: "pt"
+ isolcpus: "1-27,29-55,57-83,85-111"
+ mce: "off"
+ nmi_watchdog: "0"
+ nohz_full: "1-27,29-55,57-83,85-111"
+ nosoftlockup: true
+ numa_balancing: "disable"
+ processor.max_cstate: "1"
+ rcu_nocbs: "1-27,29-55,57-83,85-111"
+ tsc: "reliable"
+sysctl:
+ kernel:
+ watchdog_cpumask: "0,28,56,84"
+ vm:
+ nr_hugepages: 8192
+ max_map_count: 20000
+
+inventory_ipmi_hostname: "10.30.55.21"
+cpu_microarchitecture: "cascadelake"
+
+intel_700_matrix: "dpdk22.07"
+intel_800_matrix: "dpdk22.07"
+mellanox_matrix: "dpdk22.07"
diff --git a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.32.8.22.yaml b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.32.8.22.yaml
new file mode 100644
index 0000000000..ae2947f54a
--- /dev/null
+++ b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.32.8.22.yaml
@@ -0,0 +1,34 @@
+---
+# file: host_vars/10.32.8.22.yaml
+
+hostname: "s37-t29-sut1"
+grub:
+ audit: "0"
+ default_hugepagesz: "2M hugepagesz=1G hugepages=32 hugepagesz=2M hugepages=32768"
+ hpet: "disable"
+ intel_idle.max_cstate: "1"
+ intel_iommu: "on"
+ intel_pstate: "disable"
+ iommu: "pt"
+ isolcpus: "1-23,25-47,49-71,73-95"
+ mce: "off"
+ nmi_watchdog: "0"
+ nohz_full: "1-23,25-47,49-71,73-95"
+ nosoftlockup: true
+ numa_balancing: "disable"
+ processor.max_cstate: "1"
+ rcu_nocbs: "1-23,25-47,49-71,73-95"
+ tsc: "reliable"
+sysctl:
+ kernel:
+ watchdog_cpumask: "0,24,48,72"
+ vm:
+ nr_hugepages: 32768
+ max_map_count: 20000
+
+inventory_ipmi_hostname: "10.30.55.22"
+cpu_microarchitecture: "cascadelake"
+
+intel_700_matrix: "dpdk23.11"
+intel_800_matrix: "dpdk23.11"
+mellanox_matrix: "dpdk23.11" \ No newline at end of file
diff --git a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.32.8.23.yaml b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.32.8.23.yaml
new file mode 100644
index 0000000000..a3f19b4249
--- /dev/null
+++ b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.32.8.23.yaml
@@ -0,0 +1,34 @@
+---
+# file: host_vars/10.32.8.23.yaml
+
+hostname: "s38-t29-tg1"
+grub:
+ audit: "0"
+ default_hugepagesz: "2M hugepagesz=1G hugepages=32 hugepagesz=2M hugepages=8192"
+ hpet: "disable"
+ intel_idle.max_cstate: "1"
+ intel_iommu: "on"
+ intel_pstate: "disable"
+ iommu: "pt"
+ isolcpus: "1-27,29-55,57-83,85-111"
+ mce: "off"
+ nmi_watchdog: "0"
+ nohz_full: "1-27,29-55,57-83,85-111"
+ nosoftlockup: true
+ numa_balancing: "disable"
+ processor.max_cstate: "1"
+ rcu_nocbs: "1-27,29-55,57-83,85-111"
+ tsc: "reliable"
+sysctl:
+ kernel:
+ watchdog_cpumask: "0,28,56,84"
+ vm:
+ nr_hugepages: 8192
+ max_map_count: 20000
+
+inventory_ipmi_hostname: "10.30.55.23"
+cpu_microarchitecture: "cascadelake"
+
+intel_700_matrix: "dpdk22.07"
+intel_800_matrix: "dpdk22.07"
+mellanox_matrix: "dpdk22.07"
diff --git a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.32.8.24.yaml b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.32.8.24.yaml
new file mode 100644
index 0000000000..accb8c5dc9
--- /dev/null
+++ b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.32.8.24.yaml
@@ -0,0 +1,30 @@
+---
+# file: host_vars/10.32.8.24.yaml
+
+hostname: "s60-t210-sut1"
+grub:
+ amd_iommu: "on"
+ audit: "0"
+ default_hugepagesz: "2M hugepagesz=1G hugepages=32 hugepagesz=2M hugepages=32768"
+ hpet: "disable"
+ iommu: "pt"
+ isolcpus: "1-15,17-31,33-47,49-63"
+ nmi_watchdog: "0"
+ nohz_full: "off"
+ nosoftlockup: true
+ numa_balancing: "disable"
+ processor.max_cstate: "0"
+ rcu_nocbs: "1-15,17-31,33-47,49-63"
+ tsc: "reliable"
+sysctl:
+ kernel:
+ watchdog_cpumask: "0,16,32,48"
+ vm:
+ nr_hugepages: 32768
+ max_map_count: 20000
+
+inventory_ipmi_hostname: "10.30.55.24"
+cpu_microarchitecture: "epyc"
+
+intel_700_matrix: "dpdk23.11"
+mellanox_matrix: "dpdk23.11" \ No newline at end of file
diff --git a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.32.8.25.yaml b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.32.8.25.yaml
new file mode 100644
index 0000000000..629538fa34
--- /dev/null
+++ b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.32.8.25.yaml
@@ -0,0 +1,30 @@
+---
+# file: host_vars/10.32.8.25.yaml
+
+hostname: "s61-t210-tg1"
+grub:
+ amd_iommu: "on"
+ audit: "0"
+ default_hugepagesz: "2M hugepagesz=1G hugepages=32 hugepagesz=2M hugepages=8192"
+ hpet: "disable"
+ iommu: "pt"
+ isolcpus: "1-15,17-31,33-47,49-63"
+ nmi_watchdog: "0"
+ nohz_full: "off"
+ nosoftlockup: true
+ numa_balancing: "disable"
+ processor.max_cstate: "0"
+ rcu_nocbs: "1-15,17-31,33-47,49-63"
+ tsc: "reliable"
+sysctl:
+ kernel:
+ watchdog_cpumask: "0,16,32,48"
+ vm:
+ nr_hugepages: 8192
+ max_map_count: 20000
+
+inventory_ipmi_hostname: "10.30.55.25"
+cpu_microarchitecture: "epyc"
+
+intel_700_matrix: "dpdk22.07"
+mellanox_matrix: "dpdk22.07"
diff --git a/fdio.infra.ansible/inventories/lf_inventory/hosts b/fdio.infra.ansible/inventories/lf_inventory/hosts
new file mode 100644
index 0000000000..b7109a1261
--- /dev/null
+++ b/fdio.infra.ansible/inventories/lf_inventory/hosts
@@ -0,0 +1,74 @@
+all:
+ children:
+ tg:
+ hosts:
+ 10.30.51.49: #s19-t33t211-tg1 - skylake
+ 10.30.51.53: #s53-t21-tg1 - sapphirerapids
+ 10.30.51.55: #s55-t22-tg1 - sapphirerapids
+ 10.30.51.57: #s57-t23-tg1 - sapphirerapids
+ 10.30.51.59: #s59-t24-tg1 - sapphirerapids
+ 10.30.51.74: #s64-t34-tg1 - icelake
+ 10.30.51.77: #s67-t37-tg1 - icelake
+ 10.30.51.80: #s80-t38-tg1 - icelake
+ 10.30.51.82: #s72-t212-tg1 - icelake
+ 10.30.51.84: #s84-t213-tg1 - icelake
+ 10.30.51.86: #s86-t214-tg1 - icelake
+ 10.30.51.88: #s88-t215-tg1 - icelake
+ 10.30.51.89: #s89-t39t310-tg1 - icelake
+ 10.30.51.90: #s90-t31t32-tg1 - icelake
+ 10.32.8.19: #s34-t27-tg1 - cascadelake
+ 10.32.8.21: #s36-t28-tg1 - cascadelake
+ 10.32.8.23: #s38-t29-tg1 - cascadelake
+ 10.32.8.25: #s61-t210-tg1 - epyc
+ sut:
+ hosts:
+ 10.30.51.32: #s32-t31-sut1 - icelaked
+ 10.30.51.33: #s33-t31-sut2 - icelaked
+ 10.30.51.34: #s34-t32-sut1 - icelaked
+ 10.30.51.35: #s35-t32-sut2 - icelaked
+ 10.30.51.36: #s17-t33-sut1 - taishan
+ 10.30.51.37: #s18-t33-sut2 - taishan
+ 10.30.51.52: #s52-t21-sut1 - sapphirerapids
+ 10.30.51.54: #s54-t22-sut1 - sapphirerapids
+ 10.30.51.56: #s56-t23-sut1 - sapphirerapids
+ 10.30.51.58: #s58-t24-sut1 - sapphirerapids
+ 10.30.51.72: #s62-t34-sut1 - altra
+ 10.30.51.73: #s63-t34-sut2 - altra
+ 10.30.51.75: #s65-t37-sut1 - icelake
+ 10.30.51.76: #s66-t37-sut2 - icelake
+ 10.30.51.78: #s78-t38-sut1 - icelake
+ 10.30.51.79: #s79-t38-sut2 - icelake
+ 10.30.51.81: #s81-t212-sut1 - icelake
+ 10.30.51.83: #s83-t213-sut1 - icelake
+ 10.30.51.85: #s85-t214-sut1 - icelake
+ 10.30.51.87: #s87-t215-sut1 - icelake
+ 10.30.51.93: #s93-t39-sut1 - snowridge
+ 10.30.51.94: #s94-t39-sut2 - snowridge
+ 10.32.8.18: #s33-t27-sut1 - cascadelake
+ 10.32.8.20: #s35-t28-sut1 - cascadelake
+ 10.32.8.22: #s37-t29-sut1 - cascadelake
+ 10.32.8.24: #s60-t210-sut1 - epyc
+ 10.30.51.69: #s27-t211-sut1 - thunderx2 9975
+ vpp_device:
+ hosts:
+ 10.30.51.30: #s30-t15-sut1 - sapphirerapids
+ 10.30.51.31: #s31-t16-sut1 - sapphirerapids
+ 10.30.51.50: #s1-t11-sut1 - skylake
+ 10.30.51.51: #s2-t12-sut1 - skylake
+ 10.30.51.70: #s55-t13-sut1 - thunderx2 9980
+ 10.30.51.71: #s56-t14-sut1 - thunderx2 9980
+ nomad:
+ hosts:
+ 10.30.51.21: #s21-nomad - sapphirerapids
+ 10.30.51.22: #s22-nomad - sapphirerapids
+ 10.30.51.23: #s23-nomad - skylake
+ 10.30.51.24: #s24-nomad - skylake
+ 10.30.51.25: #s25-nomad - skylake
+ 10.30.51.26: #s26-nomad - skylake
+ 10.30.51.27: #s27-nomad - skylake
+ 10.30.51.28: #s28-nomad - skylake
+ 10.30.51.91: #s58-nomad - neoverse n1
+ 10.30.51.92: #s59-nomad - neoverse n1
+ dev:
+ hosts:
+ 10.30.51.38: #fdio-marvell-dev - thunderx 88xx \ No newline at end of file
diff --git a/fdio.infra.ansible/inventories/sample_inventory/group_vars/all.yaml b/fdio.infra.ansible/inventories/sample_inventory/group_vars/all.yaml
new file mode 100644
index 0000000000..3a5ab66d8b
--- /dev/null
+++ b/fdio.infra.ansible/inventories/sample_inventory/group_vars/all.yaml
@@ -0,0 +1,2 @@
+---
+# file: sample_inventory/group_vars/all.yaml
diff --git a/fdio.infra.ansible/inventories/sample_inventory/host_vars/1.1.1.1.yaml b/fdio.infra.ansible/inventories/sample_inventory/host_vars/1.1.1.1.yaml
new file mode 100644
index 0000000000..1c3183ffd1
--- /dev/null
+++ b/fdio.infra.ansible/inventories/sample_inventory/host_vars/1.1.1.1.yaml
@@ -0,0 +1,17 @@
+---
+# file: host_vars/x.x.x.x.yaml
+
+hostname: "t1-tg1"
+grub:
+ - isolcpus: "1-27,29-55,57-83,85-111"
+ - nohz_full: "1-27,29-55,57-83,85-111"
+ - rcu_nocbs: "1-27,29-55,57-83,85-111"
+sysctl:
+ - kernel:
+ - watchdog_cpumask: "0,28,56,84"
+ - vm:
+ - nr_hugepages: 4096
+ - max_map_count: 20000
+
+inventory_ipmi_hostname: "x.x.x.x"
+cpu_microarchitecture: "skylake"
diff --git a/fdio.infra.ansible/inventories/sample_inventory/hosts b/fdio.infra.ansible/inventories/sample_inventory/hosts
new file mode 100644
index 0000000000..e8e1439db4
--- /dev/null
+++ b/fdio.infra.ansible/inventories/sample_inventory/hosts
@@ -0,0 +1,9 @@
+all:
+ children:
+ tg:
+ hosts:
+ 1.1.1.1: #t1-tg
+ sut:
+ hosts:
+ 2.2.2.2: #t1-sut1
+ 3.3.3.3: #t1-sut2
diff --git a/fdio.infra.ansible/inventories/vagrant_inventory/group_vars/vagrant.yml b/fdio.infra.ansible/inventories/vagrant_inventory/group_vars/vagrant.yml
new file mode 100644
index 0000000000..af44a3354d
--- /dev/null
+++ b/fdio.infra.ansible/inventories/vagrant_inventory/group_vars/vagrant.yml
@@ -0,0 +1,2 @@
+---
+# file: vagrant_inventory/group_vars/all.yaml
diff --git a/fdio.infra.ansible/inventories/vagrant_inventory/hosts b/fdio.infra.ansible/inventories/vagrant_inventory/hosts
new file mode 100644
index 0000000000..451a01621c
--- /dev/null
+++ b/fdio.infra.ansible/inventories/vagrant_inventory/hosts
@@ -0,0 +1,4 @@
+# Inventory file for VPP Device vagrant environment
+
+[vagrant]
+localhost ansible_connection=local
diff --git a/fdio.infra.ansible/nomad.yaml b/fdio.infra.ansible/nomad.yaml
new file mode 100644
index 0000000000..d7aa467007
--- /dev/null
+++ b/fdio.infra.ansible/nomad.yaml
@@ -0,0 +1,32 @@
+---
+# file: nomad.yaml
+
+- hosts: nomad
+ remote_user: localadmin
+ become: true
+ become_user: root
+ gather_facts: false
+ pre_tasks:
+ - name: Gathering Facts
+ gather_facts:
+ tags:
+ - always
+ roles:
+ - role: user_add
+ tags: user_add
+ - role: baremetal
+ tags: baremetal
+ - role: docker
+ tags: docker
+ - role: nomad
+ tags: nomad
+ - role: consul
+ tags: consul
+ - role: vault
+ tags: vault
+ - role: prometheus_exporter
+ tags: prometheus_exporter
+ - role: jenkins_job_health_exporter
+ tags: jenkins_job_health_exporter
+ - role: cleanup
+ tags: cleanup
diff --git a/fdio.infra.ansible/nomad_vault.yml b/fdio.infra.ansible/nomad_vault.yml
new file mode 100644
index 0000000000..a9e8fde783
--- /dev/null
+++ b/fdio.infra.ansible/nomad_vault.yml
@@ -0,0 +1,796 @@
+$ANSIBLE_VAULT;1.1;AES256
+33306665326534376332386439383961343830333466353035393561313661636538663936316465
+6666313364613566346361303235313433623235646562660a613861353636623637656331616531
+37303538383839383731616663656230383930363962663033613163666332333032376131306264
+6439363062376361350a643335363334396535326138353635316535366261353761633635333835
+61393763336533363236636265363764323863356365376332393737383636653961323063323061
+34653133656535383366363532363736613765323163313731623465323330666635623535316434
+30643238643238643038353737373736386162623830366335383431333139656539336430656435
+61393365646433633833393166323266306236623761623664303761336463326634373938623235
+31663132643835313430633162643361306135646133333238383834333431643138373435613132
+38613862623766323838613035316564303934396162353032356639343039346462313935663634
+65653535643834363030306638336136323363383935393436656637343438633839326165656430
+38343866623862393364353334653566343338656238646135616266356336363339326662373464
+30656139663864353439316432623235653533303032363930366134313933653738623731336331
+38663566653334303462356138643863373937396666666339303337383533336565383732626461
+35613531643763666563626330636639353566373063303232643936353530373162646162626133
+61666532636461333535373334363764653936323164663639613932613961313930366330383735
+33633430343361326635316632633065333536303031653539366361663033623336656463363436
+30316433633733333735346137373735303030333565336530656261306338646237663163393363
+33323133663663356263633565323161313065303735666566383564613439663336306330343533
+32623931623439323161656430666430313962316334656266336332313239643261316433373463
+34656630313065333866613336663761366530616333386431343864633232336463356134643233
+63373235343438363465396363363032313366393936396334613565303936343166656336376330
+64633561353934653364613739306132623266303563313434346261636462663166383466343437
+61313766613263373230363333636237633861626531666161393435376135343466663638323465
+31633832316262316661653734343965356134346132336563346634343935393735393965383962
+61666262656431376436633336393631613335363835663930396139653262343066613936363761
+64626134366131666133643338333862313366366562323664336361633339366235646639346365
+62343639666363373131343838353836636263313862393165363336656264306361393661373735
+33303435343035616531663434663935346630313731323338656237616638613530316131313131
+62303162326162636230346134323036383134653863343961323166646163316436346131386335
+36653166613231643632653435653733626631303732323337303562396436336639383636643633
+39313365653739343265383533626666623131366164353937363933613866363966616535386135
+38616561613336613037633331303463383830626132343664636433393935623932336464636435
+63633165363031386336633537333235343962373565643964373130373564373830326135633138
+63333366316132373761313561623466323563646364353939633639396138633038316130343638
+37316164353830333333666337383063383834653565366362613465636235623335396438623564
+35613639303333343033623037663337373932313063626364316535663632613761613236656533
+39303937643030373366326664663862313038336566636533326264323562386236373135306463
+63303135363338306365633735323731646533633432353266613535336432313132666631623063
+37616437656132386130383265666330616234653137306564663166666231303762653036373636
+63373961663936353236653334316530623638373133383061383730366633613339616439373738
+30343265326563366231316164396637363331303165663735326139613936643636336232663361
+32343261373730353533383237376661613966633932383930636661656438393934646661663135
+64343838663065363731663332653739643035363032623836323462343532326431656437303237
+37353362656665303337633538306263303163663463613531336165333462346533633038613465
+31353833616238623332373033636632386665306237373833303731653034376334663864633037
+31663035346333623135616139366336666363393638353262666466333164333931643630386531
+33323736396665333631313966326237326235343338313332643863613561393034326161343430
+35643230363237373261616336383535636232656238626232663435636162383165616530346165
+30633033313664313461643639613065303636613861643233613263643939613064356163353339
+62646564626531323333373562633337633038663265613065373566333661643435313536323130
+63646364353630383132346339343262396635636662376339353162346636366663633632653637
+30356532363162613738633662373035636335313331343864356335346363303239326138316662
+30643166313032626332383765386436346131353032613232393034636431333566653830633865
+31336666353735623638386432663431646334663237383362623061333364313533326161306563
+64626465373939353066373233366332646438303064643236373465666363633730643131346336
+62343265643532333138653238326535303039333466363666313564333036313934326662356432
+63356261353564316462356131623464343466393833353232623034623133396134303936613263
+65393434636162353537326433356564616334333635646362643963666230353030626438663364
+30303436663562633635313063663863343734393632333665343635613830383061396636636232
+37336262393363303765663736613039636333336166653433333130386230653135386530616330
+31623639383334303338306236333831653139666565373262383633306437646636333437646132
+63323135336630313864626637623736633537623661363763353630333038353862393863666532
+36626630313835663665633663623533386638326564366336366135333834383032343763383061
+36313936623337396537373434346166313762396262633331303265343831383738363937363863
+34363937343966363134666639616364353032363932333534643463346431343430653634633734
+36393932356634393037643765353064363834373131393265336235366265326264653635383737
+65353832346162656634333264353932343836323961313936356463626232336336383034393166
+66333732356137313166356235353936316233313266336231386130373437303833393163333338
+66333633363136623165363136323931633564656337366462653933373834303132616336306361
+33316633353034303833636665613537643764343862653334366135633037626461636464646437
+36303063643434663061323263313565646265653235613238303335326264623066663934336165
+32386562363263623036653461313132393365323232613663396436396566346234626130393639
+31623039653062386130373439656539616238333531313739356135383632343738643465303333
+38353832323236363835643835303463366538636334393865373038393862616630363238383665
+61613236643135376336323366326339396432326666323531363030623433363330353530613832
+38363431663638633061633862636666666636353638643436653862613263623865643133656638
+38656661316165353230343738613130633765356432376237643832613264373333616461633764
+38653363646235326433333739313263323832353262633835396361396663666335663366633665
+39383666313465643530353230616664356334623465623362646632363935633866613566623465
+65393834633831363031626433313136613162353966366265343436323362323566336564343635
+36353930656130386134313864393964393166666365626531336137363133396238373465663737
+31393332656364353439636538326562316231663262323234333063353832316663336362646332
+38376364313730303662363738646265653930306366363065303263323936643366326463363365
+32376365383461643534336233666364376633663834323664393339356134356431616165633538
+39386362626630333764653735316662663434303963623462396165373963363538366433653932
+61316562663532313034626336656265393663663237656365643830613136383833616364303538
+38346637613466643765386132613461326261363461643932636436666335353630633261653763
+31376132383465386361323730373764313664386531303162343532373937306538386231393961
+33653061383966313161396564396331663362343262633936633262613165663334663562353739
+32633233623366663363336464386232646336613632633866646437326230653866333363356663
+64646338626164643737306166623331323266343266386162643466343432323833313033363837
+32393566303537333733333836636663666230663132643638393137646639633161316331323039
+64306539393131316335376261653364323538633638303330303962313863353434616361373961
+65363065613038333830663533373539333636663766643434343266623437633364653366643038
+39303763353536653232363033613934386630623636656361383430363664363262616539343830
+32353735663630336162646334313234393837343936353830306235653433373863393265386461
+37633630623339663664623661633637346564666532663432346634633639663032373261373937
+32363364396430626138313464356635313933633637643462373032633636376333376563623836
+64336362393565353431653934343863346631346630613734366265636164323966643339366131
+63653865616230356339303863653864386636306365626236356462663364626433313462663466
+31623035373835366131613534646539663963616263343935393937326639646534636136326437
+32333937643736343233333039643232316462663936373664313338343832356438326239353033
+36363133386533306665643833383636343533623537366333373431613865333938353139313963
+31626163643362343735333534373061333539393839396630646437666532363338383162323235
+39666638323761616433623034663063376666383365306565666366663230313231366461333262
+63356166363334626538616438376463626465626163336266633333383338333130366632633934
+34343361383637356335386361616366333636343065306639616462333837646561613532333133
+34333032303634336634353231646533613831663564616636303061613663393963616662303336
+61616238646462613962616138333639336665353961386262346536613664386662346139336266
+38643535666266336661383335383430383564323435636438306132643538613261333737323132
+38396539366365313834393733636339363438396164326635393736633934366534383861376631
+64393832633664396436393666396662323065663932393237343164373863393834643536303833
+36633237363164376635653330653266626165656662656665666461306439383937343266303439
+35666536323961336564393638643130643062333433303431333331336137626661373232626262
+30353666396332323762376532353165663564613836393534663330363864646232643836353233
+30326338306662666365653863393265653431396465306332663463613561386263633536323235
+63633934643532343730616666616661356237616161616462643534383339313230396431653763
+31653337346563666631333436626564653737393266353839636162313330333063623433366566
+35646432356639613936353939643762656435666666373962623336666233363534626161323836
+32643864353639363537346232353762333232643763373331613864363932623530653538353062
+31336639343037643037623938346166323630393232333266323965636437303166653863653730
+64373837346432386139616131383931623563373064316461613461383864626266663566316262
+37366436656536343339643730613732306364326462636232373637613736633436636434653563
+31373563646334366435386339316430646431343763623739366163353063653338656166303534
+65356464343766653565613265633663623539373238633934623266636139383438643261623662
+30333639353431326336343833393766623136303662646162633930353364663662303731343332
+35326432353131336436613564666539646431363833373062343237353235613963356137643430
+64613962623932386462336439653732653733653533663762613561623038656365666237356130
+63383065633462303337323566316364356631653230633864366530373262306330393037633665
+34396163623034366430303030373435323164373461626132366633306539656334303237303864
+35636136383736613232656638313535386161663034636532616631626530363234303130356530
+62656164313464333335663433666437636431363233646564663135343539326263643935633662
+62636336666139633762316439313937396539346436316235363135346138383630303034313035
+63623631306565323264626661626262653662313734336232643438303431656565303963313437
+63363365323532653963623065366635366133646635636130616539393739643266666336623432
+37666562653532363964613763353462326631316566363633643566653663663535656130363135
+62386531386533366632333564393431303466303465333537656334373562623462356237383363
+34373838333466303536643639306635636638396365383537323162343236393533623436333537
+66613731363534663235373631353162333235393836653463643131643633356633343363636234
+38633238643339633636643834333036643237313238633138343039643534353163626235323630
+37376364373366343164656566646531383336313834333136633364303438623739366634346138
+61643735626236623664353236323239363362643737363561306661386235353937353763306232
+64666532393436306134366336303363313935383831313037353738313863353431393263383830
+39653233663866396437303635303735656434656634653937663465303638363861316332333161
+63343439353563653864353339333034633465343030326134316235373565353536323366303834
+36316535623961383338663530646434386332366132383035356462643565666332626265383130
+39353464366663363437336631613366366633623831313631383132323462316264383264366566
+32356137303233363564383038363838646338646637663135343334353162343461323538646539
+32336137666238643830313661333534333835633762393139323561373066376261316363383861
+30643635303561643834323864643339643363643932356233303965626433613638396536393465
+39333535646634363239386134303535613532383165343436666164626662373263333635383537
+64316638396333656265343830616633363039626466653932643334663132663039653865323166
+30656633356532663561643863653266633766333163336264333836623831383266323632356431
+36333261326631663835633763323561626330663838353065333434336565313865353836383162
+66643639346465613135336335376661303831303865613836313736643433303334333836336637
+66643136336336663065363530306532316564376633376338363332333439356338306365643466
+39386334333563313239376165616166323735393639363262653533383835623634626230363830
+37326666313663373439383065656662366665643539326663393663316531356132333235626130
+65613962396665323039323561666138363331626562346363623431626531383931626265346238
+39623266373135363735646635656530653463343862386538633066626134653730633337316330
+66353131646638613866316436353536373633646232653331313138356437646136383330663430
+66363330343861386139333435386132326531313835383062653635306463386433353932373062
+34366335373432336638383261316132373437333030623730633034353965383336646465376238
+32613338396166343365656464313562313934303238316466656431366538316533353331633664
+39653965333961386462393530653036653566643634643262623435623538343562356635313935
+38656130363161623039303438393137323466373031623739636364646633616162393962336466
+36323430386234356236653563393362306466353565313733326330613966313564663564656466
+32323935313533643731666135633035633336623966373338623734633361323463616464643366
+33633735383331623331373462663862383430376566363936356639316131313765313430343733
+63623639316634343862643766643238616330316638663834633731333430613863313564643463
+34343466393732363861363434366464623461323434653637336465323163313734666339353833
+61653538393137333638643633366464663833356138393939653030333537356361613238396564
+33653833666639326562343032643561616463623131666361643933373538636665346666316639
+34343230626365623466643462613232373038633239646364653432623430313735383637626630
+66343030373033313731356234636630383266383565396336303037393030363163623034383639
+32316635353838636238343737343564353238333661336436336266393538353138663861323963
+64326664656436363464383665376334373862396666373837656237353364353034653262613539
+36333031626265623537656537646239323764383937373939643661373963323332393835663735
+31373264616664346239363437326139636138643730666465313962306432643263353362646362
+62323937663334623037326632373966353665393166636563643130393437313263643739303131
+62663939323831373431633263666337356662646535336666326665343062383930346635656562
+65326436356430646339383864663565366264653362313862623361373138356634386562313834
+33323865373431316630623533643336636331656562326539616334633435313835323537303933
+30643435333565666330396339386364316434326439626132656638666531383561653832643962
+66353731303934366463613335343164353337616434633132666435653363356664616336363438
+30373964323835396438666133333965663939653731373564396431366232393762386639333330
+36303632613564363266333265363432366365623264393637653937383231613865613336646630
+32396430643666353066626131373863623733633038613765313361386334386133326638303536
+30373266653536383062616231326330313261313137616631313736653839646435346137343139
+32393435636538313161383231666236303334316233626130303661373464303235336562396234
+37333261366239366138613533643535613630663034306363623336633238616235636239366239
+38373336306266656465616331646233316337386263646563633366666330353437386536336163
+61373432623530353736363430396535636532373235653036306465653436656330666463623963
+63333633346462626463613465643765613363363434313933333139633462343031313635363834
+38386663326237346639653830633031626162663662663961613266613838613535363234356431
+32623832656136663733333165643437663733313536353761346361653638333265633933306263
+35393663643337396432643933396337623839313832376534613162393732316234343434363961
+38363362383365623035646364313031633161636534326262323865323139363937656565383562
+33333832316334323636616538663263373038356538613765316666623232626566346464333065
+34663266633732393961636338386538326266666463616636623865633836353532383162643330
+39323831306237366435313261393839653335326430373835356333626639383930316263356334
+35653730646363636234663532616239303563383932353964653566313035343736656436623764
+37396334633366663365616438333761393362666234303234626163663939393431373561326264
+62356163383262393437373835303830393530356163313930643233313066366338633036333936
+35366339646162396234376232336235383232373937393336313831336464366230383566356438
+33313338646464333135346636656137656537376536666233643161393235386462316365653037
+35616666303531353031663261633962613836653236366561366537646235393165346238303663
+31666434653339626462306334643339336139353135333435346263333038363934313063623763
+63366434346332383466633962353331623761373834643437396438353066383763643334373432
+32356534323862376335623136346465636438633635653539633965623930613238356130363261
+36666231376164646431313736376665346533346662666333626236376332626438323438633161
+64343566343432643236626436376230353465393165323636356561343432656132303364396138
+64323636666238353663613565623232646234646136633935636538306261613337643539643232
+34656661306330376331643332393261626439373361356361356666393034663064323538316432
+35393562326234633436623362373639303363383462353832373231376666373633323334363165
+66386566666131343562303435383663303732646566643239663166633162333964613137333731
+35633335646530666666643134636366343363323931336563326436663433363732373662363232
+36633437373634303337343537646130323232373833663734386136666563353465666230663664
+61373063343932616566653862363339393665646430663733343862666663303562643536646536
+62373462646531336636313135353932303636326463346631376130386638616130626162346238
+37653666303062623630363165343935366634636561383330623734306232333634623435376238
+65303662646231373633636432316262323636343763333361333365653765323035333734303035
+39653235386666356335626161656331383161646464356232326337343832363665653831633133
+35393566613237303932643765643461303365343633386266363535633261346338346531663462
+62326635633866353035346131653239383236383065663130656636653936383561393235343761
+38363739653638313839356238373839663033333437353162613430616537333264303161613863
+37643062376361326364356430306361333230316361396634393466323130613564316331363739
+31363637333432336635383266656266653234633762376664326564623136376533356631393833
+34313334313863306335383233373331616330386630333362366332633862386664313065643563
+33316462616332313661613565313736633163376263646138396266373161393336306334653536
+38663731626636653361626166303561353336363736336132616631386539373637353237626330
+32656236623031663464313235313164336465663062313637613061316531626661653433313837
+37363533643466323733383730313361663633353638363039383264623365623830336638343666
+37363561396664653830656633623839643235376331323836653036636362383632656436383530
+61346536363030303062343064303330326136343962633762616663396337356139306565353230
+39393661623639363462343534616637643335366432346638653730356266396235613364333131
+37313832303136653036616535326165343064646163303634666164656330353866396166343930
+62653666363637303961326132366632323034616465303464636137656430393938306633386131
+33666161623234396230383830313630393737366430633766316436313131353730356432393164
+31666238353066366431633631633132313732626233393130326334366433663562346435646666
+32346630376139366464323039303730313731663330396233396564303335363561626566643665
+66616238636534663735333631353363396663626632656532303239343836663231633231313730
+66313534366435383866626539653134306165306436363038623961343664316233353931396464
+61646461376632313534396636316462373862313537306231636666653065363835333331353335
+65333362623336396134613139633839623233653032393436656132373133346562363162666138
+66393531316130666636353334306332386161363839363934336431656430616130393336303238
+37346435366339643732346461366462346530373664623363643439376130633236663965663232
+37356535653038356161626630633331613265643131646434306130373364343037373862346433
+30663533306334386362633665616534396431376664656432343235323734343463633831336563
+61316131636137316366613939643031326236303431616135353638643164356538353463653731
+30626434303633393732316135313565613834643365373236366434616538336265636661303765
+35666636646166353134383332373836306633663538366236333832626535386634613862346533
+62336538356263386437616234303264303539356364653163363439333164306331616361643564
+35636137356532373462333537333831356231363165353362626365333536613937343534353133
+39623664303461366637356138376534653337626161356661636437343166373961643066623163
+63343134363733633537353230663864336164313364656638383331393338346161373961303663
+61616362323630316331333233336165333633663166646432643263313232393438303232613462
+61653933326336333238363836303638366461313132653437643266386233316332386432313132
+37363665663136636564626665366438633961376162653532376466303136663738336166633538
+38343462656534393331313138633733386536316131333566323330383934663735373830373136
+34333830386633633438633835303766383835396231353064343963343939313662633962383339
+64613732633064353338326239653130633264643838356462343164356138386631613132646263
+34353937656261383530373335663739386161363731386365643162613866313137386266633036
+66383735613535303731643132633939373739383561323633333263393338653861663937333162
+38323530313938356531356130383931663265313864303439633130643130333032353363353531
+30646339623035643535626265666265386665323139623835303764643038363830616439373137
+63613464363039303632626630323531316237333632363063353861666535373962363532623435
+35376236306130393866303564353761386233613865636231626336343736343962636435373536
+31663832306564313131346136313435613538396662373939373030653465663666306236303939
+36626661393539356334333532323765343432343035363330323463656433633233393638323963
+31643532653831383166313266633530346437646131323538613339336633643664616537633731
+39316363383861353231333339653833663361383336343364323332646337383931646262333734
+31393837303866306631306165323362633434623566646561303938363033666536643435316637
+31363134313964636565666339373938313433663737653461653530616434363337303965316464
+30343162323738623864383131303366666234653565343262303638373766353931363430316166
+65343166633264613366633434656636656334626363393362303763646439326338316234653539
+61363961353565663737363430663932653666383835613937623465346336393835393037623266
+39373330366362383938616234386134326638666434366130336539636133653138366433303637
+34386465383661643963636630343435323363666266353165336665393363316561643939396630
+38626465383563353734653266393864323466616631663539616633656362306465353633336466
+38373166643133616466643661626539653539343234616139613566666632313634366662323062
+63373336353631313563646133353462653839353337643338643535613765353861393034623562
+32333239633938333133346632626137386631343738646264353932313037333838613737653063
+62383263623061393662373439653830326431653262386239336133316666326435396638383531
+65306537346539363766353762316161656165643566366630336464393831356239633035623963
+65343232376162316639393263363135343130653337386261663631656534353931383862633538
+36663165623261623835303635306538646636656636613365616532633731663636316466396635
+65623362376438336231646163313734623866393965326134363236396432646534633163626534
+61313739646561643062303934636638353535383765396266643234643331386131613037363131
+39353839666538336335323937616362346166666538663664313030316664353234363930666436
+35323763336133626662333939613266343933346465373030646237373537636133626239313663
+65303866346462326637306230643835333063623265616230636430373138346335663034353530
+34306365326231316262613163346632376462373637623661653734353534386434333030366435
+37323639313636313066663436323931366263643961613637663264373466303634333762383739
+38653763356532643339356232623334623235653738373161333362666262326438663232646331
+39383737623439346139643861643830306331386131653539333136653466353831376465666366
+31646337376639323931666562376661393633616461373835303134303463326339626631386364
+39343337623063356662393064386338353732306262373132623465396535313039393332313766
+34306362636433623261353866313766373638656534366337346439316334373962663732313633
+34616430613331666132393665376436303237343366373165373934386236653962623132393435
+36353165376532383232393235333337336665336338383032343062336366356135613636633938
+36383665356137313565663661383731363065326566323131663636633261323563653138326433
+63626639363933643238336434633663363336373032343432336562613464623135323930336461
+32306461363761333266623131623462376239653732616133376234336261313333376431643362
+37353638323235356663333266626135633437303837323032656433333761376366633634373163
+62613034636265623834626435666638613561303064633634376361623130343230353565303061
+34613264386233666632373730626138316437343338393030333364373930343239623734393666
+36363463336538646538373763363534623339646261663835313733373635303866303931356462
+62643438396332336334383839396465653538333035313863396237636534626633326231653062
+39353664386631656366623137386137383764336262343865356536363863666161373535393039
+61623039323163636435383039376265623232386331646439666634646537376231363337623565
+33636635356134333736626364303664333663366539343737396337363832373131303261356230
+36313232633062333736326664346363633632356435643764386133373637393833333738653634
+38616238326562633930393462643732393161353232363363303261623034313166613132643861
+38303836633335613036643966333433366436373164626332336532663339623263396163376233
+66646638656435626163373862346361303537333030653338383830363639613433363634346233
+62626663623833383530656263626336616434613033303833633234313637663436656332303039
+32336638663530663234663164623334386332633032346636373032633438636230653538343537
+64663236653164353962346331313834343264393534373366656162343064313339653537376131
+39623733323263343233623962373335336335326164646434623833356139303436353233393064
+35376665343761646632633234336239363735366333333834336235393663646435666530613138
+65633337383538343734343265626339613066306236336563646133363766656438353662613838
+62643466616134363139646131656565663739613961376537346366333564616266353866633137
+64643338633332306362303735313432653933323962343264323237336165336236653335306565
+37613166356164613566343739383862616437336638333163623061303430373431633439633036
+35313864643530623963666666386238656661343631353436313164646464363465383333373633
+35356632383639633933393439353866643663396432346461313561616430633138643263616433
+66346634393439643031613063666637663430326464353364333134663431653461343933376135
+33373462323934623765626536333332353337353566313464616638326463346536316131376564
+64643830623034633334366330356535346436613363396531386632376264306131373337333233
+65323162373337656565363234346435356466373264386336613534303033316135323763343634
+65623039383436373531343634316138643461313831373232626162306137353437376165316237
+62313239346233306434316537623761346161366365303235646361303736623139386336323334
+64383537656265643030353165613465303431323232316462636263663133643234373830383463
+31633635356532306237316236666264666532333733303933616639313236303838313532656130
+38396334363331333731396532646164386136386138633436623164623737636564343330396566
+38316235316436383961316661633139666634316662326136633231396134613634626563393334
+31613837633134666163306437346137373738313736323537623363323264383066346531336533
+30613233323235343933353435636532396364393963323966306538663836666438336137353432
+36333235353061363738323364646266666238323735613365663732373038393632386465626637
+37666362393661613336653365383064393130346339306234613562656130636232363063623164
+32373030623837616365383732626631396536613566383039373331393435373234323236633639
+36636531666666663466633066663439323637333739383833336136333633393339653937666638
+62653730386437653363336337306164376134363335386562613038393431363836636330383261
+32633630646435346331626339316334343830643463646138316363326335363835663561366531
+64636639336239356233333232643731616235306335323864616565363030343065393737373739
+63353038333132383039366639346538356234323536336434353439633761653233623861663430
+31393862656133633038386333633166383862613862336139363363313966343033643834393331
+64663532313138343639393839323431636165653431643061616364393961396237396437346131
+63343938393461323836393139373265656361363839366464626563343464343231323963373736
+38376566303666656332346136303630613637373863636131366130613062633162633063326362
+63343766363633623965666236653665353833613566633135343339643533376265373934353132
+32383736356431663930373537616637636132613265343437623337663163336463366162356436
+64343466373639316263373064306336376636616634316562393039663430396335633036613965
+31653639626433303236373365303561646364363337353264333166336235633265323266386464
+36343165653839623134623361303336666636656637363637616365343532363838633364373535
+64666532643136323630313137623330636238353932363763643937306561366635366337323130
+36383133386233323835353239663436653163616565346265316235653463373866363234353461
+38366438633865373030363466633165633539666334333732613461356339613332313163616233
+35313466616536313837306666326638653738626165663761306435353432666366383733366666
+36306436383964353236663736636263366430636566356132306439636161316331646162333433
+34393736323765613837306235666636666632636437383931333264376366643434653664613530
+63336362363064313864636364373164306432366437316261613362363164646134626635623032
+37316533643939396631396362646535633634343635353961356364303839376665373132613866
+62386232663663643230623930326666613731363863633765303634633865356164663538363262
+32633263313838396662306138333561646133383163646132643036393034383264306538653537
+61313034636537626236353164633732386264306366343439623736343765376636303336623861
+65393635356262386465633763366436353738343535376630666162353431333461643739323334
+64613937333665643161636133306336346466383765333039333733356266643530303761633162
+62656232333638626431333539306635313732373038653634383834666432356236333435336636
+66616433353130303437363734616634343630653831613635333365373433356638333438643466
+35306237333163383936636463366635613733323233356165396138303638363634623161623430
+30663964636634663038316338353265633763666266646330653166316335393138303535336638
+39623062643632386661373262653432316435346330643536386265383433663561343433323335
+38383834383962346134363138643932396530313235613338656238333137313230326362303730
+34303762636463386132363336303661313732623665613065343466356232313039656563636531
+32373936333161316338656339346334366165643134316635656330343362313864633836323537
+30383933356261643635653862343164313939326266343464626132393838333435303635323437
+31363738346465616564616639323535366332633466343462666230326531343961386265316666
+39333432666633353038656636383036356133306338346139303764366232613562396163623433
+66613966623862353065336366643035383363613131376461363565623565393237663138666365
+32643332393533363532333335636166636365643037636430313934363566616231313034333164
+35356237373637343562653133666531356130386539353736386431656566386463383463356464
+32633536336433343239383633643535316462333265303037633161363331373036393663613930
+38303732663864356139313235343536663261623466646435346630646430306532363230343534
+61323231656165666637633463363836363630396563373136323665343062346533613232363933
+63633364373430383232313366363166616635366665663738333566626338383030383330646466
+63653939326663616632353165316266313933313434643564306562656339363163636434623730
+30653938353730623036656439613265616133323238646139656236656639336433366464646137
+62366136333764346434633661393131386562623236396333326366343832643031393335373835
+62356639333137633934316632306235326630313030303133633665663464633334386366333333
+66656337313435373765626666646661386132646537653235613262373236396532386632386165
+39633130623739393066363834646264646539626464356534616335343139613736653830383831
+37646433326662643934626237306563376237643765336639363332376238623835333462623638
+32373735343066643731393931316661633864333061363463616139376430353064613035346235
+61643663313231316231616463626661643630616463363362326139326362346639346564313439
+62313335633164656261653335646439373031333230333635666136336333383930363836333336
+64313832393339316365396564396236666466656631316365343735356464353737333537313233
+33346130383138376166616631393064633930353464343235656637653039636639643139306131
+62396166636564353134303261316535666432333231366164663061343562643933633263393834
+34316430343635386139353065653262343165636163666361353866633939663436626636356566
+62323231666232333636323064303131656431636634666364623462326334386131356133303734
+65653464353130636165383333616566323261376438393065343038333838613866623762396332
+38626438653765663536656435633634366263616630353363363062356232343937613065343765
+38336634643064626536346262666331616635323266383034393566666233323736313362313631
+65343238363730633366313930353962343665333033393133656339376330653335323634626139
+30613036306636613565636332626264636532373039663336323537343237343737376437303763
+38643764616233316139303362393337363135323766356162636162373464323935633831396363
+66636666623430653237393562356238626465633937373465383064343765626364666666333037
+37383561656163316639663637613531363932383965313962396536376235653965336434326635
+64306261316530356439386633666439393037303365393036356437653165626431346133623939
+65323938326430626162383539303135633535393566333034616333363865653433666530356562
+34386264393030653438663038643939373536376466343739303364303634373038363439656663
+38386538323764656337376331373932306134323030376366323338613035376563333163616630
+31633866633437353364666531616537396535386638363935313566386564313964363038336635
+35663763393966333432663435303839656532343332383132343339633664333732363930373133
+34303634663163336466386436633639663434383864363339323730343062356534356439626661
+64386435353933316361393737646436656236353862653036636331366665623334393633376164
+64393538393931303934396361363266623535363066333065333839636132616638623131323935
+37386161396335363334363664623066613234613739363562663939616633353537623434646466
+31653635316232623234353435626363336565653034353564616235633739383435366138363563
+62333833333833343235656165333764353333333436346361613166346233356636663732376333
+62353066396331386532663538313732646265663931383739643231346131366339363432353261
+63393130316639666534633237326562343539623531663130373561643266383335623132373839
+39666562323264613635636664613763333838336636636462396237303266656530663332353234
+33343830623834636338656436386433383934316235313433346637306163666462656631626664
+62386130613834356435626262343363376365393465373765363764333364343664643636373062
+63376665346431653665626164383639383462613834663135363036643835303337333139393936
+32303062346637383465393861623163376665643865636362343164356333316231323839316533
+36643533376263613333626333396461636630346530316335376361383239333331363164356539
+33316435343665396532313030623939306364663465303732346333643261353965323538313661
+63653065323835326536626337653065653364623661633139343463303837366235303631653132
+38663237643464393735636166343738373435303764643163303932396161323661333264343836
+37623738343635646163653334306463343331626338373839646364393236306232303738623766
+31366539653734323265653163393833646132663362633763633562383432356238326366336239
+31303439626161613935353538356438363530373436303864356631343031653964653464373061
+33366665623661616131333936316362333034333631623464623165613062326235653737626361
+63376634376534633439386361393035346436343130323832363631306166636166386437306434
+30313163353030333838363232313266306430363663643765343338323266326661653032346535
+32326362646361643966376231393931336464663039666339366563366633306239346336616130
+32333361623239616466353633326465623634633962663833653935363865396561653831653464
+63393730393265633262303939353262623339373038323833303232633531663164313233616330
+61643438303935333539343766633133623664646563643332616430353963626430373461356135
+36333639663231316663336333386261653533636465336236346630306665613162363737623134
+63386132356131313063396533323961343333396465303866666437373833633761303338616235
+61356365653638363264346130363831303538313234373430333936353966343233663934323033
+65343132316338313266336435323466323863333831633165643365663934626162643762616265
+65373866303239393766353738626536333732653838303663353534396233643234386139303762
+63616162313764363530396534643237363361323132363635633863653261646466343636656531
+32633361383138343434343733626539386437373737623863343061363764663432643337393561
+61396665396534313132323761376661656263656539656364376263333734626266633166393436
+32653938633432376132346335353939313366383939626661383236393034313465616234323335
+33306230396632623738333732346530353965383035363561313235616431353331643334333338
+65383264393930373134306437393037353631313832383537386262633738326532623832623635
+66313434313730613835336335306134356564393366316230366165653335376334316331313961
+37343933386632653232343435393333336433323639613263666437626234303436313136346236
+39636434356165323232633132666434336432393831656238616334303838373831643936323333
+39613038663765396436303538383565306638376365666538373134366232626236383065643161
+30393463646661353666366630613465323864363262643366346163346239616462653839636531
+36396235633938366131336361636332383134343935383035663934386336386364326136373532
+32363161323830346364306334653263636264663034343437626637666238386661383464653638
+35646331636165373462303936393965633365323463313836306262333261633636656366323538
+37383564373639356264313033313664333037336638313439323662363134623666656632383132
+39646139336335373538646634633432646665653364653130643937323939336231356139666636
+66363464663164666633333831343335313931613934383131333762396239333265373432666536
+39303936373338333338653231376630353333343762366332616363666138336435383835333330
+37343662336561336136643562336234396566333762633132663631376363656333373435623762
+31633031393635326163373130623833393263323864343739636630313165313034303831386461
+31383539343961343131653431363739356332373765306234363039376330653934626665326662
+62393136353339663336663837623935666535333061653438326631306438663034346231383362
+32616639363335393137386436363834363938663639393237383630323764656265656463613365
+35323361643562663737613762323737326164373466386238333938646632663537386337323762
+36336565366165376337613738353263373637386433373462353730623065306339616239633562
+31373939336234333835343563663238363635343565356634633662393635663037666364373132
+32333064613834326535376264333063323764353766383063323633643663343738363636333362
+38633738333735646236333061386634316634613636393633376462393561333839666431343863
+34646238343139336563386537653563366230626638303961373635306137633366643332616330
+36363565333030636434383430623361396434373332326234316162393762393937626464396132
+31623966633330366435313335313966666564346366656230376665303665333735666331363661
+61663937376339393765313030626138383331626561376665626364323464366135626564666362
+33396561626639656534646263333430333838303866653635303939376435653764616538653462
+63663932353930626138633237326465653635383931363331666537663338613939326565626263
+30653061376137326235363930393064343336313936643133623836383032613963343336613961
+38663163323164303638656566633135623863316330363463336462326435663732643031336163
+36616264386431303235623932396138343635623463303635313865363535643039613230393161
+32386463376537313733313666636462336439326530303561373561316237653164353531346635
+35636633653638316635663133346463383462666531643734663538613539326362336161343139
+31616230393630313139376138386330323464323263633331326631333535323762646661383835
+35623938336661313234656633393766663134653232333962373066323165656530376435343935
+37646662316234376139313232313363373333346262626566396330613839663735376362666131
+64346330656461613536343933316138363833346234626338316633393630656634383830363336
+39633636323235373633363366386339323737633230383834646232643361323238643736373239
+35316639353139396138356436303766363232666639346465333231363733373064333636653163
+32303639656333653230623761613566633236633034336465336638303163366364356130346264
+32353366376532383261353865323065623933343039666462626631353766666466346661356436
+34616261343635353665323865666538363236393635353066356438623262616238373638653230
+64316633653433363130323463656439646465333464373065653434343362346632666338363438
+63663339663762643936303235333264633933396565323536633839326363343838393061393264
+39613863333862376536353061353333366239643662623363316232383034626635346438643834
+65396139316432653765316135343132666463343939313061656233336135666361636166366665
+38306332633465323039326130646638323561666365306337613562616232326261363530306532
+37393933383463313331303036373565373365363936636363383530373130316666326238346266
+66353066393563636534666530396236373332616165323831376663383438633162613536613937
+63343636343031376539616534333830356335316636616361393137383065303166363838636339
+30363930396334666665633631623338396339303363343061356630313335656361613839623135
+64343831343333363538363131323031313238303839653534646436623738373337316265643039
+64396632666231323663623237356339643566633833326366373265376139393666373932373662
+30646239306561636365653230366230376662613439303030303362616232383163346463303161
+37613334363134363535613764393336666666316439666434663737653635643137363262313037
+33363230396165356235346635386461353538326230626139643834313032326438393031633434
+61336362623530366266393238376135316565623665623033343230313538626232353631363234
+37353033616238396238383464643437363531313364353562386132646239366465306330646333
+63383139653232303033666237663530376161363562386334376366333037373634333139343863
+65373637643565316336303437396566363332386363623466663031643064393064663533343064
+61376233343163376439336565663136356663336361313065393233613038656265343630373263
+66363639356462663335646339623563356661396334663039646637636366343231366461306438
+34326635373438626662343332393433313137313064623830353334363531373638316233646331
+34653838326566333637336636636562313964346536653463353632336532353163303361303632
+33663663393366383938623764313963393536306362303035613162383535326238303562323434
+64613831376662653235313666663430303761313031656534616132633462636265393338313832
+35316331613961373332366131646533306231666339616133366437623435316364303263383666
+31663934653362626234393437396432313230363862353437663730313865306138386537663735
+30323566336362306237656465666564323261633734393063316638323935346138363364303266
+34303339393035343737666264326165636635363932343738373233666439363562356630313564
+36616534356539356263616264373139336666363761636565623066613662326361666163663830
+37623131346336353837633936643135386262613137303932373263356237653963326465393765
+32383039643863303364616166396165336435396235623964313363393836383566326362643330
+61323837376565643963393533363861373735396135653935383964643866623531386564356535
+65633639656465306266343061613463623464636333613334353863663232356137373831386565
+38313732353162383062383765323938323031363037333466313739623635303634323931303566
+62396263396638623066346661376532623931316662326466386164323261313465666139616336
+30303537663130646535386531623266326533383662376535633332373662323031366434383730
+39393131663939353137616430386264376136316537373933663162663532616633616230393765
+66386264316461353638643837393838346232646465363031623934656233616531346132373836
+61313562616332633633333066656463653763333136613033323966313132303665613764336230
+35353265323166653135353031363033343561343331333630363166386663643635336561653661
+34643864646566636139353635323166643962626233613139663231646665626563376339653566
+38366261346338393938323664663034373531626236643734653162656137653639666538656631
+30366231366632383836356566616133373935623430663431666364386536626436623238613331
+35303939373132336235633265666366653262313430636434336434343864383230656264316336
+34393730663162313433303563333936333430303261343338656235376236306133323836393337
+61643534386334646265663131383863313962623438626634636633313936636638343639303337
+65633438363332363939326463363664343232633963323865356530616461663337323366366534
+64333861353139323934393263323236623831326164373935326662623039366534626263346561
+32383833656138656438616634653237323263313763326563316238633763313461373432336639
+33626665353536616333356639376138323231613130303030366266663037353464656230616436
+61393238343538623636303661633236303466633464646465396464663938356138646631366636
+63366231323461393539366131313761353363623664363265393665323632646237383432613863
+64626335393362613864356162326335383635393166336136373066303338646638656138343663
+65623863646561346233333562353639333264636261383836613536336365356563393938303936
+30643465663663323262323836316331383936333962633334316666623735306261353437623739
+34636338616537376630633637613461306233636666373931656637633662373133616335626463
+64393864333336303334306131373131343830353661633937303834336561636461346538643231
+61383437383734366662356534623363383030663761343532373263383331386264626664323932
+32656265643034383436633831343335303035313537323961643239363638623636633161336164
+39303266383761366161613961366237323430353433316166306437396635663331613632363838
+65633266303465333562316637336233333938353036303263383964636138353737666536363638
+66363361363330303961323935353732616665633333633133333638363339373338326339323964
+66336363626138356461373633333039373638626637363866316231613664356165666337663238
+66386632663730366264313839326462623138393866623635646264353630383234346635383961
+35616630353931353239333863613139356164643365643635396365613432613937646465383863
+31323666343535343035323838363962636333653537316339326361323962353631353935636633
+32353838323035646362656664616434303738616533376364653434376130393730383137633263
+63663231386433366130363465343435383331313164336437343361343532323131653162646331
+61373234346631373534376537643530376337376130393638623537363463303465666638623338
+65393632316434383835613431363539303739623936363763653562376635346637376530613539
+62366436373361356361303862363937643466366362636339643230376536656462663133383564
+65386561393736383930636561336330313832316562636439396366653430663135393632643463
+64356132333036376631376432663331663165353166336665306464653435316632356266613763
+34663737356237303138373265626338393736626230343162636438393339353838356637346464
+66633966626662363632306430306430623535626139333465366530383730396136323363333766
+62613664326236336263336132613435666166666365653061396536626635643564353633393936
+35626131366562633464356332353164613032633438373933323566663766626362626136363561
+39303531666538343362353564666130306232663464303262383135393031633430653264393064
+63386663363938393738623230353664356635373732653832326433666630353061613364383766
+30353434356131313864393337343566333830626433336235346132346638376237386634373766
+62363631326131336137646462656134623933663137613131663366356437376362363536666438
+32636138393035393563356137316462393831663834363362353939646231336230646430306665
+34303063633636323030666564653633623862343632333364633965336331623566346532303266
+66386561643932333662653861373537376363623836363761376461383864626334613838633332
+31626563643361633362306331363734646330343837393234636430333064393564613433646261
+65623536353464383261303433366138666232616136613635326437383032386431373437613934
+32323937356533666263366465623265656635663062643935666630656331326537616166363962
+64376364636630363036323330366138313937393937626533313664323734333666656433313635
+36393964633934613663343339303838623163336664663931636562626435393463653132313234
+39383666613433303536653632326464303366633731633064363561623463306539663037626361
+61373039636163616332356266306563646334376161336630633737386237613764336262366439
+66623937306338373636636438633065613231626332363166333235356665323432346434396361
+64313562376231616136353730353364326239346166393035363536663539326638333137643631
+31386430313136316131303938316534393565336131366436353964323138383334613863626364
+33653933373933653939376236376635306263643038626537636539336631366163373336346563
+32313135313631396435613536376234663033643738393638653865663235336435316335393834
+33623966613662393965623530306330363664346330643833663030366364646533333737393938
+33313033643566663064316233393464623233633531343235346534356233323333333534383563
+66643862653939383838376336386138353863636233396363313532346131633335346634353264
+36316562623132376337636161343430643365363864643139643235353234396333396339346331
+36303035663466333031386362306664653039386566653762663633653731656365666531613132
+39356664316532623466346564316636663335666439383265393063656436306666303465306462
+31373438633838306235303561343031356333373164356662613062666536653565356239616233
+62326234643233366261373337376533643234376432313634633630313635306466613130386235
+31303266356431323636363165663131383065613531313165313930303031393236336661303433
+66656138386661396130306230376262633862343461313766633632636362613464633766313564
+32636337303538616536323462323236653265303232633138393531396536636563653665613131
+63653138643835306237353939633066323135336233306432633632653365336439636663303065
+35336530363735626666356135626537313431303139663263383265383831316439636335653864
+31613839346235363239613039363935613338393333306235303161623135326332303833373039
+65653133653063613865363637646239663832643239636239346462653531346562343237623963
+64373133383964333963633466386166643733653534326635633161326637336230646231323330
+36376634616237333063633166373631613036316536393934353736396662333563636664633336
+64643966663465316166323833393231353535346265613138363339656332306161663462353062
+39653731316334313465333637393430333634633264383930656165303638396634303133613338
+32633166656564346538313935633037633964363161346133653063323037313064303237366266
+37643564643265383262616633373563386264646463326530353265393837363861343932346433
+38393331393731396330633961653532393663393038336332383065346631646361656262393839
+35363065366361346364633261306164366266653365356435333236366533316137386233393762
+65313238363639306639306138303934623234623532636563336239353363656631346334316436
+35643665636266376634656235343866393634333030363733336237633234666662616465666164
+62393461626236313932393439623135613236653534646662633332363736306235396262363738
+62333830303632386335356665343330316461663462316332373333626466383863393862623335
+65656230316365396637383431323166353031666264326232353733366463616636623138663236
+35363563616265616535376437646563366666346262393339313032383361626438613665383935
+65626233663162653062386364373139353337643563656236616465613063376265653435613062
+62323835353432333665313337643930616238386136303834646136366364663238663436666337
+31613538653464383534616230333962343532353264623063656561643164373762653265383761
+35323139306532653165666262663531333933623265326266356536346232626534633066343762
+38373236393563313731303733323236643634623261313365663330356464373933343735323730
+36323234323033313937643662613764306264626162383031393638396162383532396535396664
+62343236636563613038356438613830613163343531663430663338386632663036623438366630
+31326263376432643761623132396433393637666531386261646630326439386236326363313933
+30303864663833363962326263323532663836663636313632613637336637636533643433643637
+38666336306137386535303630313230386238666632306530623439363238306661346533643663
+37613263613766353937326637313231336534343737316430333937383831616264393435646262
+66386235356536616136376136663137303163313063353031326532363039343937376533316134
+66333065363233346639633830643961623633346436623366393438383438323566366466623763
+32633036353935653933636566343138386463633265396131373330656433336562323036343363
+33353362363563613561303561356638616462396164383839623664396537316534393237393965
+64393464326266323332653439663062353832363862663165643830306365303336613161653233
+34653731666530646638646336353838353836306231613533646561653561643861313138376133
+65656430323662346663333534393631653162613135653862613833626365333936373661366232
+66616634646139616438313832646437653532366436643962343234376466643763323434646238
+38663531353835373238313337313238396661303736653631653661626564653731333737653864
+65626461633761656337396537386138626231333731353432333632323034396365646630613135
+61396339393734353866653265646164343934313235383736343939616132313336346338616665
+32656461353233613863366463346637373562663164363030376465353736353838323562626238
+65393632313763386361363330383837383633316130366331316337396566393963306630336431
+32666334386135363538636433613065646337613033656662383336373935313963343861663839
+65303961643131323533303937613666333133656162343438383134653831346139343362636564
+32396136353338333932633561353439626561383563643830316637383739666662663536356430
+37386139346539613066303363306134333062323235363961303137663265633430663064396437
+63373035656262353030336261376366653064353832306633306132376663303761343063376166
+35323237313837303135636237393130386234326464636466383936373235663466326635306636
+38663931646164393736646535306162613963366131623738343138323438363839323132343862
+36323937643033656535393135656461616531653331356565396337346664343865393761383161
+62353337396638633635373461373161653636343637643337373339646635613161643537366166
+34383537626361306438386262613035323761666161313938643230306666656135646235623337
+31663764346365616233353434336565613461646531356265326163363165666433663763323332
+39353538346239376162613866333361656338313330623865613939393533386138613263613832
+61663934306236336230363932666533373230316232316339616433393933666639353938376266
+66653434323934386361646531383762343134366462383935373631656136373863343364393136
+66653161373064616337363738613737376664616232653631303563303734383064623336613130
+66616539386236666331323835376430613736306131366137636462653362663461626632303535
+36356563333931386133653664613566356336653830663533613031633235343865616636316165
+64313034383133646563346633343034626635373838623162363630336464626162383338633339
+31363163643038393739663130663030636130396636643263633864383835313132323034623565
+31303337616461626161356231303238323966323465383535663465656536336361353234633737
+32363433333533353539376362656138383431623638653633363238646438306363343434653738
+38323866626166656633306235346136643834633339313037373564333564636634353863353266
+33353966303064653737343564626238396635663034666537623039363835363736373039303134
+63343632306635633935613961383261393765326462363137343634353839323130626333623730
+65643630353833653336393430643335373462313232376463663731323837313466643431373338
+61333865343232616162646462646262366634663036303730333262376133653862613832386231
+63383761666363313032646132383030313537383265663132303036343463333065363036386330
+62396661623939616463393839356663383361393534653239616538663331363938666135343162
+33636231383966393565313931656434323465386561336231343266663035663763366435386439
+65646633623432363036356434333637393134616361623133376638373731613033313333663133
+32663432633564623937353665396562383132663261353436663665333334393336623464376639
+39663662616161663135346133383535353861306439363365316538333331636432363736343633
+36396633653762643635656636616535323731353039626164633434653464366466656631306139
+35376462613662353164393536643339646430303634613937363864323536666339346137363137
+62386363303365643266373563373738356631643861383938343764643465663033383763343930
+36373737316164396530643538353065616538353132653531623039613036623365646332643338
+65393234643037383436616530323133656439363837376662613434333333313937633435393639
+30396630316338663966656663343066346631356638333136363632616264323636393232616530
+38323164323164656566376462313336373362383864643863313265616334373464366139663163
+62383166373562363139353938653335656566633639366631643834623632353163663266313661
+36666162646234386331656432363633623837663165356431396536313233303932306535316261
+64383562633036366533656263636233623333303630616432383537396262366131623266656335
+38343463343262653835313263356435616661633966353636303865383961336232333166373632
+66386339636463636135666533613963383437363033343634313335303130323139613434633338
+31356661313436326231353762343731613434316334623666393033363865323035643334383463
+66393465636536643966363034323264623137383032366631643931366363636636396234343865
+65633866353263616465356436393863373531353062356138633133313934626138383762393133
+62373366383966313532396265666431663566366332383737636536633961333130616665323532
+63356461393163343637666330616464633337376332333637336566663232333739336335666531
+66666132663962333837643239613063356262343134303637653936623566663236376261383661
+37353961356335373561373839663631396330376161653933623363343965623262313766323237
+38316131376563356463313864386165653032363932656162323131333937643131313761633038
+39623330666238656261343138353065373136346436616562653463316235393935643837613661
+35323538383262316332616534353531343664346639306134663336323562636331366330616335
+33343466303438623663346166333366616538626531663539363834616366306462356632363836
+62356566336263666238633832376363323062653163323135646661326234353565623534633232
+61656135336563326166663664343064386566316166313363663732636665313630386436663538
+38336666393836383432643062383133303066633739343936333031666230353866313230636565
+32383835393739623261626238626230633738666639646663633031366333333630393630643137
+64646537366162366265353165373561343934653238623462356230633230356333633962323161
+35376436663166383534663230393839363939633466613666363436623331343633356365313136
+63346262633132326534623138323239386662333834653630643238373930643365303666613463
+37653333663239636363383163353863666236363565303266356163316636373730353963663736
+61323035383130343666636135666235333131373163333564373031643938636365646631333762
+35303230363339356139346663366231653330333534376466633062643533356436333235613363
+39303833313862333963663338666435653438396565303565386561346237333839353931306562
+64653162346133326138326639646636313061356532663237343163333432326632326632313162
+65613932373366353039346136306531356236343733633165376434613161386664623462623638
+30636135666535643630333031316666393835336162373436303866346338393634656237666466
+30626566373939353130303230336638613762373537643235666361346535383066636462653434
+33636266653937363763623861393834633231396663393930393161383034636339643938306162
+63643432353661646236626361323834323564373337383333623333616666383934663034396562
+33323833343765363639383035613034336362633666666231653864613835616339306134363637
+65323531386139613437643030623134363238643162663031336538306461396537623039646434
+62326565663066633134663866376535326261376431353337663539363933623665383562346166
+66616561666338326164623936623864636366343533333762366163303332383337356635616561
+62396363383162616664303865356131353962383431373835363061353566383430343932343331
+32386334303665616234353965646263623830623065373736623239313266316166653932653232
+35663030343363303535666165343733353631306334356162613664613539336566663130323336
+66353239653764613830383465646236306137303932333739383734343334343132333361363937
+66356561333439303531303034626535323762326339626431616262373266623837333634626132
+38346236313563366631396263363034663861343631373035663932306538643037653735346566
+64363262386136396663633064323061343032303430633261363333633862333834343132303534
+35376366663931376266313634643638383665633737366261353932386338303139653363363365
+30636435616532636130646364643336343235383462386638626665373533353431393836306139
+32373566393665393632303064343032663635653430343635336138643666356430393239303731
+39643864343732636465626466613037373333366461363166363265643162363337653565616264
+39346336313535323134376332373762393432363330343562316337623034306264383935323639
+32333338396532623431636463626464393662316638376638643635393065316463636564363664
+65623836356666343066663135376634383233353231626262643431356263616633313663633930
+37643830393538363565656632666133653336356462646666323366633133326466666137313732
+38373933363837636236313463613631616331383536366335393836303632373431306536386539
+63656662323562373534373132623732303164386236353238313464353764613263616634613335
+36383637383733636630353337373965343565343337316262623061343562373833363162353363
+35306333623737646266316638303366346364346234303331356639656131353437316232623165
+31643433396364346638303464613833373566316565303663643436343861616565333637336137
+65653831323637326634393736363834343363373130333938376264366637393662653166383963
+31383862613131636161393730336266616132623162326462626264343066636563623730373734
+38346366383137623463646630346130656462386234383436393235326137373734343863656264
+61316237383937656130646261626334666639653439373935353836346162383762336338353635
+63396437336537656561353136616463326166373732356230373832623631616239333936333431
+32383936653164383161326334376230353664653165643231333536623165373461663330653765
+64303932386230613532636334663533366639356166363863316538633238656232656332663461
+66393635346138303539343337393236643533336565323734616639373437326465636439663931
+38333863366663303331313133666134393666333239623538383333346639373932363362663636
+34396632336638343034356632633463643862636533343762633837346532663866376631623438
+62613266633061366664613339656137336535386230393562653562373263653338313263663838
+37376330326239316561306534386133313036663566363561333431623038656239383262386462
+66613563633134303937613734346236623037343436656338326166656431663331663064623835
+65346634656438353734623865353430323634303139336531626638626234333666356235383436
+36333361616631383932313331646565613161336366636164663938613231373233393132313738
+65636561663537376230653131356237646431646132396662383230343837363963366564303432
+65613164333332356163656463646338363235636530383933353432633934346162376339356539
+33376238333433656234656665303937356433363761666565633761646164633731633235353839
+61303330356331616165616463666539383832373032623466343732366561333365326663313763
+66363961313732613533376463323561653631613264663566633936616464313830333235396462
+62353536356239316236373562346562663964383730383362336464366234663530303832323734
+37343731616266323330393431373631636434336666616538356164316638646461336461633730
+38393162373533623962386562336563306631663062396531376161363438633264383862303062
+32613663363263303330306433326137346136613435373230346636396536376363643538363135
+35623831326434363232383163623931396339353363343332626162363234633430386634316564
+35653939653631376236653139626538396666313937626235643835316263313238653134343934
+65386161656339346536666565383166373937346131383662383433346134343237363833323935
+35343534343235653336626431346232333063373064616135326535613331313663353361623736
+34316238653463663638343338343163313936333232386339356135643165316137643039633461
+63313062323637663034636562633238313639333634663566623164326339373633343566393539
+35626237363266343433313064333962343431346564333635343035396231666564343362386131
+61313933356637313735366166326335656339323439386334323639366561386463656633313561
+30646135373134333536393135363865643932616235653838646236376630383264303038333664
+62656164633135663039613633323437366264376437333338363363383564356338636430623431
+61306638643833373863353465336132653730306331623730383664306464343735303738346436
+31373538383938666335396336613761636639353733663232333639356435366135616331303137
+64643934343464623766363562306434313431613630373338346433663436373437613864616634
+30653339376163306563396137616239663834633233623330313034646431613630353634653366
+38663563323531343530333630386230613134656338376536346535376265303362636236626439
+37333239656465623935663766616139323931353532363664313232623639636239353331373335
+61626665323065333163656661323065336336306366383865353866303766616366353137383962
+64613934366365383839663262333738396332343862343838623038633763383934336234353463
+31393561386433313262646239393832346666393835656361623064383339633662303466623965
+32613734383630633261636337313533353163363764336265623136656636303536306337363165
+38626663333831306332346231356435343039393739393135646531313331303239613331383835
+61653432393437303063323963393732653435383130646662376564333839343033643239376138
+66643833383066353439636638653462363930646566633635386338313733356562336331326166
+38646261353161326162303338356565343836633133336465666434666635323262383439383562
+31613766666466653835353430636531313033316664653664303038623035646366633639303634
+62316262313566613864613737383163373631346366366666373831643833373966353537346530
+61623865366262383934646137623532316161643534303832376631656530626334386233363337
+36306432643634653961653931376437323437313634623638323836623238353135633933303731
+666562613235653164366237333861333330
diff --git a/fdio.infra.ansible/roles/ab/defaults/main.yaml b/fdio.infra.ansible/roles/ab/defaults/main.yaml
new file mode 100644
index 0000000000..adabf1464c
--- /dev/null
+++ b/fdio.infra.ansible/roles/ab/defaults/main.yaml
@@ -0,0 +1,18 @@
+---
+# file: roles/ab/defaults/main.yaml
+
+packages: "{{ packages_base + packages_by_distro[ansible_distribution|lower][ansible_distribution_release] + packages_by_arch[ansible_machine] }}"
+
+packages_base:
+ - []
+
+packages_by_distro:
+ ubuntu:
+ jammy:
+ - "apache2-utils"
+
+packages_by_arch:
+ aarch64:
+ - []
+ x86_64:
+ - []
diff --git a/fdio.infra.ansible/roles/ab/tasks/main.yaml b/fdio.infra.ansible/roles/ab/tasks/main.yaml
new file mode 100644
index 0000000000..2a70fd1d1d
--- /dev/null
+++ b/fdio.infra.ansible/roles/ab/tasks/main.yaml
@@ -0,0 +1,18 @@
+---
+# file: roles/ab/tasks/main.yaml
+
+- name: Inst - Update Package Cache (APT)
+ ansible.builtin.apt:
+ update_cache: true
+ cache_valid_time: 3600
+ when:
+ - ansible_distribution|lower == 'ubuntu'
+ tags:
+ - ab-inst-prerequisites
+
+- name: Inst - Apache ab tools
+ ansible.builtin.package:
+ name: "{{ packages | flatten(levels=1) }}"
+ state: present
+ tags:
+ - ab-inst
diff --git a/fdio.infra.ansible/roles/aws/defaults/main.yaml b/fdio.infra.ansible/roles/aws/defaults/main.yaml
new file mode 100644
index 0000000000..5b6978da51
--- /dev/null
+++ b/fdio.infra.ansible/roles/aws/defaults/main.yaml
@@ -0,0 +1,26 @@
+---
+# file: defaults/main.yaml
+
+packages: "{{ packages_base + packages_by_distro[ansible_distribution|lower] + packages_by_arch[ansible_machine] }}"
+
+packages_repo:
+ ubuntu:
+ aarch64:
+ "http://ports.ubuntu.com/"
+ x86_64:
+ "http://archive.ubuntu.com/ubuntu"
+
+packages_base:
+ - []
+
+packages_by_distro:
+ ubuntu:
+ - "linux-image-5.4.0-1009-aws"
+ - "linux-headers-5.4.0-1009-aws"
+ - "linux-tools-5.4.0-1009-aws"
+
+packages_by_arch:
+ aarch64:
+ - []
+ x86_64:
+ - [] \ No newline at end of file
diff --git a/fdio.infra.ansible/roles/aws/files/get-vfio-with-wc.sh b/fdio.infra.ansible/roles/aws/files/get-vfio-with-wc.sh
new file mode 100644
index 0000000000..02a3139b66
--- /dev/null
+++ b/fdio.infra.ansible/roles/aws/files/get-vfio-with-wc.sh
@@ -0,0 +1,203 @@
+#!/usr/bin/env bash
+# Enable WC in VFIO-PCI driver
+# Tested on:
+# * Amazon Linux 2 AMI (HVM), SSD Volume Type - ami-0bb3fad3c0286ebd5
+# * Amazon Linux AMI 2018.03.0 (HVM), SSD Volume Type - ami-015232c01a82b847b
+# * Red Hat Enterprise Linux 8 (HVM), SSD Volume Type - ami-08f4717d06813bf00
+# * Ubuntu Server 20.04 LTS (HVM), SSD Volume Type - ami-06fd8a495a537da8b
+# * Ubuntu Server 18.04 LTS (HVM), SSD Volume Type - ami-0823c236601fef765
+
+set -e
+
+TMP_DIR="tmp"
+
+# Kernel modules location:
+P1="/usr/lib/modules/`uname -r`/kernel/drivers/vfio"
+P2="/lib/modules/`uname -r`/kernel/drivers/vfio"
+
+# This may return an error if executed from inside the script
+set +e
+RED="$(tput setaf 1)"
+GREEN="$(tput setaf 2)"
+
+BOLD="$(tput bold)"
+NORMAL="$(tput sgr0)"
+set -e
+
+function bold {
+ echo -e "${BOLD}${@}${NORMAL}"
+}
+
+function err {
+ bold "${RED}ERROR: ${@}"
+}
+
+function green {
+ bold "${GREEN}${@}"
+}
+
+function get_kernel_version {
+ local ver=$(uname -r | cut -f 1 -d '-')
+ local ver_major=$(echo $ver | cut -f1 -d '.')
+ local ver_minor=$(echo $ver | cut -f2 -d '.')
+ local ver_subminor=$(echo $ver | cut -f3 -d '.')
+
+ printf "%d%02d%04d" "${ver_major}" "${ver_minor}" "${ver_subminor}"
+}
+
+function download_kernel_src_yum {
+ echo "Use yum to get the kernel sources"
+
+ bold "\nInstall required applications and kernel headers"
+ yum install -y gcc "kernel-$(uname -r)" "kernel-devel-$(uname -r)" \
+ git make elfutils-libelf-devel patch yum-utils
+ green Done
+
+ # Download kernel source
+ bold "\nDownload kernel source with vfio"
+ yumdownloader --source "kernel-devel-$(uname -r)"
+ rpm2cpio kernel*.src.rpm | cpio -idmv
+ green Done
+
+ rm -f *patches.tar
+ tar xf linux-*.tar*
+ rm -f linux-*.tar* linux-*.patch
+}
+
+function download_kernel_src_apt {
+ echo "Use apt-get to get the kernel sources"
+ apt-get -q -y update
+ green Done
+
+ bold "\nInstall required applications"
+ apt-get -q -y install dpkg-dev build-essential git
+ green Done
+
+ bold "\nDownload Linux kernel source with vfio"
+ if ! apt-get -q -y source -t focal linux-image-$(uname -r); then
+ err "Cannot download Linux kernel source.\nPlease uncomment appropriate 'deb-src' line in the /etc/apt/sources.list file"
+ exit 1
+ fi
+ green Done
+
+ rm -f linux-*.dsc linux-*.gz
+}
+
+function download_kernel_src {
+ bold "[1] Downloading prerequisites..."
+ rm -rf "${TMP_DIR}"
+ mkdir -p "${TMP_DIR}"
+ cd "${TMP_DIR}"
+
+ if apt-get -v >/dev/null 2>/dev/null; then
+ download_kernel_src_apt
+ else
+ download_kernel_src_yum
+ fi
+ cd linux-*
+}
+
+function apply_wc_patch {
+ echo "Using patch for kernel version 4.10"
+ local wc_patch="${BASE_PATH}/patches/linux-4.10-vfio-wc.patch"
+
+ if ! patch --ignore-whitespace -p1 < "${wc_patch}"; then
+ err "Cannot apply patch: ${wc_patch}!"
+ exit 1
+ fi
+}
+
+function compile_vfio_driver {
+ bold "\n[2] Patch and build the vfio driver"
+ # Adjust VFIO-PCI driver
+
+ bold "Apply patch for the write combining to the vfio-pci"
+ apply_wc_patch
+ green Done
+
+ cd drivers/vfio
+ # Configure Makefile - build VFIO with support for NOIOMMU mode
+ bold "\nConfigure Makefile for standalone vfio build and noiommu mode support"
+ echo "ccflags-y := -DCONFIG_VFIO_NOIOMMU=1" >> Makefile
+ echo 'all:' >> Makefile
+ echo ' make -C /lib/modules/$(shell uname -r)/build M=$(PWD) modules' >> Makefile
+ green Done
+
+ bold "\nBuild the driver"
+ if ! make; then
+ err "Compilation error."
+ exit 1
+ fi
+ green Done
+}
+
+function get_module_location {
+ for p in ${P1} ${P2}; do
+ if find "${p}" -name "vfio.*" >/dev/null 2>/dev/null; then
+ MOD_PATH="${p}"
+ break
+ fi
+ done
+
+ if [ -z "${MOD_PATH}" ]; then
+ err "Cannot find kernel modules location..."
+ exit
+ fi
+}
+
+function get_module_compression {
+ if ls "${MOD_PATH}/vfio.ko.xz" >/dev/null 2>/dev/null; then
+ XZ=".xz"
+ else
+ XZ=""
+ fi
+}
+
+function replace_module {
+ local installed=0
+
+ bold "\n[3] Install module"
+ get_module_location
+ get_module_compression
+
+ for name in "pci/vfio-pci.ko" "pci/vfio-pci-core.ko" "vfio.ko"; do
+ if test -e "${MOD_PATH}/${name}${XZ}"; then
+ if [ -n "${XZ}" ]; then
+ xz "${name}" -c > "${name}${XZ}"
+ fi
+ mv "${MOD_PATH}/${name}${XZ}" "${MOD_PATH}/${name}${XZ}_no_wc"
+ cp "${name}${XZ}" "${MOD_PATH}/${name}${XZ}"
+ bold "Installing: ${MOD_PATH}/${name}${XZ}"
+ installed=1
+ fi
+ done
+ if [ "${installed}" -eq 1 ]; then
+ green "Module installed at: ${MOD_PATH}"
+ else
+ err "Failure during vfio-pci module installation. Prehaps it's not provided as a kernel module!"
+ exit 1
+ fi
+}
+
+###############################################
+# Main script code
+###############################################
+
+if [ "$(id -u)" -ne 0 ]; then
+ err 'Please execute script as a root'
+ exit 1
+fi
+
+cd $(dirname ${0})
+BASE_PATH=$(pwd)
+
+KERNEL_VERSION=$(get_kernel_version)
+
+if [ "${KERNEL_VERSION}" -lt 4100000 ]; then
+ err "Kernel version: $(uname -r) is not supported by the script. Please upgrade kernel to at least v4.10."
+ exit 1
+fi
+
+download_kernel_src
+compile_vfio_driver
+replace_module
diff --git a/fdio.infra.ansible/roles/aws/handlers/main.yaml b/fdio.infra.ansible/roles/aws/handlers/main.yaml
new file mode 100644
index 0000000000..d55db1c22f
--- /dev/null
+++ b/fdio.infra.ansible/roles/aws/handlers/main.yaml
@@ -0,0 +1,20 @@
+---
+# file: roles/aws/handlers/main.yaml
+
+- name: Reload systemd-modules
+ systemd:
+ name: "systemd-modules-load"
+ state: "restarted"
+ tags:
+ - reload-systemd-modules
+
+- name: Update GRUB
+ ansible.builtin.command: update-grub
+ tags:
+ - update-grub
+
+- name: Reboot Server
+ ansible.builtin.reboot:
+ reboot_timeout: 3600
+ tags:
+ - reboot-server
diff --git a/fdio.infra.ansible/roles/aws/tasks/main.yaml b/fdio.infra.ansible/roles/aws/tasks/main.yaml
new file mode 100644
index 0000000000..b5132c1909
--- /dev/null
+++ b/fdio.infra.ansible/roles/aws/tasks/main.yaml
@@ -0,0 +1,124 @@
+---
+# file: tasks/main.yaml
+
+- name: Edit repositories
+ include_tasks: "{{ ansible_distribution|lower }}_{{ ansible_distribution_release }}.yaml"
+ tags:
+ - aws-edit-repo
+
+- name: Prerequisites
+ ansible.builtin.package:
+ name: "{{ packages | flatten(levels=1) }}"
+ state: "latest"
+ tags:
+ - aws-inst-prerequisites
+
+- name: Switch Kernel At Boot
+ ansible.builtin.lineinfile:
+ path: "/etc/default/grub"
+ state: "present"
+ line: "GRUB_DEFAULT=\"1>2\""
+ notify:
+ - "Update GRUB"
+ tags:
+ - perf-conf-grub
+
+- meta: flush_handlers
+
+- name: Load Kernel Modules By Default
+ ansible.builtin.lineinfile:
+ path: "/etc/modules"
+ state: "present"
+ line: "{{ item }}"
+ with_items:
+ - "igb_uio"
+ - "vfio-pci"
+ tags:
+ - aws-load-kernel-modules
+
+- name: Add Kernel Modules Options (igb_uio)
+ ansible.builtin.lineinfile:
+ path: "/etc/modprobe.d/igb_uio.conf"
+ state: "present"
+ line: "{{ item }}"
+ create: "yes"
+ with_items:
+ - "options igb_uio wc_activate=1"
+ tags:
+ - aws-load-kernel-modules
+
+- name: Add Kernel Modules Options (vfio-pci)
+ ansible.builtin.lineinfile:
+ path: "/etc/modprobe.d/vfio-noiommu.conf"
+ state: "present"
+ line: "{{ item }}"
+ create: "yes"
+ with_items:
+ - "options vfio enable_unsafe_noiommu_mode=1"
+ tags:
+ - aws-load-kernel-modules
+
+#- name: Get vfio-pci With WC Patcher
+# ansible.builtin.get_url:
+# url: "https://github.com/amzn/amzn-drivers/raw/master/userspace/dpdk/enav2-vfio-patch/get-vfio-with-wc.sh"
+# dest: "/opt/get-vfio-with-wc.sh"
+# mode: 0744
+# tags:
+# - aws-vfio-patch
+
+- name: Create vfio-pci Patch Directory
+ ansible.builtin.file:
+ path: "/opt/patches/"
+ state: "directory"
+ tags:
+ - aws-vfio-patch
+
+- name: Get vfio-pci WC Patch
+ ansible.builtin.get_url:
+ url: "https://github.com/amzn/amzn-drivers/raw/master/userspace/dpdk/enav2-vfio-patch/patches/{{ item }}"
+ dest: "/opt/patches/{{ item }}"
+ mode: 0744
+ with_items:
+ - "linux-4.10-vfio-wc.patch"
+ - "linux-5.8-vfio-wc.patch"
+ - "linux-5.15-vfio-wc.patch"
+ tags:
+ - aws-vfio-patch
+
+- name: Copy vfio-pci WC Patch
+ ansible.builtin.copy:
+ src: "files/get-vfio-with-wc.sh"
+ dest: "/opt"
+ mode: 0744
+ tags:
+ - aws-vfio-patch
+
+- name: Compile vfio-pci With WC Patch
+ ansible.builtin.shell: "/bin/bash /opt/get-vfio-with-wc.sh"
+ environment:
+ DEBIAN_FRONTEND: "noninteractive"
+ TERM: "vt100"
+ tags:
+ - aws-vfio-patch
+
+- name: Reload systemd-modules
+ ansible.builtin.systemd:
+ name: "systemd-modules-load"
+ state: "restarted"
+ tags:
+ - aws-reload-systemd-modules
+
+- name: Adjust nr_hugepages
+ ansible.builtin.sysctl:
+ name: "vm.nr_hugepages"
+ value: "8192"
+ state: "present"
+ sysctl_file: "/etc/sysctl.d/90-csit.conf"
+ reload: true
+ tags:
+ - aws-set-hugepages
+
+- name: Shutdown host with delay
+ ansible.builtin.command: "/sbin/shutdown -P +720"
+ tags:
+ - aws-set-self-terminate
diff --git a/fdio.infra.ansible/roles/aws/tasks/ubuntu_jammy.yaml b/fdio.infra.ansible/roles/aws/tasks/ubuntu_jammy.yaml
new file mode 100644
index 0000000000..c589239f61
--- /dev/null
+++ b/fdio.infra.ansible/roles/aws/tasks/ubuntu_jammy.yaml
@@ -0,0 +1,35 @@
+---
+# file: tasks/ubuntu_jammy.yaml
+
+- name: Enable deb-src APT Repository
+ ansible.builtin.apt_repository:
+ repo: "deb-src {{ packages_repo[ansible_distribution|lower][ansible_machine] }} jammy main"
+ state: "present"
+ update_cache: true
+ tags:
+ - aws-enable-src-repo
+
+- name: Enable deb APT Repository Focal
+ ansible.builtin.apt_repository:
+ repo: "deb {{ packages_repo[ansible_distribution|lower][ansible_machine] }} focal main"
+ state: "present"
+ update_cache: true
+ tags:
+ - aws-enable-src-repo
+
+- name: Enable deb-src APT Repository Focal Src
+ ansible.builtin.apt_repository:
+ repo: "deb-src {{ packages_repo[ansible_distribution|lower][ansible_machine] }} focal main"
+ state: "present"
+ update_cache: true
+ tags:
+ - aws-enable-src-repo
+
+- name: Update Package Cache (APT)
+ ansible.builtin.apt:
+ update_cache: true
+ cache_valid_time: 3600
+ when:
+ - ansible_distribution == 'Ubuntu'
+ tags:
+ - aws-enable-src-repo \ No newline at end of file
diff --git a/fdio.infra.ansible/roles/azure/defaults/main.yaml b/fdio.infra.ansible/roles/azure/defaults/main.yaml
new file mode 100644
index 0000000000..68f6148413
--- /dev/null
+++ b/fdio.infra.ansible/roles/azure/defaults/main.yaml
@@ -0,0 +1,2 @@
+---
+# file: roles/azure/defaults/main.yaml
diff --git a/fdio.infra.ansible/roles/azure/files/10-dtap.link b/fdio.infra.ansible/roles/azure/files/10-dtap.link
new file mode 100644
index 0000000000..a8e0aa10f3
--- /dev/null
+++ b/fdio.infra.ansible/roles/azure/files/10-dtap.link
@@ -0,0 +1,4 @@
+[Match]
+OriginalName=dtap*
+[Link]
+NamePolicy=kernel
diff --git a/fdio.infra.ansible/roles/azure/handlers/main.yaml b/fdio.infra.ansible/roles/azure/handlers/main.yaml
new file mode 100644
index 0000000000..f0d46062d9
--- /dev/null
+++ b/fdio.infra.ansible/roles/azure/handlers/main.yaml
@@ -0,0 +1,15 @@
+---
+# file: roles/azure/handlers/main.yaml
+
+- name: Reboot server
+ reboot:
+ reboot_timeout: 3600
+ tags:
+ - reboot-server
+
+- name: Azure - Reload systemd-modules
+ systemd:
+ name: "systemd-modules-load"
+ state: "restarted"
+ tags:
+ - reload-systemd-modules
diff --git a/fdio.infra.ansible/roles/azure/tasks/main.yaml b/fdio.infra.ansible/roles/azure/tasks/main.yaml
new file mode 100644
index 0000000000..c8d72475d8
--- /dev/null
+++ b/fdio.infra.ansible/roles/azure/tasks/main.yaml
@@ -0,0 +1,38 @@
+---
+# file: roles/azure/tasks/main.yaml
+
+- name: Azure - Load Kernel Modules By Default
+ lineinfile:
+ path: "/etc/modules"
+ state: "present"
+ line: "{{ item }}"
+ with_items:
+ - "vfio-pci"
+ - "ib_uverbs"
+ - "mlx4_ib"
+ - "mlx5_ib"
+ notify: "Azure - Reload systemd-modules"
+ tags:
+ - load-kernel-modules
+
+- name: Azure - Performance Tuning - Adjust nr_hugepages
+ sysctl:
+ name: "vm.nr_hugepages"
+ value: "8192"
+ state: "present"
+ sysctl_file: "/etc/sysctl.d/90-csit.conf"
+ reload: "yes"
+ tags:
+ - set-sysctl
+
+- name: Azure - prevent interface renaming
+ copy:
+ src: "files/10-dtap.link"
+ dest: "/etc/systemd/network/"
+ owner: "root"
+ group: "root"
+ mode: "0644"
+ notify:
+ - "Reboot server"
+ tags:
+ - prevent-interface-renaming
diff --git a/fdio.infra.ansible/roles/baremetal/handlers/cimc.yaml b/fdio.infra.ansible/roles/baremetal/handlers/cimc.yaml
new file mode 100644
index 0000000000..3d244d8d19
--- /dev/null
+++ b/fdio.infra.ansible/roles/baremetal/handlers/cimc.yaml
@@ -0,0 +1,74 @@
+---
+# file: roles/baremeatal/handlers/cimc.yaml
+
+- name: Boot from network
+ imc_rest:
+ hostname: "{{ inventory_cimc_hostname }}"
+ username: "{{ inventory_cimc_username }}"
+ password: "{{ inventory_cimc_password }}"
+ validate_certs: false
+ content: |
+ <!-- Configure PXE boot -->
+ <configConfMo><inConfig>
+ <lsbootLan dn="sys/rack-unit-1/boot-policy/lan-read-only" access="read-only" order="1" prot="pxe" type="lan"/>
+ </inConfig></configConfMo>
+ delegate_to: localhost
+ tags:
+ - boot-network
+
+- name: Boot from storage
+ imc_rest:
+ hostname: "{{ inventory_cimc_hostname }}"
+ username: "{{ inventory_cimc_username }}"
+ password: "{{ inventory_cimc_password }}"
+ validate_certs: false
+ content: |
+ <configConfMo><inConfig>
+ <lsbootStorage dn="sys/rack-unit-1/boot-policy/storage-read-write" access="read-write" order="1" type="storage"/>
+ </inConfig></configConfMo>
+ delegate_to: localhost
+ tags:
+ - boot-storage
+
+- name: Power up server
+ imc_rest:
+ hostname: "{{ inventory_cimc_hostname }}"
+ username: "{{ inventory_cimc_username }}"
+ password: "{{ inventory_cimc_password }}"
+ validate_certs: false
+ content: |
+ <configConfMo><inConfig>
+ <computeRackUnit dn="sys/rack-unit-1" adminPower="up"/>
+ </inConfig></configConfMo>
+ delegate_to: localhost
+ tags:
+ - power-up
+
+- name: Power down server
+ imc_rest:
+ hostname: "{{ inventory_cimc_hostname }}"
+ username: "{{ inventory_cimc_username }}"
+ password: "{{ inventory_cimc_password }}"
+ validate_certs: false
+ content: |
+ <configConfMo><inConfig>
+ <computeRackUnit dn="sys/rack-unit-1" adminPower="down"/>
+ </inConfig></configConfMo>
+ delegate_to: localhost
+ tags:
+ - power-down
+
+- name: Power cycle server
+ imc_rest:
+ hostname: "{{ inventory_cimc_hostname }}"
+ username: "{{ inventory_cimc_username }}"
+ password: "{{ inventory_cimc_password }}"
+ validate_certs: false
+ content: |
+ <!-- Power cycle server -->
+ <configConfMo><inConfig>
+ <computeRackUnit dn="sys/rack-unit-1" adminPower="cycle-immediate"/>
+ </inConfig></configConfMo>
+ delegate_to: localhost
+ tags:
+ - power-cycle
diff --git a/fdio.infra.ansible/roles/baremetal/handlers/ipmi.yaml b/fdio.infra.ansible/roles/baremetal/handlers/ipmi.yaml
new file mode 100644
index 0000000000..b3cc3d0a82
--- /dev/null
+++ b/fdio.infra.ansible/roles/baremetal/handlers/ipmi.yaml
@@ -0,0 +1,52 @@
+---
+# file: roles/baremetal/handlers/ipmi.yaml
+
+- name: Boot from network
+ ipmi_boot:
+ name: "{{ inventory_ipmi_hostname }}"
+ user: "{{ inventory_ipmi_username }}"
+ password: "{{ inventory_ipmi_password }}"
+ bootdev: network
+ delegate_to: localhost
+ tags:
+ - boot-network
+
+- name: Boot from storage
+ ipmi_boot:
+ name: "{{ inventory_ipmi_hostname }}"
+ user: "{{ inventory_ipmi_username }}"
+ password: "{{ inventory_ipmi_password }}"
+ bootdev: hd
+ delegate_to: localhost
+ tags:
+ - boot-storage
+
+- name: Power up server
+ ipmi_power:
+ name: "{{ inventory_ipmi_hostname }}"
+ user: "{{ inventory_ipmi_username }}"
+ password: "{{ inventory_ipmi_password }}"
+ state: true
+ delegate_to: localhost
+ tags:
+ - power-up
+
+- name: Power down server
+ ipmi_power:
+ name: "{{ inventory_ipmi_hostname }}"
+ user: "{{ inventory_ipmi_username }}"
+ password: "{{ inventory_ipmi_password }}"
+ state: false
+ delegate_to: localhost
+ tags:
+ - power-down
+
+- name: Power cycle server
+ ipmi_power:
+ name: "{{ inventory_ipmi_hostname }}"
+ user: "{{ inventory_ipmi_username }}"
+ password: "{{ inventory_ipmi_password }}"
+ state: boot
+ delegate_to: localhost
+ tags:
+ - power-cycle
diff --git a/fdio.infra.ansible/roles/baremetal/handlers/main.yaml b/fdio.infra.ansible/roles/baremetal/handlers/main.yaml
new file mode 100644
index 0000000000..6e8734eaa9
--- /dev/null
+++ b/fdio.infra.ansible/roles/baremetal/handlers/main.yaml
@@ -0,0 +1,30 @@
+---
+# file: roles/baremetal/handlers/main.yaml
+
+- name: IPMI specific
+ import_tasks: ipmi.yaml
+ when: inventory_ipmi_hostname is defined
+ tags:
+ - ipmi-handlers
+
+- name: CIMC specific
+ import_tasks: cimc.yaml
+ when: inventory_cimc_hostname is defined
+ tags:
+ - cimc-handlers
+
+- name: Reboot server
+ ansible.builtin.reboot:
+ reboot_timeout: 3600
+ tags:
+ - reboot-server
+
+- name: Wait for server to restart
+ ansible.builtin.wait_for:
+ host: "{{ inventory_hostname }}"
+ search_regex: OpenSSH
+ port: 22
+ delay: 60
+ timeout: 3600
+ tags:
+ - reboot-server
diff --git a/fdio.infra.ansible/roles/cadvisor/defaults/main.yaml b/fdio.infra.ansible/roles/cadvisor/defaults/main.yaml
new file mode 100644
index 0000000000..5dba8c9112
--- /dev/null
+++ b/fdio.infra.ansible/roles/cadvisor/defaults/main.yaml
@@ -0,0 +1,24 @@
+---
+# file: roles/cadvisor/defaults/main.yaml
+
+packages: "{{ packages_base + packages_by_distro[ansible_distribution | lower] + packages_by_arch[ansible_machine] }}"
+
+packages_base:
+ - []
+
+packages_by_distro:
+ ubuntu:
+ - "python3-docker"
+ - "python3-dockerpty"
+
+packages_by_arch:
+ aarch64:
+ - []
+ x86_64:
+ - []
+
+image: "{{ image_by_arch[ansible_machine] }}"
+
+image_by_arch:
+ aarch64: "zcube/cadvisor:v0.37.0"
+ x86_64: "gcr.io/cadvisor/cadvisor:v0.38.7"
diff --git a/fdio.infra.ansible/roles/cadvisor/tasks/main.yaml b/fdio.infra.ansible/roles/cadvisor/tasks/main.yaml
new file mode 100644
index 0000000000..a8c3f70124
--- /dev/null
+++ b/fdio.infra.ansible/roles/cadvisor/tasks/main.yaml
@@ -0,0 +1,39 @@
+---
+# file: roles/cadvisor/tasks/main.yaml
+
+- name: Inst - Update Package Cache (APT)
+ apt:
+ update_cache: true
+ cache_valid_time: 3600
+ when:
+ - ansible_distribution|lower == 'ubuntu'
+ tags:
+ - cadvisor-inst-prerequisites
+
+- name: Inst - Prerequisites
+ package:
+ name: "{{ packages | flatten(levels=1) }}"
+ state: latest
+ tags:
+ - cadvisor-inst-prerequisites
+
+- name: Inst - Start a container
+ docker_container:
+ name: "cAdvisor"
+ image: "{{ image }}"
+ state: "started"
+ restart_policy: "unless-stopped"
+ detach: true
+ devices:
+ - "/dev/kmsg"
+ ports:
+ - "8080:8080"
+ privileged: true
+ volumes:
+ - "/:/rootfs:ro"
+ - "/var/run:/var/run:ro"
+ - "/sys:/sys:ro"
+ - "/var/lib/docker/:/var/lib/docker:ro"
+ - "/dev/disk/:/dev/disk:ro"
+ tags:
+ - cadvisor-run-container
diff --git a/fdio.infra.ansible/roles/calibration/defaults/main.yaml b/fdio.infra.ansible/roles/calibration/defaults/main.yaml
new file mode 100644
index 0000000000..5dc3330e08
--- /dev/null
+++ b/fdio.infra.ansible/roles/calibration/defaults/main.yaml
@@ -0,0 +1,37 @@
+---
+# file: roles/calibration/defaults/main.yaml
+
+# Packages to install.
+packages: "{{ packages_base + packages_by_distro[ansible_distribution|lower][ansible_distribution_release] + packages_by_arch[ansible_machine] }}"
+
+packages_base:
+ - []
+
+packages_by_distro:
+ ubuntu:
+ jammy:
+ - "build-essential"
+ - "dmidecode"
+
+packages_by_arch:
+ aarch64:
+ - []
+ x86_64:
+ - []
+
+# Kernel version to check.
+kernel_version: "{{ kernel_version_by_distro_by_arch[ansible_distribution | lower][ansible_distribution_release][ansible_machine] }}"
+
+kernel_version_by_distro_by_arch:
+ ubuntu:
+ jammy:
+ x86_64:
+ - "5.15.0-46-generic" # Placeholder
+ - "5.15.0-1000-aws" # Placeholder
+ - "5.4.0-1009-aws" # Placeholder
+ aarch64:
+ - "5.15.0-46-generic" # Placeholder
+
+pma_directory: "/tmp/pma_tools"
+jitter_core: 7
+jitter_iterations: 20
diff --git a/fdio.infra.ansible/roles/calibration/tasks/aarch64.yaml b/fdio.infra.ansible/roles/calibration/tasks/aarch64.yaml
new file mode 100644
index 0000000000..ca4e75d268
--- /dev/null
+++ b/fdio.infra.ansible/roles/calibration/tasks/aarch64.yaml
@@ -0,0 +1,2 @@
+---
+# file: roles/calibration/tasks/aarch64.yaml
diff --git a/fdio.infra.ansible/roles/calibration/tasks/main.yaml b/fdio.infra.ansible/roles/calibration/tasks/main.yaml
new file mode 100644
index 0000000000..5807d7e2a4
--- /dev/null
+++ b/fdio.infra.ansible/roles/calibration/tasks/main.yaml
@@ -0,0 +1,89 @@
+---
+# file: roles/calibration/tasks/main.yaml
+
+- name: Inst - Update Package Cache (APT)
+ ansible.builtin.apt:
+ update_cache: true
+ cache_valid_time: 3600
+ when:
+ - ansible_distribution|lower == 'ubuntu'
+ tags:
+ - calibration-inst-prerequisites
+
+- name: Inst - Prerequisites
+ ansible.builtin.package:
+ name: "{{ packages | flatten(levels=1) }}"
+ state: latest
+ tags:
+ - calibration-inst-prerequisites
+
+- name: Check CPU Power States
+ ansible.builtin.shell: "lscpu"
+ register: current_lscpu
+ changed_when: false
+ tags:
+ - check-cpu-frequency
+
+- name: Check CPU Power States
+ ansible.builtin.assert:
+ that:
+ - "'CPU min MHz' not in current_lscpu.stdout or 'Intel(R) Xeon(R)' not in ansible_processor"
+ fail_msg: "CPU configuration!"
+ success_msg: "CPU configuration match."
+ tags:
+ - check-cpu-frequency
+
+- name: Check Kernel Parameters
+ ansible.builtin.assert:
+ that:
+ - item in ansible_cmdline
+ fail_msg: "Kernel parameters!"
+ success_msg: "Kernel parameters match."
+ loop: "{{ grub.keys()|sort }}"
+ when:
+ - grub is defined
+ tags:
+ - check-kernel-params
+
+- name: Check Kernel Version
+ ansible.builtin.assert:
+ that:
+ - ansible_kernel not in kernel_version_by_distro_by_arch
+ fail_msg: "Kernel version!"
+ success_msg: "Kernel version match."
+ tags:
+ - check-kernel-version
+
+- name: Spectre Meltdown Checker Status
+ ansible.builtin.stat:
+ path: "/opt/spectre-meltdown-checker.sh"
+ register: spectre_meltdown_status
+ tags:
+ - check-spectre-meltdown
+
+- name: Get Spectre Meltdown Checker
+ ansible.builtin.get_url:
+ url: "https://meltdown.ovh"
+ dest: "/opt/spectre-meltdown-checker.sh"
+ mode: "744"
+ when:
+ - not spectre_meltdown_status.stat.exists
+ tags:
+ - check-spectre-meltdown
+
+- name: Run Spectre Meltdown Checker
+ ansible.builtin.shell: "/opt/spectre-meltdown-checker.sh --no-color --sysfs-only || true"
+ ignore_errors: true
+ register: spectre_meltdown_sync
+ tags:
+ - check-spectre-meltdown
+
+- debug: var=spectre_meltdown_sync.stdout_lines
+ tags:
+ - check-spectre-meltdown
+
+- name: "{{ ansible_machine }} Specific"
+ include_tasks: "{{ ansible_machine }}.yaml"
+ tags:
+ - check-machine-specific
+ - check-jitter-tool
diff --git a/fdio.infra.ansible/roles/calibration/tasks/x86_64.yaml b/fdio.infra.ansible/roles/calibration/tasks/x86_64.yaml
new file mode 100644
index 0000000000..2d28f92ae3
--- /dev/null
+++ b/fdio.infra.ansible/roles/calibration/tasks/x86_64.yaml
@@ -0,0 +1,35 @@
+---
+# file: roles/calibration/tasks/x86_64.yaml
+
+- name: Calibration - Clone PMA Tool
+ ansible.builtin.git:
+ repo: "https://gerrit.fd.io/r/pma_tools"
+ dest: "{{ pma_directory }}"
+ tags:
+ - check-jitter-tool
+
+- name: Calibration - Compile PMA Tool
+ ansible.builtin.raw: "cd {{ pma_directory }}/jitter && make"
+ tags:
+ - check-jitter-tool
+
+- name: Calibration - Run Jitter Tool
+ ansible.builtin.shell: "{{ pma_directory }}/jitter/jitter -c {{ jitter_core }} -i {{ jitter_iterations }} -f"
+ become: true
+ async: 60
+ poll: 0
+ ignore_errors: true
+ register: jitter_async
+ tags:
+ - check-jitter-tool
+
+- name: Check sync status
+ ansible.builtin.async_status:
+ jid: "{{ jitter_async.ansible_job_id }}"
+ register: "jitter_poll_results"
+ until: jitter_poll_results.finished
+ retries: 30
+
+- ansible.builtin.debug: var=jitter_poll_results.stdout_lines
+ tags:
+ - check-jitter-tool
diff --git a/fdio.infra.ansible/roles/cleanup/files/reset_vppdevice.sh b/fdio.infra.ansible/roles/cleanup/files/reset_vppdevice.sh
new file mode 100644
index 0000000000..ede2db1273
--- /dev/null
+++ b/fdio.infra.ansible/roles/cleanup/files/reset_vppdevice.sh
@@ -0,0 +1,113 @@
+#!/usr/bin/env bash
+
+set -euo pipefail
+
+function die () {
+ # Print the message to standard error end exit with error code specified
+ # by the second argument.
+ #
+ # Hardcoded values:
+ # - The default error message.
+ # Arguments:
+ # - ${1} - The whole error message, be sure to quote. Optional
+ # - ${2} - the code to exit with, default: 1.
+
+ set +eu
+ warn "${1:-Unspecified run-time error occurred!}"
+ exit "${2:-1}"
+}
+
+
+function set_eligibility_off {
+ # Set Nomad eligibility to ineligible for scheduling. Fail otherwise.
+
+ set -euo pipefail
+
+ node_id="$(nomad node status | grep $(hostname) | cut -d ' ' -f 1)" || die
+ node_status="$(nomad node status | grep $(hostname))" || die
+
+ if [[ "${node_status}" != *"ineligible"* ]]; then
+ nomad node eligibility -disable "${node_id}" || die
+ node_status="$(nomad node status | grep $(hostname))" || die
+ if [[ "${node_status}" != *"ineligible"* ]]; then
+ die "Set eligibility off failed!"
+ fi
+ fi
+}
+
+
+function set_eligibility_on {
+ # Set Nomad eligibility to eligible for scheduling. Fail otherwise.
+
+ set -euo pipefail
+
+ node_id="$(nomad node status | grep $(hostname) | cut -d ' ' -f 1)" || die
+ node_status="$(nomad node status | grep $(hostname))" || die
+
+ if [[ "${node_status}" == *"ineligible"* ]]; then
+ nomad node eligibility -enable "${node_id}" || die
+ node_status="$(nomad node status | grep $(hostname))" || die
+ if [[ "${node_status}" == *"ineligible"* ]]; then
+ die "Set eligibility on failed!"
+ fi
+ fi
+}
+
+
+function restart_vfs_service {
+ # Stop and start VF serice. This will reinitialize VFs and driver mappings.
+
+ set -euo pipefail
+
+ warn "Restarting VFs service (this may take few minutes)..."
+ sudo service csit-initialize-vfs stop || die "Failed to stop VFs service!"
+ sudo service csit-initialize-vfs start || die "Failed to start VFs service!"
+}
+
+
+function wait_for_pending_containers {
+ # Wait in loop for defined amount of time for pending containers to
+ # gracefully quit them. If parameter force is specified. Force kill them.
+
+ # Arguments:
+ # - ${@} - Script parameters.
+
+ set -euo pipefail
+
+ retries=60
+ wait_time=60
+ containers=(docker ps --quiet --filter name=csit*)
+
+ for i in $(seq 1 ${retries}); do
+ mapfile -t pending_containers < <( ${containers[@]} ) || die
+ warn "Waiting for pending containers [${pending_containers[@]}] ..."
+ if [ ${#pending_containers[@]} -eq 0 ]; then
+ break
+ fi
+ sleep "${wait_time}" || die
+ done
+ if [ ${#pending_containers[@]} -ne 0 ]; then
+ if [[ "${1-}" == "force" ]]; then
+ warn "Force killing [${pending_containers[@]}] ..."
+ docker rm --force ${pending_containers[@]} || die
+ else
+ die "Still few containers running!"
+ fi
+ fi
+}
+
+
+function warn () {
+ # Print the message to standard error.
+ #
+ # Arguments:
+ # - ${@} - The text of the message.
+
+ echo "$@" >&2
+}
+
+
+set_eligibility_off || die
+wait_for_pending_containers "${@}" || die
+restart_vfs_service || die
+set_eligibility_on || die
diff --git a/fdio.infra.ansible/roles/cleanup/tasks/clean_images.yaml b/fdio.infra.ansible/roles/cleanup/tasks/clean_images.yaml
new file mode 100644
index 0000000000..76704ab50d
--- /dev/null
+++ b/fdio.infra.ansible/roles/cleanup/tasks/clean_images.yaml
@@ -0,0 +1,36 @@
+---
+# file: tasks/clean_images.yaml
+
+- name: Clean Docker Images
+ block:
+ - name: Clean Images - Prefetch Docker Images
+ ansible.builtin.cron:
+ name: "Prefetch docker image {{ item }}"
+ minute: "10"
+ hour: "7"
+ job: "/usr/bin/docker pull {{ item }}"
+ loop:
+ "{{ images_to_prefetch_by_arch[ansible_machine] }}"
+ tags:
+ - prefetch-docker-images
+
+ - name: Clean Images - Remove Dangling Docker Images
+ ansible.builtin.cron:
+ name: "Remove dangling docker images"
+ minute: "10"
+ hour: "5"
+ weekday: "7"
+ job: "/usr/bin/docker rmi $(/usr/bin/docker images --filter 'dangling=true' -q)"
+ tags:
+ - remove-docker-images-dangling
+
+# TODO: Disabled until all images will be in registry
+# - name: Clean Images - Prune Docker Images
+# cron:
+# name: "Prune docker images"
+# minute: "10"
+# hour: "6"
+# weekday: 7
+# job: "/usr/bin/docker image prune --all --force"
+# tags:
+# - prune-docker-images
diff --git a/fdio.infra.ansible/roles/cleanup/tasks/kill_containers.yaml b/fdio.infra.ansible/roles/cleanup/tasks/kill_containers.yaml
new file mode 100644
index 0000000000..dc739eb954
--- /dev/null
+++ b/fdio.infra.ansible/roles/cleanup/tasks/kill_containers.yaml
@@ -0,0 +1,42 @@
+---
+# file: tasks/kill_containers.yaml
+
+- name: Kill Docker Containers
+ block:
+ - name: Get Running Docker Containers
+ ansible.builtin.shell: "docker ps -a --filter name=DUT -q"
+ register: running_containers
+ changed_when: false
+ tags:
+ - kill-containers
+
+ - name: Remove All Docker Containers
+ ansible.builtin.shell: "docker rm --force {{ item }}"
+ with_items: "{{ running_containers.stdout_lines }}"
+ tags:
+ - kill-containers
+
+ rescue:
+ - name: Restart Docker Daemon
+ ansible.builtin.systemd:
+ name: "docker"
+ state: "restarted"
+
+- name: Kill LXC Containers
+ block:
+ - name: Get Running LXC Containers
+ ansible.builtin.shell: "lxc-ls"
+ register: running_containers
+ changed_when: false
+ tags:
+ - kill-containers
+
+ - name: Remove All LXC Containers
+ ansible.builtin.shell: "lxc-destroy --force -n {{ item }}"
+ with_items: "{{ running_containers.stdout_lines }}"
+ tags:
+ - kill-containers
+
+ rescue:
+ - fail:
+ msg: "Kill LXC containers failed!"
diff --git a/fdio.infra.ansible/roles/cleanup/tasks/kill_process.yaml b/fdio.infra.ansible/roles/cleanup/tasks/kill_process.yaml
new file mode 100644
index 0000000000..9ab98a8e57
--- /dev/null
+++ b/fdio.infra.ansible/roles/cleanup/tasks/kill_process.yaml
@@ -0,0 +1,38 @@
+---
+# file: tasks/kill_process.yaml
+
+- name: Kill Process - {{ process }}
+ block:
+ - name: Get PID Of {{ process }}
+ ansible.builtin.shell: "ps -ef | grep -v grep | grep -w {{ process }} | awk '{print $2}'"
+ when:
+ - process is defined and process != ""
+ register: running_processes
+ tags:
+ - kill-process
+
+ - name: Safe Kill {{ process }}
+ ansible.builtin.shell: "kill {{ item }}"
+ with_items: "{{ running_processes.stdout_lines }}"
+ ignore_errors: true
+ tags:
+ - kill-process
+
+ - wait_for:
+ path: "/proc/{{ item }}/status"
+ state: "absent"
+ with_items: "{{ running_processes.stdout_lines }}"
+ ignore_errors: true
+ register: killed_processes
+ tags:
+ - kill-process
+
+ - name: Kill Process - Force Kill {{ process }}
+ ansible.builtin.shell: "kill -9 {{ item }}"
+ with_items: "{{ killed_processes.results | select('failed') | map(attribute='item') | list }}"
+ tags:
+ - kill-process
+
+ rescue:
+ - fail:
+ msg: "Kill process {{ process }} failed!"
diff --git a/fdio.infra.ansible/roles/cleanup/tasks/main.yaml b/fdio.infra.ansible/roles/cleanup/tasks/main.yaml
new file mode 100644
index 0000000000..c97b9c5d7e
--- /dev/null
+++ b/fdio.infra.ansible/roles/cleanup/tasks/main.yaml
@@ -0,0 +1,26 @@
+---
+# file: tasks/main.yaml
+
+- name: tg specific
+ include_tasks: tg.yaml
+ when: "'tg' in group_names"
+ tags:
+ - cleanup
+
+- name: sut specific
+ include_tasks: sut.yaml
+ when: "'sut' in group_names"
+ tags:
+ - cleanup
+
+- name: vpp_device specific
+ include_tasks: vpp_device.yaml
+ when: "'vpp_device' in group_names"
+ tags:
+ - cleanup
+
+- name: nomad specific
+ include_tasks: nomad.yaml
+ when: "'nomad' in group_names"
+ tags:
+ - cleanup
diff --git a/fdio.infra.ansible/roles/cleanup/tasks/nomad.yaml b/fdio.infra.ansible/roles/cleanup/tasks/nomad.yaml
new file mode 100644
index 0000000000..086a4eff7d
--- /dev/null
+++ b/fdio.infra.ansible/roles/cleanup/tasks/nomad.yaml
@@ -0,0 +1,18 @@
+---
+# file: tasks/nomad.yaml
+
+- name: Host Cleanup
+ block:
+ - name: Clean Images
+ import_tasks: clean_images.yaml
+ vars:
+ images_to_prefetch_by_arch:
+ aarch64:
+ - "fdiotools/builder-ubuntu2204:prod-aarch64"
+ - "fdiotools/builder-ubuntu2004:prod-aarch64"
+ x86_64:
+ - "fdiotools/builder-ubuntu2204:prod-x86_64"
+ - "fdiotools/builder-ubuntu2004:prod-x86_64"
+ - "fdiotools/builder-debian11:prod-x86_64"
+ tags:
+ - clean-images
diff --git a/fdio.infra.ansible/roles/cleanup/tasks/remove_package.yaml b/fdio.infra.ansible/roles/cleanup/tasks/remove_package.yaml
new file mode 100644
index 0000000000..652729bc30
--- /dev/null
+++ b/fdio.infra.ansible/roles/cleanup/tasks/remove_package.yaml
@@ -0,0 +1,21 @@
+---
+# file: tasks/remove_package.yaml
+
+- name: Fix Corrupted APT
+ ansible.builtin.shell: "dpkg --configure -a"
+ when:
+ - ansible_distribution == 'Ubuntu'
+ tags:
+ - remove-package
+
+- name: Remove Package - {{ package }}
+ ansible.builtin.apt:
+ name: "{{ package }}"
+ force: true
+ purge: true
+ state: "absent"
+ failed_when: false
+ when:
+ - ansible_distribution == 'Ubuntu'
+ tags:
+ - remove-package \ No newline at end of file
diff --git a/fdio.infra.ansible/roles/cleanup/tasks/sut.yaml b/fdio.infra.ansible/roles/cleanup/tasks/sut.yaml
new file mode 100644
index 0000000000..22bf596369
--- /dev/null
+++ b/fdio.infra.ansible/roles/cleanup/tasks/sut.yaml
@@ -0,0 +1,97 @@
+---
+# file: tasks/sut.yaml
+
+- name: Host Cleanup
+ block:
+ - name: Kill Processes - Qemu
+ import_tasks: kill_process.yaml
+ vars:
+ process: "qemu"
+ tags:
+ - kill-process
+
+ - name: Kill Processes - L3fwd
+ import_tasks: kill_process.yaml
+ vars:
+ process: "l3fwd"
+ tags:
+ - kill-process
+
+ - name: Kill Processes - Testpmd
+ import_tasks: kill_process.yaml
+ vars:
+ process: "testpmd"
+ tags:
+ - kill-process
+
+ - name: Kill Processes - iPerf3
+ import_tasks: kill_process.yaml
+ vars:
+ process: "iperf3"
+ tags:
+ - kill-process
+
+ - name: Kill Processes - nohup
+ import_tasks: kill_process.yaml
+ vars:
+ process: "nohup"
+ tags:
+ - kill-process
+
+ - name: Kill Processes - vpp
+ import_tasks: kill_process.yaml
+ vars:
+ process: "vpp"
+ tags:
+ - kill-process
+
+ - name: Kill Processes - vpp_echo
+ import_tasks: kill_process.yaml
+ vars:
+ process: "vpp_echo"
+ tags:
+ - kill-process
+
+ - name: Find File Or Dir - Core Zip File
+ ansible.builtin.find:
+ paths: "/tmp/"
+ patterns: "*tar.lzo.lrz.xz*"
+ register: files_to_delete
+ tags:
+ - remove-file-dir
+
+ - name: Remove File Or Dir - Core Zip File
+ ansible.builtin.file:
+ path: "{{ item.path }}"
+ state: absent
+ with_items: "{{ files_to_delete.files }}"
+ tags:
+ - remove-file-dir
+
+ - name: Find File Or Dir - Core Dump File
+ ansible.builtin.find:
+ paths: "/tmp/"
+ patterns: "*core*"
+ register: files_to_delete
+ tags:
+ - remove-file-dir
+
+ - name: Remove File Or Dir - Core Dump File
+ ansible.builtin.file:
+ path: "{{ item.path }}"
+ state: absent
+ with_items: "{{ files_to_delete.files }}"
+ tags:
+ - remove-file-dir
+
+ - name: Kill Containers - Remove All Containers
+ import_tasks: kill_containers.yaml
+ tags:
+ - kill-containers
+
+ - name: Remove Packages - Remove VPP
+ import_tasks: remove_package.yaml
+ vars:
+ package: "*vpp*"
+ tags:
+ - remove-package
diff --git a/fdio.infra.ansible/roles/cleanup/tasks/tg.yaml b/fdio.infra.ansible/roles/cleanup/tasks/tg.yaml
new file mode 100644
index 0000000000..8c0162df2c
--- /dev/null
+++ b/fdio.infra.ansible/roles/cleanup/tasks/tg.yaml
@@ -0,0 +1,13 @@
+---
+# file: tasks/tg.yaml
+
+- name: Host Cleanup
+ block:
+ - name: Kill Processes - TRex
+ import_tasks: kill_process.yaml
+ vars:
+ process: "_t-rex"
+ when:
+ - docker_tg is undefined
+ tags:
+ - kill-process
diff --git a/fdio.infra.ansible/roles/cleanup/tasks/vpp_device.yaml b/fdio.infra.ansible/roles/cleanup/tasks/vpp_device.yaml
new file mode 100644
index 0000000000..c97fa0cde5
--- /dev/null
+++ b/fdio.infra.ansible/roles/cleanup/tasks/vpp_device.yaml
@@ -0,0 +1,32 @@
+---
+# file: tasks/vpp_device.yaml
+
+- name: Host Cleanup
+ block:
+ - name: Reset vpp_device Binary
+ ansible.builtin.copy:
+ src: "files/reset_vppdevice.sh"
+ dest: "/usr/local/bin"
+ owner: "root"
+ group: "root"
+ mode: "744"
+ tags:
+ - reset-sriov
+
+ - name: Clean Images
+ import_tasks: clean_images.yaml
+ vars:
+ images_to_prefetch_by_arch:
+ aarch64:
+ - "fdiotools/builder-ubuntu2004:prod-aarch64"
+ - "fdiotools/builder-ubuntu1804:prod-aarch64"
+ - "fdiotools/builder-centos8:prod-aarch64"
+ x86_64:
+ - "fdiotools/builder-ubuntu2004:prod-x86_64"
+ - "fdiotools/builder-ubuntu1804:prod-x86_64"
+ - "fdiotools/builder-debian10:prod-x86_64"
+ - "fdiotools/builder-debian9:prod-x86_64"
+ - "fdiotools/builder-centos8:prod-x86_64"
+ - "fdiotools/builder-centos7:prod-x86_64"
+ tags:
+ - clean-images
diff --git a/fdio.infra.ansible/roles/common/defaults/main.yaml b/fdio.infra.ansible/roles/common/defaults/main.yaml
new file mode 100644
index 0000000000..9ded8fcba9
--- /dev/null
+++ b/fdio.infra.ansible/roles/common/defaults/main.yaml
@@ -0,0 +1,55 @@
+---
+# file: roles/common/defaults/main.yaml
+
+packages: "{{ packages_base + packages_by_distro[ansible_distribution|lower][ansible_distribution_release] + packages_by_arch[ansible_machine] }}"
+
+packages_base:
+ - "autoconf"
+ - "ca-certificates"
+ - "cgroup-tools"
+ - "dkms"
+ - "iperf3"
+ - "linux-tools-common"
+ - "ninja-build"
+ - "numactl"
+ - "qemu-system"
+ - "socat"
+ - "unzip"
+ - "virtualenv"
+
+packages_by_distro:
+ ubuntu:
+ jammy:
+ - "build-essential"
+ - "libpcap-dev"
+ - "net-tools"
+ - "python3-all"
+ - "python3-apt"
+ - "python3-cffi"
+ - "python3-cffi-backend"
+ - "python3-dev"
+ - "python3-pip"
+ - "python3-pyelftools"
+ - "python3-setuptools"
+
+packages_by_arch:
+ aarch64:
+ - "gfortran"
+ - "libblas-dev"
+ - "libffi-dev"
+ - "liblapack-dev"
+ - "libssl-dev"
+ x86_64:
+ - []
+
+# Proxy settings: Uncomment and fill the proper values. These variables will be
+# set globally by writing into /etc/environment file on target machine.
+# proxy_env:
+# http_proxy: http://proxy.com:80
+# HTTP_PROXY: http://proxy.com:80
+# https_proxy: http://proxy.com:80
+# HTTPS_PROXY: http://proxy.com:80
+# ftp_proxy: http://proxy.com:80
+# FTP_PROXY: http://proxy.com:80
+# no_proxy: localhost,127.0.0.1,{{ ansible_default_ipv4.address }}
+# NO_PROXY: localhost,127.0.0.1,{{ ansible_default_ipv4.address }}
diff --git a/fdio.infra.ansible/roles/common/handlers/main.yaml b/fdio.infra.ansible/roles/common/handlers/main.yaml
new file mode 100644
index 0000000000..0a4944b4ca
--- /dev/null
+++ b/fdio.infra.ansible/roles/common/handlers/main.yaml
@@ -0,0 +1,8 @@
+---
+# file: roles/common/handlers/main.yaml
+
+- name: Reboot Server
+ ansible.builtin.reboot:
+ reboot_timeout: 3600
+ tags:
+ - reboot-server
diff --git a/fdio.infra.ansible/roles/common/tasks/main.yaml b/fdio.infra.ansible/roles/common/tasks/main.yaml
new file mode 100644
index 0000000000..e47a1fc7a8
--- /dev/null
+++ b/fdio.infra.ansible/roles/common/tasks/main.yaml
@@ -0,0 +1,56 @@
+---
+# file: roles/common/tasks/main.yaml
+
+- name: Conf - Add permanent proxy settings
+ ansible.builtin.lineinfile:
+ path: "/etc/environment"
+ state: "present"
+ line: "{{ item.key }}={{ item.value }}"
+ with_dict: "{{ proxy_env }}"
+ when: proxy_env is defined
+ tags:
+ - common-conf-proxy
+
+- name: Inst - Update package cache (apt)
+ ansible.builtin.apt:
+ update_cache: true
+ cache_valid_time: 3600
+ when:
+ - ansible_distribution|lower == 'ubuntu'
+ tags:
+ - common-inst-prerequisites
+
+- name: Inst - Prerequisites
+ ansible.builtin.package:
+ name: "{{ packages | flatten(levels=1) }}"
+ state: "latest"
+ tags:
+ - common-inst-prerequisites
+
+- name: Inst - Meson (DPDK)
+ ansible.builtin.pip:
+ name:
+ - "meson==0.64.1"
+ state: "forcereinstall"
+ tags:
+ - common-inst-meson
+
+- name: Conf - sudoers admin
+ ansible.builtin.lineinfile:
+ path: "/etc/sudoers"
+ state: "present"
+ regexp: "^%admin ALL="
+ line: "%admin ALL=(ALL) ALL"
+ validate: "/usr/sbin/visudo -cf %s"
+ tags:
+ - common-conf-sudoers
+
+- name: Conf - sudoers nopasswd
+ ansible.builtin.lineinfile:
+ path: "/etc/sudoers"
+ state: "present"
+ regexp: "^%sudo"
+ line: "%sudo ALL=(ALL:ALL) NOPASSWD: ALL"
+ validate: "/usr/sbin/visudo -cf %s"
+ tags:
+ - common-conf-sudoers
diff --git a/fdio.infra.ansible/roles/consul/defaults/main.yaml b/fdio.infra.ansible/roles/consul/defaults/main.yaml
new file mode 100644
index 0000000000..9ea38efb56
--- /dev/null
+++ b/fdio.infra.ansible/roles/consul/defaults/main.yaml
@@ -0,0 +1,87 @@
+---
+# file: defaults/main.yaml
+
+# Inst - Prerequisites.
+packages: "{{ packages_base + packages_by_distro[ansible_distribution | lower] + packages_by_arch[ansible_machine] }}"
+packages_base:
+ - "curl"
+ - "unzip"
+packages_by_distro:
+ ubuntu:
+ - []
+packages_by_arch:
+ aarch64:
+ - []
+ x86_64:
+ - []
+
+# Inst - Consul Map.
+consul_architecture_map:
+ amd64: "amd64"
+ x86_64: "amd64"
+ armv7l: "arm"
+ aarch64: "arm64"
+ 32-bit: "386"
+ 64-bit: "amd64"
+consul_architecture: "{{ consul_architecture_map[ansible_architecture] }}"
+consul_version: "1.16.1"
+consul_pkg: "consul_{{ consul_version }}_linux_{{ consul_architecture }}.zip"
+consul_zip_url: "https://releases.hashicorp.com/consul/{{ consul_version }}/{{ consul_pkg }}"
+consul_force_update: false
+
+# Inst - System paths.
+consul_bin_dir: "/usr/local/bin"
+consul_config_dir: "/etc/consul.d"
+consul_data_dir: "/var/consul"
+consul_inst_dir: "/opt"
+consul_lockfile: "/var/lock/subsys/consul"
+consul_run_dir: "/var/run/consul"
+consul_ssl_dir: "/etc/consul.d/ssl"
+
+# Conf - Service.
+consul_node_role: "both"
+consul_restart_handler_state: "restarted"
+nomad_restart_handler_state: "restarted"
+systemd_resolved_state: "stopped"
+consul_service_mgr: ""
+
+# Conf - User and group.
+consul_group: "consul"
+consul_user: "consul"
+
+# Conf - base.hcl
+consul_allow_tls: true
+consul_bind_addr: "{{ ansible_default_ipv4.address }}"
+consul_bootstrap_expect: 1
+consul_client_addr: "0.0.0.0"
+consul_datacenter: "dc1"
+consul_disable_update_check: true
+consul_enable_debug: false
+consul_enable_syslog: true
+consul_encrypt: ""
+consul_log_level: "INFO"
+consul_node_name: "{{ inventory_hostname }}"
+consul_recursors:
+ - 1.1.1.1
+ - 8.8.8.8
+consul_retry_join: false
+consul_ui_config:
+ enabled: true
+consul_verify_incoming: true
+consul_verify_outgoing: true
+consul_vefify_server_hostname: false
+consul_ca_file: "{{ consul_ssl_dir }}/ca.pem"
+consul_cert_file: "{{ consul_ssl_dir }}/consul.pem"
+consul_key_file: "{{ consul_ssl_dir }}/consul-key.pem"
+
+# Conf - ports.hcl
+consul_port_dns: 53
+consul_port_http: 8500
+consul_port_https: 8501
+consul_port_grpc: 8502
+consul_port_serf_lan: 8301
+consul_port_serf_wan: 8302
+consul_port_server: 8300
+
+# Conf - services.json
+consul_services: false
diff --git a/fdio.infra.ansible/roles/consul/handlers/main.yaml b/fdio.infra.ansible/roles/consul/handlers/main.yaml
new file mode 100644
index 0000000000..a9de4d1439
--- /dev/null
+++ b/fdio.infra.ansible/roles/consul/handlers/main.yaml
@@ -0,0 +1,16 @@
+---
+# file handlers/main.yaml
+
+- name: Restart Nomad
+ ansible.builtin.systemd:
+ daemon_reload: true
+ enabled: true
+ name: "nomad"
+ state: "{{ nomad_restart_handler_state }}"
+
+- name: Restart Consul
+ ansible.builtin.systemd:
+ daemon_reload: true
+ enabled: true
+ name: "consul"
+ state: "{{ consul_restart_handler_state }}"
diff --git a/fdio.infra.ansible/roles/consul/meta/main.yaml b/fdio.infra.ansible/roles/consul/meta/main.yaml
new file mode 100644
index 0000000000..673c3b738d
--- /dev/null
+++ b/fdio.infra.ansible/roles/consul/meta/main.yaml
@@ -0,0 +1,21 @@
+---
+# file: meta/main.yaml
+
+dependencies: []
+
+galaxy_info:
+ role_name: "consul"
+ author: "pmikus"
+ description: "Hashicorp Consul."
+ company: "none"
+ license: "license (Apache)"
+ min_ansible_version: "2.9"
+ platforms:
+ - name: "Ubuntu"
+ versions:
+ - "focal"
+ - "jammy"
+ - "kinetic"
+ galaxy_tags:
+ - "consul"
+ - "hashicorp"
diff --git a/fdio.infra.ansible/roles/consul/tasks/main.yaml b/fdio.infra.ansible/roles/consul/tasks/main.yaml
new file mode 100644
index 0000000000..6dd430754b
--- /dev/null
+++ b/fdio.infra.ansible/roles/consul/tasks/main.yaml
@@ -0,0 +1,145 @@
+---
+# file: tasks/main.yaml
+
+- name: Update Repositories Cache
+ ansible.builtin.apt:
+ update_cache: true
+ when:
+ - ansible_os_family == 'Debian'
+ tags:
+ - consul-inst-package
+
+- name: Dependencies
+ ansible.builtin.apt:
+ name: "{{ packages | flatten(levels=1) }}"
+ state: "present"
+ cache_valid_time: 3600
+ install_recommends: false
+ when:
+ - ansible_os_family == 'Debian'
+ tags:
+ - consul-inst-dependencies
+
+- name: Add Consul Group
+ ansible.builtin.group:
+ name: "{{ consul_group }}"
+ state: "present"
+ tags:
+ - consul-conf-user
+
+- name: Add Consul user
+ ansible.builtin.user:
+ name: "{{ consul_user }}"
+ group: "{{ consul_group }}"
+ state: "present"
+ system: true
+ tags:
+ - consul-conf-user
+
+- name: Download Consul
+ ansible.builtin.get_url:
+ url: "{{ consul_zip_url }}"
+ dest: "{{ consul_inst_dir }}/{{ consul_pkg }}"
+ tags:
+ - consul-inst-package
+
+- name: Clean Consul
+ ansible.builtin.file:
+ path: "{{ consul_inst_dir }}/consul"
+ state: "absent"
+ when:
+ - consul_force_update | bool
+ tags:
+ - consul-inst-package
+
+- name: Unarchive Consul
+ ansible.builtin.unarchive:
+ src: "{{ consul_inst_dir }}/{{ consul_pkg }}"
+ dest: "{{ consul_inst_dir }}/"
+ remote_src: true
+ tags:
+ - consul-inst-package
+
+- name: Consul
+ ansible.builtin.copy:
+ src: "{{ consul_inst_dir }}/consul"
+ dest: "{{ consul_bin_dir }}"
+ owner: "{{ consul_user }}"
+ group: "{{ consul_group }}"
+ force: true
+ mode: 0755
+ remote_src: true
+ tags:
+ - consul-inst-package
+
+- name: Create Directories
+ ansible.builtin.file:
+ dest: "{{ item }}"
+ state: "directory"
+ owner: "{{ consul_user }}"
+ group: "{{ consul_group }}"
+ mode: 0755
+ with_items:
+ - "{{ consul_config_dir }}"
+ - "{{ consul_ssl_dir }}"
+ - "{{ consul_data_dir }}"
+ - "{{ nomad_config_dir }}"
+ - "{{ nomad_ssl_dir }}"
+ tags:
+ - consul-conf
+
+- name: Base Configuration
+ ansible.builtin.template:
+ src: "{{ item }}.hcl.j2"
+ dest: "{{ consul_config_dir }}/{{ item }}.hcl"
+ owner: "{{ consul_user }}"
+ group: "{{ consul_group }}"
+ mode: 0644
+ with_items:
+ - "base"
+ - "ports"
+ - "telemetry"
+ tags:
+ - consul-conf
+
+- name: Copy Certificates And Keys
+ ansible.builtin.copy:
+ content: "{{ item.src }}"
+ dest: "{{ item.dest }}"
+ owner: "{{ consul_user }}"
+ group: "{{ consul_group }}"
+ mode: 0600
+ no_log: true
+ loop: "{{ consul_certificates | flatten(levels=1) }}"
+ when:
+ - consul_certificates is defined
+ tags:
+ - consul-conf
+
+- name: Stop Systemd-resolved
+ ansible.builtin.systemd:
+ daemon_reload: true
+ enabled: false
+ name: "systemd-resolved"
+ state: "{{ systemd_resolved_state }}"
+ when:
+ - consul_service_mgr == "systemd"
+ tags:
+ - consul-conf
+
+- name: System.d Script
+ ansible.builtin.template:
+ src: "consul_systemd.service.j2"
+ dest: "/lib/systemd/system/consul.service"
+ owner: "root"
+ group: "root"
+ mode: 0644
+ notify:
+ - "Restart Consul"
+ when:
+ - consul_service_mgr == "systemd"
+ tags:
+ - consul-conf
+
+- name: Flush handlers
+ ansible.builtin.meta: flush_handlers
diff --git a/fdio.infra.ansible/roles/consul/templates/base.hcl.j2 b/fdio.infra.ansible/roles/consul/templates/base.hcl.j2
new file mode 100644
index 0000000000..15104b2710
--- /dev/null
+++ b/fdio.infra.ansible/roles/consul/templates/base.hcl.j2
@@ -0,0 +1,56 @@
+node_name = "{{ consul_node_name }}"
+datacenter = "{{ consul_datacenter }}"
+
+bind_addr = "{{ consul_bind_addr }}"
+client_addr = "{{ consul_client_addr }}"
+data_dir = "{{ consul_data_dir }}"
+
+enable_syslog = {{ consul_enable_syslog | bool | lower }}
+enable_debug = {{ consul_enable_debug | bool | lower }}
+disable_update_check = {{ consul_disable_update_check | bool | lower }}
+log_level = "{{ consul_log_level }}"
+
+server = {{ consul_node_server | bool | lower }}
+encrypt = "{{ consul_encrypt }}"
+{% if consul_node_server | bool == True %}
+bootstrap_expect = {{ consul_bootstrap_expect }}
+verify_incoming = {{ consul_verify_incoming | bool | lower }}
+verify_outgoing = {{ consul_verify_outgoing | bool | lower }}
+verify_server_hostname = {{ consul_vefify_server_hostname | bool | lower }}
+ca_file = "{{ consul_ca_file }}"
+cert_file = "{{ consul_cert_file }}"
+key_file = "{{ consul_key_file }}"
+auto_encrypt {
+ allow_tls = {{ consul_allow_tls | bool | lower }}
+}
+{% else %}
+verify_incoming = {{ consul_verify_incoming | bool | lower }}
+verify_outgoing = {{ consul_verify_outgoing | bool | lower }}
+verify_server_hostname = {{ consul_vefify_server_hostname | bool | lower }}
+ca_file = "{{ consul_ca_file }}"
+auto_encrypt {
+ tls = {{ consul_allow_tls | bool | lower }}
+}
+{% endif %}
+{% if consul_retry_join | bool -%}
+retry_join = [ {% for ip_port in consul_retry_servers -%} "{{ ip_port }}"{% if not loop.last %}, {% endif %}{%- endfor -%} ]
+{%- endif %}
+
+{% if consul_ui_config -%}
+ui_config {
+{% for key, value in consul_ui_config.items() %}
+ {%- if value|bool %}
+ {{ key }} = {{ value | bool | lower }}
+ {%- elif value|string or value == "" %}
+ {{ key }} = "{{ value }}"
+ {%- else %}
+ {{ key }} = {{ value }}
+ {%- endif %}
+{% endfor %}
+
+}
+{%- endif %}
+
+{% if consul_recursors -%}
+recursors = [ {% for server in consul_recursors -%} "{{ server }}"{% if not loop.last %}, {% endif %}{%- endfor -%} ]
+{%- endif %} \ No newline at end of file
diff --git a/fdio.infra.ansible/roles/consul/templates/consul_systemd.service.j2 b/fdio.infra.ansible/roles/consul/templates/consul_systemd.service.j2
new file mode 100644
index 0000000000..16874f213e
--- /dev/null
+++ b/fdio.infra.ansible/roles/consul/templates/consul_systemd.service.j2
@@ -0,0 +1,18 @@
+[Unit]
+Description="HashiCorp Consul - A service mesh solution"
+Documentation=https://www.consul.io/
+Requires=network-online.target
+After=network-online.target
+
+[Service]
+User=root
+Group=root
+ExecStart={{ consul_bin_dir }}/consul agent -config-dir={{ consul_config_dir }}
+ExecReload=/bin/kill --signal HUP $MAINPID
+KillMode=process
+KillSignal=SIGTERM
+Restart=on-failure
+LimitNOFILE=infinity
+
+[Install]
+WantedBy=multi-user.target \ No newline at end of file
diff --git a/fdio.infra.ansible/roles/consul/templates/ports.hcl.j2 b/fdio.infra.ansible/roles/consul/templates/ports.hcl.j2
new file mode 100644
index 0000000000..02932bf6dc
--- /dev/null
+++ b/fdio.infra.ansible/roles/consul/templates/ports.hcl.j2
@@ -0,0 +1,9 @@
+ports {
+ dns = {{ consul_port_dns }}
+ http = {{ consul_port_http }}
+ https = {{ consul_port_https }}
+ grpc_tls = {{ consul_port_grpc }}
+ serf_lan = {{ consul_port_serf_lan }}
+ serf_wan = {{ consul_port_serf_wan }}
+ server = {{ consul_port_server }}
+} \ No newline at end of file
diff --git a/fdio.infra.ansible/roles/consul/templates/telemetry.hcl.j2 b/fdio.infra.ansible/roles/consul/templates/telemetry.hcl.j2
new file mode 100644
index 0000000000..ec7fabc9da
--- /dev/null
+++ b/fdio.infra.ansible/roles/consul/templates/telemetry.hcl.j2
@@ -0,0 +1,3 @@
+telemetry {
+ prometheus_retention_time = "24h"
+} \ No newline at end of file
diff --git a/fdio.infra.ansible/roles/consul/vars/main.yaml b/fdio.infra.ansible/roles/consul/vars/main.yaml
new file mode 100644
index 0000000000..5d813dffc7
--- /dev/null
+++ b/fdio.infra.ansible/roles/consul/vars/main.yaml
@@ -0,0 +1,5 @@
+---
+# file: vars/main.yaml
+
+consul_node_client: "{{ (consul_node_role == 'client') or (consul_node_role == 'both') }}"
+consul_node_server: "{{ (consul_node_role == 'server') or (consul_node_role == 'both') }}"
diff --git a/fdio.infra.ansible/roles/docker/defaults/main.yaml b/fdio.infra.ansible/roles/docker/defaults/main.yaml
new file mode 100644
index 0000000000..bf97b4a192
--- /dev/null
+++ b/fdio.infra.ansible/roles/docker/defaults/main.yaml
@@ -0,0 +1,35 @@
+---
+# file: roles/docker/defaults/main.yaml
+
+# Version options.
+docker_edition: "ce"
+docker_package: "docker-{{ docker_edition }}"
+docker_package_state: latest
+
+# Service options.
+docker_service_state: started
+docker_service_enabled: true
+docker_restart_handler_state: restarted
+docker_service_mgr: "systemd"
+
+# Used only for Debian/Ubuntu.
+docker_apt_release_channel: "stable"
+docker_apt_repository: "deb https://download.docker.com/linux/{{ ansible_distribution|lower }} {{ ansible_distribution_release }} stable"
+docker_apt_repository_state: present
+docker_apt_ignore_key_error: true
+docker_apt_gpg_key: "https://download.docker.com/linux/{{ ansible_distribution | lower }}/gpg"
+docker_apt_gpg_key_state: present
+
+# Used only for RedHat/CentOS/Fedora.
+docker_yum_repo_url: https://download.docker.com/linux/{{ (ansible_distribution == "Fedora") | ternary("fedora","centos") }}/docker-{{ docker_edition }}.repo
+docker_yum_repo_enable_edge: "0"
+docker_yum_repo_enable_test: "0"
+docker_yum_gpg_key: https://download.docker.com/linux/centos/gpg
+
+# Proxy settings.
+docker_daemon_environment_http:
+ - "HTTP_PROXY={{ proxy_env.http_proxy }}"
+ - "NO_PROXY={{ proxy_env.no_proxy }}"
+docker_daemon_environment_https:
+ - "HTTPS_PROXY={{ proxy_env.https_proxy }}"
+ - "NO_PROXY={{ proxy_env.no_proxy }}"
diff --git a/fdio.infra.ansible/roles/docker/handlers/main.yaml b/fdio.infra.ansible/roles/docker/handlers/main.yaml
new file mode 100644
index 0000000000..53eb8528f6
--- /dev/null
+++ b/fdio.infra.ansible/roles/docker/handlers/main.yaml
@@ -0,0 +1,9 @@
+---
+# file roles/docker/handlers/main.yaml
+
+- name: Restart Docker
+ ansible.builtin.service:
+ name: "docker"
+ state: "{{ docker_restart_handler_state }}"
+ tags:
+ - docker-restart-service
diff --git a/fdio.infra.ansible/roles/docker/meta/main.yaml b/fdio.infra.ansible/roles/docker/meta/main.yaml
new file mode 100644
index 0000000000..7bef656eb5
--- /dev/null
+++ b/fdio.infra.ansible/roles/docker/meta/main.yaml
@@ -0,0 +1,18 @@
+---
+# file: meta/main.yaml
+
+dependencies: []
+
+galaxy_info:
+ role_name: docker
+ author: fd.io
+ description: Docker-CE for Linux.
+ company: none
+ license: "license (Apache)"
+ min_ansible_version: 2.9
+ platforms:
+ - name: Ubuntu
+ versions:
+ - jammy
+ galaxy_tags:
+ - docker
diff --git a/fdio.infra.ansible/roles/docker/tasks/jammy.yaml b/fdio.infra.ansible/roles/docker/tasks/jammy.yaml
new file mode 100644
index 0000000000..8ec7a01ee1
--- /dev/null
+++ b/fdio.infra.ansible/roles/docker/tasks/jammy.yaml
@@ -0,0 +1,30 @@
+---
+# file: roles/docker/tasks/ubuntu_jammy.yaml
+
+- name: Inst - Dependencies
+ ansible.builtin.apt:
+ name:
+ - "apt-transport-https"
+ - "ca-certificates"
+ - "gpg-agent"
+ - "software-properties-common"
+ state: "present"
+ cache_valid_time: 3600
+ install_recommends: false
+ tags:
+ - docker-inst-dependencies
+
+- name: Conf - Add APT Key
+ ansible.builtin.apt_key:
+ url: "{{ docker_apt_gpg_key }}"
+ state: "{{ docker_apt_gpg_key_state }}"
+ tags:
+ - docker-conf-apt
+
+- name: Conf - Install APT Repository
+ ansible.builtin.apt_repository:
+ repo: "{{ docker_apt_repository }}"
+ state: "{{ docker_apt_repository_state }}"
+ update_cache: true
+ tags:
+ - docker-conf-apt
diff --git a/fdio.infra.ansible/roles/docker/tasks/main.yaml b/fdio.infra.ansible/roles/docker/tasks/main.yaml
new file mode 100644
index 0000000000..e07b29e363
--- /dev/null
+++ b/fdio.infra.ansible/roles/docker/tasks/main.yaml
@@ -0,0 +1,85 @@
+---
+# file: roles/docker/tasks/main.yaml
+
+- include_tasks: "{{ ansible_distribution_release }}.yaml"
+ tags:
+ - docker-inst-dependencies
+
+- name: Inst - Docker
+ ansible.builtin.package:
+ name:
+ - "{{ docker_package }}"
+ - "{{ docker_package }}-cli"
+ state: "{{ docker_package_state }}"
+ tags:
+ - docker-inst-package
+
+- name: Conf - Docker Service
+ ansible.builtin.service:
+ name: docker
+ state: "{{ docker_service_state }}"
+ enabled: "{{ docker_service_enabled }}"
+ when:
+ - docker_service_mgr == "systemd"
+ tags:
+ - docker-conf-service
+
+- name: Conf - Docker Service Directory
+ ansible.builtin.file:
+ path: "/etc/systemd/system/docker.service.d"
+ state: "directory"
+ mode: "0755"
+ when:
+ - docker_service_mgr == "systemd"
+ tags:
+ - docker-conf-service
+
+- name: Conf - Docker Daemon
+ ansible.builtin.template:
+ src: "templates/daemon.json.j2"
+ dest: "/etc/docker/daemon.json"
+ owner: "root"
+ group: "root"
+ mode: "0644"
+ notify:
+ - "Restart Docker"
+ when: >
+ docker_daemon is defined and
+ docker_service_mgr == "systemd"
+ tags:
+ - docker-conf-daemon
+
+- name: Conf - Docker HTTP Proxy
+ ansible.builtin.template:
+ src: "templates/docker.service.proxy.http"
+ dest: "/etc/systemd/system/docker.service.d/http-proxy.conf"
+ owner: "root"
+ group: "root"
+ mode: "0644"
+ notify:
+ - "Restart Docker"
+ when: >
+ proxy_env is defined and
+ proxy_env.http_proxy is defined and
+ docker_service_mgr == "systemd"
+ tags:
+ - docker-conf-service
+
+- name: Conf - Docker HTTPS Proxy
+ ansible.builtin.template:
+ src: "templates/docker.service.proxy.https"
+ dest: "/etc/systemd/system/docker.service.d/https-proxy.conf"
+ owner: "root"
+ group: "root"
+ mode: "0644"
+ notify:
+ - "Restart Docker"
+ when: >
+ proxy_env is defined and
+ proxy_env.https_proxy is defined and
+ docker_service_mgr == "systemd"
+ tags:
+ - docker-conf-service
+
+- name: Meta - Flush handlers
+ ansible.builtin.meta: flush_handlers
diff --git a/fdio.infra.ansible/roles/docker/templates/daemon.json.j2 b/fdio.infra.ansible/roles/docker/templates/daemon.json.j2
new file mode 100644
index 0000000000..becc2b1af7
--- /dev/null
+++ b/fdio.infra.ansible/roles/docker/templates/daemon.json.j2
@@ -0,0 +1 @@
+{{ docker_daemon | to_nice_json }} \ No newline at end of file
diff --git a/fdio.infra.ansible/roles/docker/templates/docker.service.proxy.http b/fdio.infra.ansible/roles/docker/templates/docker.service.proxy.http
new file mode 100644
index 0000000000..73ceba3870
--- /dev/null
+++ b/fdio.infra.ansible/roles/docker/templates/docker.service.proxy.http
@@ -0,0 +1,4 @@
+# {{ ansible_managed }}
+
+[Service]
+Environment="{{ docker_daemon_environment_http | join('" "') }}"
diff --git a/fdio.infra.ansible/roles/docker/templates/docker.service.proxy.https b/fdio.infra.ansible/roles/docker/templates/docker.service.proxy.https
new file mode 100644
index 0000000000..1c2097eb9d
--- /dev/null
+++ b/fdio.infra.ansible/roles/docker/templates/docker.service.proxy.https
@@ -0,0 +1,4 @@
+# {{ ansible_managed }}
+
+[Service]
+Environment="{{ docker_daemon_environment_https | join('" "') }}"
diff --git a/fdio.infra.ansible/roles/docker_images/files/base/Dockerfile b/fdio.infra.ansible/roles/docker_images/files/base/Dockerfile
new file mode 100644
index 0000000000..88af96bfa8
--- /dev/null
+++ b/fdio.infra.ansible/roles/docker_images/files/base/Dockerfile
@@ -0,0 +1,140 @@
+FROM ubuntu:22.04
+
+# Setup the environment
+ENV DEBIAN_FRONTEND=noninteractive
+
+# Configure locales
+RUN apt-get update -qq \
+ && apt-get install -y \
+ apt-utils \
+ locales \
+ && sed -i 's/# \(en_US\.UTF-8 .*\)/\1/' /etc/locale.gen \
+ && locale-gen en_US.UTF-8 \
+ && dpkg-reconfigure --frontend=noninteractive locales \
+ && update-locale LANG=en_US.UTF-8 \
+ && TZ=Etc/UTC && ln -snf /usr/share/zoneinfo/$TZ /etc/localtime && echo $TZ > /etc/timezone \
+ && rm -r /var/lib/apt/lists/*
+ENV LANG="en_US.UTF-8" LANGUAGE="en_US" LC_ALL="en_US.UTF-8"
+
+# Install packages and Docker
+RUN apt-get -q update \
+ && apt-get install -y -qq \
+ apt-transport-https \
+ bridge-utils \
+ ca-certificates \
+ cgroup-tools \
+ cloud-init \
+ cmake \
+ curl \
+ dkms \
+ ethtool \
+ gdb \
+ gfortran \
+ libapr1 \
+ libblas-dev \
+ libffi-dev \
+ libibverbs-dev \
+ liblapack-dev \
+ libmbedcrypto7 \
+ libmbedtls14 \
+ libmbedx509-1 \
+ libnuma1 \
+ libnuma-dev \
+ libpcap-dev \
+ libpixman-1-dev \
+ libsctp-dev \
+ libssl-dev \
+ net-tools \
+ ninja-build \
+ openssh-server \
+ pciutils \
+ python3-all \
+ python3-apt \
+ python3-cffi \
+ python3-cffi-backend \
+ python3-dev \
+ python3-pip \
+ python3-pyelftools \
+ python3-setuptools \
+ python3-virtualenv \
+ qemu-system \
+ rdma-core \
+ rsyslog \
+ screen \
+ socat \
+ software-properties-common \
+ strace \
+ strongswan \
+ ssh \
+ sshpass \
+ sudo \
+ supervisor \
+ tar \
+ tcpdump \
+ unzip \
+ vim \
+ wget \
+ zlib1g-dev \
+ && ln -s -f /usr/lib/x86_64-linux-gnu/libc.a /usr/lib/x86_64-linux-gnu/liblibc.a \
+ && curl -fsSL https://get.docker.com | sh \
+ && rm -rf /var/lib/apt/lists/*
+
+# Fix permissions
+RUN chown root:syslog /var/log \
+ && chmod 755 /etc/default
+
+# Create directory structure
+RUN mkdir -p /tmp/dumps \
+ && mkdir -p /var/cache/vpp/python \
+ && mkdir -p /var/run/sshd \
+ && mkdir -p /var/log/vpp
+
+# CSIT PIP pre-cache
+RUN pip3 install \
+ ecdsa==0.18.0 \
+ paramiko==3.3.1 \
+ pycrypto==2.6.1 \
+ python-dateutil==2.8.2 \
+ PyYAML==6.0.1 \
+ requests==2.31.0 \
+ robotframework==6.1.1 \
+ scapy==2.4.5 \
+ scp==0.14.5 \
+ ansible==8.2.0 \
+ ansible-core==2.15.2 \
+ dill==0.3.7 \
+ numpy==1.25.2 \
+ scipy==1.11.1 \
+ ply==3.11 \
+ jsonschema==4.18.4 \
+ rfc3339-validator==0.1.4 \
+ rfc3987==1.3.8 \
+ attrs==23.1.0 \
+ bcrypt==4.0.1 \
+ certifi==2023.7.22 \
+ cffi==1.15.1 \
+ charset-normalizer==3.2.0 \
+ cryptography==41.0.3 \
+ idna==3.4 \
+ Jinja2==3.1.2 \
+ jsonschema-specifications==2023.7.1 \
+ MarkupSafe==2.1.3 \
+ packaging==23.1 \
+ pycparser==2.21 \
+ PyNaCl==1.5.0 \
+ referencing==0.30.0 \
+ resolvelib==1.0.1 \
+ rpds-py==0.9.2 \
+ six==1.16.0 \
+ urllib3==2.0.4 \
+ meson==0.64.1
+
+RUN groupadd -g 1000 testuser \
+ && useradd -rm -d /home/testuser -s /bin/bash -g testuser -G sudo -u 1000 testuser \
+ && echo 'testuser:Csit1234' | chpasswd
+
+RUN echo 'root:Csit1234' | chpasswd \
+ && sed -i 's/#PermitRootLogin prohibit-password/PermitRootLogin yes/' /etc/ssh/sshd_config \
+ && echo "export VISIBLE=now" >> /etc/profile
+
+RUN service ssh start \ No newline at end of file
diff --git a/fdio.infra.ansible/roles/docker_images/files/csit-initialize-docker-sut.service b/fdio.infra.ansible/roles/docker_images/files/csit-initialize-docker-sut.service
new file mode 100644
index 0000000000..431387c95c
--- /dev/null
+++ b/fdio.infra.ansible/roles/docker_images/files/csit-initialize-docker-sut.service
@@ -0,0 +1,12 @@
+[Unit]
+Description=CSIT Initialize Docker SUT
+After=network.target
+
+[Service]
+Type=oneshot
+RemainAfterExit=True
+ExecStart=docker compose -f /opt/csit-docker-images/docker-compose-sut.yaml up --detach
+ExecStop=docker compose -f /opt/csit-docker-images/docker-compose-sut.yaml down
+
+[Install]
+WantedBy=default.target
diff --git a/fdio.infra.ansible/roles/docker_images/files/csit-initialize-docker-tg.service b/fdio.infra.ansible/roles/docker_images/files/csit-initialize-docker-tg.service
new file mode 100644
index 0000000000..2c93724a4c
--- /dev/null
+++ b/fdio.infra.ansible/roles/docker_images/files/csit-initialize-docker-tg.service
@@ -0,0 +1,12 @@
+[Unit]
+Description=CSIT Initialize Docker TG
+After=network.target
+
+[Service]
+Type=oneshot
+RemainAfterExit=True
+ExecStart=docker compose -f /opt/csit-docker-images/docker-compose-tg.yaml up --detach
+ExecStop=docker compose -f /opt/csit-docker-images/docker-compose-tg.yaml down
+
+[Install]
+WantedBy=default.target
diff --git a/fdio.infra.ansible/roles/docker_images/files/csit-sut/Dockerfile b/fdio.infra.ansible/roles/docker_images/files/csit-sut/Dockerfile
new file mode 100644
index 0000000000..85537bc32f
--- /dev/null
+++ b/fdio.infra.ansible/roles/docker_images/files/csit-sut/Dockerfile
@@ -0,0 +1,7 @@
+FROM base-ubuntu2204:local
+
+EXPOSE 2222
+
+COPY supervisord.conf /etc/supervisor/supervisord.conf
+
+CMD ["sh", "-c", "rm -f /dev/shm/db /dev/shm/global_vm /dev/shm/vpe-api; /usr/bin/supervisord -c /etc/supervisor/supervisord.conf; /usr/sbin/sshd -D -p 2222"] \ No newline at end of file
diff --git a/fdio.infra.ansible/roles/docker_images/files/csit-sut/supervisord.conf b/fdio.infra.ansible/roles/docker_images/files/csit-sut/supervisord.conf
new file mode 100644
index 0000000000..22a36be5c6
--- /dev/null
+++ b/fdio.infra.ansible/roles/docker_images/files/csit-sut/supervisord.conf
@@ -0,0 +1,24 @@
+[unix_http_server]
+file = /tmp/supervisor.sock
+chmod = 0777
+
+[rpcinterface:supervisor]
+supervisor.rpcinterface_factory = supervisor.rpcinterface:make_main_rpcinterface
+
+[supervisorctl]
+serverurl = unix:///tmp/supervisor.sock
+
+[supervisord]
+pidfile = /tmp/supervisord.pid
+identifier = supervisor
+directory = /tmp
+logfile = /tmp/supervisord.log
+loglevel = debug
+nodaemon = false
+
+[program:vpp]
+command = /usr/bin/vpp -c /etc/vpp/startup.conf
+autostart = false
+autorestart = true
+redirect_stderr = true
+priority = 1 \ No newline at end of file
diff --git a/fdio.infra.ansible/roles/docker_images/handlers/main.yaml b/fdio.infra.ansible/roles/docker_images/handlers/main.yaml
new file mode 100644
index 0000000000..766eec432a
--- /dev/null
+++ b/fdio.infra.ansible/roles/docker_images/handlers/main.yaml
@@ -0,0 +1,18 @@
+---
+# file: handlers/main.yaml
+
+- name: "Start csit-initialize-docker-sut.service"
+ ansible.builtin.systemd:
+ enabled: true
+ state: "started"
+ name: "csit-initialize-docker-sut.service"
+ tags:
+ - docker-sut
+
+- name: "Start csit-initialize-docker-tg.service"
+ ansible.builtin.systemd:
+ enabled: true
+ state: "started"
+ name: "csit-initialize-docker-tg.service"
+ tags:
+ - docker-tg \ No newline at end of file
diff --git a/fdio.infra.ansible/roles/docker_images/tasks/base.yaml b/fdio.infra.ansible/roles/docker_images/tasks/base.yaml
new file mode 100644
index 0000000000..69b3f6217d
--- /dev/null
+++ b/fdio.infra.ansible/roles/docker_images/tasks/base.yaml
@@ -0,0 +1,63 @@
+---
+# file: tasks/base.yaml
+
+- name: "Create a Directory For Docker Images"
+ ansible.builtin.file:
+ path: "/opt/csit-docker-images/"
+ state: "directory"
+ mode: 0755
+ tags:
+ - docker-base
+
+- name: "Copy Build Items"
+ ansible.builtin.copy:
+ src: "{{ item }}"
+ dest: "/opt/csit-docker-images/{{ item }}"
+ owner: "root"
+ group: "root"
+ mode: 0755
+ with_items:
+ - "base/"
+ - "csit-sut/"
+ tags:
+ - docker-base
+
+- name: "Build CSIT Base Docker Image"
+ ansible.builtin.shell: "docker build -t base-ubuntu2204:local ."
+ args:
+ chdir: "/opt/csit-docker-images/base"
+ async: 3000
+ poll: 0
+ register: "docker_built"
+ tags:
+ - docker-base
+
+- name: "Check if CSIT Base Docker Image is Built"
+ async_status:
+ jid: "{{ docker_built.ansible_job_id }}"
+ register: "docker_built"
+ until: "docker_built.finished"
+ delay: 10
+ retries: 300
+ tags:
+ - docker-base
+
+- name: "Build CSIT OLD Docker Image"
+ ansible.builtin.shell: "docker build -t csit_sut-ubuntu2204:local ."
+ args:
+ chdir: "/opt/csit-docker-images/csit-sut"
+ async: 3000
+ poll: 0
+ register: "docker_built"
+ tags:
+ - docker-base
+
+- name: "Check if CSIT OLD Docker Image is Built"
+ async_status:
+ jid: "{{ docker_built.ansible_job_id }}"
+ register: "docker_built"
+ until: "docker_built.finished"
+ delay: 10
+ retries: 300
+ tags:
+ - docker-base \ No newline at end of file
diff --git a/fdio.infra.ansible/roles/docker_images/tasks/main.yaml b/fdio.infra.ansible/roles/docker_images/tasks/main.yaml
new file mode 100644
index 0000000000..1005e024f2
--- /dev/null
+++ b/fdio.infra.ansible/roles/docker_images/tasks/main.yaml
@@ -0,0 +1,21 @@
+---
+# file: tasks/main.yaml
+
+- name: "Build Base Docker Images"
+ import_tasks: "base.yaml"
+ tags:
+ - docker-base
+
+- name: "Docker Orchestration for TG"
+ import_tasks: "tg.yaml"
+ when: >
+ docker_tg is defined
+ tags:
+ - docker-tg
+
+- name: "Docker Orchestration for SUT"
+ import_tasks: "sut.yaml"
+ when: >
+ docker_sut is defined
+ tags:
+ - docker-sut \ No newline at end of file
diff --git a/fdio.infra.ansible/roles/docker_images/tasks/sut.yaml b/fdio.infra.ansible/roles/docker_images/tasks/sut.yaml
new file mode 100644
index 0000000000..8ac179573d
--- /dev/null
+++ b/fdio.infra.ansible/roles/docker_images/tasks/sut.yaml
@@ -0,0 +1,28 @@
+---
+# file: tasks/sut.yaml
+
+- name: "Template Compose File"
+ ansible.builtin.template:
+ src: "{{ item }}.j2"
+ dest: "/opt/csit-docker-images/{{ item }}"
+ owner: "root"
+ group: "root"
+ mode: 0755
+ with_items:
+ - "docker-compose-sut.yaml"
+ tags:
+ - docker-sut
+
+- name: "Copy csit-initialize-docker-sut.service"
+ ansible.builtin.copy:
+ src: "files/csit-initialize-docker-sut.service"
+ dest: "/etc/systemd/system/"
+ owner: "root"
+ group: "root"
+ mode: 0644
+ notify:
+ - "Start csit-initialize-docker-sut.service"
+ tags:
+ - docker-sut
+
+- meta: flush_handlers \ No newline at end of file
diff --git a/fdio.infra.ansible/roles/docker_images/tasks/tg.yaml b/fdio.infra.ansible/roles/docker_images/tasks/tg.yaml
new file mode 100644
index 0000000000..0623616073
--- /dev/null
+++ b/fdio.infra.ansible/roles/docker_images/tasks/tg.yaml
@@ -0,0 +1,28 @@
+---
+# file: tasks/tg.yaml
+
+- name: "Template Compose File"
+ ansible.builtin.template:
+ src: "{{ item }}.j2"
+ dest: "/opt/csit-docker-images/{{ item }}"
+ owner: "root"
+ group: "root"
+ mode: 0755
+ with_items:
+ - "docker-compose-tg.yaml"
+ tags:
+ - docker-tg
+
+- name: "Start csit-initialize-docker-tg.service"
+ ansible.builtin.copy:
+ src: "files/csit-initialize-docker-tg.service"
+ dest: "/etc/systemd/system/"
+ owner: "root"
+ group: "root"
+ mode: 0644
+ notify:
+ - "Start csit-initialize-docker-tg.service"
+ tags:
+ - docker-tg
+
+- meta: flush_handlers \ No newline at end of file
diff --git a/fdio.infra.ansible/roles/docker_images/templates/docker-compose-sut.yaml.j2 b/fdio.infra.ansible/roles/docker_images/templates/docker-compose-sut.yaml.j2
new file mode 100644
index 0000000000..b4713d8552
--- /dev/null
+++ b/fdio.infra.ansible/roles/docker_images/templates/docker-compose-sut.yaml.j2
@@ -0,0 +1,42 @@
+version: "3"
+services:
+ numa-0:
+ build:
+ context: "base/"
+ dockerfile: "Dockerfile"
+ cap_add:
+ - NET_RAW
+ command: ["/usr/sbin/sshd","-D", "-p", "6001"]
+ expose:
+ - "6001"
+ hostname: "{{ ansible_hostname[:-1] }}1"
+ network_mode: "host"
+ privileged: true
+ restart: "always"
+ shm_size: "4G"
+ volumes:
+{% for volume in docker_volumes %}
+ - type: "bind"
+ source: "{{ volume.source }}"
+ target: "{{ volume.target }}"
+{% endfor %}
+ numa-1:
+ build:
+ context: "base/"
+ dockerfile: "Dockerfile"
+ cap_add:
+ - NET_RAW
+ command: ["/usr/sbin/sshd","-D", "-p", "6002"]
+ expose:
+ - "6002"
+ hostname: "{{ ansible_hostname[:-1] }}2"
+ network_mode: "host"
+ privileged: true
+ restart: "always"
+ shm_size: "4G"
+ volumes:
+{% for volume in docker_volumes %}
+ - type: "bind"
+ source: "{{ volume.source }}"
+ target: "{{ volume.target }}"
+{% endfor %} \ No newline at end of file
diff --git a/fdio.infra.ansible/roles/docker_images/templates/docker-compose-tg.yaml.j2 b/fdio.infra.ansible/roles/docker_images/templates/docker-compose-tg.yaml.j2
new file mode 100644
index 0000000000..2cee85e169
--- /dev/null
+++ b/fdio.infra.ansible/roles/docker_images/templates/docker-compose-tg.yaml.j2
@@ -0,0 +1,38 @@
+version: "3"
+services:
+ tg-0:
+ build:
+ context: "base/"
+ dockerfile: "Dockerfile"
+ command: ["/usr/sbin/sshd","-D", "-p", "6001"]
+ expose:
+ - "6001"
+ hostname: "{{ ansible_hostname }}"
+ network_mode: "host"
+ privileged: true
+ restart: "always"
+ shm_size: "4G"
+ volumes:
+{% for volume in docker_volumes %}
+ - type: "bind"
+ source: "{{ volume.source }}"
+ target: "{{ volume.target }}"
+{% endfor %}
+ tg-1:
+ build:
+ context: "base/"
+ dockerfile: "Dockerfile"
+ command: ["/usr/sbin/sshd","-D", "-p", "6002"]
+ expose:
+ - "6002"
+ hostname: "{{ ansible_hostname }}"
+ network_mode: "host"
+ privileged: true
+ restart: "always"
+ shm_size: "4G"
+ volumes:
+{% for volume in docker_volumes %}
+ - type: "bind"
+ source: "{{ volume.source }}"
+ target: "{{ volume.target }}"
+{% endfor %} \ No newline at end of file
diff --git a/fdio.infra.ansible/roles/dpdk/defaults/main.yaml b/fdio.infra.ansible/roles/dpdk/defaults/main.yaml
new file mode 100644
index 0000000000..d94e9ac91f
--- /dev/null
+++ b/fdio.infra.ansible/roles/dpdk/defaults/main.yaml
@@ -0,0 +1,24 @@
+---
+# file: defaults/main.yaml
+
+packages: "{{ packages_base + packages_by_distro[ansible_distribution|lower][ansible_distribution_release] + packages_by_arch[ansible_machine] }}"
+
+packages_base:
+ - []
+
+packages_by_distro:
+ ubuntu:
+ jammy:
+ - "build-essential"
+ - "libnuma-dev"
+
+packages_by_arch:
+ aarch64:
+ - []
+ x86_64:
+ - []
+
+dpdk_target_dir: "/opt"
+dpdk_version:
+ - "23.11"
+dpdk_url: "https://fast.dpdk.org/rel"
diff --git a/fdio.infra.ansible/roles/dpdk/files/dpdk-mlx5.patch b/fdio.infra.ansible/roles/dpdk/files/dpdk-mlx5.patch
new file mode 100644
index 0000000000..a3928d70f7
--- /dev/null
+++ b/fdio.infra.ansible/roles/dpdk/files/dpdk-mlx5.patch
@@ -0,0 +1,19 @@
+diff --git a/drivers/net/mlx5/mlx5_ethdev.c b/drivers/net/mlx5/mlx5_ethdev.c
+index d7d3bc73c..c21c38485 100644
+--- a/drivers/net/mlx5/mlx5_ethdev.c
++++ b/drivers/net/mlx5/mlx5_ethdev.c
+@@ -1032,11 +1032,14 @@ mlx5_link_update_unlocked_gs(struct rte_eth_dev *dev,
+ ETH_LINK_HALF_DUPLEX : ETH_LINK_FULL_DUPLEX);
+ dev_link.link_autoneg = !(dev->data->dev_conf.link_speeds &
+ ETH_LINK_SPEED_FIXED);
++#if 0
++ /* FIXME: this does not work on Azure w/ CX4-LX */
+ if (((dev_link.link_speed && !dev_link.link_status) ||
+ (!dev_link.link_speed && dev_link.link_status))) {
+ rte_errno = EAGAIN;
+ return -rte_errno;
+ }
++#endif
+ *link = dev_link;
+ return 0;
+ }
diff --git a/fdio.infra.ansible/roles/dpdk/meta/main.yaml b/fdio.infra.ansible/roles/dpdk/meta/main.yaml
new file mode 100644
index 0000000000..3ca2918d36
--- /dev/null
+++ b/fdio.infra.ansible/roles/dpdk/meta/main.yaml
@@ -0,0 +1,18 @@
+---
+# file: meta/main.yaml
+
+dependencies: []
+
+galaxy_info:
+ role_name: "dpdk"
+ author: "fd.io"
+ description: "DPDK for Linux."
+ company: "none"
+ license: "license (Apache)"
+ min_ansible_version: 2.9
+ platforms:
+ - name: "Ubuntu"
+ versions:
+ - "jammy"
+ galaxy_tags:
+ - "dpdk"
diff --git a/fdio.infra.ansible/roles/dpdk/molecule/default/converge.yml b/fdio.infra.ansible/roles/dpdk/molecule/default/converge.yml
new file mode 100644
index 0000000000..ab8392c3dc
--- /dev/null
+++ b/fdio.infra.ansible/roles/dpdk/molecule/default/converge.yml
@@ -0,0 +1,9 @@
+---
+# file: molecule/default/converge.yaml
+
+- name: Converge
+ hosts: all
+ become: true
+
+ roles:
+ - role: csit.dpdk
diff --git a/fdio.infra.ansible/roles/dpdk/molecule/default/molecule.yml b/fdio.infra.ansible/roles/dpdk/molecule/default/molecule.yml
new file mode 100644
index 0000000000..060f842db7
--- /dev/null
+++ b/fdio.infra.ansible/roles/dpdk/molecule/default/molecule.yml
@@ -0,0 +1,21 @@
+---
+# file: molecule/default/molecule.yaml
+
+dependency:
+ name: galaxy
+driver:
+ name: docker
+lint: |
+ yamllint .
+ ansible-lint
+platforms:
+ - name: ${DISTRO:-ubuntu-22.04}
+ image: "pmikus/docker-${MOLECULE_DISTRO:-ubuntu-22.04}-ansible:latest"
+ volumes:
+ - /sys/fs/cgroup:/sys/fs/cgroup:ro
+ privileged: true
+ pre_build_image: true
+provisioner:
+ name: ansible
+ playbooks:
+ converge: ${MOLECULE_PLAYBOOK:-converge.yml}
diff --git a/fdio.infra.ansible/roles/dpdk/tasks/deploy_block.yaml b/fdio.infra.ansible/roles/dpdk/tasks/deploy_block.yaml
new file mode 100644
index 0000000000..1f972f5320
--- /dev/null
+++ b/fdio.infra.ansible/roles/dpdk/tasks/deploy_block.yaml
@@ -0,0 +1,33 @@
+---
+# file: tasks/deploy_block.yaml
+
+- name: Download Release {{ item }}
+ ansible.builtin.get_url:
+ url: "{{ dpdk_url }}/dpdk-{{ item }}.tar.xz"
+ dest: "{{ dpdk_target_dir }}/dpdk-{{ item }}.tar.xz"
+ mode: 0644
+ register: dpdk_downloaded
+
+- name: Extract Release {{ item }}
+ ansible.builtin.unarchive:
+ remote_src: true
+ src: "{{ dpdk_target_dir }}/dpdk-{{ item }}.tar.xz"
+ dest: "{{ dpdk_target_dir }}/"
+ creates: "{{ dpdk_target_dir }}/dpdk-{{ item }}"
+ when: dpdk_downloaded
+ register: dpdk_extracted
+
+- name: Compile Release I
+ ansible.builtin.command: "meson -Dexamples=l3fwd build"
+ args:
+ chdir: "{{ dpdk_target_dir }}/dpdk-{{ item }}"
+ environment:
+ CFLAGS: "-DRTE_LIBRTE_I40E_16BYTE_RX_DESC=y"
+ register: dpdk_compiled
+
+- name: Compile Release II
+ ansible.builtin.command: "ninja -C build"
+ args:
+ chdir: "{{ dpdk_target_dir }}/dpdk-{{ item }}"
+ environment:
+ CFLAGS: "-DRTE_LIBRTE_I40E_16BYTE_RX_DESC=y" \ No newline at end of file
diff --git a/fdio.infra.ansible/roles/dpdk/tasks/main.yaml b/fdio.infra.ansible/roles/dpdk/tasks/main.yaml
new file mode 100644
index 0000000000..4f6c9ec9f2
--- /dev/null
+++ b/fdio.infra.ansible/roles/dpdk/tasks/main.yaml
@@ -0,0 +1,24 @@
+---
+# file: tasks/main.yaml
+
+- name: Update Package Cache (APT)
+ ansible.builtin.apt:
+ update_cache: true
+ cache_valid_time: 3600
+ when:
+ - ansible_distribution == 'Ubuntu'
+ tags:
+ - dpdk-inst-prerequisites
+
+- name: Prerequisites
+ ansible.builtin.package:
+ name: "{{ packages | flatten(levels=1) }}"
+ state: "latest"
+ tags:
+ - dpdk-inst-prerequisites
+
+- name: Multiple DPDK Versions
+ include_tasks: deploy_block.yaml
+ loop: "{{ dpdk_version }}"
+ tags:
+ - dpdk-inst
diff --git a/fdio.infra.ansible/roles/intel/defaults/main.yaml b/fdio.infra.ansible/roles/intel/defaults/main.yaml
new file mode 100644
index 0000000000..9a3c5c0f0c
--- /dev/null
+++ b/fdio.infra.ansible/roles/intel/defaults/main.yaml
@@ -0,0 +1,111 @@
+---
+# file: defaults/main.yaml
+
+packages: "{{ packages_base + packages_by_distro[ansible_distribution | lower] + packages_by_arch[ansible_machine] }}"
+
+packages_base:
+ - []
+
+packages_by_distro:
+ ubuntu:
+ - "build-essential"
+ - "autoconf"
+ - "automake"
+ - "autotools-dev"
+ - "libtool"
+ - "pkgconf"
+ - "asciidoc"
+ - "xmlto"
+ - "uuid-dev"
+ - "libjson-c-dev"
+ - "libkeyutils-dev"
+ - "libz-dev"
+ - "libssl-dev"
+ - "debhelper"
+ - "devscripts"
+ - "debmake"
+ - "quilt"
+ - "fakeroot"
+ - "lintian"
+ - "asciidoctor"
+ - "file"
+ - "gnupg"
+ - "patch"
+ - "patchutils"
+ - "libboost-dev"
+ - "libboost-regex-dev"
+# - "libudev-dev" http://security.ubuntu.com/ubuntu/pool/main/s/systemd/
+ - "yasm"
+
+packages_by_arch:
+ aarch64:
+ - []
+ x86_64:
+ - []
+
+intel_sourceforge_download_url: "https://sourceforge.net/projects/e1000/files"
+intel_download_url: "https://downloadmirror.intel.com"
+intel_extract_dir: "/opt"
+
+intel_700_compatibility_matrix:
+ dpdk22.07:
+ # https://doc.dpdk.org/guides/rel_notes/release_22_07.html
+ i40e: "2.19.3"
+ iavf: "4.5.3"
+ nvm: "8.70"
+ dpdk23.11:
+ # https://doc.dpdk.org/guides/rel_notes/release_23_11.html
+ i40e: "2.23.17"
+ iavf: "4.9.5"
+ nvm: "9.30"
+
+intel_800_compatibility_matrix:
+ dpdk22.03:
+ # custom for vpp_device
+ ice: "1.13.7"
+ ddp: "1.3.45.0"
+ iavf: "4.9.5"
+ nvm: "4.40"
+ dpdk22.07:
+ # https://doc.dpdk.org/guides/rel_notes/release_22_07.html
+ ice: "1.9.7"
+ ddp: "1.3.37.0"
+ iavf: "4.5.3"
+ nvm: "4.00"
+ dpdk23.11:
+ # https://doc.dpdk.org/guides/rel_notes/release_23_11.html
+ ice: "1.13.7"
+ ddp: "1.3.45.0"
+ iavf: "4.9.5"
+ nvm: "4.40"
+
+intel_dsa_compatibility_matrix:
+ dsa: "4.0"
+
+intel_qat_compatibility_matrix:
+ qat2: "1.0.20-00008"
+ qat1: "4.22.0-00001"
+
+intel_i40e_url:
+ "2.19.3": "i40e%20stable/2.19.3/i40e-2.19.3.tar.gz/download"
+ "2.23.17": "i40e%20stable/2.23.17/i40e-2.23.17.tar.gz/download"
+
+intel_ice_url:
+ "1.9.7": "ice%20stable/1.9.7/ice-1.9.7.tar.gz/download"
+ "1.13.7": "ice%20stable/1.13.7/ice-1.13.7.tar.gz/download"
+
+intel_iavf_url:
+ "4.3.19": "iavf%20stable/4.3.19/iavf-4.3.19.tar.gz/download"
+ "4.5.3": "iavf%20stable/4.5.3/iavf-4.5.3.tar.gz/download"
+ "4.9.5": "iavf%20stable/4.9.5/iavf-4.9.5.tar.gz/download"
+
+intel_ddp_url:
+ "1.3.37.0": "738733/800%20Series%20DDP%20Comms%20Package%201.3.37.0.zip"
+ "1.3.45.0": "785846/738693_ice_comms-1.3.45.0.zip"
+
+intel_dsa_url:
+ "4.0": "https://github.com/intel/idxd-config/archive/refs/tags/accel-config-v4.0.tar.gz"
+
+intel_qat_url:
+ "1.0.20-00008": "777529/QAT20.L.1.0.20-00008.tar.gz"
+ "4.22.0-00001": "780675/QAT.L.4.22.0-00001.tar.gz"
diff --git a/fdio.infra.ansible/roles/intel/tasks/dsa.yaml b/fdio.infra.ansible/roles/intel/tasks/dsa.yaml
new file mode 100644
index 0000000000..2f038b0e9f
--- /dev/null
+++ b/fdio.infra.ansible/roles/intel/tasks/dsa.yaml
@@ -0,0 +1,39 @@
+---
+# file: tasks/dsa.yaml
+
+- name: Get DSA Driver
+ ansible.builtin.uri:
+ url: "{{ intel_dsa_url[dsa] }}"
+ follow_redirects: "all"
+ force: true
+ dest: "{{ intel_extract_dir }}/accel-config-v{{ dsa }}.tar.gz"
+ mode: "0644"
+ failed_when: false
+ tags:
+ - intel-inst
+
+- name: Extract DSA Driver
+ ansible.builtin.unarchive:
+ remote_src: true
+ src: "{{ intel_extract_dir }}/accel-config-v{{ dsa }}.tar.gz"
+ dest: "{{ intel_extract_dir }}/"
+ creates: "{{ intel_extract_dir }}/idxd-config-accel-config-v{{ dsa }}"
+ register: intel_driver_extracted
+ tags:
+ - intel-inst
+
+- name: Install DSA Driver
+ ansible.builtin.command: "{{ item }}"
+ args:
+ chdir: "{{ intel_extract_dir }}/idxd-config-accel-config-v{{ dsa }}"
+ become: true
+ with_items:
+ - "./autogen.sh"
+ - "./configure CFLAGS='-g -O2' --prefix=/usr --sysconfdir=/etc --libdir=/usr/lib64"
+ - "make"
+ - "make check"
+ - "make install"
+ when:
+ - intel_driver_extracted
+ tags:
+ - intel-inst
diff --git a/fdio.infra.ansible/roles/intel/tasks/i40e.yaml b/fdio.infra.ansible/roles/intel/tasks/i40e.yaml
new file mode 100644
index 0000000000..8b069bf9b2
--- /dev/null
+++ b/fdio.infra.ansible/roles/intel/tasks/i40e.yaml
@@ -0,0 +1,37 @@
+---
+# file: tasks/i40e.yaml
+
+- name: Get i40e Network Adapter Driver
+ ansible.builtin.uri:
+ url: "{{ intel_sourceforge_download_url }}/{{ intel_i40e_url[i40e] }}"
+ follow_redirects: "all"
+ force: true
+ dest: "{{ intel_extract_dir }}/i40e-{{ i40e }}.tar.gz"
+ mode: "0644"
+ failed_when: false
+ tags:
+ - intel-inst
+
+- name: Extract i40e Network Adapter Driver
+ ansible.builtin.unarchive:
+ remote_src: true
+ src: "{{ intel_extract_dir }}/i40e-{{ i40e }}.tar.gz"
+ dest: "{{ intel_extract_dir }}/"
+ creates: "{{ intel_extract_dir }}/i40e-{{ i40e }}"
+ register: intel_driver_extracted
+ tags:
+ - intel-inst
+
+- name: Install i40e Network Adapter Driver
+ ansible.builtin.command: "{{ item }}"
+ args:
+ chdir: "{{ intel_extract_dir }}/i40e-{{ i40e }}/src"
+ become: true
+ with_items:
+ - "make install"
+ #- "modprobe -r i40e"
+ - "modprobe i40e"
+ when:
+ - intel_driver_extracted
+ tags:
+ - intel-inst
diff --git a/fdio.infra.ansible/roles/intel/tasks/iavf.yaml b/fdio.infra.ansible/roles/intel/tasks/iavf.yaml
new file mode 100644
index 0000000000..127e31bee2
--- /dev/null
+++ b/fdio.infra.ansible/roles/intel/tasks/iavf.yaml
@@ -0,0 +1,37 @@
+---
+# file: tasks/iavf.yaml
+
+- name: Get iavf Network Adapter Driver
+ ansible.builtin.uri:
+ url: "{{ intel_sourceforge_download_url }}/{{ intel_iavf_url[iavf] }}"
+ follow_redirects: "all"
+ force: true
+ dest: "{{ intel_extract_dir }}/iavf-{{ iavf }}.tar.gz"
+ mode: "0644"
+ failed_when: false
+ tags:
+ - intel-inst
+
+- name: Extract iavf Network Adapter Driver
+ ansible.builtin.unarchive:
+ remote_src: true
+ src: "{{ intel_extract_dir }}/iavf-{{ iavf }}.tar.gz"
+ dest: "{{ intel_extract_dir }}/"
+ creates: "{{ intel_extract_dir }}/iavf-{{ iavf }}"
+ register: intel_driver_extracted
+ tags:
+ - intel-inst
+
+- name: Install iavf Network Adapter Driver
+ ansible.builtin.command: "{{ item }}"
+ args:
+ chdir: "{{ intel_extract_dir }}/iavf-{{ iavf }}/src"
+ become: true
+ with_items:
+ - "make install"
+ - "modprobe -r iavf"
+ - "modprobe iavf"
+ when:
+ - intel_driver_extracted
+ tags:
+ - intel-inst
diff --git a/fdio.infra.ansible/roles/intel/tasks/ice.yaml b/fdio.infra.ansible/roles/intel/tasks/ice.yaml
new file mode 100644
index 0000000000..c773a65a34
--- /dev/null
+++ b/fdio.infra.ansible/roles/intel/tasks/ice.yaml
@@ -0,0 +1,91 @@
+---
+# file: tasks/ice.yaml
+
+- name: Get ice Network Adapter Driver
+ ansible.builtin.uri:
+ url: "{{ intel_sourceforge_download_url }}/{{ intel_ice_url[ice] }}"
+ follow_redirects: "all"
+ force: true
+ dest: "{{ intel_extract_dir }}/ice-{{ ice }}.tar.gz"
+ mode: "0644"
+ failed_when: false
+ tags:
+ - intel-inst
+
+- name: Extract ice Network Adapter Driver
+ ansible.builtin.unarchive:
+ remote_src: true
+ src: "{{ intel_extract_dir }}/ice-{{ ice }}.tar.gz"
+ dest: "{{ intel_extract_dir }}/"
+ creates: "{{ intel_extract_dir }}/ice-{{ ice }}"
+ register: intel_driver_extracted
+ tags:
+ - intel-inst
+
+- name: Install ice Network Adapter Driver
+ ansible.builtin.command: "{{ item }}"
+ args:
+ chdir: "{{ intel_extract_dir }}/ice-{{ ice }}/src"
+ become: true
+ with_items:
+ - "make install"
+ #- "modprobe -r ice"
+ - "modprobe ice"
+ when:
+ - intel_driver_extracted
+ tags:
+ - intel-inst
+
+- name: Get Dynamic Device Personalization (DDP) Package
+ ansible.builtin.get_url:
+ url: "{{ intel_download_url }}/{{ intel_ddp_url[ddp] }}"
+ dest: "{{ intel_extract_dir }}/800-Series-Comms-Binary-Package-{{ ddp }}.zip"
+ mode: 0644
+ tags:
+ - intel-inst
+
+- name: Extract Dynamic Device Personalization (DDP) Package
+ ansible.builtin.unarchive:
+ remote_src: true
+ src: "{{ intel_extract_dir }}/800-Series-Comms-Binary-Package-{{ ddp }}.zip"
+ dest: "{{ intel_extract_dir }}/"
+ creates: "{{ intel_extract_dir }}/ice_comms-{{ ddp }}.zip"
+ register: intel_driver_extracted
+ tags:
+ - intel-inst
+
+- name: Extract Dynamic Device Personalization (DDP) Package
+ ansible.builtin.unarchive:
+ remote_src: true
+ src: "{{ intel_extract_dir }}/ice_comms-{{ ddp }}.zip"
+ dest: "{{ intel_extract_dir }}/"
+ register: intel_driver_extracted
+ tags:
+ - intel-inst
+
+- name: Copy Dynamic Device Personalization (DDP) Package
+ ansible.builtin.copy:
+ src: "{{ intel_extract_dir }}/ice_comms-{{ ddp }}.pkg"
+ dest: "/lib/firmware/updates/intel/ice/ddp/ice-{{ ddp }}.pkg"
+ remote_src: true
+ follow: true
+ tags:
+ - intel-inst
+
+- name: Link Dynamic Device Personalization (DDP) Package
+ ansible.builtin.file:
+ src: "ice-{{ ddp }}.pkg"
+ dest: "/lib/firmware/updates/intel/ice/ddp/ice.pkg"
+ state: link
+ tags:
+ - intel-inst
+
+- name: Extract Dynamic Device Personalization (DDP) Package (cleanup)
+ ansible.builtin.file:
+ path: "{{ item }}"
+ state: absent
+ with_items:
+ - "{{ intel_extract_dir }}/E810 DDP for Comms TechGuide_Rev2.3.pdf"
+ - "{{ intel_extract_dir }}/Intel_800_series_market_segment_DDP_license.txt"
+ tags:
+ - intel-inst
diff --git a/fdio.infra.ansible/roles/intel/tasks/main.yaml b/fdio.infra.ansible/roles/intel/tasks/main.yaml
new file mode 100644
index 0000000000..d7598deca7
--- /dev/null
+++ b/fdio.infra.ansible/roles/intel/tasks/main.yaml
@@ -0,0 +1,146 @@
+---
+# file: tasks/main.yaml
+
+- name: Update Package Cache (APT)
+ ansible.builtin.apt:
+ update_cache: true
+ cache_valid_time: 3600
+ when:
+ - ansible_distribution|lower == 'ubuntu'
+ tags:
+ - intel-inst-drivers
+
+- name: Install Prerequisites
+ ansible.builtin.package:
+ name: "{{ packages | flatten(levels=1) }}"
+ state: latest
+ tags:
+ - intel-inst-drivers
+
+- name: Check Presence of Intel Ethernet 700 Series
+ ansible.builtin.shell: "lspci -d 8086:1583; lspci -d 8086:1585; lspci -d 8086:1572; lspci -d 8086:158a; lspci -d 8086:158b"
+ register: intel_700_pcis
+ failed_when: false
+ changed_when: false
+ tags:
+ - intel-inst-drivers
+
+- name: Check Presence of Intel Ethernet 800 Series
+ ansible.builtin.shell: "lspci -d 8086:1592; lspci -d 8086:1891; lspci -d 8086:188c"
+ register: intel_800_pcis
+ failed_when: false
+ changed_when: false
+ tags:
+ - intel-inst-drivers
+
+- name: Check Presence of Intel DSA
+ ansible.builtin.shell: "lspci -d 8086:0b25"
+ register: intel_dsa_pcis
+ failed_when: false
+ changed_when: false
+ tags:
+ - intel-inst-drivers
+
+- name: Check Presence of Intel C4XXX
+ ansible.builtin.shell: "lspci -d 8086:18a0"
+ register: intel_qat1_pcis
+ failed_when: false
+ changed_when: false
+ tags:
+ - intel-inst-drivers
+
+- name: Check Presence of Intel 4XXX
+ ansible.builtin.shell: "lspci -d 8086:4942"
+ register: intel_qat2_pcis
+ failed_when: false
+ changed_when: false
+ tags:
+ - intel-inst-drivers
+
+- name: Get Intel Ethernet 700 Series driver versions
+ ansible.builtin.set_fact:
+ i40e: "{{ intel_700_compatibility_matrix[intel_700_matrix]['i40e'] }}"
+ iavf: "{{ intel_700_compatibility_matrix[intel_700_matrix]['iavf'] }}"
+ nvm: "{{ intel_700_compatibility_matrix[intel_700_matrix]['nvm'] }}"
+ when: >
+ intel_700_matrix is defined
+ tags:
+ - intel-inst-drivers
+
+- name: Get Intel Ethernet 800 Series driver versions
+ ansible.builtin.set_fact:
+ ice: "{{ intel_800_compatibility_matrix[intel_800_matrix]['ice'] }}"
+ ddp: "{{ intel_800_compatibility_matrix[intel_800_matrix]['ddp'] }}"
+ iavf: "{{ intel_800_compatibility_matrix[intel_800_matrix]['iavf'] }}"
+ nvm: "{{ intel_800_compatibility_matrix[intel_800_matrix]['nvm'] }}"
+ when: >
+ intel_800_matrix is defined
+ tags:
+ - intel-inst-drivers
+
+- name: Get Intel DSA driver versions
+ ansible.builtin.set_fact:
+ dsa: "{{ intel_dsa_compatibility_matrix['dsa'] }}"
+ when: >
+ intel_dsa_matrix is defined
+ tags:
+ - intel-inst-drivers
+
+- name: Get Intel QAT driver versions
+ ansible.builtin.set_fact:
+ qat1: "{{ intel_qat_compatibility_matrix['qat1'] }}"
+ qat2: "{{ intel_qat_compatibility_matrix['qat2'] }}"
+ when: >
+ intel_qat_matrix is defined
+ tags:
+ - intel-inst-drivers
+
+- name: Driver Intel Ethernet 700 Series
+ import_tasks: i40e.yaml
+ when: >
+ intel_700_pcis.stdout_lines | length > 0 and
+ intel_700_matrix is defined
+ tags:
+ - intel-inst-drivers
+
+- name: Driver Intel Ethernet 800 Series
+ import_tasks: ice.yaml
+ when: >
+ intel_800_pcis.stdout_lines | length > 0 and
+ intel_800_matrix is defined
+ tags:
+ - intel-inst-drivers
+
+- name: Driver Intel iAVF
+ import_tasks: iavf.yaml
+ when: >
+ (intel_700_pcis.stdout_lines | length > 0 and
+ intel_700_matrix is defined) or
+ (intel_800_pcis.stdout_lines | length > 0 and
+ intel_800_matrix is defined)
+ tags:
+ - intel-inst-drivers
+
+- name: Driver Intel DSA
+ import_tasks: dsa.yaml
+ when: >
+ intel_dsa_pcis.stdout_lines | length > 0 and
+ intel_dsa_matrix is defined
+ tags:
+ - intel-inst-drivers
+
+- name: Driver Intel QAT 1.x
+ import_tasks: qat1.yaml
+ when: >
+ intel_qat1_pcis.stdout_lines | length > 0 and
+ intel_qat_matrix is defined
+ tags:
+ - intel-inst-drivers
+
+- name: Driver Intel QAT 2.x
+ import_tasks: qat2.yaml
+ when: >
+ intel_qat2_pcis.stdout_lines | length > 0 and
+ intel_qat_matrix is defined
+ tags:
+ - intel-inst-drivers \ No newline at end of file
diff --git a/fdio.infra.ansible/roles/intel/tasks/qat1.yaml b/fdio.infra.ansible/roles/intel/tasks/qat1.yaml
new file mode 100644
index 0000000000..701c0c1bf1
--- /dev/null
+++ b/fdio.infra.ansible/roles/intel/tasks/qat1.yaml
@@ -0,0 +1,54 @@
+---
+# file: tasks/qat1.yaml
+
+- name: Get QAT 1.x Driver
+ ansible.builtin.uri:
+ url: "{{ intel_download_url }}/{{ intel_qat_url[qat1] }}"
+ follow_redirects: "all"
+ force: true
+ dest: "{{ intel_extract_dir }}/QAT.L.{{ qat1 }}.tar.gz"
+ mode: "0644"
+ failed_when: false
+ tags:
+ - intel-inst
+
+- name: Create a Directory For QAT 1.x Driver
+ ansible.builtin.file:
+ path: "{{ intel_extract_dir }}/QAT.L.{{ qat1 }}/"
+ state: "directory"
+ mode: "0755"
+ tags:
+ - intel-inst
+
+- name: Extract QAT 1.x Driver
+ ansible.builtin.unarchive:
+ remote_src: true
+ src: "{{ intel_extract_dir }}/QAT.L.{{ qat1 }}.tar.gz"
+ dest: "{{ intel_extract_dir }}/QAT.L.{{ qat1 }}/"
+ register: intel_driver_extracted
+ tags:
+ - intel-inst
+
+- name: Install QAT1.x Driver
+ ansible.builtin.command: "{{ item }}"
+ args:
+ chdir: "{{ intel_extract_dir }}/QAT.L.{{ qat1 }}"
+ become: true
+ with_items:
+ - "./configure --enable-icp-sriov=host --enable-icp-sym-only"
+ - "make"
+ - "make install"
+ when:
+ - intel_driver_extracted
+ tags:
+ - intel-inst
+
+- name: Load Kernel Modules By Default
+ ansible.builtin.lineinfile:
+ path: "/etc/modules"
+ state: "present"
+ line: "{{ item }}"
+ with_items:
+ - "qat_c4xxx"
+ tags:
+ - intel-inst \ No newline at end of file
diff --git a/fdio.infra.ansible/roles/intel/tasks/qat2.yaml b/fdio.infra.ansible/roles/intel/tasks/qat2.yaml
new file mode 100644
index 0000000000..a560f16b2c
--- /dev/null
+++ b/fdio.infra.ansible/roles/intel/tasks/qat2.yaml
@@ -0,0 +1,57 @@
+---
+# file: tasks/qat2.yaml
+
+- name: Get QAT 2.x Driver
+ ansible.builtin.uri:
+ url: "{{ intel_download_url }}/{{ intel_qat_url[qat2] }}"
+ follow_redirects: "all"
+ force: true
+ dest: "{{ intel_extract_dir }}/QAT20.L.{{ qat2 }}.tar.gz"
+ mode: "0644"
+ failed_when: false
+ tags:
+ - intel-inst
+
+- name: Create a Directory For QAT 2.x Driver
+ ansible.builtin.file:
+ path: "{{ intel_extract_dir }}/QAT20.L.{{ qat2 }}/"
+ state: "directory"
+ mode: "0755"
+ tags:
+ - intel-inst
+
+- name: Extract QAT 2.x Driver
+ ansible.builtin.unarchive:
+ remote_src: true
+ src: "{{ intel_extract_dir }}/QAT20.L.{{ qat2 }}.tar.gz"
+ dest: "{{ intel_extract_dir }}/QAT20.L.{{ qat2 }}/"
+ register: intel_driver_extracted
+ tags:
+ - intel-inst
+
+- name: Install QAT 2.x Driver
+ ansible.builtin.command: "{{ item }}"
+ args:
+ chdir: "{{ intel_extract_dir }}/QAT20.L.{{ qat2 }}"
+ become: true
+ with_items:
+ - "wget http://security.ubuntu.com/ubuntu/pool/main/s/systemd/libudev-dev_249.11-0ubuntu3.7_amd64.deb"
+ - "dpkg -i ./libudev-dev_249.11-0ubuntu3.7_amd64.deb"
+ - "./configure --enable-icp-sriov=host --enable-icp-sym-only"
+ - "make"
+ - "make install"
+ - "apt remove -y libudev-dev"
+ when:
+ - intel_driver_extracted
+ tags:
+ - intel-inst
+
+- name: Load Kernel Modules By Default
+ ansible.builtin.lineinfile:
+ path: "/etc/modules"
+ state: "present"
+ line: "{{ item }}"
+ with_items:
+ - "qat_4xxx"
+ tags:
+ - intel-inst \ No newline at end of file
diff --git a/fdio.infra.ansible/roles/iperf/defaults/main.yaml b/fdio.infra.ansible/roles/iperf/defaults/main.yaml
new file mode 100644
index 0000000000..f757b287b7
--- /dev/null
+++ b/fdio.infra.ansible/roles/iperf/defaults/main.yaml
@@ -0,0 +1,23 @@
+---
+# file: roles/iperf/defaults/main.yaml
+
+packages: "{{ packages_base + packages_by_distro[ansible_distribution|lower][ansible_distribution_release] + packages_by_arch[ansible_machine] }}"
+
+packages_base:
+ - []
+
+packages_by_distro:
+ ubuntu:
+ jammy:
+ - "build-essential"
+ #- "lib32z1"
+
+packages_by_arch:
+ aarch64:
+ - []
+ x86_64:
+ - []
+
+iperf_target_dir: "/opt"
+iperf_version:
+ - "3.7"
diff --git a/fdio.infra.ansible/roles/iperf/tasks/main.yaml b/fdio.infra.ansible/roles/iperf/tasks/main.yaml
new file mode 100644
index 0000000000..6184ba25f1
--- /dev/null
+++ b/fdio.infra.ansible/roles/iperf/tasks/main.yaml
@@ -0,0 +1,62 @@
+---
+# file: roles/iperf/tasks/main.yaml
+
+- name: Inst - Update Package Cache (APT)
+ ansible.builtin.apt:
+ update_cache: true
+ cache_valid_time: 3600
+ when:
+ - ansible_distribution|lower == 'ubuntu'
+ tags:
+ - iperf-inst-prerequisites
+
+- name: Inst - Prerequisites
+ ansible.builtin.package:
+ name: "{{ packages | flatten(levels=1) }}"
+ state: latest
+ tags:
+ - iperf-inst-prerequisites
+
+- name: Get Release Archive
+ ansible.builtin.get_url:
+ url: "https://downloads.es.net/pub/iperf/iperf-{{ item }}.tar.gz"
+ dest: "{{ iperf_target_dir }}/iperf-{{ item }}.tar.gz"
+ validate_certs: false
+ mode: 0644
+ loop: "{{ iperf_version }}"
+ tags:
+ - iperf-inst
+
+- name: Extract Release Archive
+ ansible.builtin.unarchive:
+ remote_src: true
+ src: "{{ iperf_target_dir }}/iperf-{{ item }}.tar.gz"
+ dest: "{{ iperf_target_dir }}/"
+ creates: "{{ iperf_target_dir }}/iperf-{{ item }}/src"
+ loop: "{{ iperf_version }}"
+ tags:
+ - iperf-inst
+
+- name: Compile Release I
+ ansible.builtin.command: "./configure"
+ args:
+ chdir: "{{ iperf_target_dir }}/iperf-{{ item }}/"
+ loop: "{{ iperf_version }}"
+ tags:
+ - iperf-inst
+
+- name: Compile Release II
+ ansible.builtin.command: "make"
+ args:
+ chdir: "{{ iperf_target_dir }}/iperf-{{ item }}/"
+ loop: "{{ iperf_version }}"
+ tags:
+ - iperf-inst
+
+- name: Compile Release III
+ ansible.builtin.command: "make install"
+ args:
+ chdir: "{{ iperf_target_dir }}/iperf-{{ item }}/"
+ loop: "{{ iperf_version }}"
+ tags:
+ - iperf-inst
diff --git a/fdio.infra.ansible/roles/jenkins_job_health_exporter/defaults/main.yaml b/fdio.infra.ansible/roles/jenkins_job_health_exporter/defaults/main.yaml
new file mode 100644
index 0000000000..ab54aac516
--- /dev/null
+++ b/fdio.infra.ansible/roles/jenkins_job_health_exporter/defaults/main.yaml
@@ -0,0 +1,28 @@
+---
+# file: roles/jenkins_job_health_exporter/defaults/main.yaml
+
+# Conf - Jenkins Job Health Exporter.
+jenkins_host: "jenkins.fd.io"
+poll_interval_sec: 1800
+req_timeout_sec: 30
+bind_to: "0.0.0.0:9186"
+last_builds: 10
+jobs:
+ - "vpp-csit-verify-api-crc-master-ubuntu2204-x86_64"
+ - "vpp-gcc-verify-master-ubuntu2204-x86_64"
+ - "vpp-verify-master-ubuntu2204-aarch64"
+ - "vpp-verify-master-ubuntu2204-x86_64"
+ - "vpp-debug-verify-master-ubuntu2204-x86_64"
+ - "vpp-checkstyle-verify-master-ubuntu2204-x86_64"
+ - "vpp-docs-verify-master-ubuntu2204-x86_64"
+ - "vpp-csit-verify-device-master-ubuntu2204-x86_64-1n-skx"
+ - "vpp-csit-verify-device-master-ubuntu2204-aarch64-1n-tx2"
+
+# Conf - Service.
+jenkins_job_health_exporter_restart_handler_state: "restarted"
+
+# Inst - System paths.
+jenkins_job_health_exporter_target_dir: "/usr/bin"
+jenkins_job_health_exporter_conf_dir: "/etc"
+jenkins_job_health_exporter_url: "https://github.com/ayourtch/jenkins-job-health-exporter/releases/download"
+jenkins_job_health_exporter_version: "v0.0.3"
diff --git a/fdio.infra.ansible/roles/jenkins_job_health_exporter/handlers/main.yaml b/fdio.infra.ansible/roles/jenkins_job_health_exporter/handlers/main.yaml
new file mode 100644
index 0000000000..01849fd92e
--- /dev/null
+++ b/fdio.infra.ansible/roles/jenkins_job_health_exporter/handlers/main.yaml
@@ -0,0 +1,9 @@
+---
+# file roles/jenkins_job_health_exporter/handlers/main.yaml
+
+- name: Restart Jenkins Job Health Exporter
+ ansible.builtin.systemd:
+ daemon_reload: true
+ enabled: true
+ name: "jenkins-job-health-exporter"
+ state: "{{ jenkins_job_health_exporter_restart_handler_state }}"
diff --git a/fdio.infra.ansible/roles/jenkins_job_health_exporter/tasks/main.yaml b/fdio.infra.ansible/roles/jenkins_job_health_exporter/tasks/main.yaml
new file mode 100644
index 0000000000..efdc26a6d2
--- /dev/null
+++ b/fdio.infra.ansible/roles/jenkins_job_health_exporter/tasks/main.yaml
@@ -0,0 +1,38 @@
+---
+# file: roles/jenkins_job_health_exporter/tasks/main.yaml
+
+- name: Conf - Jenkins Job Health Exporter Config
+ ansible.builtin.template:
+ src: "templates/jenkins-job-health-exporter.j2"
+ dest: "/etc/jenkins-job-health-exporter.json"
+ owner: "root"
+ group: "root"
+ mode: "0644"
+ when:
+ - ansible_hostname == "s22-nomad"
+ tags:
+ - conf-jenkins-job-json
+
+- name: Inst - Jenkins Job Health Exporter Binary
+ ansible.builtin.get_url:
+ url: "{{ jenkins_job_health_exporter_url }}/{{ jenkins_job_health_exporter_version }}/jenkins-job-health-exporter"
+ dest: "{{ jenkins_job_health_exporter_target_dir }}/jenkins-job-health-exporter"
+ mode: "0755"
+ when:
+ - ansible_hostname == "s22-nomad"
+ tags:
+ - inst-jenkins-job-binary
+
+- name: Inst - Jenkins Job Health Exporter Service
+ ansible.builtin.template:
+ src: "templates/jenkins-job-health-exporter.service.j2"
+ dest: "/lib/systemd/system/jenkins-job-health-exporter.service"
+ owner: "root"
+ group: "root"
+ mode: "0644"
+ when:
+ - ansible_hostname == "s22-nomad"
+ notify:
+ - "Restart Jenkins Job Health Exporter"
+ tags:
+ - inst-jenkins-job-service
diff --git a/fdio.infra.ansible/roles/jenkins_job_health_exporter/templates/jenkins-job-health-exporter.j2 b/fdio.infra.ansible/roles/jenkins_job_health_exporter/templates/jenkins-job-health-exporter.j2
new file mode 100644
index 0000000000..5942b782e0
--- /dev/null
+++ b/fdio.infra.ansible/roles/jenkins_job_health_exporter/templates/jenkins-job-health-exporter.j2
@@ -0,0 +1,16 @@
+{
+ "jenkins_host": "{{ jenkins_host }}",
+ "poll_interval_sec": {{ poll_interval_sec }},
+ "req_timeout_sec": {{ req_timeout_sec }},
+ "bind_to": "{{ bind_to }}",
+ "last_builds": {{ last_builds }},
+ "jobs": [
+{% for item in jobs %}
+ "{{ item }}"
+{%- if not loop.last %},
+{% endif %}
+{% endfor %}
+
+ ],
+ "verbose": 3
+} \ No newline at end of file
diff --git a/fdio.infra.ansible/roles/jenkins_job_health_exporter/templates/jenkins-job-health-exporter.service.j2 b/fdio.infra.ansible/roles/jenkins_job_health_exporter/templates/jenkins-job-health-exporter.service.j2
new file mode 100644
index 0000000000..38073d0a8c
--- /dev/null
+++ b/fdio.infra.ansible/roles/jenkins_job_health_exporter/templates/jenkins-job-health-exporter.service.j2
@@ -0,0 +1,13 @@
+[Unit]
+Description=Jenkins Job Health Exporter
+Documentation=https://github.com/ayourtch/jenkins-job-health-exporter
+
+[Service]
+Restart=always
+ExecStart={{ jenkins_job_health_exporter_target_dir }}/jenkins-job-health-exporter {{ jenkins_job_health_exporter_conf_dir }}/jenkins-job-health-exporter.json
+ExecReload=/bin/kill -HUP $MAINPID
+TimeoutStopSec=20s
+SendSIGKILL=no
+
+[Install]
+WantedBy=multi-user.target \ No newline at end of file
diff --git a/fdio.infra.ansible/roles/kernel/defaults/main.yaml b/fdio.infra.ansible/roles/kernel/defaults/main.yaml
new file mode 100644
index 0000000000..ef628c93f5
--- /dev/null
+++ b/fdio.infra.ansible/roles/kernel/defaults/main.yaml
@@ -0,0 +1,31 @@
+---
+# file: roles/kernel/defaults/main.yaml
+
+# Kernel version to install (Default to any version).
+kernel_version: "{{ kernel_version_by_distro[ansible_distribution|lower][ansible_distribution_release] | join(' ') }}"
+
+kernel_version_by_distro:
+ ubuntu:
+ jammy:
+ - "5.15.0-46"
+
+kernel_packages: "{{ kernel_packages_by_distro[ansible_distribution|lower][ansible_distribution_release] | flatten(levels=1) }}"
+
+kernel_packages_by_distro:
+ ubuntu:
+ jammy:
+ - "linux-image"
+ - "linux-headers"
+ - "linux-modules"
+ - "linux-modules-extra"
+ - "linux-tools"
+
+# Packages to remove in relation to kernel upgrade.
+absent_packages: "{{ absent_packages_by_distro[ansible_distribution|lower][ansible_distribution_release] | flatten(levels=1) }}"
+
+absent_packages_by_distro:
+ ubuntu:
+ jammy:
+ - "amd64-microcode"
+ - "intel-microcode"
+ - "iucode-tool"
diff --git a/fdio.infra.ansible/roles/kernel/filter_plugins/main.py b/fdio.infra.ansible/roles/kernel/filter_plugins/main.py
new file mode 100644
index 0000000000..7d909b90e8
--- /dev/null
+++ b/fdio.infra.ansible/roles/kernel/filter_plugins/main.py
@@ -0,0 +1,143 @@
+
+"""Extra Ansible filters"""
+
+def deb_kernel(packages, kernel_version, current_version):
+ """
+ Return best matching kernel version.
+ Args:
+ packages (dict): apt-cache showpkg output.
+ kernel_version (str): Kernel version to install.
+ current_version (str): Current kernel version.
+ Returns:
+ str: kernel version.
+ """
+ kernels = set()
+
+ # List all available kernel version and associated repository
+ for line in packages['stdout'].splitlines():
+ line = line.strip()
+ if line.startswith('Package: ') and (
+ line.endswith('-common') or # Debian
+ line.endswith('-generic')): # Ubuntu
+ kernel = line.split()[1]
+
+ for string in ('linux-headers-', 'common', 'generic'):
+ kernel = kernel.replace(string, '')
+ kernel = kernel.strip('-')
+
+ if kernel:
+ kernels.add(kernel)
+
+ # Sort Kernel versions
+ versions = {}
+ for kernel in kernels:
+ try:
+ version, build = kernel.split('-', 1)
+ except ValueError:
+ version = kernel
+ build = ''
+ versions[kernel] = list(
+ int(ver) for ver in version.split('.')) + [build]
+ kernels = sorted(versions.keys(), key=versions.get, reverse=True)
+
+ # Return more recent kernel package that match version requirement
+ for kernel in kernels:
+ if kernel.startswith(kernel_version):
+ return kernel
+
+ raise RuntimeError(
+ 'No kernel matching to "%s". Available kernel versions: %s' % (
+ kernel_version, ', '.join(reversed(kernels))))
+
+
+def _deb_kernel_package(kernel, dist, arch, name):
+ """
+ Return kernel package name.
+ Args:
+ kernel (str): Kernel version.
+ dist (str): Distribution.
+ arch (str): Architecture.
+ name (str): Package name.
+ Returns:
+ str: kernel package.
+ """
+ # Define package suffix
+ if dist == 'Ubuntu':
+ suffix = 'generic'
+ elif name == 'linux-image':
+ suffix = arch.replace('x86_64', 'amd64')
+ else:
+ suffix = 'common'
+
+ return '-'.join((name, kernel, suffix))
+
+
+def deb_kernel_pkg(packages, kernel_version, current_version, dist, arch, name):
+ """
+ Return kernel package to install.
+ Args:
+ packages (dict): apt-cache showpkg output.
+ kernel_version (str): Kernel version to install.
+ current_version (str): Current kernel version.
+ dist (str): Distribution.
+ arch (str): Architecture.
+ name (str): Package name.
+ Returns:
+ str: kernel package to install.
+ """
+ return _deb_kernel_package(
+ deb_kernel(packages, kernel_version, current_version), dist, arch, name)
+
+
+def deb_installed_kernel(installed, packages, kernel_version, current_version):
+ """
+ Return old kernel packages to remove.
+ Args:
+ installed (dict): dpkg -l output.
+ packages (dict): apt-cache showpkg output.
+ kernel_version (str): Kernel version to install.
+ current_version (str): Current kernel version.
+ Returns:
+ list of str: Kernel packages to remove.
+ """
+ # Filter installed package to keep
+ to_keep = deb_kernel(packages, kernel_version, current_version)
+
+ # Return installed package to remove
+ to_remove = []
+ for line in installed['stdout'].splitlines():
+ if ' linux-' not in line:
+ continue
+
+ package = line.split()[1]
+ if ((package.startswith('linux-image-') or
+ package.startswith('linux-headers-')) and not (
+ package.startswith('linux-image-' + to_keep) or
+ package.startswith('linux-headers-' + to_keep))):
+ to_remove.append(package)
+
+ return to_remove
+
+
+def kernel_match(kernel, kernel_spec):
+ """
+ Check if kernel version match.
+ Args:
+ kernel (str): Kernel
+ kernel_spec (str): Kernel to match.
+ Returns:
+ bool: True if Kernel match.
+ """
+ return kernel.startswith(kernel_spec)
+
+
+class FilterModule(object):
+ """Return filter plugin"""
+
+ @staticmethod
+ def filters():
+ """Return filter"""
+ return {'deb_kernel': deb_kernel,
+ 'deb_kernel_pkg': deb_kernel_pkg,
+ 'deb_installed_kernel': deb_installed_kernel,
+ 'kernel_match': kernel_match}
diff --git a/fdio.infra.ansible/roles/kernel/handlers/main.yaml b/fdio.infra.ansible/roles/kernel/handlers/main.yaml
new file mode 100644
index 0000000000..d0be276a5b
--- /dev/null
+++ b/fdio.infra.ansible/roles/kernel/handlers/main.yaml
@@ -0,0 +1,8 @@
+---
+# file roles/kernel/handlers/main.yaml
+
+- name: Reboot Server
+ ansible.builtin.reboot:
+ reboot_timeout: 3600
+ tags:
+ - reboot-server
diff --git a/fdio.infra.ansible/roles/kernel/tasks/main.yaml b/fdio.infra.ansible/roles/kernel/tasks/main.yaml
new file mode 100644
index 0000000000..431e344fb8
--- /dev/null
+++ b/fdio.infra.ansible/roles/kernel/tasks/main.yaml
@@ -0,0 +1,9 @@
+---
+# file: roles/kernel/tasks/main.yaml
+
+- name: Inst - Prerequisites
+ include_tasks: "{{ ansible_distribution|lower }}_{{ ansible_distribution_release }}.yaml"
+ tags:
+ - kernel-inst-prerequisites
+
+- meta: flush_handlers
diff --git a/fdio.infra.ansible/roles/kernel/tasks/ubuntu_jammy.yaml b/fdio.infra.ansible/roles/kernel/tasks/ubuntu_jammy.yaml
new file mode 100644
index 0000000000..af987d4e5a
--- /dev/null
+++ b/fdio.infra.ansible/roles/kernel/tasks/ubuntu_jammy.yaml
@@ -0,0 +1,62 @@
+---
+# file: roles/kernel/tasks/ubuntu_jammy.yaml
+
+- name: Get Available Kernel Versions
+ ansible.builtin.command: "apt-cache showpkg linux-headers-*"
+ changed_when: false
+ register: apt_kernel_list
+ tags:
+ - kernel-inst
+
+- name: Get installed packages with APT
+ ansible.builtin.command: "dpkg -l"
+ changed_when: false
+ register: apt_packages_list
+ tags:
+ - kernel-inst
+
+- name: Set target APT kernel version
+ ansible.builtin.set_fact:
+ _kernel: "{{ apt_kernel_list | deb_kernel(
+ kernel_version, ansible_kernel) }}"
+ tags:
+ - kernel-inst
+
+- name: Disable APT auto upgrade
+ ansible.builtin.lineinfile:
+ path: "/etc/apt/apt.conf.d/20auto-upgrades"
+ state: "present"
+ regexp: "APT::Periodic::Unattended-Upgrade \"[0-9]\";"
+ line: "APT::Periodic::Unattended-Upgrade \"0\";"
+ create: true
+ mode: 0644
+ tags:
+ - kernel-inst
+
+- name: Ensure Packages Versions
+ ansible.builtin.apt:
+ name: "{{ apt_kernel_list | deb_kernel_pkg(
+ kernel_version, ansible_kernel, ansible_distribution,
+ ansible_architecture, item) }}"
+ loop: "{{ kernel_packages }}"
+ tags:
+ - kernel-inst
+
+- name: Ensure Any Other Kernel Packages Are Removed
+ ansible.builtin.apt:
+ name: "{{ apt_packages_list | deb_installed_kernel(
+ apt_kernel_list, kernel_version, ansible_kernel) }}"
+ state: "absent"
+ purge: true
+ notify:
+ - "Reboot Server"
+ tags:
+ - kernel-inst
+
+- name: Ensure Any Microcode Is Absent
+ ansible.builtin.apt:
+ name: "{{ absent_packages }}"
+ state: "absent"
+ purge: true
+ tags:
+ - kernel-inst
diff --git a/fdio.infra.ansible/roles/kernel_vm/files/initramfs_modules b/fdio.infra.ansible/roles/kernel_vm/files/initramfs_modules
new file mode 100644
index 0000000000..00ae8e03e7
--- /dev/null
+++ b/fdio.infra.ansible/roles/kernel_vm/files/initramfs_modules
@@ -0,0 +1,4 @@
+9p
+9pnet
+9pnet_virtio
+vfio-pci \ No newline at end of file
diff --git a/fdio.infra.ansible/roles/kernel_vm/files/initramfs_resume b/fdio.infra.ansible/roles/kernel_vm/files/initramfs_resume
new file mode 100644
index 0000000000..820819823b
--- /dev/null
+++ b/fdio.infra.ansible/roles/kernel_vm/files/initramfs_resume
@@ -0,0 +1 @@
+RESUME=none \ No newline at end of file
diff --git a/fdio.infra.ansible/roles/kernel_vm/tasks/main.yaml b/fdio.infra.ansible/roles/kernel_vm/tasks/main.yaml
new file mode 100644
index 0000000000..cd8eb15c57
--- /dev/null
+++ b/fdio.infra.ansible/roles/kernel_vm/tasks/main.yaml
@@ -0,0 +1,92 @@
+---
+# file: roles/kernel_vm/tasks/main.yaml
+
+- name: Inst - Backup remote initramfs modules
+ ansible.builtin.copy:
+ src: "/etc/initramfs-tools/modules"
+ dest: "/tmp/initramfs_modules.bkp"
+ remote_src: true
+ ignore_errors: true
+ register: __initramfs_modules_backuped
+ tags:
+ - kernel-inst-image
+
+- name: Inst - Backup remote initramfs resume config
+ ansible.builtin.copy:
+ src: "/etc/initramfs-tools/conf.d/resume"
+ dest: "/tmp/initramfs-resume.bkp"
+ remote_src: true
+ ignore_errors: true
+ register: __initramfs_resume_backuped
+ tags:
+ - kernel-inst-image
+
+- name: Inst - Update remote initramfs modules
+ ansible.builtin.copy:
+ src: "../files/initramfs_modules"
+ dest: "/etc/initramfs-tools/modules"
+ tags:
+ - kernel-inst-image
+
+- name: Inst - Update remote initramfs resume config
+ ansible.builtin.copy:
+ src: "../files/initramfs_resume"
+ dest: "/etc/initramfs-tools/conf.d/resume"
+ tags:
+ - kernel-inst-image
+
+- name: Inst - Create target kernel dir
+ ansible.builtin.file:
+ path: "/opt/boot"
+ state: "directory"
+ tags:
+ - kernel-inst-image
+
+- name: Inst - Build initrd image
+ ansible.builtin.shell: "update-initramfs -k {{ ansible_kernel }} -c -b /opt/boot"
+ tags:
+ - kernel-inst-image
+
+- name: Inst - Copy corresponding kernel img
+ ansible.builtin.copy:
+ src: "/boot/vmlinuz-{{ ansible_kernel }}"
+ dest: "/opt/boot/vmlinuz-{{ ansible_kernel }}"
+ remote_src: true
+ tags:
+ - kernel-inst-image
+
+- name: Inst - Restore remote initramfs modules
+ ansible.builtin.copy:
+ src: "/tmp/initramfs_modules.bkp"
+ dest: "/etc/initramfs-tools/modules"
+ remote_src: true
+ ignore_errors: true
+ when: __initramfs_modules_backuped
+ tags:
+ - kernel-inst-image
+
+- name: Inst - Remove remote backup initramfs modules
+ ansible.builtin.file:
+ path: "/tmp/initramfs_modules.bkp"
+ state: "absent"
+ when: __initramfs_modules_backuped
+ tags:
+ - kernel-inst-image
+
+- name: Inst - Restore remote initramfs resume config
+ ansible.builtin.copy:
+ src: "/tmp/initramfs-resume.bkp"
+ dest: "/etc/initramfs-tools/conf.d/resume"
+ remote_src: true
+ ignore_errors: true
+ when: __initramfs_resume_backuped
+ tags:
+ - kernel-inst-image
+
+- name: Inst - Remove remote backup initramfs resume config
+ ansible.builtin.file:
+ path: "/tmp/initramfs-resume.bkp"
+ state: "absent"
+ when: __initramfs_resume_backuped
+ tags:
+ - kernel-inst-image
diff --git a/fdio.infra.ansible/roles/mellanox/defaults/main.yaml b/fdio.infra.ansible/roles/mellanox/defaults/main.yaml
new file mode 100644
index 0000000000..de66be2d6b
--- /dev/null
+++ b/fdio.infra.ansible/roles/mellanox/defaults/main.yaml
@@ -0,0 +1,30 @@
+---
+# file: defaults/main.yaml
+
+packages: "{{ packages_base + packages_by_distro[ansible_distribution | lower] + packages_by_arch[ansible_machine] }}"
+
+packages_base:
+ - []
+
+packages_by_distro:
+ ubuntu:
+ - "build-essential"
+ - "libnl-3-dev"
+ - "libnl-route-3-dev"
+
+packages_by_arch:
+ aarch64:
+ - []
+ x86_64:
+ - []
+
+mellanox_download_url: "http://content.mellanox.com/ofed"
+mellanox_extract_dir: "/opt"
+
+mellanox_compatibility_matrix:
+ dpdk22.07:
+ # https://doc.dpdk.org/guides/rel_notes/release_22_07.html
+ ofed: "5.9-0.5.6.0"
+ dpdk23.11:
+ # https://doc.dpdk.org/guides/rel_notes/release_23_11.html
+ ofed: "23.07-0.5.0.0"
diff --git a/fdio.infra.ansible/roles/mellanox/tasks/main.yaml b/fdio.infra.ansible/roles/mellanox/tasks/main.yaml
new file mode 100644
index 0000000000..53376eb997
--- /dev/null
+++ b/fdio.infra.ansible/roles/mellanox/tasks/main.yaml
@@ -0,0 +1,43 @@
+---
+# file: tasks/main.yaml
+
+- name: Inst - Update Package Cache (APT)
+ apt:
+ update_cache: true
+ cache_valid_time: 3600
+ when:
+ - ansible_distribution|lower == 'ubuntu'
+ tags:
+ - mellanox-inst-drivers
+
+- name: Inst - Prerequisites
+ package:
+ name: "{{ packages | flatten(levels=1) }}"
+ state: "latest"
+ tags:
+ - mellanox-inst-drivers
+
+- name: Inst - Check Presence of Mellanox
+ shell: "lspci | grep Mellanox | awk '{print $1}'"
+ register: mellanox_pcis
+ failed_when: false
+ changed_when: false
+ tags:
+ - mellanox-inst-drivers
+
+- name: Inst - Get Mellanox OFED driver versions
+ set_fact:
+ ofed: "{{ mellanox_compatibility_matrix[mellanox_matrix]['ofed'] }}"
+ when: >
+ mellanox_pcis.stdout_lines | length > 0 and
+ mellanox_matrix is defined
+ tags:
+ - mellanox-inst-drivers
+
+- name: Inst - Driver Mellanox
+ import_tasks: ofed.yaml
+ when: >
+ mellanox_pcis.stdout_lines | length > 0 and
+ mellanox_matrix is defined
+ tags:
+ - mellanox-inst-drivers
diff --git a/fdio.infra.ansible/roles/mellanox/tasks/ofed.yaml b/fdio.infra.ansible/roles/mellanox/tasks/ofed.yaml
new file mode 100644
index 0000000000..c39975bab6
--- /dev/null
+++ b/fdio.infra.ansible/roles/mellanox/tasks/ofed.yaml
@@ -0,0 +1,37 @@
+---
+# file: tasks/ofed.yaml
+
+- name: Inst - Get OFED
+ ansible.builtin.get_url:
+ url: "{{ mellanox_download_url }}/MLNX_OFED-{{ ofed }}/MLNX_OFED_LINUX-{{ ofed }}-{{ ansible_distribution|lower }}{{ ansible_distribution_version }}-{{ ansible_machine }}.tgz"
+ dest: "{{ mellanox_extract_dir }}/MLNX_OFED_LINUX-{{ ofed }}-{{ ansible_distribution|lower }}{{ ansible_distribution_version }}-{{ ansible_machine }}.tgz"
+ mode: "0644"
+ when: mellanox_pcis.stdout_lines | length > 0
+ tags:
+ - mellanox-inst-drivers
+
+- name: Inst - Extract OFED
+ ansible.builtin.unarchive:
+ remote_src: true
+ src: "{{ mellanox_extract_dir }}/MLNX_OFED_LINUX-{{ ofed }}-{{ ansible_distribution|lower }}{{ ansible_distribution_version }}-{{ ansible_machine }}.tgz"
+ dest: "{{ mellanox_extract_dir }}/"
+ creates: "{{ mellanox_extract_dir }}/MLNX_OFED_LINUX-{{ ofed }}-{{ ansible_distribution|lower }}{{ ansible_distribution_version }}-{{ ansible_machine }}"
+ register: mellanox_firmware_extracted
+ tags:
+ - mellanox-inst-drivers
+
+- name: Inst - OFED
+ ansible.builtin.command: "./mlnxofedinstall --with-mft --dpdk --force --upstream-libs" # --without-fw-update"
+ args:
+ chdir: "{{ mellanox_extract_dir }}/MLNX_OFED_LINUX-{{ ofed }}-{{ ansible_distribution|lower }}{{ ansible_distribution_version }}-{{ ansible_machine }}"
+ when: mellanox_firmware_extracted
+ tags:
+ - mellanox-inst-drivers
+
+- name: Inst - Switch Infiniband to Ethernet
+ ansible.builtin.command: "mlxconfig --yes --dev {{ item }} set LINK_TYPE_P1=2 LINK_TYPE_P2=2"
+ with_items: "{{ mellanox_pcis.stdout_lines }}"
+ failed_when: false
+ changed_when: false
+ tags:
+ - mellanox-inst-drivers \ No newline at end of file
diff --git a/fdio.infra.ansible/roles/nomad/defaults/main.yaml b/fdio.infra.ansible/roles/nomad/defaults/main.yaml
new file mode 100644
index 0000000000..535db2bb2c
--- /dev/null
+++ b/fdio.infra.ansible/roles/nomad/defaults/main.yaml
@@ -0,0 +1,193 @@
+---
+# file: roles/nomad/defaults/main.yaml
+
+# Prerequisites
+packages: "{{ packages_base + packages_by_distro[ansible_distribution | lower] + packages_by_arch[ansible_machine] }}"
+packages_base:
+ - "curl"
+ - "unzip"
+packages_by_distro:
+ ubuntu:
+ - []
+packages_by_arch:
+ aarch64:
+ - []
+ x86_64:
+ - []
+
+# Package
+nomad_version: "1.4.3"
+nomad_architecture_map:
+ amd64: "amd64"
+ x86_64: "amd64"
+ armv7l: "arm"
+ aarch64: "arm64"
+ 32-bit: "386"
+ 64-bit: "amd64"
+nomad_architecture: "{{ nomad_architecture_map[ansible_architecture] }}"
+nomad_pkg: "nomad_{{ nomad_version }}_linux_{{nomad_architecture}}.zip"
+nomad_zip_url: "https://releases.hashicorp.com/nomad/{{ nomad_version }}/nomad_{{ nomad_version }}_linux_{{nomad_architecture}}.zip"
+nomad_checksum_file_url: "https://releases.hashicorp.com/nomad/{{ nomad_version }}/nomad_{{ nomad_version}}_SHA256SUMS"
+nomad_force_update: false
+
+# Paths
+nomad_inst_dir: "/opt"
+nomad_bin_dir: "/usr/local/bin"
+nomad_config_dir: "/etc/nomad.d"
+nomad_data_dir: "/var/nomad"
+nomad_plugin_dir: "{{ nomad_data_dir }}/plugins"
+nomad_lockfile: "/var/lock/subsys/nomad"
+nomad_run_dir: "/var/run/nomad"
+nomad_ssl_dir: "/etc/nomad.d/ssl"
+
+# Initialization and startup script templates
+nomad_service_mgr: ""
+
+# System user and group
+nomad_group: "nomad"
+nomad_user: "nomad"
+
+# Nomad settings
+nomad_datacenter: "dc1"
+nomad_region: "global"
+nomad_log_level: "INFO"
+nomad_syslog_enable: true
+nomad_iface: "{{ ansible_default_ipv4.interface }}"
+nomad_node_name: "{{ inventory_hostname }}"
+nomad_node_role: "server"
+nomad_leave_on_terminate: true
+nomad_leave_on_interrupt: false
+nomad_disable_update_check: true
+nomad_enable_debug: false
+
+# Server settings
+nomad_bootstrap_expect: 2
+nomad_encrypt: ""
+nomad_retry_join: true
+# Specifies how long a node must be in a terminal state before it is garbage
+# collected and purged from the system.
+nomad_node_gc_threshold: "24h"
+# Specifies the interval between the job garbage collections. Only jobs who have
+# been terminal for at least job_gc_threshold will be collected.
+nomad_job_gc_interval: "10m"
+# Specifies the minimum time a job must be in the terminal state before it is
+# eligible for garbage collection.
+nomad_job_gc_threshold: "4h"
+# Specifies the minimum time an evaluation must be in the terminal state before
+# it is eligible for garbage collection.
+nomad_eval_gc_threshold: "1h"
+# Specifies the minimum time a deployment must be in the terminal state before
+# it is eligible for garbage collection.
+nomad_deployment_gc_threshold: "1h"
+nomad_encrypt_enable: false
+nomad_raft_protocol: 2
+
+# Client settings
+nomad_node_class: "compute"
+nomad_no_host_uuid: true
+nomad_max_kill_timeout: "30s"
+nomad_gc_interval: "1m"
+nomad_gc_disk_usage_threshold: 80
+nomad_gc_inode_usage_threshold: 70
+nomad_gc_parallel_destroys: 2
+nomad_reserved:
+ cpu: "{{ nomad_reserved_cpu | default('0', true) }}"
+ memory: "{{ nomad_reserved_memory | default('0', true) }}"
+ disk: "{{ nomad_reserved_disk | default('0', true) }}"
+ ports: "{{ nomad_reserved_ports | default('22', true) }}"
+nomad_volumes: []
+nomad_options: {}
+nomad_meta: {}
+nomad_chroot_env: false
+nomad_plugins: {}
+
+# Addresses
+nomad_bind_address: "{{ hostvars[inventory_hostname]['ansible_'+ nomad_iface ]['ipv4']['address'] }}"
+nomad_advertise_address: "{{ hostvars[inventory_hostname]['ansible_' + nomad_iface]['ipv4']['address'] }}"
+
+# Ports
+nomad_ports:
+ http: "{{ nomad_ports_http | default('4646', true) }}"
+ rpc: "{{ nomad_ports_rpc | default('4647', true) }}"
+ serf: "{{ nomad_ports_serf | default('4648', true) }}"
+
+# Servers
+nomad_group_name: "nomad"
+nomad_servers: "\
+ {% if nomad_use_consul==false %}\
+ {% set _nomad_servers = [] %}\
+ {% for host in groups[nomad_group_name] %}\
+ {% set _nomad_node_role = hostvars[host]['nomad_node_role'] | default('client', true) %}\
+ {% if (_nomad_node_role == 'server' or _nomad_node_role == 'both') %}\
+ {% if _nomad_servers.append(host) %}{% endif %}\
+ {% endif %}\
+ {% endfor %}\
+ {{ _nomad_servers }}\
+ {% else %}\
+ []\
+ {% endif %}"
+nomad_gather_server_facts: false
+
+# Consul
+nomad_use_consul: true
+nomad_consul_address: "localhost:8500"
+nomad_consul_token: ""
+nomad_consul_servers_service_name: "nomad"
+nomad_consul_clients_service_name: "nomad-client"
+nomad_consul_tags: {}
+nomad_consul_use_ssl: false
+
+# ACLs
+nomad_acl_enabled: false
+nomad_acl_token_ttl: "30s"
+nomad_acl_policy_ttl: "30s"
+nomad_acl_replication_token: ""
+
+# Docker
+nomad_docker_enable: false
+nomad_docker_dmsetup: true
+
+# Autopilot
+nomad_autopilot_cleanup_dead_servers: true
+nomad_autopilot_last_contact_threshold: "200ms"
+nomad_autopilot_max_trailing_logs: 250
+nomad_autopilot_server_stabilization_time: "10s"
+
+# Telemetry.
+nomad_use_telemetry: true
+nomad_telemetry_disable_hostname: false
+nomad_telemetry_collection_interval: "1s"
+nomad_telemetry_use_node_name: false
+nomad_telemetry_publish_allocation_metrics: true
+nomad_telemetry_publish_node_metrics: true
+nomad_telemetry_prometheus_metrics: true
+
+# TLS.
+nomad_use_tls: true
+nomad_tls_ca_file: "{{ nomad_ssl_dir }}/nomad-ca.pem"
+nomad_tls_cert_file: "{{ nomad_ssl_dir }}/nomad.pem"
+nomad_tls_key_file: "{{ nomad_ssl_dir }}/nomad-key.pem"
+nomad_tls_cli_cert_file: "{{ nomad_ssl_dir }}/nomad-cli.pem"
+nomad_tls_cli_key_file: "{{ nomad_ssl_dir }}/nomad-cli-key.pem"
+nomad_tls_http: false
+nomad_tls_rpc: false
+nomad_tls_rpc_upgrade_mode: false
+nomad_tls_verify_https_client: false
+nomad_tls_verify_server_hostname: false
+
+# Vault
+nomad_use_vault: false
+nomad_vault_address: "http://vault.service.consul:8200"
+nomad_vault_allow_unauthenticated: true
+nomad_vault_enabled: false
+nomad_vault_create_from_role: ""
+nomad_vault_task_token_ttl: "72h"
+nomad_vault_use_ssl: false
+nomad_vault_ca_file: ""
+nomad_vault_ca_path: ""
+nomad_vault_cert_file: ""
+nomad_vault_key_file: ""
+nomad_vault_namespace: ""
+nomad_vault_tls_server_name: ""
+nomad_vault_tls_skip_verify: false
+nomad_vault_token: ""
diff --git a/fdio.infra.ansible/roles/nomad/handlers/main.yaml b/fdio.infra.ansible/roles/nomad/handlers/main.yaml
new file mode 100644
index 0000000000..32e5798e3e
--- /dev/null
+++ b/fdio.infra.ansible/roles/nomad/handlers/main.yaml
@@ -0,0 +1,9 @@
+---
+# file handlers/main.yaml
+
+- name: Restart Nomad
+ ansible.builtin.systemd:
+ daemon_reload: true
+ enabled: true
+ name: "nomad"
+ state: "restarted"
diff --git a/fdio.infra.ansible/roles/nomad/meta/main.yaml b/fdio.infra.ansible/roles/nomad/meta/main.yaml
new file mode 100644
index 0000000000..098aafe2fb
--- /dev/null
+++ b/fdio.infra.ansible/roles/nomad/meta/main.yaml
@@ -0,0 +1,21 @@
+---
+# file: meta/main.yaml
+
+dependencies: ["docker"]
+
+galaxy_info:
+ role_name: "nomad"
+ author: "pmikus"
+ description: "Hashicorp Nomad."
+ company: "none"
+ license: "license (Apache)"
+ min_ansible_version: "2.9"
+ platforms:
+ - name: "Ubuntu"
+ release:
+ - "focal"
+ - "jammy"
+ - "kinetic"
+ galaxy_tags:
+ - "nomad"
+ - "hashicorp"
diff --git a/fdio.infra.ansible/roles/nomad/tasks/main.yaml b/fdio.infra.ansible/roles/nomad/tasks/main.yaml
new file mode 100644
index 0000000000..72b78458f8
--- /dev/null
+++ b/fdio.infra.ansible/roles/nomad/tasks/main.yaml
@@ -0,0 +1,151 @@
+---
+# file: tasks/main.yaml
+
+- name: Update Repositories Cache
+ ansible.builtin.apt:
+ update_cache: true
+ when:
+ - ansible_os_family == 'Debian'
+ tags:
+ - nomad-inst-package
+
+- name: Dependencies
+ ansible.builtin.apt:
+ name: "{{ packages | flatten(levels=1) }}"
+ state: "present"
+ cache_valid_time: 3600
+ install_recommends: false
+ when:
+ - ansible_os_family == 'Debian'
+ tags:
+ - nomad-inst-dependencies
+
+- name: Add Nomad Group
+ ansible.builtin.group:
+ name: "{{ nomad_group }}"
+ state: "present"
+ tags:
+ - nomad-conf-user
+
+- name: Add Nomad user
+ ansible.builtin.user:
+ name: "{{ nomad_user }}"
+ group: "{{ nomad_group }}"
+ state: "present"
+ system: true
+ tags:
+ - nomad-conf-user
+
+- name: Download Nomad
+ ansible.builtin.get_url:
+ url: "{{ nomad_zip_url }}"
+ dest: "{{ nomad_inst_dir }}/{{ nomad_pkg }}"
+ mode: 0644
+ tags:
+ - nomad-inst-package
+
+- name: Clean Nomad
+ ansible.builtin.file:
+ path: "{{ nomad_inst_dir }}/nomad"
+ state: "absent"
+ when:
+ - nomad_force_update | bool
+ tags:
+ - nomad-inst-package
+
+- name: Unarchive Nomad
+ ansible.builtin.unarchive:
+ src: "{{ nomad_inst_dir }}/{{ nomad_pkg }}"
+ dest: "{{ nomad_inst_dir }}/"
+ remote_src: true
+ tags:
+ - nomad-inst-package
+
+- name: Nomad
+ ansible.builtin.copy:
+ src: "{{ nomad_inst_dir }}/nomad"
+ dest: "{{ nomad_bin_dir }}"
+ owner: "{{ nomad_user }}"
+ group: "{{ nomad_group }}"
+ force: true
+ mode: 0755
+ remote_src: true
+ tags:
+ - nomad-inst-package
+
+- name: Create Directories
+ ansible.builtin.file:
+ dest: "{{ item }}"
+ state: "directory"
+ owner: "{{ nomad_user }}"
+ group: "{{ nomad_group }}"
+ mode: 0755
+ with_items:
+ - "{{ nomad_data_dir }}"
+ - "{{ nomad_config_dir }}"
+ - "{{ nomad_ssl_dir }}"
+ tags:
+ - nomad-conf
+
+- name: Base Configuration
+ ansible.builtin.template:
+ src: "{{ item }}.hcl.j2"
+ dest: "{{ nomad_config_dir }}/{{ item }}.hcl"
+ owner: "{{ nomad_user }}"
+ group: "{{ nomad_group }}"
+ mode: 0644
+ with_items:
+ - "base"
+ - "consul"
+ - "client"
+ - "server"
+ - "telemetry"
+ - "tls"
+ - "vault"
+ tags:
+ - nomad-conf
+
+- name: Conf - Copy Certificates And Keys
+ ansible.builtin.copy:
+ content: "{{ item.src }}"
+ dest: "{{ item.dest }}"
+ owner: "{{ nomad_user }}"
+ group: "{{ nomad_group }}"
+ mode: 0600
+ no_log: true
+ loop: "{{ nomad_certificates | flatten(levels=1) }}"
+ when:
+ - nomad_certificates is defined
+ tags:
+ - nomad-conf
+
+- name: Nomad CLI Environment Variables
+ ansible.builtin.lineinfile:
+ path: "/etc/profile.d/nomad.sh"
+ line: "{{ item }}"
+ mode: 0644
+ create: true
+ loop:
+ - "export NOMAD_ADDR=https://nomad-server.service.consul:4646"
+ - "export NOMAD_CACERT={{ nomad_tls_ca_file }}"
+ - "export NOMAD_CLIENT_CERT={{ nomad_tls_cli_cert_file }}"
+ - "export NOMAD_CLIENT_KEY={{ nomad_tls_cli_key_file }}"
+ tags:
+ - nomad-conf
+
+- name: System.d Script
+ ansible.builtin.template:
+ src: "nomad_systemd.service.j2"
+ dest: "/lib/systemd/system/nomad.service"
+ owner: "root"
+ group: "root"
+ mode: 0644
+ notify:
+ - "Restart Nomad"
+ when:
+ - nomad_service_mgr == "systemd"
+ tags:
+ - nomad-conf
+
+- name: Meta - Flush handlers
+ ansible.builtin.meta: flush_handlers
diff --git a/fdio.infra.ansible/roles/nomad/templates/base.hcl.j2 b/fdio.infra.ansible/roles/nomad/templates/base.hcl.j2
new file mode 100644
index 0000000000..cd7fb54f9c
--- /dev/null
+++ b/fdio.infra.ansible/roles/nomad/templates/base.hcl.j2
@@ -0,0 +1,26 @@
+name = "{{ nomad_node_name }}"
+region = "{{ nomad_region }}"
+datacenter = "{{ nomad_datacenter }}"
+
+enable_debug = {{ nomad_enable_debug | bool | lower }}
+disable_update_check = {{ nomad_disable_update_check | bool | lower }}
+
+bind_addr = "{{ nomad_bind_address }}"
+advertise {
+ http = "{{ nomad_advertise_address }}:{{ nomad_ports.http }}"
+ rpc = "{{ nomad_advertise_address }}:{{ nomad_ports.rpc }}"
+ serf = "{{ nomad_advertise_address }}:{{ nomad_ports.serf }}"
+}
+ports {
+ http = {{ nomad_ports['http'] }}
+ rpc = {{ nomad_ports['rpc'] }}
+ serf = {{ nomad_ports['serf'] }}
+}
+
+data_dir = "{{ nomad_data_dir }}"
+
+log_level = "{{ nomad_log_level }}"
+enable_syslog = {{ nomad_syslog_enable | bool | lower }}
+
+leave_on_terminate = {{ nomad_leave_on_terminate | bool | lower }}
+leave_on_interrupt = {{ nomad_leave_on_interrupt | bool | lower }}
diff --git a/fdio.infra.ansible/roles/nomad/templates/cfssl.json b/fdio.infra.ansible/roles/nomad/templates/cfssl.json
new file mode 100644
index 0000000000..2b603e9b84
--- /dev/null
+++ b/fdio.infra.ansible/roles/nomad/templates/cfssl.json
@@ -0,0 +1,8 @@
+{
+ "signing": {
+ "default": {
+ "expiry": "87600h",
+ "usages": ["signing", "key encipherment", "server auth", "client auth"]
+ }
+ }
+} \ No newline at end of file
diff --git a/fdio.infra.ansible/roles/nomad/templates/client.hcl.j2 b/fdio.infra.ansible/roles/nomad/templates/client.hcl.j2
new file mode 100644
index 0000000000..f82f38a4e4
--- /dev/null
+++ b/fdio.infra.ansible/roles/nomad/templates/client.hcl.j2
@@ -0,0 +1,60 @@
+client {
+ enabled = {{ nomad_node_client | bool | lower }}
+
+ node_class = "{{ nomad_node_class }}"
+ no_host_uuid = {{ nomad_no_host_uuid | bool | lower }}
+
+{% if nomad_use_consul == False %}
+ {% if nomad_servers -%}
+ servers = [ {% for ip_port in nomad_servers -%} "{{ ip_port }}" {% if not loop.last %},{% endif %}{%- endfor -%} ]
+ {% endif -%}
+{% endif %}
+
+ {% if nomad_network_interface is defined -%}
+ network_interface = "{{ nomad_network_interface }}"
+ {% endif -%}
+ {% if nomad_network_speed is defined -%}
+ network_speed = "{{ nomad_network_speed }}"
+ {% endif -%}
+ {% if nomad_cpu_total_compute is defined -%}
+ cpu_total_compute = {{ nomad_cpu_total_compute }}
+ {% endif -%}
+
+ reserved {
+ cpu = {{ nomad_reserved['cpu'] }}
+ memory = {{ nomad_reserved['memory'] }}
+ disk = {{ nomad_reserved['disk'] }}
+ }
+
+ {% for nomad_host_volume in nomad_volumes -%}
+ host_volume "{{ nomad_host_volume.name }}" {
+ path = "{{ nomad_host_volume.path }}"
+ read_only = {{ nomad_host_volume.read_only | bool | lower }}
+ }
+ {% endfor %}
+
+ {% if nomad_chroot_env != False -%}
+ chroot_env = {
+ {% for key, value in nomad_chroot_env.items() %}
+ "{{ key }}" = "{{ value }}"
+ {% endfor -%}
+ }
+ {% endif %}
+
+ {% if nomad_options -%}
+ options = {
+ {% for key, value in nomad_options.items() %}
+ "{{ key }}" = "{{ value }}"
+ {% endfor -%}
+ }
+ {% endif %}
+
+ {% if nomad_meta -%}
+ meta = {
+ {% for key, value in nomad_meta.items() %}
+ "{{ key }}" = "{{ value }}"
+ {% endfor -%}
+ }
+ {% endif %}
+
+}
diff --git a/fdio.infra.ansible/roles/nomad/templates/consul.hcl.j2 b/fdio.infra.ansible/roles/nomad/templates/consul.hcl.j2
new file mode 100644
index 0000000000..a9c1aff7b2
--- /dev/null
+++ b/fdio.infra.ansible/roles/nomad/templates/consul.hcl.j2
@@ -0,0 +1,63 @@
+{% if nomad_use_consul | bool == True %}
+consul {
+ # Specifies the address to the local Consul agent, given in the format
+ # host:port.
+ address = "{{ nomad_consul_address }}"
+
+ # Specifies if Nomad should advertise its services in Consul. The services
+ # are named according to server_service_name and client_service_name. Nomad
+ # servers and clients advertise their respective services, each tagged
+ # appropriately with either http or rpc tag. Nomad servers also advertise a
+ # serf tagged service.
+ auto_advertise = true
+
+ # Specifies if the Nomad clients should automatically discover servers in
+ # the same region by searching for the Consul service name defined in the
+ # server_service_name option. The search occurs if the client is not
+ # registered with any servers or it is unable to heartbeat to the leader of
+ # the region, in which case it may be partitioned and searches for other
+ # servers.
+ client_auto_join = true
+
+ # Specifies the name of the service in Consul for the Nomad clients.
+ client_service_name = "{{ nomad_consul_clients_service_name }}"
+
+ # Specifies the name of the service in Consul for the Nomad servers.
+ server_service_name = "{{ nomad_consul_servers_service_name }}"
+
+ # Specifies if the Nomad servers should automatically discover and join
+ # other Nomad servers by searching for the Consul service name defined in
+ # the server_service_name option. This search only happens if the server
+ # does not have a leader.
+ server_auto_join = true
+
+ # Specifies optional Consul tags to be registered with the Nomad server and
+ # agent services.
+ tags = {{ nomad_consul_tags | to_json }}
+
+ # Specifies the token used to provide a per-request ACL token. This option
+ # overrides the Consul Agent's default token. If the token is not set here
+ # or on the Consul agent, it will default to Consul's anonymous policy,
+ # which may or may not allow writes.
+ token = "{{ nomad_consul_token }}"
+
+ {% if nomad_consul_use_ssl | bool == True -%}
+ # Specifies if the transport scheme should use HTTPS to communicate with the
+ # Consul agent.
+ ssl = true
+
+ # Specifies an optional path to the CA certificate used for Consul
+ # communication. This defaults to the system bundle if unspecified.
+ ca_file = "{{ nomad_ca_file }}"
+
+ # Specifies the path to the certificate used for Consul communication. If
+ # this is set then you need to also set key_file.
+ cert_file = "{{ nomad_cert_file }}"
+
+ # Specifies the path to the private key used for Consul communication. If
+ # this is set then you need to also set cert_file.
+ key_file = "{{ nomad_key_file }}"
+ {% endif %}
+
+}
+{% endif %} \ No newline at end of file
diff --git a/fdio.infra.ansible/roles/nomad/templates/nomad_systemd.service.j2 b/fdio.infra.ansible/roles/nomad/templates/nomad_systemd.service.j2
new file mode 100644
index 0000000000..564505781b
--- /dev/null
+++ b/fdio.infra.ansible/roles/nomad/templates/nomad_systemd.service.j2
@@ -0,0 +1,33 @@
+[Unit]
+Description=Nomad
+Documentation=https://nomadproject.io/docs/
+Wants=network-online.target
+After=network-online.target
+
+# When using Nomad with Consul it is not necessary to start Consul first. These
+# lines start Consul before Nomad as an optimization to avoid Nomad logging
+# that Consul is unavailable at startup.
+#Wants=consul.service
+#After=consul.service
+
+[Service]
+# Nomad server should be run as the nomad user. Nomad clients
+# should be run as root
+#User=nomad
+#Group=nomad
+
+ExecReload=/bin/kill -HUP $MAINPID
+ExecStart={{ nomad_bin_dir }}/nomad agent -config {{ nomad_config_dir }}
+KillMode=process
+KillSignal=SIGINT
+LimitNOFILE=infinity
+LimitNPROC=infinity
+Restart=on-failure
+RestartSec=2
+StartLimitBurst=3
+StartLimitInterval=10
+TasksMax=infinity
+OOMScoreAdjust=-1000
+
+[Install]
+WantedBy=multi-user.target \ No newline at end of file
diff --git a/fdio.infra.ansible/roles/nomad/templates/server.hcl.j2 b/fdio.infra.ansible/roles/nomad/templates/server.hcl.j2
new file mode 100644
index 0000000000..e19dea9e6f
--- /dev/null
+++ b/fdio.infra.ansible/roles/nomad/templates/server.hcl.j2
@@ -0,0 +1,58 @@
+{% if nomad_node_server | bool == True %}
+server {
+ enabled = {{ nomad_node_server | bool | lower }}
+
+ {% if nomad_node_server | bool -%}
+ bootstrap_expect = {{ nomad_bootstrap_expect }}
+ {%- endif %}
+
+ {% if nomad_node_server | bool -%}
+ raft_protocol = 3
+ {%- endif %}
+
+ {% if nomad_authoritative_region is defined %}
+ authoritative_region = "{{ nomad_authoritative_region }}"
+ {% endif %}
+
+{% if nomad_use_consul == False %}
+ {% if nomad_retry_join | bool -%}
+ retry_join = [
+ {%- set comma = joiner(",") -%}
+ {% for server in nomad_servers -%}
+ {{ comma() }}"{{ hostvars[server]['nomad_advertise_address'] | ipwrap }}"
+ {%- endfor -%} ]
+ retry_max = {{ nomad_retry_max }}
+ retry_interval = "{{ nomad_retry_interval }}"
+ {% else -%}
+ start_join = [
+ {%- set comma = joiner(",") -%}
+ {% for server in nomad_servers -%}
+ {{ comma() }}"{{ hostvars[server]['nomad_advertise_address'] | ipwrap }}"
+ {%- endfor -%} ]
+ {%- endif %}
+{% endif %}
+
+ encrypt = "{{ nomad_encrypt | default('') }}"
+
+ {% if nomad_node_gc_threshold -%}
+ node_gc_threshold = "{{ nomad_node_gc_threshold }}"
+ {%- endif %}
+
+ {% if nomad_job_gc_interval -%}
+ job_gc_interval = "{{ nomad_job_gc_interval }}"
+ {%- endif %}
+
+ {% if nomad_job_gc_threshold -%}
+ job_gc_threshold = "{{ nomad_job_gc_threshold }}"
+ {%- endif %}
+
+ {% if nomad_eval_gc_threshold -%}
+ eval_gc_threshold = "{{ nomad_eval_gc_threshold }}"
+ {%- endif %}
+
+ {% if nomad_deployment_gc_threshold -%}
+ deployment_gc_threshold = "{{ nomad_deployment_gc_threshold }}"
+ {%- endif %}
+
+}
+{% endif %} \ No newline at end of file
diff --git a/fdio.infra.ansible/roles/nomad/templates/telemetry.hcl.j2 b/fdio.infra.ansible/roles/nomad/templates/telemetry.hcl.j2
new file mode 100644
index 0000000000..14be0d9548
--- /dev/null
+++ b/fdio.infra.ansible/roles/nomad/templates/telemetry.hcl.j2
@@ -0,0 +1,26 @@
+{% if nomad_use_telemetry | bool == True %}
+telemetry {
+ # Specifies if gauge values should be prefixed with the local hostname.
+ disable_hostname = {{ nomad_telemetry_disable_hostname | bool | lower }}
+
+ # Specifies the time interval at which the Nomad agent collects telemetry
+ # data.
+ collection_interval = "{{ nomad_telemetry_collection_interval }}"
+
+ # Specifies if gauge values should be prefixed with the name of the node,
+ # instead of the hostname. If set it will override disable_hostname value.
+ use_node_name = {{ nomad_telemetry_use_node_name | bool | lower }}
+
+ # Specifies if Nomad should publish runtime metrics of allocations.
+ publish_allocation_metrics = {{ nomad_telemetry_publish_allocation_metrics | bool | lower }}
+
+ # Specifies if Nomad should publish runtime metrics of nodes.
+ publish_node_metrics = {{ nomad_telemetry_publish_node_metrics | bool | lower }}
+
+ # Specifies whether the agent should make Prometheus formatted metrics
+ # available at /v1/metrics?format=prometheus.Specifies whether the agent
+ # should make Prometheus formatted metrics available at
+ # /v1/metrics?format=prometheus.
+ prometheus_metrics = {{ nomad_telemetry_prometheus_metrics | bool | lower }}
+}
+{% endif %}
diff --git a/fdio.infra.ansible/roles/nomad/templates/tls.hcl.j2 b/fdio.infra.ansible/roles/nomad/templates/tls.hcl.j2
new file mode 100644
index 0000000000..0a1a5b20a4
--- /dev/null
+++ b/fdio.infra.ansible/roles/nomad/templates/tls.hcl.j2
@@ -0,0 +1,36 @@
+{% if nomad_use_tls | bool %}
+tls {
+ # Specifies the path to the CA certificate to use for Nomad's TLS
+ # communication.
+ ca_file = "{{ nomad_tls_ca_file }}"
+
+ # Specifies the path to the certificate file used for Nomad's TLS
+ # communication.
+ cert_file = "{{ nomad_tls_cert_file }}"
+
+ # Specifies the path to the key file to use for Nomad's TLS communication.
+ key_file = "{{ nomad_tls_key_file }}"
+
+ # Specifies if TLS should be enabled on the HTTP endpoints on the Nomad
+ # agent, including the API.
+ http = {{ nomad_tls_http | bool | lower }}
+
+ # Specifies if TLS should be enabled on the RPC endpoints and Raft traffic
+ # between the Nomad servers. Enabling this on a Nomad client makes the
+ # client use TLS for making RPC requests to the Nomad servers.
+ rpc = {{ nomad_tls_rpc | bool | lower }}
+
+ # This option should be used only when the cluster is being upgraded to
+ # TLS, and removed after the migration is complete. This allows the agent
+ # to accept both TLS and plaintext traffic.
+ rpc_upgrade_mode = {{ nomad_tls_rpc_upgrade_mode | bool | lower }}
+
+ # Specifies agents should require client certificates for all incoming
+ # HTTPS requests. The client certificates must be signed by the same CA
+ # as Nomad.
+ verify_https_client = {{ nomad_tls_verify_https_client | bool | lower }}
+
+ # Specifies if outgoing TLS connections should verify the server's hostname.
+ verify_server_hostname = {{ nomad_tls_verify_server_hostname | bool | lower }}
+}
+{% endif %}
diff --git a/fdio.infra.ansible/roles/nomad/templates/vault.hcl.j2 b/fdio.infra.ansible/roles/nomad/templates/vault.hcl.j2
new file mode 100644
index 0000000000..7911cbc5c4
--- /dev/null
+++ b/fdio.infra.ansible/roles/nomad/templates/vault.hcl.j2
@@ -0,0 +1,69 @@
+{% if nomad_use_tls | bool == True %}
+vault {
+ # Specifies the address to the Vault server. This must include the protocol,
+ # host/ip, and port given in the format protocol://host:port. If your Vault
+ # installation is behind a load balancer, this should be the address of the
+ # load balancer.
+ address = "{{ nomad_vault_address }}"
+
+ # Specifies if users submitting jobs to the Nomad server should be required
+ # to provide their own Vault token, proving they have access to the policies
+ # listed in the job. This option should be disabled in an untrusted
+ # environment.
+ allow_unauthenticated = {{ nomad_vault_allow_unauthenticated | bool | lower }}
+
+ # Specifies if the Vault integration should be activated.
+ enabled = {{ nomad_vault_enabled | bool | lower }}
+
+ # Specifies the role to create tokens from. The token given to Nomad does
+ # not have to be created from this role but must have "update" capability
+ # on "auth/token/create/<create_from_role>" path in Vault. If this value is
+ # unset and the token is created from a role, the value is defaulted to the
+ # role the token is from. This is largely for backwards compatibility. It
+ # is recommended to set the create_from_role field if Nomad is deriving
+ # child tokens from a role.
+ create_from_role = "{{ nomad_vault_create_from_role }}"
+
+ # Specifies the TTL of created tokens when using a root token. This is
+ # specified using a label suffix like "30s" or "1h".
+ task_token_ttl = "{{ nomad_vault_task_token_ttl }}"
+
+ {% if nomad_vault_use_ssl | bool == True -%}
+ # Specifies an optional path to the CA certificate used for Vault
+ # communication. If unspecified, this will fallback to the default system
+ # CA bundle, which varies by OS and version.
+ ca_file = "{{ nomad_vault_ca_file }}"
+
+ # Specifies an optional path to a folder containing CA certificates to be
+ # used for Vault communication. If unspecified, this will fallback to the
+ # default system CA bundle, which varies by OS and version.
+ ca_path = "{{ nomad_vault_ca_path }}"
+
+ # Specifies the path to the certificate used for Vault communication. This
+ # must be set if tls_require_and_verify_client_cert is enabled in Vault.
+ cert_file = "{{ nomad_vault_cert_file }}"
+
+ # Specifies the path to the private key used for Vault communication. If
+ # this is set then you need to also set cert_file. This must be set if
+ # tls_require_and_verify_client_cert is enabled in Vault.
+ key_file = "{{ nomad_vault_key_file }}"
+
+ # Specifies the Vault namespace used by the Vault integration. If non-empty,
+ # this namespace will be used on all Vault API calls.
+ namespace = "{{ nomad_vault_namespace }}"
+
+ # Specifies an optional string used to set the SNI host when connecting to
+ # Vault via TLS.
+ tls_server_name = "{{ nomad_vault_tls_server_name }}"
+
+ # Specifies if SSL peer validation should be enforced.
+ tls_skip_verify = {{ nomad_vault_tls_skip_verify | bool | lower }}
+ {% endif %}
+
+ # Specifies the parent Vault token to use to derive child tokens for jobs
+ # requesting tokens. Only required on Nomad servers. Nomad client agents
+ # use the allocation's token when contacting Vault. Visit the Vault
+ # Integration Guide to see how to generate an appropriate token in Vault.
+ token = "{{ nomad_vault_token }}"
+}
+{% endif %} \ No newline at end of file
diff --git a/fdio.infra.ansible/roles/nomad/vars/main.yaml b/fdio.infra.ansible/roles/nomad/vars/main.yaml
new file mode 100644
index 0000000000..791eeadb06
--- /dev/null
+++ b/fdio.infra.ansible/roles/nomad/vars/main.yaml
@@ -0,0 +1,5 @@
+---
+# file: vars/main.yaml
+
+nomad_node_client: "{{ (nomad_node_role == 'client') or (nomad_node_role == 'both') }}"
+nomad_node_server: "{{ (nomad_node_role == 'server') or (nomad_node_role == 'both') }}"
diff --git a/fdio.infra.ansible/roles/performance_tuning/defaults/main.yaml b/fdio.infra.ansible/roles/performance_tuning/defaults/main.yaml
new file mode 100644
index 0000000000..5a732e5539
--- /dev/null
+++ b/fdio.infra.ansible/roles/performance_tuning/defaults/main.yaml
@@ -0,0 +1,18 @@
+---
+# file: roles/performance_tuning/defaults/main.yaml
+
+packages: "{{ packages_base + packages_by_distro[ansible_distribution|lower][ansible_distribution_release] + packages_by_arch[ansible_machine] }}"
+
+packages_base:
+ - "cpufrequtils"
+
+packages_by_distro:
+ ubuntu:
+ jammy:
+ - []
+
+packages_by_arch:
+ aarch64:
+ - []
+ x86_64:
+ - []
diff --git a/fdio.infra.ansible/roles/performance_tuning/files/cpufrequtils b/fdio.infra.ansible/roles/performance_tuning/files/cpufrequtils
new file mode 100644
index 0000000000..03070fefe1
--- /dev/null
+++ b/fdio.infra.ansible/roles/performance_tuning/files/cpufrequtils
@@ -0,0 +1 @@
+GOVERNOR="performance"
diff --git a/fdio.infra.ansible/roles/performance_tuning/files/disable-turbo-boost.service b/fdio.infra.ansible/roles/performance_tuning/files/disable-turbo-boost.service
new file mode 100644
index 0000000000..e04729de50
--- /dev/null
+++ b/fdio.infra.ansible/roles/performance_tuning/files/disable-turbo-boost.service
@@ -0,0 +1,10 @@
+[Unit]
+Description=Disable Turbo Boost on Intel CPU
+
+[Service]
+ExecStart=/bin/sh -c 'for core in `cat /proc/cpuinfo | grep processor | awk \'{print $3}\'`; do sudo wrmsr -p$core 0x1a0 0x4000850089; done'
+ExecStop=/bin/sh -c 'for core in `cat /proc/cpuinfo | grep processor | awk \'{print $3}\'`; do sudo wrmsr -p$core 0x1a0 0x850089; done'
+RemainAfterExit=yes
+
+[Install]
+WantedBy=sysinit.target
diff --git a/fdio.infra.ansible/roles/performance_tuning/files/irqbalance b/fdio.infra.ansible/roles/performance_tuning/files/irqbalance
new file mode 100644
index 0000000000..861be02fb3
--- /dev/null
+++ b/fdio.infra.ansible/roles/performance_tuning/files/irqbalance
@@ -0,0 +1,25 @@
+# irqbalance is a daemon process that distributes interrupts across
+# CPUS on SMP systems. The default is to rebalance once every 10
+# seconds. This is the environment file that is specified to systemd via the
+# EnvironmentFile key in the service unit file (or via whatever method the init
+# system you're using has.
+#
+# ONESHOT=yes
+# after starting, wait for a minute, then look at the interrupt
+# load and balance it once; after balancing exit and do not change
+# it again.
+#IRQBALANCE_ONESHOT=
+
+#
+# IRQBALANCE_BANNED_CPUS
+# 64 bit bitmask which allows you to indicate which cpu's should
+# be skipped when reblancing irqs. Cpu numbers which have their
+# corresponding bits set to one in this mask will not have any
+# irq's assigned to them on rebalance
+#
+IRQBALANCE_BANNED_CPUS="{{ ansible_processor_cores | irqbalance_banned_cpu_mask(ansible_processor_count, ansible_processor_threads_per_core) }}"
+#
+# IRQBALANCE_ARGS
+# append any args here to the irqbalance daemon as documented in the man page
+#
+#IRQBALANCE_ARGS=
diff --git a/fdio.infra.ansible/roles/performance_tuning/filter_plugins/main.py b/fdio.infra.ansible/roles/performance_tuning/filter_plugins/main.py
new file mode 100644
index 0000000000..d76f6fe166
--- /dev/null
+++ b/fdio.infra.ansible/roles/performance_tuning/filter_plugins/main.py
@@ -0,0 +1,29 @@
+"""Extra Ansible filters"""
+
+def irqbalance_banned_cpu_mask(
+ processor_cores, processor_count, processor_threads_per_core):
+ """
+ Return irqbalance CPU mask.
+ Args:
+ processor_cores (int): Physical processor unit.
+ processor_counts (int): Processors per physical unit.
+ processor_threads_per_core (int): Threads per physical unit.
+ Returns:
+ str: irqbalance_banned_cpus.
+ """
+ mask = int("1" * 128, 2)
+
+ for i in range(processor_count * processor_threads_per_core):
+ mask &= ~(1 << i * processor_cores)
+
+ import re
+ return ",".join(re.findall('.{1,8}', str(hex(mask))[2:]))
+
+
+class FilterModule(object):
+ """Return filter plugin"""
+
+ @staticmethod
+ def filters():
+ """Return filter"""
+ return {'irqbalance_banned_cpu_mask': irqbalance_banned_cpu_mask}
diff --git a/fdio.infra.ansible/roles/performance_tuning/handlers/main.yaml b/fdio.infra.ansible/roles/performance_tuning/handlers/main.yaml
new file mode 100644
index 0000000000..5a48fc37b4
--- /dev/null
+++ b/fdio.infra.ansible/roles/performance_tuning/handlers/main.yaml
@@ -0,0 +1,13 @@
+---
+# file handlers/main.yaml
+
+- name: Update GRUB
+ ansible.builtin.command: update-grub
+ tags:
+ - update-grub
+
+- name: Reboot Server
+ ansible.builtin.reboot:
+ reboot_timeout: 3600
+ tags:
+ - reboot-server
diff --git a/fdio.infra.ansible/roles/performance_tuning/tasks/main.yaml b/fdio.infra.ansible/roles/performance_tuning/tasks/main.yaml
new file mode 100644
index 0000000000..cc904e23e9
--- /dev/null
+++ b/fdio.infra.ansible/roles/performance_tuning/tasks/main.yaml
@@ -0,0 +1,210 @@
+---
+# file: roles/performance_tuning/tasks/main.yaml
+
+- name: Inst - Update Package Cache (APT)
+ ansible.builtin.apt:
+ update_cache: true
+ cache_valid_time: 3600
+ when:
+ - ansible_distribution|lower == 'ubuntu'
+ tags:
+ - perf-inst-prerequisites
+
+- name: Inst - Machine Prerequisites
+ ansible.builtin.package:
+ name: "{{ packages | flatten(levels=1) }}"
+ state: latest
+ tags:
+ - perf-inst-prerequisites
+
+- name: Conf - Turbo Boost
+ import_tasks: turbo_boost.yaml
+ when: >
+ cpu_microarchitecture == "skylake" or
+ cpu_microarchitecture == "cascadelake" or
+ cpu_microarchitecture == "icelake" or
+ cpu_microarchitecture == "sapphirerapids"
+ tags:
+ - perf-conf-turbo-boost
+
+- name: Conf - Adjust max_map_count
+ # this file contains the maximum number of memory map areas a process
+ # may have. memory map areas are used as a side-effect of calling
+ # malloc, directly by mmap and mprotect, and also when loading shared
+ # libraries.
+ #
+ # while most applications need less than a thousand maps, certain
+ # programs, particularly malloc debuggers, may consume lots of them,
+ # e.g., up to one or two maps per allocation.
+ # must be greater than or equal to (2 * vm.nr_hugepages).
+ ansible.builtin.sysctl:
+ name: "vm.max_map_count"
+ value: "{{ sysctl.vm.nr_hugepages * 4 }}"
+ state: "present"
+ sysctl_file: "/etc/sysctl.d/90-csit.conf"
+ reload: "yes"
+ tags:
+ - perf-conf-sysctl
+
+- name: Conf - Adjust hugetlb_shm_group
+ # hugetlb_shm_group contains group id that is allowed to create sysv
+ # shared memory segment using hugetlb page.
+ ansible.builtin.sysctl:
+ name: "vm.hugetlb_shm_group"
+ value: "1000"
+ state: "present"
+ sysctl_file: "/etc/sysctl.d/90-csit.conf"
+ reload: "yes"
+ tags:
+ - perf-conf-sysctl
+
+- name: Conf - Adjust swappiness
+ # this control is used to define how aggressive the kernel will swap
+ # memory pages. higher values will increase agressiveness, lower values
+ # decrease the amount of swap. a value of 0 instructs the kernel not to
+ # initiate swap until the amount of free and file-backed pages is less
+ # than the high water mark in a zone.
+ ansible.builtin.sysctl:
+ name: "vm.swappiness"
+ value: "0"
+ state: "present"
+ sysctl_file: "/etc/sysctl.d/90-csit.conf"
+ reload: "yes"
+ tags:
+ - perf-conf-sysctl
+
+- name: Conf - Adjust shmmax
+ # shared memory max must be greator or equal to the total size of hugepages.
+ # for 2mb pages, totalhugepagesize = vm.nr_hugepages * 2 * 1024 * 1024
+ # if the existing kernel.shmmax setting (cat /sys/proc/kernel/shmmax)
+ # is greater than the calculated totalhugepagesize then set this parameter
+ # to current shmmax value.
+ ansible.builtin.sysctl:
+ name: "kernel.shmmax"
+ value: "{{ sysctl.vm.nr_hugepages * 2 * 1024 * 1024 }}"
+ state: "present"
+ sysctl_file: "/etc/sysctl.d/90-csit.conf"
+ reload: "yes"
+ tags:
+ - perf-conf-sysctl
+
+- name: Conf - Adjust watchdog_cpumask
+ # this value can be used to control on which cpus the watchdog may run.
+ # the default cpumask is all possible cores, but if no_hz_full is
+ # enabled in the kernel config, and cores are specified with the
+ # nohz_full= boot argument, those cores are excluded by default.
+ # offline cores can be included in this mask, and if the core is later
+ # brought online, the watchdog will be started based on the mask value.
+ #
+ # typically this value would only be touched in the nohz_full case
+ # to re-enable cores that by default were not running the watchdog,
+ # if a kernel lockup was suspected on those cores.
+ ansible.builtin.sysctl:
+ name: "kernel.watchdog_cpumask"
+ value: "{{ sysctl.kernel.watchdog_cpumask }}"
+ state: "present"
+ sysctl_file: "/etc/sysctl.d/90-csit.conf"
+ reload: "yes"
+ tags:
+ - perf-conf-sysctl
+
+- name: Conf - Adjust randomize_va_space
+ # this option can be used to select the type of process address
+ # space randomization that is used in the system, for architectures
+ # that support this feature.
+ # 0 - turn the process address space randomization off. this is the
+ # default for architectures that do not support this feature anyways,
+ # and kernels that are booted with the "norandmaps" parameter.
+ ansible.builtin.sysctl:
+ name: "kernel.randomize_va_space"
+ value: "0"
+ state: "present"
+ sysctl_file: "/etc/sysctl.d/90-csit.conf"
+ reload: "yes"
+ tags:
+ - perf-conf-sysctl
+
+- name: Conf - Cpufrequtils
+ ansible.builtin.copy:
+ src: "files/cpufrequtils"
+ dest: "/etc/default/cpufrequtils"
+ owner: "root"
+ group: "root"
+ mode: 0644
+ tags:
+ - perf-conf-cpufrequtils
+
+- name: Conf - Irqbalance
+ ansible.builtin.template:
+ src: "files/irqbalance"
+ dest: "/etc/default/irqbalance"
+ owner: "root"
+ group: "root"
+ mode: 0644
+ tags:
+ - perf-conf-irqbalance
+
+- name: Conf - Kernel Parameters
+ ansible.builtin.lineinfile:
+ path: "/etc/default/grub"
+ state: "present"
+ regexp: "^GRUB_CMDLINE_LINUX="
+ line: "GRUB_CMDLINE_LINUX=\"{% for key, value in grub.items() %}{% if value is sameas true %}{{key}} {% else %}{{key}}={{value}} {% endif %}{% endfor %}\""
+ notify:
+ - "Update GRUB"
+ tags:
+ - perf-conf-grub
+
+- meta: flush_handlers
+
+- name: Conf - Load Kernel Modules By Default
+ ansible.builtin.lineinfile:
+ path: "/etc/modules"
+ state: "present"
+ line: "{{ item }}"
+ with_items:
+ - "vfio-pci"
+ notify:
+ - "Reboot Server"
+ tags:
+ - perf-conf-load-kernel-modules
+
+- name: Conf - Create a directory for 1G HugeTLBs hugepages
+ ansible.builtin.file:
+ path: "/dev/hugepages1G"
+ state: "directory"
+ mode: 0755
+ tags:
+ - perf-conf-hugepages-1g
+
+- name: Conf - Mount 1G HugeTLBs hugepages
+ ansible.builtin.mount:
+ path: "/dev/hugepages1G"
+ src: "hugetlbfs"
+ opts: "pagesize=1G"
+ boot: false
+ state: "mounted"
+ fstype: "hugetlbfs"
+ tags:
+ - perf-conf-hugepages-1g
+
+- name: Create a directory if it does not exist
+ ansible.builtin.file:
+ path: "/dev/hugepages2M"
+ state: "directory"
+ mode: 0755
+ tags:
+ - perf-conf-hugepages-2m
+
+- name: Conf - Create a directory for 2M HugeTLBs hugepages
+ ansible.builtin.mount:
+ path: "/dev/hugepages2M"
+ src: "hugetlbfs"
+ opts: "pagesize=2M"
+ boot: false
+ state: "mounted"
+ fstype: "hugetlbfs"
+ tags:
+ - perf-conf-hugepages-2m
+
+- meta: flush_handlers
diff --git a/fdio.infra.ansible/roles/performance_tuning/tasks/turbo_boost.yaml b/fdio.infra.ansible/roles/performance_tuning/tasks/turbo_boost.yaml
new file mode 100644
index 0000000000..cff71e9ce3
--- /dev/null
+++ b/fdio.infra.ansible/roles/performance_tuning/tasks/turbo_boost.yaml
@@ -0,0 +1,44 @@
+---
+# file: roles/performance_tuning/tasks/turbo_boost.yaml
+
+- name: Inst - Update Package Cache (APT)
+ apt:
+ update_cache: true
+ cache_valid_time: 3600
+ when:
+ - ansible_distribution|lower == 'ubuntu'
+ tags:
+ - turbo-inst-prerequisites
+
+- name: Inst - msr-tools
+ package:
+ name:
+ - "msr-tools"
+ state: latest
+ tags:
+ - turbo-inst-prerequisites
+
+- name: Conf - Load msr By Default
+ lineinfile:
+ path: "/etc/modules"
+ state: "present"
+ line: "msr"
+ tags:
+ - turbo-conf-msr
+
+- name: Conf - Custom Startup Service Hook
+ copy:
+ src: "files/disable-turbo-boost.service"
+ dest: "/etc/systemd/system/disable-turbo-boost.service"
+ owner: "root"
+ group: "root"
+ mode: 0644
+ tags:
+ - turbo-conf-msr
+
+- name: Conf - Custom Startup Service Hook Enable
+ service:
+ name: "disable-turbo-boost"
+ enabled: true
+ tags:
+ - turbo-conf-msr
diff --git a/fdio.infra.ansible/roles/prometheus_exporter/defaults/main.yaml b/fdio.infra.ansible/roles/prometheus_exporter/defaults/main.yaml
new file mode 100644
index 0000000000..7291ce0276
--- /dev/null
+++ b/fdio.infra.ansible/roles/prometheus_exporter/defaults/main.yaml
@@ -0,0 +1,31 @@
+---
+# file: roles/prometheus_exporter/defaults/main.yaml
+
+# Inst - Exporters.
+packages: "{{ packages_base + packages_by_distro[ansible_distribution | lower] + packages_by_arch[ansible_machine] }}"
+
+packages_base:
+ - []
+
+packages_by_distro:
+ ubuntu:
+ - "python3-docker"
+ - "python3-dockerpty"
+
+packages_by_arch:
+ aarch64:
+ - []
+ x86_64:
+ - []
+
+ne_image: "{{ ne_image_by_arch[ansible_machine] }}"
+
+ne_image_by_arch:
+ aarch64: "prom/node-exporter:v1.3.1"
+ x86_64: "prom/node-exporter:v1.3.1"
+
+be_image: "{{ be_image_by_arch[ansible_machine] }}"
+
+be_image_by_arch:
+ aarch64: "prom/blackbox-exporter:v0.21.1"
+ x86_64: "prom/blackbox-exporter:v0.21.1"
diff --git a/fdio.infra.ansible/roles/prometheus_exporter/files/blackbox.yml b/fdio.infra.ansible/roles/prometheus_exporter/files/blackbox.yml
new file mode 100644
index 0000000000..526dcf5dce
--- /dev/null
+++ b/fdio.infra.ansible/roles/prometheus_exporter/files/blackbox.yml
@@ -0,0 +1,25 @@
+modules:
+ http_2xx:
+ prober: http
+ timeout: 5s
+ http:
+ valid_http_versions: ["HTTP/1.1", "HTTP/2.0"]
+ no_follow_redirects: false
+ fail_if_ssl: false
+ fail_if_not_ssl: true
+ tls_config:
+ insecure_skip_verify: false
+ preferred_ip_protocol: "ip4"
+ icmp_v4:
+ prober: icmp
+ timeout: 5s
+ icmp:
+ preferred_ip_protocol: "ip4"
+ dns_udp:
+ prober: dns
+ timeout: 5s
+ dns:
+ query_name: "jenkins.fd.io"
+ query_type: "A"
+ valid_rcodes:
+ - NOERROR
diff --git a/fdio.infra.ansible/roles/prometheus_exporter/meta/main.yaml b/fdio.infra.ansible/roles/prometheus_exporter/meta/main.yaml
new file mode 100644
index 0000000000..7d8b861882
--- /dev/null
+++ b/fdio.infra.ansible/roles/prometheus_exporter/meta/main.yaml
@@ -0,0 +1,18 @@
+---
+# file: roles/prometheus_exporter/meta/main.yaml
+
+dependencies: [docker]
+
+galaxy_info:
+ role_name: prometheus_exporter
+ author: fd.io
+ description: Prometheus Exporters.
+ company: none
+ license: "license (Apache)"
+ min_ansible_version: 2.9
+ platforms:
+ - name: Ubuntu
+ versions:
+ - jammy
+ galaxy_tags:
+ - prometheus
diff --git a/fdio.infra.ansible/roles/prometheus_exporter/tasks/main.yaml b/fdio.infra.ansible/roles/prometheus_exporter/tasks/main.yaml
new file mode 100644
index 0000000000..ef9da40175
--- /dev/null
+++ b/fdio.infra.ansible/roles/prometheus_exporter/tasks/main.yaml
@@ -0,0 +1,72 @@
+---
+# file: roles/prometheus_exporter/tasks/main.yaml
+
+- name: Inst - Update Package Cache (APT)
+ ansible.builtin.apt:
+ update_cache: true
+ cache_valid_time: 3600
+ when:
+ - ansible_distribution|lower == 'ubuntu'
+ tags:
+ - prometheus-inst
+
+- name: Inst - Prerequisites
+ ansible.builtin.package:
+ name: "{{ packages | flatten(levels=1) }}"
+ state: latest
+ tags:
+ - prometheus-inst
+
+- name: Inst - Start a NodeExporter container
+ docker_container:
+ name: "NodeExporter"
+ image: "{{ ne_image }}"
+ state: "started"
+ restart_policy: "unless-stopped"
+ detach: true
+ ports:
+ - "9100:9100"
+ privileged: true
+ command:
+ - "--path.procfs=/host/proc"
+ - "--path.rootfs=/rootfs"
+ - "--path.sysfs=/host/sys"
+ - "--collector.filesystem.mount-points-exclude=^/(sys|proc|dev|host|etc)($$|/)"
+ volumes:
+ - "/:/rootfs:ro"
+ - "/proc:/host/proc:ro"
+ - "/sys:/host/sys:ro"
+ tags:
+ - prometheus-inst
+
+- name: Inst - Create a Config Directory
+ ansible.builtin.file:
+ path: "/etc/prometheus/"
+ state: "directory"
+ mode: "0755"
+ tags:
+ - prometheus-conf-blackbox-exporter
+
+- name: Conf - Prometheus Blackbox Exporter
+ ansible.builtin.copy:
+ src: "files/blackbox.yml"
+ dest: "/etc/prometheus/blackbox.yml"
+ tags:
+ - prometheus-conf-blackbox-exporter
+
+- name: Inst - Start a BlackBoxExporter container
+ docker_container:
+ name: "BlackBoxExporter"
+ image: "{{ be_image }}"
+ state: "started"
+ restart_policy: "unless-stopped"
+ detach: true
+ ports:
+ - "9115:9115"
+ privileged: true
+ command:
+ - "--config.file=/config/blackbox.yml"
+ volumes:
+ - "/etc/prometheus/blackbox.yml:/config/blackbox.yml:ro"
+ tags:
+ - prometheus-inst
diff --git a/fdio.infra.ansible/roles/python_env/defaults/main.yaml b/fdio.infra.ansible/roles/python_env/defaults/main.yaml
new file mode 100644
index 0000000000..4b572c0dd0
--- /dev/null
+++ b/fdio.infra.ansible/roles/python_env/defaults/main.yaml
@@ -0,0 +1,25 @@
+---
+# file: defaults/main.yaml
+
+packages: "{{ packages_base + packages_by_distro[ansible_distribution|lower][ansible_distribution_release] + packages_by_arch[ansible_machine] }}"
+
+packages_base:
+ - "virtualenv"
+
+packages_by_distro:
+ ubuntu:
+ jammy:
+ - "python3-all"
+ - "python3-apt"
+ - "python3-cffi"
+ - "python3-cffi-backend"
+ - "python3-dev"
+ - "python3-pip"
+ - "python3-pyelftools"
+ - "python3-setuptools"
+
+packages_by_arch:
+ aarch64:
+ - []
+ x86_64:
+ - []
diff --git a/fdio.infra.ansible/roles/python_env/tasks/main.yaml b/fdio.infra.ansible/roles/python_env/tasks/main.yaml
new file mode 100644
index 0000000000..02850110a9
--- /dev/null
+++ b/fdio.infra.ansible/roles/python_env/tasks/main.yaml
@@ -0,0 +1,62 @@
+---
+# file: tasks/main.yaml
+
+- name: Inst - Update package cache (apt)
+ ansible.builtin.apt:
+ update_cache: true
+ cache_valid_time: 3600
+ when:
+ - ansible_distribution|lower == 'ubuntu'
+ tags:
+ - common-inst-prerequisites
+
+- name: Inst - Prerequisites
+ ansible.builtin.package:
+ name: "{{ packages | flatten(levels=1) }}"
+ state: latest
+ tags:
+ - common-inst-prerequisites
+
+- name: Inst - CSIT PIP requirements
+ ansible.builtin.pip:
+ name:
+ - "ecdsa==0.18.0"
+ - "paramiko==3.3.1"
+ - "pycrypto==2.6.1"
+ - "python-dateutil==2.8.2"
+ - "PyYAML==6.0.1"
+ - "requests==2.31.0"
+ - "robotframework==6.1.1"
+ - "scapy==2.4.5"
+ - "scp==0.14.5"
+ - "ansible==8.2.0"
+ - "ansible-core==2.15.2"
+ - "dill==0.3.7"
+ - "numpy==1.25.2"
+ - "scipy==1.11.1"
+ - "ply==3.11"
+ - "jsonschema==4.18.4"
+ - "rfc3339-validator==0.1.4"
+ - "rfc3987==1.3.8"
+ - "attrs==23.1.0"
+ - "bcrypt==4.0.1"
+ - "certifi==2023.7.22"
+ - "cffi==1.15.1"
+ - "charset-normalizer==3.2.0"
+ - "cryptography==41.0.3"
+ - "idna==3.4"
+ - "Jinja2==3.1.2"
+ - "jsonschema-specifications==2023.7.1"
+ - "MarkupSafe==2.1.3"
+ - "packaging==23.1"
+ - "pycparser==2.21"
+ - "PyNaCl==1.5.0"
+ - "referencing==0.30.0"
+ - "resolvelib==1.0.1"
+ - "rpds-py==0.9.2"
+ - "six==1.16.0"
+ - "urllib3==2.0.4"
+ environment:
+ ANSIBLE_SKIP_CONFLICT_CHECK: 1
+ tags:
+ - common-inst-pip
diff --git a/fdio.infra.ansible/roles/topology/tasks/main.yaml b/fdio.infra.ansible/roles/topology/tasks/main.yaml
new file mode 100644
index 0000000000..1dc704331d
--- /dev/null
+++ b/fdio.infra.ansible/roles/topology/tasks/main.yaml
@@ -0,0 +1,23 @@
+---
+# file: tasks/main.yaml
+
+- name: Create Topology File
+ ansible.builtin.template:
+ src: "templates/topology-{{ cloud_topology }}.j2"
+ dest: "../topologies/available/{{ cloud_topology }}-{{ testbed_name }}.yaml"
+ tags:
+ - create-topology-file
+
+- name: Create Inventory Folder
+ ansible.builtin.file:
+ path: "./inventories/cloud_inventory/"
+ state: directory
+ tags:
+ - create-inventory-folder-cloud
+
+- name: Create Hosts File
+ ansible.builtin.template:
+ src: "templates/hosts.j2"
+ dest: "./inventories/cloud_inventory/hosts"
+ tags:
+ - create-hosts-file-cloud
diff --git a/fdio.infra.ansible/roles/topology/templates/hosts.j2 b/fdio.infra.ansible/roles/topology/templates/hosts.j2
new file mode 100644
index 0000000000..f02586cc99
--- /dev/null
+++ b/fdio.infra.ansible/roles/topology/templates/hosts.j2
@@ -0,0 +1,9 @@
+all:
+ children:
+ tg:
+ hosts:
+ {{ tg_public_ip }}
+ sut:
+ hosts:
+ {{ dut1_public_ip | default() }}
+ {{ dut2_public_ip | default() }}
diff --git a/fdio.infra.ansible/roles/topology/templates/topology-1n-aws-c5n.j2 b/fdio.infra.ansible/roles/topology/templates/topology-1n-aws-c5n.j2
new file mode 100644
index 0000000000..649d7e746c
--- /dev/null
+++ b/fdio.infra.ansible/roles/topology/templates/topology-1n-aws-c5n.j2
@@ -0,0 +1,30 @@
+---
+metadata:
+ version: 0.1
+ schema:
+ - resources/topology_schemas/1_node_topology.sch.yaml
+ - resources/topology_schemas/topology.sch.yaml
+ tags: [hw, 1-node]
+
+nodes:
+ TG:
+ type: TG
+ subtype: TREX
+ host: "{{ tg_public_ip }}"
+ arch: x86_64
+ port: 22
+ username: testuser
+ password: Csit1234
+ interfaces:
+ port1:
+ # tg_instance/p1 - 50GE port1 on ENA NIC.
+ mac_address: {{ tg_if1_mac }}
+ pci_address: "0000:00:06.0"
+ link: link1
+ model: Amazon-Nitro-50G
+ port2:
+ # tg_instance/p2 - 50GE port2 on ENA NIC.
+ mac_address: {{ tg_if2_mac }}
+ pci_address: "0000:00:07.0"
+ link: link1
+ model: Amazon-Nitro-50G
diff --git a/fdio.infra.ansible/roles/topology/templates/topology-1n-c6gn.j2 b/fdio.infra.ansible/roles/topology/templates/topology-1n-c6gn.j2
new file mode 100644
index 0000000000..647a40b1e7
--- /dev/null
+++ b/fdio.infra.ansible/roles/topology/templates/topology-1n-c6gn.j2
@@ -0,0 +1,30 @@
+---
+metadata:
+ version: 0.1
+ schema:
+ - resources/topology_schemas/1_node_topology.sch.yaml
+ - resources/topology_schemas/topology.sch.yaml
+ tags: [hw, 1-node]
+
+nodes:
+ TG:
+ type: TG
+ subtype: TREX
+ host: "{{ tg_public_ip }}"
+ arch: x86_64
+ port: 22
+ username: testuser
+ password: Csit1234
+ interfaces:
+ port1:
+ # tg_instance/p1 - 100GE port1 on ENA NIC.
+ mac_address: {{ tg_if1_mac }}
+ pci_address: "0000:00:06.0"
+ link: link1
+ model: Amazon-Nitro-100G
+ port2:
+ # tg_instance/p2 - 100GE port2 on ENA NIC.
+ mac_address: {{ tg_if2_mac }}
+ pci_address: "0000:00:07.0"
+ link: link1
+ model: Amazon-Nitro-100G
diff --git a/fdio.infra.ansible/roles/topology/templates/topology-1n-c6in.j2 b/fdio.infra.ansible/roles/topology/templates/topology-1n-c6in.j2
new file mode 100644
index 0000000000..7d3f4e5318
--- /dev/null
+++ b/fdio.infra.ansible/roles/topology/templates/topology-1n-c6in.j2
@@ -0,0 +1,30 @@
+---
+metadata:
+ version: 0.1
+ schema:
+ - resources/topology_schemas/1_node_topology.sch.yaml
+ - resources/topology_schemas/topology.sch.yaml
+ tags: [hw, 1-node]
+
+nodes:
+ TG:
+ type: TG
+ subtype: TREX
+ host: "{{ tg_public_ip }}"
+ arch: x86_64
+ port: 22
+ username: testuser
+ password: Csit1234
+ interfaces:
+ port1:
+ # tg_instance/p1 - 200GE port1 on ENA NIC.
+ mac_address: {{ tg_if1_mac }}
+ pci_address: "0000:00:06.0"
+ link: link1
+ model: Amazon-Nitro-200G
+ port2:
+ # tg_instance/p2 - 200GE port2 on ENA NIC.
+ mac_address: {{ tg_if2_mac }}
+ pci_address: "0000:00:07.0"
+ link: link1
+ model: Amazon-Nitro-200G
diff --git a/fdio.infra.ansible/roles/topology/templates/topology-2n-aws-c5n.j2 b/fdio.infra.ansible/roles/topology/templates/topology-2n-aws-c5n.j2
new file mode 100644
index 0000000000..de43291cc6
--- /dev/null
+++ b/fdio.infra.ansible/roles/topology/templates/topology-2n-aws-c5n.j2
@@ -0,0 +1,51 @@
+---
+metadata:
+ version: 0.1
+ schema:
+ - resources/topology_schemas/2_node_topology.sch.yaml
+ - resources/topology_schemas/topology.sch.yaml
+ tags: [hw, 2-node]
+
+nodes:
+ TG:
+ type: TG
+ subtype: TREX
+ host: "{{ tg_public_ip }}"
+ arch: x86_64
+ port: 22
+ username: testuser
+ password: Csit1234
+ interfaces:
+ port1:
+ # tg_instance/p1 - 50GE port1 on ENA NIC.
+ mac_address: {{ tg_if1_mac }}
+ pci_address: "0000:00:06.0"
+ link: link1
+ model: Amazon-Nitro-50G
+ port2:
+ # tg_instance/p2 - 50GE port2 on ENA NIC.
+ mac_address: {{ tg_if2_mac }}
+ pci_address: "0000:00:07.0"
+ link: link2
+ model: Amazon-Nitro-50G
+ DUT1:
+ type: DUT
+ host: "{{ dut1_public_ip }}"
+ arch: x86_64
+ port: 22
+ username: testuser
+ password: Csit1234
+ uio_driver: vfio-pci
+ interfaces:
+ port1:
+ # dut1_instance/p1 - 50GE port1 on ENA NIC.
+ mac_address: {{ dut1_if1_mac }}
+ pci_address: "0000:00:06.0"
+ link: link1
+ model: Amazon-Nitro-50G
+ port2:
+ # dut1_instance/p2 - 50GE port2 on ENA NIC.
+ mac_address: {{ dut1_if2_mac }}
+ pci_address: "0000:00:07.0"
+ link: link2
+ model: Amazon-Nitro-50G
diff --git a/fdio.infra.ansible/roles/topology/templates/topology-2n-c6gn.j2 b/fdio.infra.ansible/roles/topology/templates/topology-2n-c6gn.j2
new file mode 100644
index 0000000000..e693f6c42c
--- /dev/null
+++ b/fdio.infra.ansible/roles/topology/templates/topology-2n-c6gn.j2
@@ -0,0 +1,51 @@
+---
+metadata:
+ version: 0.1
+ schema:
+ - resources/topology_schemas/2_node_topology.sch.yaml
+ - resources/topology_schemas/topology.sch.yaml
+ tags: [hw, 2-node]
+
+nodes:
+ TG:
+ type: TG
+ subtype: TREX
+ host: "{{ tg_public_ip }}"
+ arch: x86_64
+ port: 22
+ username: testuser
+ password: Csit1234
+ interfaces:
+ port1:
+ # tg_instance/p1 - 100GE port1 on ENA NIC.
+ mac_address: {{ tg_if1_mac }}
+ pci_address: "0000:00:06.0"
+ link: link1
+ model: Amazon-Nitro-100G
+ port2:
+ # tg_instance/p2 - 100GE port2 on ENA NIC.
+ mac_address: {{ tg_if2_mac }}
+ pci_address: "0000:00:07.0"
+ link: link2
+ model: Amazon-Nitro-100G
+ DUT1:
+ type: DUT
+ host: "{{ dut1_public_ip }}"
+ arch: x86_64
+ port: 22
+ username: testuser
+ password: Csit1234
+ uio_driver: vfio-pci
+ interfaces:
+ port1:
+ # dut1_instance/p1 - 100GE port1 on ENA NIC.
+ mac_address: {{ dut1_if1_mac }}
+ pci_address: "0000:00:06.0"
+ link: link1
+ model: Amazon-Nitro-100G
+ port2:
+ # dut1_instance/p2 - 100GE port2 on ENA NIC.
+ mac_address: {{ dut1_if2_mac }}
+ pci_address: "0000:00:07.0"
+ link: link2
+ model: Amazon-Nitro-100G
diff --git a/fdio.infra.ansible/roles/topology/templates/topology-2n-c6in.j2 b/fdio.infra.ansible/roles/topology/templates/topology-2n-c6in.j2
new file mode 100644
index 0000000000..ef7b464967
--- /dev/null
+++ b/fdio.infra.ansible/roles/topology/templates/topology-2n-c6in.j2
@@ -0,0 +1,51 @@
+---
+metadata:
+ version: 0.1
+ schema:
+ - resources/topology_schemas/2_node_topology.sch.yaml
+ - resources/topology_schemas/topology.sch.yaml
+ tags: [hw, 2-node]
+
+nodes:
+ TG:
+ type: TG
+ subtype: TREX
+ host: "{{ tg_public_ip }}"
+ arch: x86_64
+ port: 22
+ username: testuser
+ password: Csit1234
+ interfaces:
+ port1:
+ # tg_instance/p1 - 200GE port1 on ENA NIC.
+ mac_address: {{ tg_if1_mac }}
+ pci_address: "0000:00:06.0"
+ link: link1
+ model: Amazon-Nitro-200G
+ port2:
+ # tg_instance/p2 - 200GE port2 on ENA NIC.
+ mac_address: {{ tg_if2_mac }}
+ pci_address: "0000:00:07.0"
+ link: link2
+ model: Amazon-Nitro-200G
+ DUT1:
+ type: DUT
+ host: "{{ dut1_public_ip }}"
+ arch: x86_64
+ port: 22
+ username: testuser
+ password: Csit1234
+ uio_driver: vfio-pci
+ interfaces:
+ port1:
+ # dut1_instance/p1 - 200GE port1 on ENA NIC.
+ mac_address: {{ dut1_if1_mac }}
+ pci_address: "0000:00:06.0"
+ link: link1
+ model: Amazon-Nitro-200G
+ port2:
+ # dut1_instance/p2 - 200GE port2 on ENA NIC.
+ mac_address: {{ dut1_if2_mac }}
+ pci_address: "0000:00:07.0"
+ link: link2
+ model: Amazon-Nitro-200G
diff --git a/fdio.infra.ansible/roles/topology/templates/topology-3n-aws-c5n.j2 b/fdio.infra.ansible/roles/topology/templates/topology-3n-aws-c5n.j2
new file mode 100644
index 0000000000..b353aa5ad1
--- /dev/null
+++ b/fdio.infra.ansible/roles/topology/templates/topology-3n-aws-c5n.j2
@@ -0,0 +1,73 @@
+---
+metadata:
+ version: 0.1
+ schema:
+ - resources/topology_schemas/3_node_topology.sch.yaml
+ - resources/topology_schemas/topology.sch.yaml
+ tags: [hw, 3-node]
+
+nodes:
+ TG:
+ type: TG
+ subtype: TREX
+ host: "{{ tg_public_ip }}"
+ arch: x86_64
+ port: 22
+ username: testuser
+ password: Csit1234
+ interfaces:
+ port1:
+ # tg_instance/p1 - 50GE port1 on ENA NIC.
+ mac_address: {{ tg_if1_mac }}
+ pci_address: "0000:00:06.0"
+ link: link1
+ model: Amazon-Nitro-50G
+ port2:
+ # tg_instance/p2 - 50GE port2 on ENA NIC.
+ mac_address: {{ tg_if2_mac }}
+ pci_address: "0000:00:07.0"
+ link: link2
+ model: Amazon-Nitro-50G
+ DUT1:
+ type: DUT
+ host: "{{ dut1_public_ip }}"
+ arch: x86_64
+ port: 22
+ username: testuser
+ password: Csit1234
+ uio_driver: vfio-pci
+ interfaces:
+ port1:
+ # dut1_instance/p1 - 50GE port1 on ENA NIC.
+ mac_address: {{ dut1_if1_mac }}
+ pci_address: "0000:00:06.0"
+ link: link1
+ model: Amazon-Nitro-50G
+ port2:
+ # dut1_instance/p2 - 50GE port2 on ENA NIC.
+ mac_address: {{ dut1_if2_mac }}
+ pci_address: "0000:00:07.0"
+ link: link21
+ model: Amazon-Nitro-50G
+ DUT2:
+ type: DUT
+ host: "{{ dut2_public_ip }}"
+ arch: x86_64
+ port: 22
+ username: testuser
+ password: Csit1234
+ uio_driver: vfio-pci
+ interfaces:
+ port1:
+ # dut2_instance/p1 - 50GE port1 on ENA NIC.
+ mac_address: {{ dut2_if1_mac }}
+ pci_address: "0000:00:06.0"
+ link: link21
+ model: Amazon-Nitro-50G
+ port2:
+ # dut2_instance/p2 - 50GE port1 on ENA NIC.
+ mac_address: {{ dut2_if2_mac }}
+ pci_address: "0000:00:07.0"
+ link: link2
+ model: Amazon-Nitro-50G
+
diff --git a/fdio.infra.ansible/roles/topology/templates/topology-3n-azure-Fsv2.j2 b/fdio.infra.ansible/roles/topology/templates/topology-3n-azure-Fsv2.j2
new file mode 100644
index 0000000000..e4dd6cdbf2
--- /dev/null
+++ b/fdio.infra.ansible/roles/topology/templates/topology-3n-azure-Fsv2.j2
@@ -0,0 +1,82 @@
+---
+metadata:
+ version: 0.1
+ schema:
+ - resources/topology_schemas/3_node_topology.sch.yaml
+ - resources/topology_schemas/topology.sch.yaml
+ tags: [hw, 3-node]
+
+nodes:
+ TG:
+ type: TG
+ subtype: TREX
+ host: "{{ tg_public_ip }}"
+ arch: x86_64
+ port: 22
+ username: testuser
+ password: Csit1234
+ interfaces:
+ port1:
+ # tg_instance/p1 - 40GE port1 on Mellanox NIC.
+ mac_address: "{{ tg_if1_mac | lower | replace('-',':') }}"
+ pci_address: "0002:00:02.0"
+ link: link1
+ model: Azure-MLX-40G
+ port2:
+ # tg_instance/p2 - 40GE port2 on Mellanox NIC.
+ mac_address: "{{ tg_if2_mac | lower | replace('-',':') }}"
+ pci_address: "0003:00:02.0"
+ link: link2
+ model: Azure-MLX-40G
+ DUT1:
+ type: DUT
+ host: "{{ dut1_public_ip }}"
+ arch: x86_64
+ port: 22
+ username: testuser
+ password: Csit1234
+ uio_driver: vfio-pci
+ honeycomb:
+ user: admin
+ passwd: admin
+ port: 8183
+ netconf_port: 2831
+ interfaces:
+ port1:
+ # dut1_instance/p1 - 40GE port1 on Mellanox NIC.
+ mac_address: "{{ dut1_if1_mac | lower | replace('-',':') }}"
+ pci_address: "0002:00:02.0"
+ link: link1
+ model: Azure-MLX-40G
+ port2:
+ # dut2_instance/p1 - 40GE port2 on Mellanox NIC.
+ mac_address: "{{ dut1_if2_mac | lower | replace('-',':') }}"
+ pci_address: "0003:00:02.0"
+ link: link21
+ model: Azure-MLX-40G
+ DUT2:
+ type: DUT
+ host: "{{ dut2_public_ip }}"
+ arch: x86_64
+ port: 22
+ username: testuser
+ password: Csit1234
+ uio_driver: vfio-pci
+ honeycomb:
+ user: admin
+ passwd: admin
+ port: 8183
+ netconf_port: 2831
+ interfaces:
+ port1:
+ # dut1_instance/p1 - 40GE port1 on Mellanox NIC.
+ mac_address: "{{ dut2_if1_mac | lower | replace('-',':') }}"
+ pci_address: "0002:00:02.0"
+ link: link21
+ model: Azure-MLX-40G
+ port2:
+ # dut2_instance/p1 - 40GE port2 on Mellanox NIC.
+ mac_address: "{{ dut2_if2_mac | lower | replace('-',':') }}"
+ pci_address: "0003:00:02.0"
+ link: link2
+ model: Azure-MLX-40G
diff --git a/fdio.infra.ansible/roles/topology/templates/topology-3n-c6gn.j2 b/fdio.infra.ansible/roles/topology/templates/topology-3n-c6gn.j2
new file mode 100644
index 0000000000..295d457f49
--- /dev/null
+++ b/fdio.infra.ansible/roles/topology/templates/topology-3n-c6gn.j2
@@ -0,0 +1,73 @@
+---
+metadata:
+ version: 0.1
+ schema:
+ - resources/topology_schemas/3_node_topology.sch.yaml
+ - resources/topology_schemas/topology.sch.yaml
+ tags: [hw, 3-node]
+
+nodes:
+ TG:
+ type: TG
+ subtype: TREX
+ host: "{{ tg_public_ip }}"
+ arch: x86_64
+ port: 22
+ username: testuser
+ password: Csit1234
+ interfaces:
+ port1:
+ # tg_instance/p1 - 100GE port1 on ENA NIC.
+ mac_address: {{ tg_if1_mac }}
+ pci_address: "0000:00:06.0"
+ link: link1
+ model: Amazon-Nitro-100G
+ port2:
+ # tg_instance/p2 - 100GE port2 on ENA NIC.
+ mac_address: {{ tg_if2_mac }}
+ pci_address: "0000:00:07.0"
+ link: link2
+ model: Amazon-Nitro-100G
+ DUT1:
+ type: DUT
+ host: "{{ dut1_public_ip }}"
+ arch: x86_64
+ port: 22
+ username: testuser
+ password: Csit1234
+ uio_driver: vfio-pci
+ interfaces:
+ port1:
+ # dut1_instance/p1 - 100GE port1 on ENA NIC.
+ mac_address: {{ dut1_if1_mac }}
+ pci_address: "0000:00:06.0"
+ link: link1
+ model: Amazon-Nitro-100G
+ port2:
+ # dut1_instance/p2 - 100GE port2 on ENA NIC.
+ mac_address: {{ dut1_if2_mac }}
+ pci_address: "0000:00:07.0"
+ link: link21
+ model: Amazon-Nitro-100G
+ DUT2:
+ type: DUT
+ host: "{{ dut2_public_ip }}"
+ arch: x86_64
+ port: 22
+ username: testuser
+ password: Csit1234
+ uio_driver: vfio-pci
+ interfaces:
+ port1:
+ # dut2_instance/p1 - 100GE port1 on ENA NIC.
+ mac_address: {{ dut2_if1_mac }}
+ pci_address: "0000:00:06.0"
+ link: link21
+ model: Amazon-Nitro-100G
+ port2:
+ # dut2_instance/p2 - 100GE port1 on ENA NIC.
+ mac_address: {{ dut2_if2_mac }}
+ pci_address: "0000:00:07.0"
+ link: link2
+ model: Amazon-Nitro-100G
+
diff --git a/fdio.infra.ansible/roles/topology/templates/topology-3n-c6in.j2 b/fdio.infra.ansible/roles/topology/templates/topology-3n-c6in.j2
new file mode 100644
index 0000000000..c280f4e7e1
--- /dev/null
+++ b/fdio.infra.ansible/roles/topology/templates/topology-3n-c6in.j2
@@ -0,0 +1,73 @@
+---
+metadata:
+ version: 0.1
+ schema:
+ - resources/topology_schemas/3_node_topology.sch.yaml
+ - resources/topology_schemas/topology.sch.yaml
+ tags: [hw, 3-node]
+
+nodes:
+ TG:
+ type: TG
+ subtype: TREX
+ host: "{{ tg_public_ip }}"
+ arch: x86_64
+ port: 22
+ username: testuser
+ password: Csit1234
+ interfaces:
+ port1:
+ # tg_instance/p1 - 200GE port1 on ENA NIC.
+ mac_address: {{ tg_if1_mac }}
+ pci_address: "0000:00:06.0"
+ link: link1
+ model: Amazon-Nitro-200G
+ port2:
+ # tg_instance/p2 - 200GE port2 on ENA NIC.
+ mac_address: {{ tg_if2_mac }}
+ pci_address: "0000:00:07.0"
+ link: link2
+ model: Amazon-Nitro-200G
+ DUT1:
+ type: DUT
+ host: "{{ dut1_public_ip }}"
+ arch: x86_64
+ port: 22
+ username: testuser
+ password: Csit1234
+ uio_driver: vfio-pci
+ interfaces:
+ port1:
+ # dut1_instance/p1 - 200GE port1 on ENA NIC.
+ mac_address: {{ dut1_if1_mac }}
+ pci_address: "0000:00:06.0"
+ link: link1
+ model: Amazon-Nitro-200G
+ port2:
+ # dut1_instance/p2 - 200GE port2 on ENA NIC.
+ mac_address: {{ dut1_if2_mac }}
+ pci_address: "0000:00:07.0"
+ link: link21
+ model: Amazon-Nitro-200G
+ DUT2:
+ type: DUT
+ host: "{{ dut2_public_ip }}"
+ arch: x86_64
+ port: 22
+ username: testuser
+ password: Csit1234
+ uio_driver: vfio-pci
+ interfaces:
+ port1:
+ # dut2_instance/p1 - 200GE port1 on ENA NIC.
+ mac_address: {{ dut2_if1_mac }}
+ pci_address: "0000:00:06.0"
+ link: link21
+ model: Amazon-Nitro-200G
+ port2:
+ # dut2_instance/p2 - 200GE port1 on ENA NIC.
+ mac_address: {{ dut2_if2_mac }}
+ pci_address: "0000:00:07.0"
+ link: link2
+ model: Amazon-Nitro-200G
+
diff --git a/fdio.infra.ansible/roles/trex/defaults/main.yaml b/fdio.infra.ansible/roles/trex/defaults/main.yaml
new file mode 100644
index 0000000000..18a2b56bda
--- /dev/null
+++ b/fdio.infra.ansible/roles/trex/defaults/main.yaml
@@ -0,0 +1,33 @@
+---
+# file: defaults/main.yaml
+
+packages: "{{ packages_base + packages_by_distro[ansible_distribution|lower] + packages_by_arch[ansible_machine] }}"
+
+packages_base:
+ - []
+
+packages_by_distro:
+ ubuntu:
+ - "build-essential"
+ - "gcc-9"
+ - "g++-9"
+ - "libmnl-dev"
+ - "libnuma-dev"
+ - "libpcap-dev"
+ - "librdmacm-dev"
+ - "librdmacm1"
+ - "libssl-dev"
+ - "pciutils"
+ - "python3-pip"
+ - "zlib1g-dev"
+
+packages_by_arch:
+ aarch64:
+ - []
+ x86_64:
+ - []
+
+trex_target_dir: "/opt"
+trex_url: "https://github.com/cisco-system-traffic-generator/trex-core/archive/"
+trex_version:
+ - "3.03"
diff --git a/fdio.infra.ansible/roles/trex/files/t-rex.patch b/fdio.infra.ansible/roles/trex/files/t-rex.patch
new file mode 100644
index 0000000000..e7db647779
--- /dev/null
+++ b/fdio.infra.ansible/roles/trex/files/t-rex.patch
@@ -0,0 +1,548 @@
+diff --git a/linux_dpdk/ws_main.py b/linux_dpdk/ws_main.py
+index e8d0cd51..a0c01adb 100755
+--- a/linux_dpdk/ws_main.py
++++ b/linux_dpdk/ws_main.py
+@@ -209,7 +209,7 @@ def check_ofed(ctx):
+
+ ofed_ver= 42
+ ofed_ver_show= '4.2'
+-
++ return True
+ if not os.path.isfile(ofed_info):
+ ctx.end_msg('not found', 'YELLOW')
+ return False
+@@ -1552,8 +1552,6 @@ class build_option:
+ flags += ['-DNDEBUG'];
+ else:
+ flags += ['-UNDEBUG'];
+- if bld.env.OFED_OK:
+- flags += ['-DHAVE_IBV_MLX4_WQE_LSO_SEG=1']
+ return (flags)
+
+ def get_bnxt_flags(self):
+diff --git a/src/dpdk/drivers/net/mlx4/mlx4_autoconf.h b/src/dpdk/drivers/net/mlx4/mlx4_autoconf.h
+index b3d68683..35474409 100644
+--- a/src/dpdk/drivers/net/mlx4/mlx4_autoconf.h
++++ b/src/dpdk/drivers/net/mlx4/mlx4_autoconf.h
+@@ -1,3 +1,6 @@
+-#ifndef HAVE_IBV_MLX4_WQE_LSO_SEG
+-#define HAVE_IBV_MLX4_WQE_LSO_SEG
+-#endif
++/* HAVE_IBV_MLX4_BUF_ALLOCATORS is not defined. */
++
++/* HAVE_IBV_MLX4_UAR_MMAP_OFFSET is not defined. */
++
++/* HAVE_IBV_MLX4_WQE_LSO_SEG is not defined. */
++
+diff --git a/src/dpdk/drivers/net/mlx5/mlx5_autoconf.h b/src/dpdk/drivers/net/mlx5/mlx5_autoconf.h
+index 8770fdde..75db5ae8 100644
+--- a/src/dpdk/drivers/net/mlx5/mlx5_autoconf.h
++++ b/src/dpdk/drivers/net/mlx5/mlx5_autoconf.h
+@@ -1,54 +1,362 @@
+-#ifndef HAVE_IBV_DEVICE_COUNTERS_SET_SUPPORT
+-#define HAVE_IBV_DEVICE_COUNTERS_SET_SUPPORT
+-#endif
++/* HAVE_IBV_DEVICE_STRIDING_RQ_SUPPORT is not defined. */
+
+-#ifndef HAVE_IBV_FLOW_DV_SUPPORT
+-#define HAVE_IBV_FLOW_DV_SUPPORT
+-#endif
++#ifndef HAVE_IBV_DEVICE_TUNNEL_SUPPORT
++#define HAVE_IBV_DEVICE_TUNNEL_SUPPORT 1
++#endif /* HAVE_IBV_DEVICE_TUNNEL_SUPPORT */
+
+-#ifndef HAVE_IBV_DEVICE_COUNTERS_SET_V45
+-#define HAVE_IBV_DEVICE_COUNTERS_SET_V45
+-#endif
++/* HAVE_IBV_DEVICE_MPLS_SUPPORT is not defined. */
+
+-#ifndef HAVE_IBV_FLOW_DEVX_COUNTERS
+-#define HAVE_IBV_FLOW_DEVX_COUNTERS
+-#endif
++#ifndef HAVE_IBV_WQ_FLAGS_PCI_WRITE_END_PADDING
++#define HAVE_IBV_WQ_FLAGS_PCI_WRITE_END_PADDING 1
++#endif /* HAVE_IBV_WQ_FLAGS_PCI_WRITE_END_PADDING */
+
+-#ifndef HAVE_IBV_MLX4_WQE_LSO_SEG
+-#define HAVE_IBV_MLX4_WQE_LSO_SEG
+-#endif
++/* HAVE_IBV_WQ_FLAG_RX_END_PADDING is not defined. */
+
++#ifndef HAVE_IBV_MLX5_MOD_SWP
++#define HAVE_IBV_MLX5_MOD_SWP 1
++#endif /* HAVE_IBV_MLX5_MOD_SWP */
+
+-#ifdef SUPPORTED_40000baseKR4_Full
++#ifndef HAVE_IBV_MLX5_MOD_MPW
++#define HAVE_IBV_MLX5_MOD_MPW 1
++#endif /* HAVE_IBV_MLX5_MOD_MPW */
++
++#ifndef HAVE_IBV_MLX5_MOD_CQE_128B_COMP
++#define HAVE_IBV_MLX5_MOD_CQE_128B_COMP 1
++#endif /* HAVE_IBV_MLX5_MOD_CQE_128B_COMP */
++
++#ifndef HAVE_IBV_MLX5_MOD_CQE_128B_PAD
++#define HAVE_IBV_MLX5_MOD_CQE_128B_PAD 1
++#endif /* HAVE_IBV_MLX5_MOD_CQE_128B_PAD */
++
++/* HAVE_IBV_FLOW_DV_SUPPORT is not defined. */
++
++/* HAVE_MLX5DV_DR is not defined. */
++
++/* HAVE_MLX5DV_DR_ESWITCH is not defined. */
++
++/* HAVE_IBV_DEVX_OBJ is not defined. */
++
++/* HAVE_IBV_FLOW_DEVX_COUNTERS is not defined. */
++
++#ifndef HAVE_ETHTOOL_LINK_MODE_25G
++#define HAVE_ETHTOOL_LINK_MODE_25G 1
++#endif /* HAVE_ETHTOOL_LINK_MODE_25G */
++
++#ifndef HAVE_ETHTOOL_LINK_MODE_50G
++#define HAVE_ETHTOOL_LINK_MODE_50G 1
++#endif /* HAVE_ETHTOOL_LINK_MODE_50G */
++
++#ifndef HAVE_ETHTOOL_LINK_MODE_100G
++#define HAVE_ETHTOOL_LINK_MODE_100G 1
++#endif /* HAVE_ETHTOOL_LINK_MODE_100G */
++
++/* HAVE_IBV_DEVICE_COUNTERS_SET_V42 is not defined. */
++
++/* HAVE_IBV_DEVICE_COUNTERS_SET_V45 is not defined. */
++
++#ifndef HAVE_RDMA_NL_NLDEV
++#define HAVE_RDMA_NL_NLDEV 1
++#endif /* HAVE_RDMA_NL_NLDEV */
++
++#ifndef HAVE_RDMA_NLDEV_CMD_GET
++#define HAVE_RDMA_NLDEV_CMD_GET 1
++#endif /* HAVE_RDMA_NLDEV_CMD_GET */
++
++#ifndef HAVE_RDMA_NLDEV_CMD_PORT_GET
++#define HAVE_RDMA_NLDEV_CMD_PORT_GET 1
++#endif /* HAVE_RDMA_NLDEV_CMD_PORT_GET */
++
++#ifndef HAVE_RDMA_NLDEV_ATTR_DEV_INDEX
++#define HAVE_RDMA_NLDEV_ATTR_DEV_INDEX 1
++#endif /* HAVE_RDMA_NLDEV_ATTR_DEV_INDEX */
++
++#ifndef HAVE_RDMA_NLDEV_ATTR_DEV_NAME
++#define HAVE_RDMA_NLDEV_ATTR_DEV_NAME 1
++#endif /* HAVE_RDMA_NLDEV_ATTR_DEV_NAME */
++
++#ifndef HAVE_RDMA_NLDEV_ATTR_PORT_INDEX
++#define HAVE_RDMA_NLDEV_ATTR_PORT_INDEX 1
++#endif /* HAVE_RDMA_NLDEV_ATTR_PORT_INDEX */
++
++/* HAVE_RDMA_NLDEV_ATTR_NDEV_INDEX is not defined. */
++
++#ifndef HAVE_IFLA_NUM_VF
++#define HAVE_IFLA_NUM_VF 1
++#endif /* HAVE_IFLA_NUM_VF */
++
++#ifndef HAVE_IFLA_EXT_MASK
++#define HAVE_IFLA_EXT_MASK 1
++#endif /* HAVE_IFLA_EXT_MASK */
++
++#ifndef HAVE_IFLA_PHYS_SWITCH_ID
++#define HAVE_IFLA_PHYS_SWITCH_ID 1
++#endif /* HAVE_IFLA_PHYS_SWITCH_ID */
++
++#ifndef HAVE_IFLA_PHYS_PORT_NAME
++#define HAVE_IFLA_PHYS_PORT_NAME 1
++#endif /* HAVE_IFLA_PHYS_PORT_NAME */
++
++#ifndef HAVE_IFLA_VXLAN_COLLECT_METADATA
++#define HAVE_IFLA_VXLAN_COLLECT_METADATA 1
++#endif /* HAVE_IFLA_VXLAN_COLLECT_METADATA */
++
++#ifndef HAVE_TCA_CHAIN
++#define HAVE_TCA_CHAIN 1
++#endif /* HAVE_TCA_CHAIN */
++
++#ifndef HAVE_TCA_FLOWER_ACT
++#define HAVE_TCA_FLOWER_ACT 1
++#endif /* HAVE_TCA_FLOWER_ACT */
++
++#ifndef HAVE_TCA_FLOWER_FLAGS
++#define HAVE_TCA_FLOWER_FLAGS 1
++#endif /* HAVE_TCA_FLOWER_FLAGS */
++
++#ifndef HAVE_TCA_FLOWER_KEY_ETH_TYPE
++#define HAVE_TCA_FLOWER_KEY_ETH_TYPE 1
++#endif /* HAVE_TCA_FLOWER_KEY_ETH_TYPE */
++
++#ifndef HAVE_TCA_FLOWER_KEY_ETH_DST
++#define HAVE_TCA_FLOWER_KEY_ETH_DST 1
++#endif /* HAVE_TCA_FLOWER_KEY_ETH_DST */
++
++#ifndef HAVE_TCA_FLOWER_KEY_ETH_DST_MASK
++#define HAVE_TCA_FLOWER_KEY_ETH_DST_MASK 1
++#endif /* HAVE_TCA_FLOWER_KEY_ETH_DST_MASK */
++
++#ifndef HAVE_TCA_FLOWER_KEY_ETH_SRC
++#define HAVE_TCA_FLOWER_KEY_ETH_SRC 1
++#endif /* HAVE_TCA_FLOWER_KEY_ETH_SRC */
++
++#ifndef HAVE_TCA_FLOWER_KEY_ETH_SRC_MASK
++#define HAVE_TCA_FLOWER_KEY_ETH_SRC_MASK 1
++#endif /* HAVE_TCA_FLOWER_KEY_ETH_SRC_MASK */
++
++#ifndef HAVE_TCA_FLOWER_KEY_IP_PROTO
++#define HAVE_TCA_FLOWER_KEY_IP_PROTO 1
++#endif /* HAVE_TCA_FLOWER_KEY_IP_PROTO */
++
++#ifndef HAVE_TCA_FLOWER_KEY_IPV4_SRC
++#define HAVE_TCA_FLOWER_KEY_IPV4_SRC 1
++#endif /* HAVE_TCA_FLOWER_KEY_IPV4_SRC */
++
++#ifndef HAVE_TCA_FLOWER_KEY_IPV4_SRC_MASK
++#define HAVE_TCA_FLOWER_KEY_IPV4_SRC_MASK 1
++#endif /* HAVE_TCA_FLOWER_KEY_IPV4_SRC_MASK */
++
++#ifndef HAVE_TCA_FLOWER_KEY_IPV4_DST
++#define HAVE_TCA_FLOWER_KEY_IPV4_DST 1
++#endif /* HAVE_TCA_FLOWER_KEY_IPV4_DST */
++
++#ifndef HAVE_TCA_FLOWER_KEY_IPV4_DST_MASK
++#define HAVE_TCA_FLOWER_KEY_IPV4_DST_MASK 1
++#endif /* HAVE_TCA_FLOWER_KEY_IPV4_DST_MASK */
++
++#ifndef HAVE_TCA_FLOWER_KEY_IPV6_SRC
++#define HAVE_TCA_FLOWER_KEY_IPV6_SRC 1
++#endif /* HAVE_TCA_FLOWER_KEY_IPV6_SRC */
++
++#ifndef HAVE_TCA_FLOWER_KEY_IPV6_SRC_MASK
++#define HAVE_TCA_FLOWER_KEY_IPV6_SRC_MASK 1
++#endif /* HAVE_TCA_FLOWER_KEY_IPV6_SRC_MASK */
++
++#ifndef HAVE_TCA_FLOWER_KEY_IPV6_DST
++#define HAVE_TCA_FLOWER_KEY_IPV6_DST 1
++#endif /* HAVE_TCA_FLOWER_KEY_IPV6_DST */
++
++#ifndef HAVE_TCA_FLOWER_KEY_IPV6_DST_MASK
++#define HAVE_TCA_FLOWER_KEY_IPV6_DST_MASK 1
++#endif /* HAVE_TCA_FLOWER_KEY_IPV6_DST_MASK */
++
++#ifndef HAVE_TCA_FLOWER_KEY_TCP_SRC
++#define HAVE_TCA_FLOWER_KEY_TCP_SRC 1
++#endif /* HAVE_TCA_FLOWER_KEY_TCP_SRC */
++
++#ifndef HAVE_TCA_FLOWER_KEY_TCP_SRC_MASK
++#define HAVE_TCA_FLOWER_KEY_TCP_SRC_MASK 1
++#endif /* HAVE_TCA_FLOWER_KEY_TCP_SRC_MASK */
++
++#ifndef HAVE_TCA_FLOWER_KEY_TCP_DST
++#define HAVE_TCA_FLOWER_KEY_TCP_DST 1
++#endif /* HAVE_TCA_FLOWER_KEY_TCP_DST */
++
++#ifndef HAVE_TCA_FLOWER_KEY_TCP_DST_MASK
++#define HAVE_TCA_FLOWER_KEY_TCP_DST_MASK 1
++#endif /* HAVE_TCA_FLOWER_KEY_TCP_DST_MASK */
++
++#ifndef HAVE_TCA_FLOWER_KEY_UDP_SRC
++#define HAVE_TCA_FLOWER_KEY_UDP_SRC 1
++#endif /* HAVE_TCA_FLOWER_KEY_UDP_SRC */
++
++#ifndef HAVE_TCA_FLOWER_KEY_UDP_SRC_MASK
++#define HAVE_TCA_FLOWER_KEY_UDP_SRC_MASK 1
++#endif /* HAVE_TCA_FLOWER_KEY_UDP_SRC_MASK */
++
++#ifndef HAVE_TCA_FLOWER_KEY_UDP_DST
++#define HAVE_TCA_FLOWER_KEY_UDP_DST 1
++#endif /* HAVE_TCA_FLOWER_KEY_UDP_DST */
++
++#ifndef HAVE_TCA_FLOWER_KEY_UDP_DST_MASK
++#define HAVE_TCA_FLOWER_KEY_UDP_DST_MASK 1
++#endif /* HAVE_TCA_FLOWER_KEY_UDP_DST_MASK */
++
++#ifndef HAVE_TCA_FLOWER_KEY_VLAN_ID
++#define HAVE_TCA_FLOWER_KEY_VLAN_ID 1
++#endif /* HAVE_TCA_FLOWER_KEY_VLAN_ID */
++
++#ifndef HAVE_TCA_FLOWER_KEY_VLAN_PRIO
++#define HAVE_TCA_FLOWER_KEY_VLAN_PRIO 1
++#endif /* HAVE_TCA_FLOWER_KEY_VLAN_PRIO */
++
++#ifndef HAVE_TCA_FLOWER_KEY_VLAN_ETH_TYPE
++#define HAVE_TCA_FLOWER_KEY_VLAN_ETH_TYPE 1
++#endif /* HAVE_TCA_FLOWER_KEY_VLAN_ETH_TYPE */
++
++#ifndef HAVE_TCA_FLOWER_KEY_TCP_FLAGS
++#define HAVE_TCA_FLOWER_KEY_TCP_FLAGS 1
++#endif /* HAVE_TCA_FLOWER_KEY_TCP_FLAGS */
++
++#ifndef HAVE_TCA_FLOWER_KEY_TCP_FLAGS_MASK
++#define HAVE_TCA_FLOWER_KEY_TCP_FLAGS_MASK 1
++#endif /* HAVE_TCA_FLOWER_KEY_TCP_FLAGS_MASK */
++
++#ifndef HAVE_TCA_FLOWER_KEY_IP_TOS
++#define HAVE_TCA_FLOWER_KEY_IP_TOS 1
++#endif /* HAVE_TCA_FLOWER_KEY_IP_TOS */
++
++#ifndef HAVE_TCA_FLOWER_KEY_IP_TOS_MASK
++#define HAVE_TCA_FLOWER_KEY_IP_TOS_MASK 1
++#endif /* HAVE_TCA_FLOWER_KEY_IP_TOS_MASK */
++
++#ifndef HAVE_TCA_FLOWER_KEY_IP_TTL
++#define HAVE_TCA_FLOWER_KEY_IP_TTL 1
++#endif /* HAVE_TCA_FLOWER_KEY_IP_TTL */
++
++#ifndef HAVE_TCA_FLOWER_KEY_IP_TTL_MASK
++#define HAVE_TCA_FLOWER_KEY_IP_TTL_MASK 1
++#endif /* HAVE_TCA_FLOWER_KEY_IP_TTL_MASK */
++
++#ifndef HAVE_TC_ACT_GOTO_CHAIN
++#define HAVE_TC_ACT_GOTO_CHAIN 1
++#endif /* HAVE_TC_ACT_GOTO_CHAIN */
++
++#ifndef HAVE_TC_ACT_VLAN
++#define HAVE_TC_ACT_VLAN 1
++#endif /* HAVE_TC_ACT_VLAN */
++
++#ifndef HAVE_TCA_FLOWER_KEY_ENC_KEY_ID
++#define HAVE_TCA_FLOWER_KEY_ENC_KEY_ID 1
++#endif /* HAVE_TCA_FLOWER_KEY_ENC_KEY_ID */
++
++#ifndef HAVE_TCA_FLOWER_KEY_ENC_IPV4_SRC
++#define HAVE_TCA_FLOWER_KEY_ENC_IPV4_SRC 1
++#endif /* HAVE_TCA_FLOWER_KEY_ENC_IPV4_SRC */
++
++#ifndef HAVE_TCA_FLOWER_KEY_ENC_IPV4_SRC_MASK
++#define HAVE_TCA_FLOWER_KEY_ENC_IPV4_SRC_MASK 1
++#endif /* HAVE_TCA_FLOWER_KEY_ENC_IPV4_SRC_MASK */
++
++#ifndef HAVE_TCA_FLOWER_KEY_ENC_IPV4_DST
++#define HAVE_TCA_FLOWER_KEY_ENC_IPV4_DST 1
++#endif /* HAVE_TCA_FLOWER_KEY_ENC_IPV4_DST */
++
++#ifndef HAVE_TCA_FLOWER_KEY_ENC_IPV4_DST_MASK
++#define HAVE_TCA_FLOWER_KEY_ENC_IPV4_DST_MASK 1
++#endif /* HAVE_TCA_FLOWER_KEY_ENC_IPV4_DST_MASK */
++
++#ifndef HAVE_TCA_FLOWER_KEY_ENC_IPV6_SRC
++#define HAVE_TCA_FLOWER_KEY_ENC_IPV6_SRC 1
++#endif /* HAVE_TCA_FLOWER_KEY_ENC_IPV6_SRC */
++
++#ifndef HAVE_TCA_FLOWER_KEY_ENC_IPV6_SRC_MASK
++#define HAVE_TCA_FLOWER_KEY_ENC_IPV6_SRC_MASK 1
++#endif /* HAVE_TCA_FLOWER_KEY_ENC_IPV6_SRC_MASK */
++
++#ifndef HAVE_TCA_FLOWER_KEY_ENC_IPV6_DST
++#define HAVE_TCA_FLOWER_KEY_ENC_IPV6_DST 1
++#endif /* HAVE_TCA_FLOWER_KEY_ENC_IPV6_DST */
++
++#ifndef HAVE_TCA_FLOWER_KEY_ENC_IPV6_DST_MASK
++#define HAVE_TCA_FLOWER_KEY_ENC_IPV6_DST_MASK 1
++#endif /* HAVE_TCA_FLOWER_KEY_ENC_IPV6_DST_MASK */
++
++#ifndef HAVE_TCA_FLOWER_KEY_ENC_UDP_SRC_PORT
++#define HAVE_TCA_FLOWER_KEY_ENC_UDP_SRC_PORT 1
++#endif /* HAVE_TCA_FLOWER_KEY_ENC_UDP_SRC_PORT */
++
++#ifndef HAVE_TCA_FLOWER_KEY_ENC_UDP_SRC_PORT_MASK
++#define HAVE_TCA_FLOWER_KEY_ENC_UDP_SRC_PORT_MASK 1
++#endif /* HAVE_TCA_FLOWER_KEY_ENC_UDP_SRC_PORT_MASK */
++
++#ifndef HAVE_TCA_FLOWER_KEY_ENC_UDP_DST_PORT
++#define HAVE_TCA_FLOWER_KEY_ENC_UDP_DST_PORT 1
++#endif /* HAVE_TCA_FLOWER_KEY_ENC_UDP_DST_PORT */
++
++#ifndef HAVE_TCA_FLOWER_KEY_ENC_UDP_DST_PORT_MASK
++#define HAVE_TCA_FLOWER_KEY_ENC_UDP_DST_PORT_MASK 1
++#endif /* HAVE_TCA_FLOWER_KEY_ENC_UDP_DST_PORT_MASK */
++
++/* HAVE_TCA_FLOWER_KEY_ENC_IP_TOS is not defined. */
++
++/* HAVE_TCA_FLOWER_KEY_ENC_IP_TOS_MASK is not defined. */
++
++/* HAVE_TCA_FLOWER_KEY_ENC_IP_TTL is not defined. */
++
++/* HAVE_TCA_FLOWER_KEY_ENC_IP_TTL_MASK is not defined. */
++
++#ifndef HAVE_TC_ACT_TUNNEL_KEY
++#define HAVE_TC_ACT_TUNNEL_KEY 1
++#endif /* HAVE_TC_ACT_TUNNEL_KEY */
++
++#ifndef HAVE_TCA_TUNNEL_KEY_ENC_DST_PORT
++#define HAVE_TCA_TUNNEL_KEY_ENC_DST_PORT 1
++#endif /* HAVE_TCA_TUNNEL_KEY_ENC_DST_PORT */
++
++/* HAVE_TCA_TUNNEL_KEY_ENC_TOS is not defined. */
++
++/* HAVE_TCA_TUNNEL_KEY_ENC_TTL is not defined. */
++
++#ifndef HAVE_TCA_TUNNEL_KEY_NO_CSUM
++#define HAVE_TCA_TUNNEL_KEY_NO_CSUM 1
++#endif /* HAVE_TCA_TUNNEL_KEY_NO_CSUM */
++
++#ifndef HAVE_TC_ACT_PEDIT
++#define HAVE_TC_ACT_PEDIT 1
++#endif /* HAVE_TC_ACT_PEDIT */
++
++#ifndef HAVE_SUPPORTED_40000baseKR4_Full
+ #define HAVE_SUPPORTED_40000baseKR4_Full 1
+-#endif
++#endif /* HAVE_SUPPORTED_40000baseKR4_Full */
+
+-#ifdef SUPPORTED_40000baseCR4_Full
++#ifndef HAVE_SUPPORTED_40000baseCR4_Full
+ #define HAVE_SUPPORTED_40000baseCR4_Full 1
+-#endif
++#endif /* HAVE_SUPPORTED_40000baseCR4_Full */
+
+-#ifdef SUPPORTED_40000baseSR4_Full
++#ifndef HAVE_SUPPORTED_40000baseSR4_Full
+ #define HAVE_SUPPORTED_40000baseSR4_Full 1
+-#endif
++#endif /* HAVE_SUPPORTED_40000baseSR4_Full */
+
+-#ifdef SUPPORTED_40000baseLR4_Full
++#ifndef HAVE_SUPPORTED_40000baseLR4_Full
+ #define HAVE_SUPPORTED_40000baseLR4_Full 1
+-#endif
++#endif /* HAVE_SUPPORTED_40000baseLR4_Full */
+
+-#ifdef SUPPORTED_56000baseKR4_Full
++#ifndef HAVE_SUPPORTED_56000baseKR4_Full
+ #define HAVE_SUPPORTED_56000baseKR4_Full 1
+-#endif
++#endif /* HAVE_SUPPORTED_56000baseKR4_Full */
+
+-#ifdef SUPPORTED_56000baseCR4_Full
++#ifndef HAVE_SUPPORTED_56000baseCR4_Full
+ #define HAVE_SUPPORTED_56000baseCR4_Full 1
+-#endif
++#endif /* HAVE_SUPPORTED_56000baseCR4_Full */
+
+-#ifdef SUPPORTED_56000baseSR4_Full
++#ifndef HAVE_SUPPORTED_56000baseSR4_Full
+ #define HAVE_SUPPORTED_56000baseSR4_Full 1
+-#endif
++#endif /* HAVE_SUPPORTED_56000baseSR4_Full */
+
+-#ifdef SUPPORTED_56000baseLR4_Full
++#ifndef HAVE_SUPPORTED_56000baseLR4_Full
+ #define HAVE_SUPPORTED_56000baseLR4_Full 1
+-#endif
++#endif /* HAVE_SUPPORTED_56000baseLR4_Full */
+
++#ifndef HAVE_STATIC_ASSERT
++#define HAVE_STATIC_ASSERT 1
++#endif /* HAVE_STATIC_ASSERT */
+
+diff --git a/src/dpdk/drivers/net/tap/rte_eth_tap.c b/src/dpdk/drivers/net/tap/rte_eth_tap.c
+index bc889c19..47a2b68f 100644
+--- a/src/dpdk/drivers/net/tap/rte_eth_tap.c
++++ b/src/dpdk/drivers/net/tap/rte_eth_tap.c
+@@ -34,8 +34,8 @@
+ #include <unistd.h>
+ #include <arpa/inet.h>
+ #include <net/if.h>
+-#include <linux_tap/if_tun.h>
+-#include <linux_tap/if_ether.h>
++#include <linux/if_tun.h>
++#include <linux/if_ether.h>
+ #include <fcntl.h>
+ #include <ctype.h>
+
+diff --git a/src/dpdk/drivers/net/tap/rte_eth_tap.h b/src/dpdk/drivers/net/tap/rte_eth_tap.h
+index 66cd3441..dc3579ac 100644
+--- a/src/dpdk/drivers/net/tap/rte_eth_tap.h
++++ b/src/dpdk/drivers/net/tap/rte_eth_tap.h
+@@ -11,7 +11,7 @@
+ #include <inttypes.h>
+ #include <net/if.h>
+
+-#include <linux_tap/if_tun.h>
++#include <linux/if_tun.h>
+
+ #include <rte_ethdev_driver.h>
+ #include <rte_ether.h>
+diff --git a/src/dpdk/drivers/net/tap/tap_autoconf.h b/src/dpdk/drivers/net/tap/tap_autoconf.h
+index dddd4ae6..d5880608 100644
+--- a/src/dpdk/drivers/net/tap/tap_autoconf.h
++++ b/src/dpdk/drivers/net/tap/tap_autoconf.h
+@@ -1,14 +1,24 @@
+ #ifndef HAVE_TC_FLOWER
+ #define HAVE_TC_FLOWER 1
+-#endif
++#endif /* HAVE_TC_FLOWER */
+
++#ifndef HAVE_TC_VLAN_ID
++#define HAVE_TC_VLAN_ID 1
++#endif /* HAVE_TC_VLAN_ID */
+
+ #ifndef HAVE_TC_BPF
+ #define HAVE_TC_BPF 1
+-#endif
++#endif /* HAVE_TC_BPF */
+
+-#ifndef HAVE_TC_VLAN_ID
+-#define HAVE_TC_VLAN_ID 1
+-#endif
++#ifndef HAVE_TC_BPF_FD
++#define HAVE_TC_BPF_FD 1
++#endif /* HAVE_TC_BPF_FD */
++
++#ifndef HAVE_TC_ACT_BPF
++#define HAVE_TC_ACT_BPF 1
++#endif /* HAVE_TC_ACT_BPF */
+
++#ifndef HAVE_TC_ACT_BPF_FD
++#define HAVE_TC_ACT_BPF_FD 1
++#endif /* HAVE_TC_ACT_BPF_FD */
+
+diff --git a/src/dpdk/drivers/net/tap/tap_netlink.h b/src/dpdk/drivers/net/tap/tap_netlink.h
+index 900ce375..faa73ba1 100644
+--- a/src/dpdk/drivers/net/tap/tap_netlink.h
++++ b/src/dpdk/drivers/net/tap/tap_netlink.h
+@@ -8,8 +8,8 @@
+
+ #include <ctype.h>
+ #include <inttypes.h>
+-#include <linux_tap/rtnetlink.h>
+-#include <linux_tap/netlink.h>
++#include <linux/rtnetlink.h>
++#include <linux/netlink.h>
+ #include <stdio.h>
+
+ #include <rte_log.h>
+diff --git a/src/dpdk/drivers/net/tap/tap_tcmsgs.h b/src/dpdk/drivers/net/tap/tap_tcmsgs.h
+index 782de540..8cedea84 100644
+--- a/src/dpdk/drivers/net/tap/tap_tcmsgs.h
++++ b/src/dpdk/drivers/net/tap/tap_tcmsgs.h
+@@ -7,13 +7,13 @@
+ #define _TAP_TCMSGS_H_
+
+ #include <tap_autoconf.h>
+-#include <linux_tap/if_ether.h>
+-#include <linux_tap/rtnetlink.h>
+-#include <linux_tap/pkt_sched.h>
+-#include <linux_tap/pkt_cls.h>
+-#include <linux_tap/tc_act/tc_mirred.h>
+-#include <linux_tap/tc_act/tc_gact.h>
+-#include <linux_tap/tc_act/tc_skbedit.h>
++#include <linux/if_ether.h>
++#include <linux/rtnetlink.h>
++#include <linux/pkt_sched.h>
++#include <linux/pkt_cls.h>
++#include <linux/tc_act/tc_mirred.h>
++#include <linux/tc_act/tc_gact.h>
++#include <linux/tc_act/tc_skbedit.h>
+ #ifdef HAVE_TC_ACT_BPF
+ #include <linux/tc_act/tc_bpf.h>
+ #endif
+diff --git a/src/main_dpdk.cpp b/src/main_dpdk.cpp
+index 0f66b07a..8c37ea15 100644
+--- a/src/main_dpdk.cpp
++++ b/src/main_dpdk.cpp
+@@ -6969,6 +6969,7 @@ COLD_FUNC bool DpdkTRexPortAttr::update_link_status_nowait(){
+ bool changed = false;
+ rte_eth_link_get_nowait(m_repid, &new_link);
+
++ new_link.link_speed = ETH_SPEED_NUM_50G;
+ if (new_link.link_speed != m_link.link_speed ||
+ new_link.link_duplex != m_link.link_duplex ||
+ new_link.link_autoneg != m_link.link_autoneg ||
diff --git a/fdio.infra.ansible/roles/trex/tasks/deploy_block.yaml b/fdio.infra.ansible/roles/trex/tasks/deploy_block.yaml
new file mode 100644
index 0000000000..1a747f68d5
--- /dev/null
+++ b/fdio.infra.ansible/roles/trex/tasks/deploy_block.yaml
@@ -0,0 +1,63 @@
+---
+# file: tasks/deploy_block.yaml
+
+- name: Get Release {{ item }}
+ ansible.builtin.get_url:
+ url: "{{ trex_url }}/v{{ item }}.tar.gz"
+ dest: "{{ trex_target_dir }}/trex-core-{{ item }}.tar.gz"
+ validate_certs: false
+ mode: 0644
+ register: trex_downloaded
+
+- name: Create Directory {{ item }}
+ ansible.builtin.file:
+ path: "{{ trex_target_dir }}/trex-core-{{ item }}"
+ state: "directory"
+
+- name: Extract Release {{ item }}
+ ansible.builtin.unarchive:
+ remote_src: true
+ src: "{{ trex_target_dir }}/trex-core-{{ item }}.tar.gz"
+ dest: "{{ trex_target_dir }}/"
+ creates: "{{ trex_target_dir }}/trex-core-{{ item }}/linux_dpdk/"
+ register: trex_extracted
+
+- name: Compile Release {{ item }} Part I
+ ansible.builtin.command: "./b configure"
+ args:
+ chdir: "{{ trex_target_dir }}/trex-core-{{ item }}/linux_dpdk/"
+ when: trex_extracted.changed
+
+- name: Compile Release {{ item }} Part II
+ ansible.builtin.command: "./b build"
+ args:
+ chdir: "{{ trex_target_dir }}/trex-core-{{ item }}/linux_dpdk/"
+ async: 3000
+ poll: 0
+ register: trex_built
+ when: trex_extracted.changed
+
+- name: Check if T-Rex is Compiled
+ async_status:
+ jid: "{{ trex_built.ansible_job_id }}"
+ register: trex_built
+ until: trex_built.finished
+ delay: 10
+ retries: 300
+ when: trex_extracted.changed
+
+- name: Compile Release {{ item }} Part III
+ ansible.builtin.command: "make -j 16"
+ args:
+ chdir: "{{ trex_target_dir }}/trex-core-{{ item }}/scripts/ko/src"
+ when: trex_extracted.changed
+
+- name: Compile Release {{ item }} Part IV
+ ansible.builtin.command: "make install"
+ args:
+ chdir: "{{ trex_target_dir }}/trex-core-{{ item }}/scripts/ko/src"
+ when: trex_extracted.changed
+
+- name: Link libc.a to liblibc.a
+ ansible.builtin.command: "ln -s -f /usr/lib/x86_64-linux-gnu/libc.a /usr/lib/x86_64-linux-gnu/liblibc.a"
+ when: trex_extracted.changed
diff --git a/fdio.infra.ansible/roles/trex/tasks/main.yaml b/fdio.infra.ansible/roles/trex/tasks/main.yaml
new file mode 100644
index 0000000000..d0509f7544
--- /dev/null
+++ b/fdio.infra.ansible/roles/trex/tasks/main.yaml
@@ -0,0 +1,24 @@
+---
+# file: tasks/main.yaml
+
+- name: Update Package Cache (APT)
+ ansible.builtin.apt:
+ update_cache: true
+ cache_valid_time: 3600
+ when:
+ - ansible_distribution|lower == 'ubuntu'
+ tags:
+ - trex-inst-prerequisites
+
+- name: Prerequisites
+ ansible.builtin.package:
+ name: "{{ packages | flatten(levels=1) }}"
+ state: latest
+ tags:
+ - trex-inst-prerequisites
+
+- name: Multiple T-Rex Versions
+ include_tasks: deploy_block.yaml
+ loop: "{{ trex_version }}"
+ tags:
+ - trex-inst
diff --git a/fdio.infra.ansible/roles/user_add/defaults/main.yaml b/fdio.infra.ansible/roles/user_add/defaults/main.yaml
new file mode 100644
index 0000000000..643ad7dfd7
--- /dev/null
+++ b/fdio.infra.ansible/roles/user_add/defaults/main.yaml
@@ -0,0 +1,14 @@
+---
+# file: roles/user_add/defaults/main.yaml
+
+# Default shell for a user if none is specified.
+users_shell: /bin/bash
+
+# Default create home dirs for new users.
+users_create_homedirs: true
+
+# Default list of users to create.
+users: []
+
+# Default enable password login.
+sshd_disable_password_login: false
diff --git a/fdio.infra.ansible/roles/user_add/handlers/main.yaml b/fdio.infra.ansible/roles/user_add/handlers/main.yaml
new file mode 100644
index 0000000000..5f1f71a332
--- /dev/null
+++ b/fdio.infra.ansible/roles/user_add/handlers/main.yaml
@@ -0,0 +1,7 @@
+---
+# file: roles/user_add/handlers/main.yaml
+
+- name: Restart SSHd
+ ansible.builtin.service:
+ name: sshd
+ state: restarted
diff --git a/fdio.infra.ansible/roles/user_add/tasks/main.yaml b/fdio.infra.ansible/roles/user_add/tasks/main.yaml
new file mode 100644
index 0000000000..329c6abd07
--- /dev/null
+++ b/fdio.infra.ansible/roles/user_add/tasks/main.yaml
@@ -0,0 +1,39 @@
+---
+# file: roles/user_add/tasks/main.yaml
+
+- name: Conf - Add User
+ ansible.builtin.user:
+ append: "{{ item.append | default(omit) }}"
+ createhome: "{{ 'yes' if users_create_homedirs else 'no' }}"
+ generate_ssh_key: "{{ item.generate_ssh_key | default(omit) }}"
+ groups: "{{ item.groups | join(',') if 'groups' in item else '' }}"
+ name: "{{ item.username }}"
+ password: "{{ item.password if item.password is defined else '!' }}"
+ shell: "{{ item.shell if item.shell is defined else users_shell }}"
+ state: present
+ with_items: "{{ users }}"
+ tags:
+ - user-add-conf
+
+- name: Conf - SSH keys
+ ansible.builtin.authorized_key:
+ user: "{{ item.0.username }}"
+ key: "{{ item.1 }}"
+ with_subelements:
+ - "{{ users }}"
+ - ssh_key
+ - skip_missing: true
+ tags:
+ - user-add-conf
+
+- name: Conf - Disable Password Login
+ ansible.builtin.lineinfile:
+ dest: "/etc/ssh/sshd_config"
+ regexp: "^PasswordAuthentication yes"
+ line: "PasswordAuthentication no"
+ notify:
+ - "Restart SSHd"
+ when:
+ - sshd_disable_password_login
+ tags:
+ - user-add-conf
diff --git a/fdio.infra.ansible/roles/vagrant/defaults/main.yml b/fdio.infra.ansible/roles/vagrant/defaults/main.yml
new file mode 100644
index 0000000000..caa3339bb0
--- /dev/null
+++ b/fdio.infra.ansible/roles/vagrant/defaults/main.yml
@@ -0,0 +1,14 @@
+---
+# file: vagrant/defaults/main.yml
+
+# Settings for VPP Device host group
+csit:
+ home: "/home/vagrant/csit"
+ test_user:
+ name: "testuser"
+ password: "$6$/mAr/JDJc0u6/i$sLBptji85Xo/vdAv43bP4NpTaAfSBY8p3G7Uj9p4fKysrvs7XF8.FmlC56j4AzOun6nnf7PA.elytvfWoEHCL1"
+ home: "/home/testuser"
+ shell: "/bin/bash"
+ repository:
+ url: "https://gerrit.fd.io/r/csit"
+ version: "HEAD"
diff --git a/fdio.infra.ansible/roles/vagrant/files/99-vppdevice.yaml b/fdio.infra.ansible/roles/vagrant/files/99-vppdevice.yaml
new file mode 100644
index 0000000000..bcaa67099d
--- /dev/null
+++ b/fdio.infra.ansible/roles/vagrant/files/99-vppdevice.yaml
@@ -0,0 +1,28 @@
+network:
+ version: 2
+ renderer: networkd
+ ethernets:
+ enp0s8:
+ match:
+ macaddress: 08:00:27:0f:e0:4d
+ set-name: enpTGa
+ enp0s9:
+ match:
+ macaddress: 08:00:27:61:f7:ad
+ set-name: enpTGb
+ enp0s17:
+ match:
+ macaddress: 08:00:27:dc:5d:a4
+ set-name: enpTGc
+ enp0s10:
+ match:
+ macaddress: 08:00:27:38:5e:58
+ set-name: enpSUTa
+ enp0s16:
+ match:
+ macaddress: 08:00:27:e3:f5:42
+ set-name: enpSUTb
+ enp0s18:
+ match:
+ macaddress: 08:00:27:4f:7c:63
+ set-name: enpSUTc
diff --git a/fdio.infra.ansible/roles/vagrant/tasks/main.yml b/fdio.infra.ansible/roles/vagrant/tasks/main.yml
new file mode 100644
index 0000000000..1716ebe0d5
--- /dev/null
+++ b/fdio.infra.ansible/roles/vagrant/tasks/main.yml
@@ -0,0 +1,43 @@
+---
+# file: vagrant/tasks/main.yml
+
+# General
+- name: Adjust number of hugepages
+ sysctl:
+ name: "vm.nr_hugepages"
+ value: "512"
+ state: "present"
+ sysctl_file: "/etc/sysctl.d/90-csit.conf"
+ reload: true
+
+- name: "Add user for running tests: {{ csit.test_user.name }}"
+ user:
+ name: "{{ csit.test_user.name }}"
+ password: "{{ csit.test_user.password }}"
+ home: "{{ csit.test_user.home }}"
+ shell: "{{ csit.test_user.shell }}"
+
+- name: Add vagrant user to docker group
+ user:
+ name: "vagrant"
+ groups:
+ - "docker"
+
+- name: Reload groups for current session
+ command: "/usr/bin/newgrp docker"
+
+# Disabling CSIT repo cloning in the VM as the repo is synced from the host
+# - name: Clone CSIT repository
+# become_user: vagrant
+# git:
+# repo: "{{ csit.repository.url }}"
+# dest: "{{ csit.home }}"
+# accept_hostkey: true
+# version: "{{ csit.repository.version }}"
+
+- name: Load csit docker image from local path if exists (/vagrant/csit-sut.tar)
+ shell: |
+ if [ -z "$(docker images -q `cat {{ csit.home }}/VPP_DEVICE_IMAGE`)" ] && [ -e /vagrant/csit-sut.tar ]; then
+ docker load -i /vagrant/csit-sut.tar;
+ fi;
+ ignore_errors: true
diff --git a/fdio.infra.ansible/roles/vault/defaults/main.yaml b/fdio.infra.ansible/roles/vault/defaults/main.yaml
new file mode 100644
index 0000000000..5dd3db63c1
--- /dev/null
+++ b/fdio.infra.ansible/roles/vault/defaults/main.yaml
@@ -0,0 +1,159 @@
+---
+# file: roles/vault/defaults/main.yaml
+
+# Inst - Prerequisites.
+packages: "{{ packages_base + packages_by_distro[ansible_distribution | lower] + packages_by_arch[ansible_machine] }}"
+packages_base:
+ - "curl"
+ - "unzip"
+packages_by_distro:
+ ubuntu:
+ - []
+packages_by_arch:
+ aarch64:
+ - []
+ x86_64:
+ - []
+
+# Inst - Vault Map.
+vault_version: "1.11.0"
+vault_architecture_map:
+ amd64: "amd64"
+ x86_64: "amd64"
+ armv7l: "arm"
+ aarch64: "arm64"
+ 32-bit: "386"
+ 64-bit: "amd64"
+vault_architecture: "{{ vault_architecture_map[ansible_architecture] }}"
+vault_os: "{{ ansible_system|lower }}"
+vault_pkg: "vault_{{ vault_version }}_{{ vault_os }}_{{ vault_architecture }}.zip"
+vault_zip_url: "https://releases.hashicorp.com/vault/{{ vault_version }}/{{ vault_pkg }}"
+
+# Conf - Service.
+vault_node_role: "server"
+vault_restart_handler_state: "restarted"
+vault_systemd_service_name: "vault"
+
+# Inst - System paths.
+vault_bin_dir: "/usr/local/bin"
+vault_config_dir: "/etc/vault.d"
+vault_data_dir: "/var/vault"
+vault_inst_dir: "/opt"
+vault_run_dir: "/var/run/vault"
+vault_ssl_dir: "/etc/vault.d/ssl"
+
+# Conf - User and group.
+vault_group: "vault"
+vault_group_state: "present"
+vault_user: "vault"
+vault_user_state: "present"
+
+# Conf - Main
+vault_group_name: "vault_instances"
+vault_cluster_name: "yul1"
+vault_datacenter: "yul1"
+vault_log_level: "{{ lookup('env','VAULT_LOG_LEVEL') | default('info', true) }}"
+vault_iface: "{{ lookup('env','VAULT_IFACE') | default(ansible_default_ipv4.interface, true) }}"
+vault_address: "{{ hostvars[inventory_hostname]['ansible_'+vault_iface]['ipv4']['address'] }}"
+vault_ui: "{{ lookup('env', 'VAULT_UI') | default(true, true) }}"
+vault_port: 8200
+vault_use_config_path: false
+vault_main_config: "{{ vault_config_dir }}/vault_main.hcl"
+vault_main_configuration_template: "vault_main_configuration.hcl.j2"
+vault_listener_localhost_enable: false
+vault_http_proxy: ""
+vault_https_proxy: ""
+vault_no_proxy: ""
+
+# Conf - Listeners
+vault_tcp_listeners:
+ - vault_address: "{{ vault_address }}"
+ vault_port: "{{ vault_port }}"
+ vault_cluster_address: "{{ vault_cluster_address }}"
+ vault_tls_disable: "{{ vault_tls_disable }}"
+ vault_tls_config_path: "{{ vault_tls_config_path }}"
+ vault_tls_cert_file: "{{ vault_tls_cert_file }}"
+ vault_tls_key_file: "{{ vault_tls_key_file }}"
+ vault_tls_ca_file: "{{ vault_tls_ca_file }}"
+ vault_tls_min_version: "{{ vault_tls_min_version }}"
+ vault_tls_cipher_suites: "{{ vault_tls_cipher_suites }}"
+ vault_tls_prefer_server_cipher_suites: "{{ vault_tls_prefer_server_cipher_suites }}"
+ vault_tls_require_and_verify_client_cert: "{{ vault_tls_require_and_verify_client_cert }}"
+ vault_tls_disable_client_certs: "{{ vault_tls_disable_client_certs }}"
+ vault_disable_mlock: true
+
+# Conf - Backend
+vault_backend_consul: "vault_backend_consul.j2"
+vault_backend_file: "vault_backend_file.j2"
+vault_backend_raft: "vault_backend_raft.j2"
+vault_backend_etcd: "vault_backend_etcd.j2"
+vault_backend_s3: "vault_backend_s3.j2"
+vault_backend_dynamodb: "vault_backend_dynamodb.j2"
+vault_backend_mysql: "vault_backend_mysql.j2"
+vault_backend_gcs: "vault_backend_gcs.j2"
+
+vault_cluster_disable: false
+vault_cluster_address: "{{ hostvars[inventory_hostname]['ansible_'+vault_iface]['ipv4']['address'] }}:{{ (vault_port | int) + 1}}"
+vault_cluster_addr: "{{ vault_protocol }}://{{ vault_cluster_address }}"
+vault_api_addr: "{{ vault_protocol }}://{{ vault_redirect_address | default(hostvars[inventory_hostname]['ansible_'+vault_iface]['ipv4']['address']) }}:{{ vault_port }}"
+
+vault_max_lease_ttl: "768h"
+vault_default_lease_ttl: "768h"
+
+vault_backend_tls_src_files: "{{ vault_tls_src_files }}"
+vault_backend_tls_config_path: "{{ vault_tls_config_path }}"
+vault_backend_tls_cert_file: "{{ vault_tls_cert_file }}"
+vault_backend_tls_key_file: "{{ vault_tls_key_file }}"
+vault_backend_tls_ca_file: "{{ vault_tls_ca_file }}"
+
+vault_consul: "127.0.0.1:8500"
+vault_consul_path: "vault"
+vault_consul_service: "vault"
+vault_consul_scheme: "http"
+
+vault_backend: "consul"
+
+# Conf - Service registration
+vault_service_registration_consul_enable: true
+vault_service_registration_consul_template: "vault_service_registration_consul.hcl.j2"
+vault_service_registration_consul_check_timeout: "5s"
+vault_service_registration_consul_address: "127.0.0.1:8500"
+vault_service_registration_consul_service: "vault"
+vault_service_registration_consul_service_tags: ""
+vault_service_registration_consul_service_address:
+vault_service_registration_consul_disable_registration: false
+vault_service_registration_consul_scheme: "http"
+
+vault_service_registration_consul_tls_config_path: "{{ vault_tls_config_path }}"
+vault_service_registration_consul_tls_cert_file: "{{ vault_tls_cert_file }}"
+vault_service_registration_consul_tls_key_file: "{{ vault_tls_key_file }}"
+vault_service_registration_consul_tls_ca_file: "{{ vault_tls_ca_file }}"
+vault_service_registration_consul_tls_min_version: "{{ vault_tls_min_version }}"
+vault_service_registration_consul_tls_skip_verify: false
+
+# Conf - Telemetry
+vault_telemetry_enabled: true
+vault_telemetry_disable_hostname: false
+vault_prometheus_retention_time: 30s
+
+# Conf - TLS
+validate_certs_during_api_reachable_check: true
+
+vault_tls_config_path: "{{ lookup('env','VAULT_TLS_DIR') | default('/etc/vault/tls', true) }}"
+vault_tls_src_files: "{{ lookup('env','VAULT_TLS_SRC_FILES') | default(role_path+'/files', true) }}"
+
+vault_tls_disable: "{{ lookup('env','VAULT_TLS_DISABLE') | default(1, true) }}"
+vault_tls_gossip: "{{ lookup('env','VAULT_TLS_GOSSIP') | default(0, true) }}"
+
+vault_tls_copy_keys: true
+vault_protocol: "{% if vault_tls_disable %}http{% else %}https{% endif %}"
+vault_tls_cert_file: "{{ lookup('env','VAULT_TLS_CERT_FILE') | default('server.crt', true) }}"
+vault_tls_key_file: "{{ lookup('env','VAULT_TLS_KEY_FILE') | default('server.key', true) }}"
+vault_tls_ca_file: "{{ lookup('env','VAULT_TLS_CA_CRT') | default('ca.crt', true) }}"
+
+vault_tls_min_version: "{{ lookup('env','VAULT_TLS_MIN_VERSION') | default('tls12', true) }}"
+vault_tls_cipher_suites: ""
+vault_tls_prefer_server_cipher_suites: "{{ lookup('env','VAULT_TLS_PREFER_SERVER_CIPHER_SUITES') | default('false', true) }}"
+vault_tls_files_remote_src: false
+vault_tls_require_and_verify_client_cert: false
+vault_tls_disable_client_certs: false
diff --git a/fdio.infra.ansible/roles/vault/handlers/main.yaml b/fdio.infra.ansible/roles/vault/handlers/main.yaml
new file mode 100644
index 0000000000..ff2944f115
--- /dev/null
+++ b/fdio.infra.ansible/roles/vault/handlers/main.yaml
@@ -0,0 +1,9 @@
+---
+# file roles/vault/handlers/main.yaml
+
+- name: Restart Vault
+ ansible.builtin.systemd:
+ daemon_reload: true
+ enabled: true
+ name: "{{ vault_systemd_service_name }}"
+ state: "{{ vault_restart_handler_state }}"
diff --git a/fdio.infra.ansible/roles/vault/meta/main.yaml b/fdio.infra.ansible/roles/vault/meta/main.yaml
new file mode 100644
index 0000000000..22a62dd438
--- /dev/null
+++ b/fdio.infra.ansible/roles/vault/meta/main.yaml
@@ -0,0 +1,18 @@
+---
+# file: roles/vault/meta/main.yaml
+
+dependencies: []
+
+galaxy_info:
+ role_name: vault
+ author: fd.io
+ description: Hashicorp Vault.
+ company: none
+ license: "license (Apache)"
+ min_ansible_version: 2.9
+ platforms:
+ - name: Ubuntu
+ versions:
+ - jammy
+ galaxy_tags:
+ - vault
diff --git a/fdio.infra.ansible/roles/vault/tasks/main.yaml b/fdio.infra.ansible/roles/vault/tasks/main.yaml
new file mode 100644
index 0000000000..3fceadfb4a
--- /dev/null
+++ b/fdio.infra.ansible/roles/vault/tasks/main.yaml
@@ -0,0 +1,133 @@
+---
+# file: roles/vault/tasks/main.yaml
+
+- name: Inst - Update Package Cache (APT)
+ ansible.builtin.apt:
+ update_cache: true
+ cache_valid_time: 3600
+ when:
+ - ansible_distribution|lower == 'ubuntu'
+ tags:
+ - vault-inst-prerequisites
+
+- name: Inst - Prerequisites
+ ansible.builtin.package:
+ name: "{{ packages | flatten(levels=1) }}"
+ state: latest
+ tags:
+ - vault-inst-prerequisites
+
+- name: Conf - Add Vault Group
+ ansible.builtin.group:
+ name: "{{ vault_group }}"
+ state: "{{ vault_user_state }}"
+ tags:
+ - vault-conf-user
+
+- name: Conf - Add Vault user
+ ansible.builtin.user:
+ name: "{{ vault_user }}"
+ group: "{{ vault_group }}"
+ state: "{{ vault_group_state }}"
+ system: true
+ tags:
+ - vault-conf-user
+
+- name: Inst - Clean Vault
+ ansible.builtin.file:
+ path: "{{ vault_inst_dir }}/vault"
+ state: "absent"
+ tags:
+ - vault-inst-package
+
+- name: Inst - Download Vault
+ ansible.builtin.get_url:
+ url: "{{ vault_zip_url }}"
+ dest: "{{ vault_inst_dir }}/{{ vault_pkg }}"
+ tags:
+ - vault-inst-package
+
+- name: Inst - Unarchive Vault
+ ansible.builtin.unarchive:
+ src: "{{ vault_inst_dir }}/{{ vault_pkg }}"
+ dest: "{{ vault_inst_dir }}/"
+ creates: "{{ vault_inst_dir }}/vault"
+ remote_src: true
+ tags:
+ - vault-inst-package
+
+- name: Inst - Vault
+ ansible.builtin.copy:
+ src: "{{ vault_inst_dir }}/vault"
+ dest: "{{ vault_bin_dir }}"
+ owner: "{{ vault_user }}"
+ group: "{{ vault_group }}"
+ force: true
+ mode: 0755
+ remote_src: true
+ tags:
+ - vault-inst-package
+
+- name: Inst - Check Vault mlock capability
+ ansible.builtin.command: "setcap cap_ipc_lock=+ep {{ vault_bin_dir }}/vault"
+ changed_when: false # read-only task
+ ignore_errors: true
+ register: vault_mlock_capability
+ tags:
+ - vault-inst-package
+
+- name: Inst - Enable non root mlock capability
+ ansible.builtin.command: "setcap cap_ipc_lock=+ep {{ vault_bin_dir }}/vault"
+ when: vault_mlock_capability is failed
+ tags:
+ - vault-inst-package
+
+- name: Conf - Create directories
+ ansible.builtin.file:
+ dest: "{{ item }}"
+ state: directory
+ owner: "{{ vault_user }}"
+ group: "{{ vault_group }}"
+ mode: 0750
+ with_items:
+ - "{{ vault_data_dir }}"
+ - "{{ vault_config_dir }}"
+ - "{{ vault_ssl_dir }}"
+ tags:
+ - vault-conf
+
+- name: Conf - Vault main configuration
+ ansible.builtin.template:
+ src: "{{ vault_main_configuration_template }}"
+ dest: "{{ vault_main_config }}"
+ owner: "{{ vault_user }}"
+ group: "{{ vault_group }}"
+ mode: 0400
+ tags:
+ - vault-conf
+
+# - name: Conf - Copy Certificates And Keys
+# copy:
+# content: "{{ item.src }}"
+# dest: "{{ item.dest }}"
+# owner: "{{ vault_user }}"
+# group: "{{ vault_group }}"
+# mode: 0600
+# no_log: true
+# loop: "{{ vault_certificates | flatten(levels=1) }}"
+# tags:
+# - vault-conf
+
+- name: Conf - System.d Script
+ ansible.builtin.template:
+ src: "vault_systemd.service.j2"
+ dest: "/lib/systemd/system/vault.service"
+ owner: "root"
+ group: "root"
+ mode: 0644
+ notify:
+ - "Restart Vault"
+ tags:
+ - vault-conf
+
+- meta: flush_handlers
diff --git a/fdio.infra.ansible/roles/vault/templates/vault_backend_consul.j2 b/fdio.infra.ansible/roles/vault/templates/vault_backend_consul.j2
new file mode 100644
index 0000000000..c45498af90
--- /dev/null
+++ b/fdio.infra.ansible/roles/vault/templates/vault_backend_consul.j2
@@ -0,0 +1,15 @@
+backend "consul" {
+ address = "{{ vault_consul }}"
+ path = "{{ vault_consul_path }}"
+ service = "{{ vault_consul_service }}"
+ {% if vault_consul_token is defined and vault_consul_token -%}
+ token = "{{ vault_consul_token }}"
+ {% endif -%}
+ scheme = "{{ vault_consul_scheme }}"
+ {% if vault_tls_gossip | bool -%}
+ tls_cert_file = "{{ vault_backend_tls_config_path }}/{{ vault_backend_tls_cert_file }}"
+ tls_key_file = "{{ vault_backend_tls_config_path }}/{{ vault_backend_tls_key_file }}"
+ tls_ca_file="{{ vault_backend_tls_config_path }}/{{ vault_backend_tls_ca_file }}"
+ {% endif %}
+
+} \ No newline at end of file
diff --git a/fdio.infra.ansible/roles/vault/templates/vault_main_configuration.hcl.j2 b/fdio.infra.ansible/roles/vault/templates/vault_main_configuration.hcl.j2
new file mode 100644
index 0000000000..dec4fff8d9
--- /dev/null
+++ b/fdio.infra.ansible/roles/vault/templates/vault_main_configuration.hcl.j2
@@ -0,0 +1,93 @@
+cluster_name = "{{ vault_cluster_name }}"
+max_lease_ttl = "{{ vault_max_lease_ttl }}"
+default_lease_ttl = "{{ vault_default_lease_ttl }}"
+
+disable_clustering = "{{ vault_cluster_disable | bool | lower }}"
+cluster_addr = "{{ vault_cluster_addr }}"
+api_addr = "{{ vault_api_addr }}"
+
+{% for l in vault_tcp_listeners %}
+listener "tcp" {
+ address = "{{ l.vault_address }}:{{ l.vault_port }}"
+ cluster_address = "{{ l.vault_cluster_address }}"
+ {% if (l.vault_proxy_protocol_behavior is defined and l.vault_proxy_protocol_behavior) -%}
+ proxy_protocol_behavior = "{{ l.vault_proxy_protocol_behavior }}"
+ {% if (l.vault_proxy_protocol_authorized_addrs is defined) -%}
+ proxy_protocol_authorized_addrs = "{{ l.vault_proxy_protocol_authorized_addrs }}"
+ {% endif -%}
+ {% endif -%}
+ {% if not (l.vault_tls_disable | bool) -%}
+ tls_cert_file = "{{ l.vault_tls_config_path }}/{{ l.vault_tls_cert_file }}"
+ tls_key_file = "{{ l.vault_tls_config_path }}/{{ l.vault_tls_key_file }}"
+ tls_client_ca_file="{{ l.vault_tls_config_path }}/{{ l.vault_tls_ca_file }}"
+ tls_min_version = "{{ l.vault_tls_min_version }}"
+ {% if vault_tls_cipher_suites is defined and vault_tls_cipher_suites -%}
+ tls_cipher_suites = "{{ l.vault_tls_cipher_suites}}"
+ {% endif -%}
+ tls_prefer_server_cipher_suites = "{{ l.vault_tls_prefer_server_cipher_suites }}"
+ {% if (l.vault_tls_require_and_verify_client_cert | bool) -%}
+ tls_require_and_verify_client_cert = "{{ l.vault_tls_require_and_verify_client_cert | bool | lower}}"
+ {% endif -%}
+ {% if (l.vault_tls_disable_client_certs | bool) -%}
+ tls_disable_client_certs = "{{ l.vault_tls_disable_client_certs | bool | lower}}"
+ {% endif -%}
+ {% endif -%}
+ tls_disable = "{{ l.vault_tls_disable | bool | lower }}"
+}
+{% endfor %}
+
+{% if (vault_listener_localhost_enable | bool) -%}
+listener "tcp" {
+ address = "127.0.0.1:{{ vault_port }}"
+ cluster_address = "127.0.0.1:8201"
+ tls_disable = "true"
+}
+{% endif -%}
+
+{#
+ Select which storage backend you want generated and placed
+ in the vault configuration file.
+#}
+{%- if vault_backend == 'consul' -%}
+ {% include vault_backend_consul with context %}
+{% elif vault_backend == 'etcd' -%}
+ {% include vault_backend_etcd with context %}
+{% elif vault_backend == 'file' -%}
+ {% include vault_backend_file with context %}
+{% elif vault_backend == 's3' -%}
+ {% include vault_backend_s3 with context %}
+{% elif vault_backend == 'dynamodb' -%}
+ {% include vault_backend_dynamodb with context %}
+{% elif vault_backend == 'mysql' -%}
+ {% include vault_backend_mysql with context %}
+{% elif vault_backend == 'gcs' -%}
+ {% include vault_backend_gcs with context %}
+{% elif vault_backend == 'raft' -%}
+ {% include vault_backend_raft with context %}
+{% endif %}
+
+{% if vault_service_registration_consul_enable -%}
+ {% include vault_service_registration_consul_template with context %}
+{% endif %}
+
+{% if vault_ui %}
+ui = {{ vault_ui | bool | lower }}
+{% endif %}
+
+{% if vault_telemetry_enabled | bool -%}
+telemetry {
+ {% if vault_statsite_address is defined -%}
+ statsite_address = "{{vault_statsite_address}}"
+ {% endif -%}
+ {% if vault_statsd_address is defined -%}
+ statsd_address = "{{vault_statsd_address}}"
+ {% endif -%}
+ {% if vault_prometheus_retention_time is defined -%}
+ prometheus_retention_time = "{{ vault_prometheus_retention_time }}"
+ {% endif -%}
+ {% if vault_telemetry_disable_hostname is defined -%}
+ disable_hostname = {{vault_telemetry_disable_hostname | bool | lower }}
+ {% endif %}
+
+}
+{% endif %} \ No newline at end of file
diff --git a/fdio.infra.ansible/roles/vault/templates/vault_service_registration_consul.hcl.j2 b/fdio.infra.ansible/roles/vault/templates/vault_service_registration_consul.hcl.j2
new file mode 100644
index 0000000000..cd5da1ffb6
--- /dev/null
+++ b/fdio.infra.ansible/roles/vault/templates/vault_service_registration_consul.hcl.j2
@@ -0,0 +1,22 @@
+service_registration "consul" {
+ address = "{{ vault_service_registration_consul_address }}"
+ check_timeout = "{{ vault_service_registration_consul_check_timeout }}"
+ disable_registration = "{{ vault_service_registration_consul_disable_registration | bool | lower }}"
+ scheme = "{{ vault_service_registration_consul_scheme }}"
+ service = "{{ vault_service_registration_consul_service }}"
+ service_tags = "{{ vault_service_registration_consul_service_tags }}"
+ {% if vault_service_registration_consul_service_address is defined and vault_service_registration_consul_service_address -%}
+ service_address = "{{ vault_service_registration_consul_service_address }}"
+ {% endif -%}
+ {% if vault_service_registration_consul_token is defined and vault_service_registration_consul_token -%}
+ token = "{{ vault_service_registration_consul_token }}"
+ {% endif -%}
+ {% if vault_service_registration_consul_scheme == "https" -%}
+ tls_ca_file="{{ vault_service_registration_consul_tls_config_path }}/{{ vault_service_registration_consul_tls_ca_file }}"
+ tls_cert_file = "{{ vault_service_registration_consul_tls_config_path }}/{{ vault_service_registration_consul_tls_cert_file }}"
+ tls_key_file = "{{ vault_service_registration_consul_tls_config_path }}/{{ vault_service_registration_consul_tls_key_file }}"
+ tls_min_version = "{{ vault_service_registration_consul_tls_min_version }}"
+ tls_skip_verify = "{{ vault_service_registration_consul_tls_skip_verify }}"
+ {% endif %}
+
+} \ No newline at end of file
diff --git a/fdio.infra.ansible/roles/vault/templates/vault_systemd.service.j2 b/fdio.infra.ansible/roles/vault/templates/vault_systemd.service.j2
new file mode 100644
index 0000000000..5d2ca78b2e
--- /dev/null
+++ b/fdio.infra.ansible/roles/vault/templates/vault_systemd.service.j2
@@ -0,0 +1,30 @@
+[Unit]
+Description=Vault
+Documentation=https://www.vaultproject.io/docs/
+Requires=network-online.target
+After=network-online.target
+
+[Service]
+User={{ vault_user }}
+Group={{ vault_group }}
+ProtectSystem=full
+ProtectHome=read-only
+PrivateTmp=yes
+PrivateDevices=yes
+NoNewPrivileges=yes
+ExecReload=/bin/kill -HUP $MAINPID
+ExecStart={{ vault_bin_dir }}/vault {{ vault_node_role }} -config={{ vault_config_dir }}
+KillMode=process
+KillSignal=SIGINT
+Restart=on-failure
+RestartSec=5
+TimeoutStopSec=30
+StartLimitInterval=60
+StartLimitBurst=3
+LimitNOFILE=524288
+LimitNPROC=524288
+LimitMEMLOCK=infinity
+LimitCORE=0
+
+[Install]
+WantedBy=multi-user.target \ No newline at end of file
diff --git a/fdio.infra.ansible/roles/vault/vars/main.yaml b/fdio.infra.ansible/roles/vault/vars/main.yaml
new file mode 100644
index 0000000000..2b16a63fdf
--- /dev/null
+++ b/fdio.infra.ansible/roles/vault/vars/main.yaml
@@ -0,0 +1,5 @@
+---
+# file: roles/vault/vars/main.yaml
+
+vault_node_client: "{{ (vault_node_role == 'client') or (vault_node_role == 'both') }}"
+vault_node_server: "{{ (vault_node_role == 'server') or (vault_node_role == 'both') }}"
diff --git a/fdio.infra.ansible/roles/vpp/defaults/main.yaml b/fdio.infra.ansible/roles/vpp/defaults/main.yaml
new file mode 100644
index 0000000000..00c56859d0
--- /dev/null
+++ b/fdio.infra.ansible/roles/vpp/defaults/main.yaml
@@ -0,0 +1,28 @@
+---
+# file: roles/vpp/defaults/main.yaml
+
+packages: "{{ packages_base + packages_by_distro[ansible_distribution|lower][ansible_distribution_release] + packages_by_arch[ansible_machine] }}"
+
+packages_base:
+ - "gdb"
+ - "libtool"
+ - "lxc"
+ - "pkg-config"
+ - "screen"
+
+packages_by_distro:
+ ubuntu:
+ jammy:
+ - "build-essential"
+ - "libglib2.0-dev"
+ - "libmbedcrypto7"
+ - "libmbedtls14"
+ - "libmbedx509-1"
+ - "libnuma-dev"
+ - "libpixman-1-dev"
+
+packages_by_arch:
+ aarch64:
+ - []
+ x86_64:
+ - []
diff --git a/fdio.infra.ansible/roles/vpp/tasks/main.yaml b/fdio.infra.ansible/roles/vpp/tasks/main.yaml
new file mode 100644
index 0000000000..cea06b764d
--- /dev/null
+++ b/fdio.infra.ansible/roles/vpp/tasks/main.yaml
@@ -0,0 +1,27 @@
+---
+# file: roles/vpp/tasks/main.yaml
+
+- name: Inst - Update Package Cache (APT)
+ ansible.builtin.apt:
+ update_cache: true
+ cache_valid_time: 3600
+ when:
+ - ansible_distribution|lower == 'ubuntu'
+ tags:
+ - vpp-inst-prerequisites
+
+- name: Inst - Prerequisites
+ ansible.builtin.package:
+ name: "{{ packages | flatten(levels=1) }}"
+ state: latest
+ tags:
+ - vpp-inst-prerequisites
+
+- name: Conf - sysctl
+ ansible.builtin.file:
+ src: "/dev/null"
+ dest: "/etc/sysctl.d/80-vpp.conf"
+ state: "link"
+ become: true
+ tags:
+ - vpp-conf-sysctl
diff --git a/fdio.infra.ansible/roles/vpp_device/files/csit-initialize-vfs-alt.sh b/fdio.infra.ansible/roles/vpp_device/files/csit-initialize-vfs-alt.sh
new file mode 100644
index 0000000000..cd04d61251
--- /dev/null
+++ b/fdio.infra.ansible/roles/vpp_device/files/csit-initialize-vfs-alt.sh
@@ -0,0 +1,39 @@
+#!/usr/bin/env bash
+
+# Copyright (c) 2023 PANTHEON.tech and/or its affiliates.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at:
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Add QLogic Corp. FastLinQ QL41000 Series 10/25/40/50GbE Controller to
+# blacklist.
+PCI_BLACKLIST=($(lspci -Dmmd ':8070:0200' | cut -f1 -d' '))
+# Add I350 Gigabit Network Connection 1521 to blacklist.
+PCI_BLACKLIST+=($(lspci -Dmmd ':1521:0200' | cut -f1 -d' '))
+# Add MT27800 Family [ConnectX-5] 1017 to blacklist.
+PCI_BLACKLIST+=($(lspci -Dmmd ':1017:0200' | cut -f1 -d' '))
+
+# Add Intel Corporation Ethernet Controller XL710 for 40GbE QSFP+ to whitelist.
+PCI_WHITELIST=($(lspci -Dmmd ':1583:0200' | cut -f1 -d' '))
+# Add MT2892 Family [ConnectX-6 Dx] 101d to whitelist.
+PCI_WHITELIST+=($(lspci -Dmmd ':101d:0200' | cut -f1 -d' '))
+
+# See http://pci-ids.ucw.cz/v2.2/pci.ids for more info.
+
+declare -A PF_INDICES
+# Intel NICs
+PF_INDICES["0000:01:00.0"]=0
+PF_INDICES["0000:01:00.1"]=1
+PF_INDICES["0003:02:00.0"]=0
+PF_INDICES["0003:02:00.1"]=1
+# Mellanox CX6
+PF_INDICES["0001:01:00.0"]=2
+PF_INDICES["0001:01:00.1"]=2 \ No newline at end of file
diff --git a/fdio.infra.ansible/roles/vpp_device/files/csit-initialize-vfs-default.sh b/fdio.infra.ansible/roles/vpp_device/files/csit-initialize-vfs-default.sh
new file mode 100644
index 0000000000..91c93ab882
--- /dev/null
+++ b/fdio.infra.ansible/roles/vpp_device/files/csit-initialize-vfs-default.sh
@@ -0,0 +1,37 @@
+#!/usr/bin/env bash
+
+# Copyright (c) 2021 Cisco and/or its affiliates.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at:
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Add Intel Corporation Ethernet Controller 10G X550T to blacklist.
+PCI_BLACKLIST=($(lspci -Dmmd ':1563:0200' | cut -f1 -d' '))
+
+# Add Intel Corporation Ethernet Controller X710 for 10GbE SFP+ to whitelist.
+PCI_WHITELIST=($(lspci -Dmmd ':1572:0200' | cut -f1 -d' '))
+# Add Intel Corporation Ethernet Controller E810-C for 100GbE QSFP to whitelist.
+PCI_WHITELIST+=($(lspci -Dmmd ':1592:0200' | cut -f1 -d' '))
+
+# See http://pci-ids.ucw.cz/v2.2/pci.ids for more info.
+
+declare -A PF_INDICES
+# Intel NICs
+PF_INDICES["0000:18:00.0"]=0
+PF_INDICES["0000:18:00.1"]=1
+PF_INDICES["0000:18:00.2"]=2
+PF_INDICES["0000:18:00.3"]=3
+PF_INDICES["0000:86:00.0"]=4
+PF_INDICES["0000:3b:00.0"]=0
+PF_INDICES["0000:3b:00.1"]=1
+PF_INDICES["0000:3b:00.2"]=2
+PF_INDICES["0000:3b:00.3"]=3
+PF_INDICES["0000:af:00.0"]=4
diff --git a/fdio.infra.ansible/roles/vpp_device/files/csit-initialize-vfs-spr.sh b/fdio.infra.ansible/roles/vpp_device/files/csit-initialize-vfs-spr.sh
new file mode 100644
index 0000000000..74593b24d4
--- /dev/null
+++ b/fdio.infra.ansible/roles/vpp_device/files/csit-initialize-vfs-spr.sh
@@ -0,0 +1,38 @@
+#!/usr/bin/env bash
+
+# Copyright (c) 2024 Cisco and/or its affiliates.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at:
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Add Intel Corporation Ethernet Controller 10G X550T to blacklist.
+PCI_BLACKLIST=($(lspci -Dmmd ':1563:0200' | cut -f1 -d' '))
+
+# Add Intel Corporation Ethernet Controller X710 for 10GbE SFP+ to whitelist.
+PCI_WHITELIST=($(lspci -Dmmd ':1572:0200' | cut -f1 -d' '))
+# Add Intel Corporation Ethernet Controller E810-C for 100GbE QSFP to whitelist.
+PCI_WHITELIST+=($(lspci -Dmmd ':1592:0200' | cut -f1 -d' '))
+
+# See http://pci-ids.ucw.cz/v2.2/pci.ids for more info.
+
+declare -A PF_INDICES
+# Intel NICs
+PF_INDICES["0000:2a:00.0"]=0
+PF_INDICES["0000:2a:00.1"]=1
+PF_INDICES["0000:2a:00.2"]=2
+PF_INDICES["0000:2a:00.3"]=3
+PF_INDICES["0000:bd:00.0"]=4
+PF_INDICES["0000:3d:00.0"]=0
+PF_INDICES["0000:3d:00.1"]=1
+PF_INDICES["0000:3d:00.2"]=2
+PF_INDICES["0000:3d:00.3"]=3
+PF_INDICES["0000:e1:00.0"]=4
+
diff --git a/fdio.infra.ansible/roles/vpp_device/files/csit-initialize-vfs-tx2.sh b/fdio.infra.ansible/roles/vpp_device/files/csit-initialize-vfs-tx2.sh
new file mode 100644
index 0000000000..6c56752ad0
--- /dev/null
+++ b/fdio.infra.ansible/roles/vpp_device/files/csit-initialize-vfs-tx2.sh
@@ -0,0 +1,34 @@
+#!/usr/bin/env bash
+
+# Copyright (c) 2021 PANTHEON.tech and/or its affiliates.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at:
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Add QLogic Corp. FastLinQ QL41000 Series 10/25/40/50GbE Controller to
+# blacklist.
+PCI_BLACKLIST=($(lspci -Dmmd ':8070:0200' | cut -f1 -d' '))
+# Add I350 Gigabit Network Connection 1521 to blacklist.
+PCI_BLACKLIST+=($(lspci -Dmmd ':1521:0200' | cut -f1 -d' '))
+# Add MT27800 Family [ConnectX-5] 1017 to blacklist.
+PCI_BLACKLIST+=($(lspci -Dmmd ':1017:0200' | cut -f1 -d' '))
+
+# Add Intel Corporation Ethernet Controller XL710 for 40GbE QSFP+ to whitelist.
+PCI_WHITELIST=($(lspci -Dmmd ':1583:0200' | cut -f1 -d' '))
+
+# See http://pci-ids.ucw.cz/v2.2/pci.ids for more info.
+
+declare -A PF_INDICES
+# Intel NICs
+PF_INDICES["0000:05:00.0"]=0
+PF_INDICES["0000:05:00.1"]=1
+PF_INDICES["0000:91:00.0"]=0
+PF_INDICES["0000:91:00.1"]=1
diff --git a/fdio.infra.ansible/roles/vpp_device/files/csit-initialize-vfs.service b/fdio.infra.ansible/roles/vpp_device/files/csit-initialize-vfs.service
new file mode 100644
index 0000000000..996792ab9b
--- /dev/null
+++ b/fdio.infra.ansible/roles/vpp_device/files/csit-initialize-vfs.service
@@ -0,0 +1,12 @@
+[Unit]
+Description=CSIT Initialize SR-IOV VFs
+After=network.target
+
+[Service]
+Type=oneshot
+RemainAfterExit=True
+ExecStart=/usr/local/bin/csit-initialize-vfs.sh start
+ExecStop=/usr/local/bin/csit-initialize-vfs.sh stop
+
+[Install]
+WantedBy=default.target
diff --git a/fdio.infra.ansible/roles/vpp_device/files/csit-initialize-vfs.sh b/fdio.infra.ansible/roles/vpp_device/files/csit-initialize-vfs.sh
new file mode 100644
index 0000000000..afa84ae15a
--- /dev/null
+++ b/fdio.infra.ansible/roles/vpp_device/files/csit-initialize-vfs.sh
@@ -0,0 +1,77 @@
+#!/usr/bin/env bash
+
+# Copyright (c) 2021 Cisco and/or its affiliates.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at:
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# CSIT SRIOV VF initialization and isolation.
+
+set -euo pipefail
+
+SCRIPT_DIR="$(dirname $(readlink -e "${BASH_SOURCE[0]}"))"
+source "${SCRIPT_DIR}/csit-initialize-vfs-data.sh"
+
+# Initilize whitelisted NICs with maximum number of VFs.
+pci_idx=0
+for pci_addr in ${PCI_WHITELIST[@]}; do
+ if ! [[ ${PCI_BLACKLIST[*]} =~ "${pci_addr}" ]]; then
+ pci_path="/sys/bus/pci/devices/${pci_addr}"
+ # SR-IOV initialization
+ case "${1:-start}" in
+ "start" )
+ if [ $(< "${pci_path}"/sriov_totalvfs) -gt 128 ]
+ then
+ sriov_totalvfs=128
+ else
+ sriov_totalvfs=$(< "${pci_path}"/sriov_totalvfs)
+ fi
+ ;;
+ "stop" )
+ sriov_totalvfs=0
+ ;;
+ esac
+ echo ${sriov_totalvfs} > "${pci_path}"/sriov_numvfs
+ # SR-IOV 802.1Q isolation
+ case "${1:-start}" in
+ "start" )
+ pf=$(basename "${pci_path}"/net/*)
+ for vf in $(seq "${sriov_totalvfs}"); do
+ # PCI address index in array (pairing siblings).
+ if [[ -n ${PF_INDICES[@]} ]]
+ then
+ vlan_pf_idx=${PF_INDICES[$pci_addr]}
+ else
+ vlan_pf_idx=$(( pci_idx % (${#PCI_WHITELIST[@]} / 2) ))
+ fi
+ # 802.1Q base offset.
+ vlan_bs_off=1100
+ # 802.1Q PF PCI address offset.
+ vlan_pf_off=$(( vlan_pf_idx * 100 + vlan_bs_off ))
+ # 802.1Q VF PCI address offset.
+ vlan_vf_off=$(( vlan_pf_off + vf - 1 ))
+ # VLAN string.
+ vlan_str="vlan ${vlan_vf_off}"
+ # MAC string.
+ mac5="$(printf '%x' ${pci_idx})"
+ mac6="$(printf '%x' $(( vf - 1 )))"
+ mac_str="mac ba:dc:0f:fe:${mac5}:${mac6}"
+ # Set 802.1Q VLAN id and MAC address
+ ip link set ${pf} vf $(( vf - 1 )) ${mac_str} ${vlan_str}
+ ip link set ${pf} vf $(( vf - 1 )) trust on
+ ip link set ${pf} vf $(( vf - 1 )) spoof off
+ sleep .5
+ done
+ pci_idx=$(( pci_idx + 1 ))
+ ;;
+ esac
+ fi
+done
diff --git a/fdio.infra.ansible/roles/vpp_device/handlers/main.yaml b/fdio.infra.ansible/roles/vpp_device/handlers/main.yaml
new file mode 100644
index 0000000000..3ac80cc16e
--- /dev/null
+++ b/fdio.infra.ansible/roles/vpp_device/handlers/main.yaml
@@ -0,0 +1,21 @@
+---
+# file: handlers/main.yaml
+
+- name: "Start csit-initialize-vfs.service"
+ ansible.builtin.systemd:
+ enabled: true
+ state: "started"
+ name: "csit-initialize-vfs.service"
+ tags:
+ - start-vf-service
+
+- name: "Update GRUB"
+ ansible.builtin.command: "update-grub"
+ tags:
+ - update-grub
+
+- name: "Reboot server"
+ ansible.builtin.reboot:
+ reboot_timeout: 3600
+ tags:
+ - reboot-server
diff --git a/fdio.infra.ansible/roles/vpp_device/tasks/main.yaml b/fdio.infra.ansible/roles/vpp_device/tasks/main.yaml
new file mode 100644
index 0000000000..91916456af
--- /dev/null
+++ b/fdio.infra.ansible/roles/vpp_device/tasks/main.yaml
@@ -0,0 +1,139 @@
+---
+# file: tasks/main.yaml
+
+- name: "Load Kernel Modules On Startup (vfio-pci)"
+ ansible.builtin.lineinfile:
+ path: "/etc/modules"
+ state: "present"
+ line: "{{ item }}"
+ with_items:
+ - "vfio-pci"
+ tags:
+ - load-kernel-modules
+
+- name: "Disable IPv6 Router Advertisement"
+ ansible.builtin.sysctl:
+ name: "net.ipv6.conf.default.accept_ra"
+ value: "0"
+ state: "present"
+ sysctl_file: "/etc/sysctl.d/90-csit.conf"
+ reload: "yes"
+ tags:
+ - set-sysctl
+
+- name: "Disable IPv6 MLDv1 interval"
+ ansible.builtin.sysctl:
+ name: "net.ipv6.conf.default.mldv1_unsolicited_report_interval"
+ value: "0"
+ state: "present"
+ sysctl_file: "/etc/sysctl.d/90-csit.conf"
+ reload: "yes"
+ tags:
+ - set-sysctl
+
+- name: "Disable IPv6 MLDv2 interval"
+ ansible.builtin.sysctl:
+ name: "net.ipv6.conf.default.mldv2_unsolicited_report_interval"
+ value: "0"
+ state: "present"
+ sysctl_file: "/etc/sysctl.d/90-csit.conf"
+ reload: "yes"
+ tags:
+ - set-sysctl
+
+- name: "Disable IPv6 Autoconf"
+ ansible.builtin.sysctl:
+ name: "net.ipv6.conf.default.autoconf"
+ value: "0"
+ state: "present"
+ sysctl_file: "/etc/sysctl.d/90-csit.conf"
+ reload: "yes"
+ tags:
+ - set-sysctl
+
+- name: "Disable IPv6 MC Forwarding"
+ ansible.builtin.sysctl:
+ name: "net.ipv6.conf.default.mc_forwarding"
+ value: "0"
+ state: "present"
+ sysctl_file: "/etc/sysctl.d/90-csit.conf"
+ reload: "yes"
+ tags:
+ - set-sysctl
+
+- name: "Disable IPv4 IGMPv2 interval"
+ ansible.builtin.sysctl:
+ name: "net.ipv4.conf.default.igmpv2_unsolicited_report_interval"
+ value: "0"
+ state: "present"
+ sysctl_file: "/etc/sysctl.d/90-csit.conf"
+ reload: "yes"
+ tags:
+ - set-sysctl
+
+- name: "Disable IPv4 IGMPv3 interval"
+ ansible.builtin.sysctl:
+ name: "net.ipv4.conf.default.igmpv3_unsolicited_report_interval"
+ value: "0"
+ state: "present"
+ sysctl_file: "/etc/sysctl.d/90-csit.conf"
+ reload: "yes"
+ tags:
+ - set-sysctl
+
+- name: "Copy csit-initialize-vfs.sh"
+ ansible.builtin.copy:
+ src: "files/csit-initialize-vfs.sh"
+ dest: "/usr/local/bin/"
+ owner: "root"
+ group: "root"
+ mode: 0744
+ tags:
+ - copy-vf-script
+
+- name: "Copy csit-initialize-vfs-data.sh"
+ ansible.builtin.copy:
+ src: "files/{{ vfs_data_file }}"
+ dest: "/usr/local/bin/csit-initialize-vfs-data.sh"
+ owner: "root"
+ group: "root"
+ mode: 0744
+ tags: copy-vf-data-script
+ when:
+ - vfs_data_file is defined
+
+- name: "Copy Default csit-initialize-vfs-data.sh"
+ ansible.builtin.copy:
+ src: "files/csit-initialize-vfs-default.sh"
+ dest: "/usr/local/bin/csit-initialize-vfs-data.sh"
+ owner: "root"
+ group: "root"
+ mode: 0744
+ tags: copy-vf-data-script
+ when:
+ - vfs_data_file is not defined
+
+- name: "Start csit-initialize-vfs.service"
+ ansible.builtin.copy:
+ src: "files/csit-initialize-vfs.service"
+ dest: "/etc/systemd/system/"
+ owner: "root"
+ group: "root"
+ mode: 0644
+ notify:
+ - "Start csit-initialize-vfs.service"
+ tags:
+ - start-vf-service
+
+- ansible.builtin.meta: "flush_handlers"
+
+- name: "Set Hugepages In GRUB"
+ ansible.builtin.lineinfile:
+ path: "/etc/default/grub"
+ state: "present"
+ regexp: "^GRUB_CMDLINE_LINUX="
+ line: "GRUB_CMDLINE_LINUX=\"{% for key, value in grub.items() %}{% if value %}{{key}}={{value}} {% else %}{{key}} {% endif %}{% endfor %}\""
+ notify:
+ - "Update GRUB"
+ tags:
+ - set-grub
diff --git a/fdio.infra.ansible/site.yaml b/fdio.infra.ansible/site.yaml
new file mode 100644
index 0000000000..45a090344e
--- /dev/null
+++ b/fdio.infra.ansible/site.yaml
@@ -0,0 +1,32 @@
+---
+# file: site.yaml
+
+- import_playbook: tg.yaml
+ tags:
+ - tg
+ - tg_aws
+ - tg_azure
+ - tg_openstack
+
+- import_playbook: sut.yaml
+ tags:
+ - sut
+ - sut_aws
+ - sut_azure
+ - sut_openstasck
+
+- import_playbook: vpp_device.yaml
+ tags:
+ - vpp-device
+
+- import_playbook: nomad.yaml
+ tags:
+ - nomad
+
+- import_playbook: dev.yaml
+ tags:
+ - dev
+
+- import_playbook: vagrant.yaml
+ tags:
+ - vagrant
diff --git a/fdio.infra.ansible/sut.yaml b/fdio.infra.ansible/sut.yaml
new file mode 100644
index 0000000000..57be961ee1
--- /dev/null
+++ b/fdio.infra.ansible/sut.yaml
@@ -0,0 +1,134 @@
+---
+# file: sut.yaml
+
+- hosts: sut
+ remote_user: testuser
+ become: true
+ become_user: root
+ gather_facts: false
+ pre_tasks:
+ - name: Gathering Facts
+ gather_facts:
+ tags:
+ - always
+ roles:
+ - role: baremetal
+ tags: baremetal
+ - role: common
+ tags: common
+ - role: python_env
+ tags: python_env
+ - role: kernel
+ tags: kernel
+ - role: mellanox
+ tags: mellanox
+ - role: intel
+ tags: intel
+ - role: docker
+ tags: docker
+ - role: vpp
+ tags: vpp
+ - role: dpdk
+ tags: dpdk
+ - role: kernel_vm
+ tags: kernel_vm
+ - role: docker_images
+ tags: docker_images
+ - role: performance_tuning
+ tags: performance_tuning
+ - role: cleanup
+ tags: cleanup
+ - role: calibration
+ tags: calibration
+
+- hosts: sut_aws
+ remote_user: testuser
+ become: true
+ become_user: root
+ gather_facts: false
+ pre_tasks:
+ - name: Gathering Facts
+ gather_facts:
+ tags:
+ - always
+ roles:
+ - role: user_add
+ tags: user_add
+ - role: common
+ tags: common
+ - role: python_env
+ tags: python_env
+ - role: vpp
+ tags: vpp
+ - role: iperf
+ tags: iperf
+ - role: docker
+ tags: docker
+# - role: dpdk
+# tags: dpdk
+ - role: aws
+ tags: aws
+ - role: cleanup
+ tags: cleanup
+ - role: calibration
+ tags: calibration
+
+- hosts: sut_azure
+ become: true
+ become_user: root
+ gather_facts: false
+ pre_tasks:
+ - name: Gathering Facts
+ gather_facts:
+ tags:
+ - always
+ roles:
+ - role: user_add
+ tags: user_add
+ - role: common
+ tags: common
+ - role: python_env
+ tags: python_env
+ - role: docker
+ tags: docker
+ - role: vpp
+ tags: vpp
+ - role: iperf
+ tags: iperf
+ - role: dpdk
+ tags: dpdk
+ - role: azure
+ tags: azure
+ - role: cleanup
+ tags: cleanup
+ - role: calibration
+ tags: calibration
+
+- hosts: sut_openstack
+ become: true
+ become_user: root
+ gather_facts: false
+ pre_tasks:
+ - name: Gathering Facts
+ gather_facts:
+ tags:
+ - always
+ roles:
+ - role: user_add
+ tags: user_add
+ - role: common
+ tags: common
+ - role: python_env
+ tags: python_env
+ - role: docker
+ tags: docker
+ - role: vpp
+ tags: vpp
+ - role: iperf
+ tags: iperf
+ - role: dpdk
+ tags: dpdk
+ - role: cleanup
+ tags: cleanup
+ - role: calibration
+ tags: calibration \ No newline at end of file
diff --git a/fdio.infra.ansible/tg.yaml b/fdio.infra.ansible/tg.yaml
new file mode 100644
index 0000000000..de8706ffd1
--- /dev/null
+++ b/fdio.infra.ansible/tg.yaml
@@ -0,0 +1,136 @@
+---
+# file: tg.yaml
+
+- hosts: tg
+ remote_user: testuser
+ become: true
+ become_user: root
+ gather_facts: false
+ pre_tasks:
+ - name: Gathering Facts
+ gather_facts:
+ tags:
+ - always
+ roles:
+ - role: baremetal
+ tags: baremetal
+ - role: common
+ tags: common
+ - role: python_env
+ tags: python_env
+ - role: kernel
+ tags: kernel
+ - role: mellanox
+ tags: mellanox
+ - role: intel
+ tags: intel
+ - role: docker
+ tags: docker
+ - role: docker_images
+ tags: docker_images
+ - role: iperf
+ tags: iperf
+ - role: trex
+ tags: trex
+ - role: ab
+ tags: ab
+ - role: performance_tuning
+ tags: performance_tuning
+ - role: cleanup
+ tags: cleanup
+ - role: calibration
+ tags: calibration
+
+- hosts: tg_aws
+ remote_user: testuser
+ become: true
+ become_user: root
+ gather_facts: false
+ pre_tasks:
+ - name: Gathering Facts
+ gather_facts:
+ tags:
+ - always
+ roles:
+ - role: user_add
+ tags: user_add
+ - role: common
+ tags: common
+ - role: python_env
+ tags: python_env
+ - role: docker
+ tags: docker
+ - role: iperf
+ tags: iperf
+# - role: dpdk
+# tags: dpdk
+ - role: aws
+ tags: aws
+ - role: trex
+ tags: trex
+ - role: ab
+ tags: ab
+ - role: cleanup
+ tags: cleanup
+ - role: calibration
+ tags: calibration
+
+- hosts: tg_azure
+ become: true
+ become_user: root
+ gather_facts: false
+ pre_tasks:
+ - name: Gathering Facts
+ gather_facts:
+ tags:
+ - always
+ roles:
+ - role: user_add
+ tags: user_add
+ - role: common
+ tags: common
+ - role: python_env
+ tags: python_env
+ - role: docker
+ tags: docker
+ - role: iperf
+ tags: iperf
+ - role: trex
+ tags: trex
+ - role: ab
+ tags: ab
+ - role: azure
+ tags: azure
+ - role: cleanup
+ tags: cleanup
+ - role: calibration
+ tags: calibration
+
+- hosts: tg_openstack
+ become: true
+ become_user: root
+ gather_facts: false
+ pre_tasks:
+ - name: Gathering Facts
+ gather_facts:
+ tags:
+ - always
+ roles:
+ - role: user_add
+ tags: user_add
+ - role: common
+ tags: common
+ - role: python_env
+ tags: python_env
+ - role: docker
+ tags: docker
+ - role: iperf
+ tags: iperf
+ - role: trex
+ tags: trex
+ - role: ab
+ tags: ab
+ - role: cleanup
+ tags: cleanup
+ - role: calibration
+ tags: calibration \ No newline at end of file
diff --git a/fdio.infra.ansible/vagrant.yaml b/fdio.infra.ansible/vagrant.yaml
new file mode 100644
index 0000000000..ad655c3926
--- /dev/null
+++ b/fdio.infra.ansible/vagrant.yaml
@@ -0,0 +1,24 @@
+---
+# file: vagrant.yaml
+
+- hosts: vagrant
+ remote_user: vagrant
+ become: true
+ become_user: root
+ gather_facts: false
+ pre_tasks:
+ - name: Gathering Facts
+ gather_facts:
+ tags:
+ - always
+ roles:
+ - role: common
+ tags: common
+ - role: python_env
+ tags: python_env
+ - role: docker
+ tags: docker
+ - role: vpp
+ tags: vpp
+ - role: vagrant
+ tags: vagrant
diff --git a/fdio.infra.ansible/vault.yml b/fdio.infra.ansible/vault.yml
new file mode 100644
index 0000000000..2c10624d07
--- /dev/null
+++ b/fdio.infra.ansible/vault.yml
@@ -0,0 +1,706 @@
+$ANSIBLE_VAULT;1.1;AES256
+39626435383866646264643632343836383132616162323536323732396265373539616464373932
+6237636263373339366163363739613139383330626437310a343862363835633663633164393464
+34653061656331363864323363663932306637613937383534343963316364636635313663343664
+3539373536313461350a626264363866306630656464646439303338383535323863393537346262
+61346533323162653766343763363934313937653237663437393133666463363962346331343836
+33393533313737626636356466623566393232303037636266353565653130646434323338376136
+32306537653134633062313732623830393166336135666263343933323138616436313632623533
+38646539623837626132346631393964663062386631313236353563393131376130316666343562
+35346632316461336564393264323632643232663136323334336336626339633365643565306436
+35333365356637316631666661356431396635636431383032643566373666363936363462616337
+62316234363963306338366638663064396365373264326635376134356333626130303834383534
+63356363373035623031666330626463613563313066303365303430643262346561633633666466
+63666661616139366431313832376665383638393835376261326465353938333963333264653236
+35613664656363643438316232306630353361343030353963386666656166373061666236326664
+63383033633733633361383061316232306566353062616163623563663032393734353963303361
+34323330336561323734373430323435393731323931343335613935396330663533323137323234
+39333132326364646336613362363365323331366430396535383138386261356534613832393863
+61363133356435303964353637346436613633366336386465326336313935323538643636313331
+39663832373337643865386164343764613764346331633261653039613136386636366431376565
+36373431303631323730343831363837303665353461303130326430636161623431613730356134
+66396632306139373732636165313834366339366363666566306461366266356338346566316363
+38386564313665306239646436386635396664643333383736396131353535336334383765653961
+31643833663361343036623634326662333935343332333066353732396132373561396631633265
+37643733393433336364353630666136616235656638343032333632373366616634343130313536
+64643834643236363731353664396636613266656263363664376539306539346639313838663738
+36623634396638313861636438306136613433313062306537653738316563613631363566623134
+62323161646464636131326661646535313436396534356335366564373165653662613536663561
+32366666356339326264326539393239666234633663366261623933366636636161343233383932
+34336638363864346436613164613161326363316431303963656233306663376332373731623437
+66353636336462663730316161313236393264633633363965653433306639643938383933373062
+30323331323735613139323963643335373335633265383235343835376363666132643431366665
+32396465313434343534343433633432396131323635356265663939616436343137393561633734
+36383961363964306431343939313837663166376133616365663939633161326338306561356666
+34663938613865383739376534306333653135383431316464613432366566623362393065306635
+64313737363065336234393463316138313864386163633966336662313366396431353632363365
+62313036656434616136626331333139343235333930363166313833306438616364393065333963
+36396137656634393066646330613365316562306164326133663365313938333337386137383864
+61376333346336396537613933323261663536353161643431383636316235393035313861326230
+31623566363138663866326231376561336534366435626134306463323032613630646361343165
+38663662653334316139313866346163623461656335616232353636636431636339313239346661
+33396133306132643732366232353166613263356165663866626465343936326461626336376462
+34376431643730616461376261356633356166313131623837343839323935343531356263336230
+62323236633339303031316165326362306535653064363862663330393034303964353437363565
+61616565656164313664306664663932333261333162383832326666393366353730306531343639
+64323530333466653534306130636161383463376632616232353361656661353831643966633639
+66336631396164663864653933623662343061316539306634393334653630393462376636346165
+33646635646633353263333838343534373838333836656536373639646636303033626537313464
+38323462396231653233333463313263313233353932356633366630623138376432383566363538
+38346336343765353830656139333564633533323538616166333865323439343361386135343130
+63343363303332313834313764356637643230386536613338623937393436613732383833366235
+64343862663234633232656333666438633464663463663737363431626361643532396664326661
+33383261643231313836326162373532626234383362653162656435366535643863316235656536
+62306334316566623732303834313864663636636334663130653230383365623031316164373161
+64326638636339303065386435303332333230666634323163393134613032326335313961306236
+37343666376235653637636136636133373966636136626137356331336234646366316364623134
+37633832386237346266376533663839343938623664653030636265303465643835666234383763
+36373439346566623739343361386562623962633539383134663237656662653939313938663938
+30653438306666613731326462633334313730363763666538666538356638633261353363393930
+37626261316565633162366636343539626238323861656565393162333662353563393139626261
+64663436336233636438326633303164333939633435373765333731666135366637653532396238
+61346538626662626463303965363061646261343232356639336366333065623765646335353638
+66616430343562366262333235323234383334343538633037643661623661306330343839333034
+62626533353238323064633862613736663666303666373262633533643539643962383333346264
+38346134363236633031393138313538666533643561373766613836636461393166306138333231
+63613735313636376538346230303662646633346363353535333232663033373036336564623766
+62316464363930366665663530626564346130316231313033323130323531396366303030616363
+36356431653435346561656132616632373834393833633865323762343037656439373331623664
+39633635306334646334386635356362346431366430326364303931336562333735613033346437
+37613636656261383034663633666138653537326364326430666632323633386636376635343232
+37643965663564376337616432623336326238626530333736313530386537383633653437356436
+66613230366666373734396538363362626531666331656362356364393230343132363033333061
+39396237316432663832343261393366306232363166313034383033313565373265353436666263
+38356564656331393733393333346164313233313937343330363662316431353233633063646438
+63386162363034363039663864636365666638343665386334383436353336313333653166343937
+35626138326333623362623639336461343737633730306630303963613465396464386534663763
+63383733643761613136386430646361363731343433656533653938366138373162313335373038
+63626639643634663962643130343566323730646432366136393335623134336238383761333733
+31633330343962316134333536376464663166316365386265393061316162363161376333393561
+31646361386332613837663634303134626133376630643730636466643961343462366566363737
+65373236323861343238343434316637663333346161633835306663313537383631303136323832
+30353862663462386262663461623432373034333631633133633563636336343831653934303033
+36303162323164663632393937643665643961313464623033383130663565333034636164643136
+62316639333662316133393533623561633435653036376637323739326339346632653837633637
+37663332386663396363653237353031343065366334333836343830363562383733653661323664
+61333633306531323639343931663061383031303937323261616165633163636133323130666133
+31353366386530323465646133343766663966663561616337313330353436303234346136326365
+64343561656532366538336662656261396337623763343966366136646638643739373037336361
+65336538373161396533613262326633343233653036656364636536636338623930316365353365
+61663435663333633435376535353963646566303130616364396539643366306635646538363464
+36623433663437363761313038666665306536353332633361316339666263306665636566393531
+66356565363839666130663866643134343632623837323630376638313131383036376464613331
+65366262663936623238303863303761313466656538343962326662646139363166616230323938
+35303265343132643938616330343833386261666432376666356532363933656461396633343831
+66623735623233363833656137626161643265633335396162626261633334613232363435613266
+39356663333532306662353538396565663666393063313630663133646365626461376363633735
+66343030306232396332313731313531333235636363383130373532633137383536316639626537
+33306330393737343563626234633531643334313363313466326564343530383337316238383362
+31306430313637613639356365343034393766313362613430646566643131646530313861383037
+31353636323963646236343866383166646633613539386333316164316532653036663236663039
+36306364613033616338643366303264343462393532393638316235303565333634396537353334
+36666361646237356430396636663032666264346561316533343438303238313537326263333766
+64313231303563626364663232363462643735346639373463376464383466636638663036353437
+64356338653862393637366233613366393635623637663332613961623733653362353234656130
+39383533626133313066646331343361303261376364356463383261356462333138643636306432
+34326135333137366339366561646337323134633033333365376666313763666630306261666263
+38386530356361633062663964333631393665343539366532316161616331383831396537323539
+66343138373463343434356465316662356233633861396337343762396366663361373161363838
+30363633313634623333383135623738343738396230343839316137633663396635643865373735
+64306238393833353065333731626131666134303264346662363433316161626632626430363861
+36656263313032643132383162386439613763333033396434613631656233653034353263386138
+35396465316664646339333630666432353064646461323463633237616338316362626538323234
+34343531633766396662306234356361646432393663303764323633333831636631323938396261
+65623064303234353861363261346139646633363965316337363962643430633864633061663462
+33393531613032386134303765373132306662633161383133376231636631643464393536636430
+32373238653166313231333733313865663862653537313736353338336634653032326465616565
+35626536386132363139303939623864613463633432643538383036616530323765376435643237
+65656463306533336661326462386636326630346664333838636332376530333432393235666332
+35663836313765363934313236333765386130623064653463323039666536646531646333633334
+32356439303333353536663638333138343965653663623465653938326565353535666662376364
+65613933323566313363366266343630396331366165306164663065343361666366616237316134
+31323236316163663030666265356531363266343337623561616163386336386266626634346436
+30303738626335313935303463363166313438663430363035336466346164373738306565653133
+30313134303563613835633739303137626562646533636263623336396233626266353130633262
+30396637613934616636643333316539653431323563616338343637303139373334386435626333
+37616265663435303130376137346361653463313934323837313331653662623261613962663833
+64386165343661663463643136643736333232346231396666383239313830386132356238343233
+65376535356238653663613336343632646234663633373238316363663162633535623962376135
+63396661373130383331623733343664653434336264343763303266303339626434316331336433
+32313066626262313438373631303337663232636538643863613939353266666463313733653830
+38666334663864373830313762346339653230393036653336663030346631333365613062633361
+36616538636336613930303065663637373433643937356461373030613733333434663736323864
+33376365323235333938353261653766663134313733393666623038646362666239373338336464
+38633433383938623463366335373439336331386336373631353232663662616261383831663339
+33663830653737306364633935393334306466383634326430326361613461316639393136373065
+34663234613434376430613365316438306162396239326531306638303132666266616238336538
+65393965646139383263366263376338396238393730643231616664323432653535613833363465
+38626166356463333938626131343638333635643661343162663931303937653835646364306164
+63663232653234383465353464616364656336653438313664313339313231633739303930316536
+32633037643561626362633231626634306335376333616562663039326466353165313962343434
+62613638313439663032366161666665663562623535643037646331323934616562623432323565
+35383939393134393931383030326237336535333862633637373730336339636239366439643863
+31626364306365303063346265343465623735346639386239616262393931643835393562326535
+62376536393965613035626664613034393533363961353864626666373639386133303634363034
+35323939323465366638386631656561636564303430643533323864333734386531376337353632
+38323161333865333663363239376133323063643931313464653464316161633637353165346233
+65373431663462656365326136346130663830313566313038303265366233626261356535633731
+65353666393935616634366335306363383831663462623034303434646431666138636231373336
+64366137336231376562313762343136646136353233326139346131336238343262623738336231
+35353031666632393237303834633536636161636561643463346339343164623732313537353433
+33343439373736336330383336303038393831386130323438633865623765623734316365313333
+33303265373562663734353938343762633230366564336137303134383661393638626465326561
+32633862393539316633616165666262623563333966353736363536656536343235303366633964
+61623434386335356435386464363035663162333761336361313263373138343965613538376435
+62646266346162363334363963653937353164653563343834346162323166386335383565353434
+30396434343465303538616463663532343938636135313739366632656332333733613737343536
+33323336356533663962386237333566323030306133666334366365373535333265333132353437
+35626435356234613539336232613335653662303365376163626535343163343036663631323031
+62393263396233316332373663613435356465616265363336393732666235373466306632383635
+64613962633435303834343231616264326666316366653433313232323835626238656439306361
+38326435363066366261363931333336613164353166353236373834316437303735623931613536
+31666335303330313664333566363063313337653163396531353137613562643039336338656164
+66663763313432386638323766323032386135643666313765313132393632323035643538313736
+38656439626261663636393034366466363437326561396261303636636265616237633537316139
+34353165303564343039656631313130316535336361393166656439623538313563343033373366
+63643862386464353330643537326331393133323765353436313136313435633261626534303736
+31346435343865396665663333323737333635343335346134343061613033393061343532623131
+66633864333838363937626135336231366434363634376635633566363536353461313866373836
+35356463313437313134323339366531633439353961346164626663613232656635666237363461
+62633832356433313934613763663433396666396435653433303733346639613762303534613939
+38343338663162346637633231626435333963376262353661313436623233616534373333313631
+34336433313964326339633466616533303031343935336266326663343937366331656539623535
+66383261303462346634626261363737386232633231616239633335303238383766643665663639
+35333731666432346665663736313435356231343132376532616439653963393934633037363366
+37363632333362333438646339336462396665373361616536306466306663623733626238646135
+37663863343339376162373065393130616538313939376137386566353361623937666330303538
+64653531653634636133353433353463323738316664666636643933646661353339326238316666
+64316137663236396234396130646533316438366337633437326539363130343765656639363334
+30663362393937393863633262323931336336343362646264656331343733386332356237653134
+62383966636130636231333539336465303838343365383135356464336139656637623233353236
+31643731326430313563613130626431353562383036373461353663363031663232353366393765
+64636261376666346132373161653430633934366165656139323232363463646461346336613964
+66313130636132323931316438613137356162313062343431393035303339306237343461326336
+38313261653231633836623739306438616439613730303332353434313934356431396565316238
+32306132646239336466663232386266656134633563646431613332393062633439646562303665
+65306665373532376362653737343061383036323535333330373831393635333663376237346565
+34646666633864363836626666623838333263633036656337656431616635353662613262336336
+62643965666364633865666432326137653864366435323332623536303465386661363162653064
+65393362336362313438366536613038316461376139376662356437343631636239633066306162
+38373638363233643531366338313434373034656635373731666230623663633430656266343933
+35663030316335646532393766306638346365396533326433646530313630636239356231646337
+62336163633162613862356138613863653432303064356638656135646264306363323664336263
+62323963333432376237626134323062626563623165386335313533356366333437353838363363
+64626230353262313138373535306538313765323435363732396437616134643931323232316463
+63396161653032653837613366346138366166313730656665623563333834653836656162393466
+65666436363465393934383732393963616236616263393366623130646134623730376462346532
+37373662306262366331323539323365356232323739333466383865653461313635356339633338
+61666330666338363533313462613739393863666439623033376336396364666365303432643137
+32663365376231333230663665326536333638303234663365303935626331626665663239323166
+64303966653566616630366432336264333639646236613038383134363336376363353961366435
+34313133346339306133303839313631663831353465376664656138616131653437626337343539
+33623834626263633939366238373232323165653236616361346533316463613063353064383633
+31366534303736316532306563366663363035636462323737633436326138306464313134323066
+33626538616536333338636664666334353832613135333832303862313061343363303362343461
+39316438396564613662356432346363336266356239303632323664393864343233633664636261
+35323831623133363937343639656637663133383637386635383137343165353932333665316434
+34393839633233396237643062623536393231373864396236613162323364663732666265633632
+32666134366137363463656131376630353130363538356334313239323939663263613863333161
+61386434306532313263326334623362623465336461326564313735333732333539653162343165
+34363861643532633137356466336430323961666133613031626631636631373464633832313035
+33343935643531383339326330303538333133666265313230373734303562366336363863353936
+33386566656633626239356334323765323435663233363265666231616363343537363438633766
+33613336366163653134333039333535303962366162613337356166393335366131643163316339
+31383132663331363762633662366462343663393062653866663766623830376135616263393839
+66326638316339633435306438663534356662336232303032376330383065336432623266383739
+64303866393235333561633434313763366333626631346563323662326537653339353834363961
+38393236333462343234663761336562383564613530323762626465643933386239396266313139
+31363138383138383236363464346163633166393232626634376533373862633039373964303331
+34323132636535663732646463656632333039333063356533663263633035666664343165623061
+64653637333637363632366562326431323932303233343433356431396564626632353334656631
+37623866633330313337653466353139393831373531656333313932376232363961663630353265
+31323864653664643030323531393736336165616632343766653032376665666263616438363064
+65646630613136343033343466643765656236393061613238356435383264383866363632333332
+61616437623065636135656330376262376130316336643263373362666336343331303534353965
+61316263633534396435373336363638326536363366383165353565373736353032633632646234
+31623233663333303232643163646630366432303933393131613461306264303932663038343435
+64376533363134343338643364626135386334316337373636653735373330386631336362623030
+32633562376164316637373639333230643065336436363163353263393630623065656263306231
+38366235366335326563323733303539383330383630613937326331303731396361653537373436
+62366333336131626138393839343463626436613664653137393731653332646661333136666232
+39383433643331383736356137663830396265336264316336393731343461356239393534313432
+37303838366264623034373538316234333536646436643661343363393161656633313530373465
+65313361656637623432393233326332373537383632316361333735663935363835646634396431
+36323335356637333530353366363661323762393131623737656238623036353937393730636333
+30613337386564383632333063396430306166396333346533333834343538356130353436323765
+38363664633330323163316233633864363262343732306363656237303534656466323231383130
+34636264343735316362643961386639353733336135613261303736313537333165323739343032
+65353363363161616531633462303539363734646661366464383334613734313137623731323939
+63323065366132623362333335663735306361393630326538613037383632663830356431373261
+33393063626436663933353864346264633535336532643136666363663237353030663761653933
+39623331366663373363356333373263643338653336343463333032646630323132636265333130
+64623765336336613161626336313361643265633735636436363037303432643265316465386335
+63316331363836626433393165356131396461333931646665613363313737613337333638306432
+33623063323065313865323732353362363333333331373766376465613638656465653035326531
+32366333363935666230666334653962623835303435336466663032623531616361643339666130
+31376265656165303864316239356339396665616461626362333862616364633432363135373332
+39313430303363623433623161643432643261613364393630633334303431366235393765653730
+38323639306161633434666161653564393436353031363131626336643664613232313463636431
+35633935393666376462656639363431333031363534383064333934663265396133373433616363
+39366461333034643631356264646362373439386161346337366165633639613939373930353562
+33643634646331376433373530623438373734353661623766623263616232376365656335636534
+35626665393636643830613466393061666338323464333230336666366135633661316537643764
+30653130356232646430653365303966303266336366373132636237623332363133323632646230
+33626361353464306566393438616465326266386262633566646134613166396635343733326635
+37363266386363633030356531643166333530313561303638346436376235623033363834316266
+36323363303636383334653533346335653939386237646436336437313161653932656331343064
+32646161623563303462386433356334306239646332383137613162363237613062323265616230
+38636334333762373138393833323231613062616535356664376239356433313264336163313138
+36353432613033333237353531656162616363353835376138666131306361623365323237353333
+64316364383866356338656237313539303434383064323831333832633063653264666261333032
+31336135613032366666613566303539626163313364303662356165643931353938356663636366
+38303063323337323661333464336339653833316163643235666133336438303930393766623931
+35386331323430333630616131663831653265396165323430333866626263356138323161303763
+37373233386562326462613364633138326535653238353662613864343835313031303362633131
+37373166363261313930316639646663396163646334623931313466663632393835633161393038
+39316363316330393266396564356338353037636632376133373231653864636365373638653438
+35326334386465623536343038336336643162616633343565383334373830656435363138636435
+36303764306235653534353161363162313764336138373332313338396134653134306338343561
+33346339646662636562333834313535316439303265306534353366353662623066363139663933
+39663165313666376362303838343765666162636337326565353761353132613737626539306163
+35383065336430396132663635663631393466663236653564366139663031326136383437383838
+32353239366232396235636132646531663563323661393332336361313738626437626335623463
+38316133646566653830633963613161303637333533363338663130663661656631646263323262
+64623032633866636237623665356436356165366165653666656161393865643931643730613664
+35663832363037653931313635313638343764316635633031616439626230323337303335623234
+32646330326632373738663465666236666565396162613361326464313965393830643237643865
+61653538333330613835623461353366633433336138653535336239343933643563633363656638
+38343966336630643030303665333563353661373064363934633566353363663334653939326263
+66623034633564383833653366323532386664633730376131656366613637363433376131356430
+64376337313930633030343230323662633762363538623331373035373166393432653836663836
+38303834313663383465376432656661343432373139653066323637643061323231383064356131
+32653065646636383534653462353534643931353035346432663266623431363066303064623435
+37613264616430386438616538643035623632336237656535343936376363616431633264303933
+34663465393337376539303431623666626339353237346338376637666331623762373132613866
+39353065653630373635383535316539303530353433376433653932636331623739633862616265
+63636163363765613730343061323930653735336364343239353633383461636133333065616632
+63396562333733306538353533646332353966396330616334636639373163323539353231623766
+35396261393066326232346330616133626634313964626531633234663637376238326666323561
+65383366373963323734366332633865623536623064643239366561623262633162353461326137
+38633862303932316362356366393861303366363335356134363638313533343434306161386261
+34323733666662313962613835613537393432623836633730306535366361336265646534353834
+64346461386630646130616663313035346232383533663863613364653461656564313834303961
+32346337346165313464326332393435356434653138343130363263396238343034646635343937
+30353361316435313634613930316237626162623562653036353966393362666438303637303333
+32383130323630336461323835373863396137343231306536613038393437623937636566376639
+36626634393035356136643831316664306662653061616464633237616566323437376634303634
+32633462383332663635623334353263313464613535323861313863343036653338343033303238
+39356239623163663330663131393334633961313066663266646631333464643366663637383434
+65323264376166666162346336383736666133666163356131326633336464613961653562663462
+63313935366166303061663066396532313830373936663865383132343466353233343165343961
+61343330616666323939643238636462353531343664343938366135323961366661323066663466
+35313635616637646336613830346165646664316464643266393665656465666263306662623062
+34306465636337633733643434373536393335363862316166373062353432626161353830336133
+37663531386463313334366634623533376131316138323337303738643238656661396633323838
+66663633306134353864643163363735303532373866323534333132373438383738366535353136
+66366262626636316435623436356263313037626431626133326339396164656535326563373932
+39373232306661356338333139313335323634636565303631356163383935663432303133346465
+37356234633362376633393265363736333132386432653961653536383136616236333263333263
+33356131396439393164633466303366343563643735343333336165636335616639663862656437
+37396262343131356665376638623236383634366532623064636636376638643133306666623733
+62343936653937663639646661353933306562643530393938313835313563343635333738303461
+38623534616363313862313366623762646531373262326666313736616461616232336537333037
+39643030333737643831333866656435663430313864656261333233336530326363653532376336
+63653532396539643966663230333435353533363239656561336531343231636362376538313037
+36633666316464376139643563313664343738373064393562633262393439393366393564666231
+34323731303839393266303465323766613864396461386465653739366634383461656537653732
+33313136366439623636636438363566333939303263346637353163613834396162303331663561
+35343237613031373065333636366336613732326662303463323461623839353439613132666563
+36646631393632613237663464373835636333366434356265363537653265383163653833663461
+31396665383334343966653166393762323837303735323366393335356230303033666232313361
+64383962636636643630326533393331313064346165313833333937303538343062653266303334
+66303063376265666538633565343166636562653639363533376637666631393764613438363333
+38636537363664643863613333633236663435323537303934613433386437666366333334363964
+34383738323339643836626562666566376336666262623736346535343639323737353163623439
+62633732353236373032646533633665313361653538396232616234623663663365396536633237
+61626366353062313665613836346233346631633131363061656662363864333065363234316230
+30613364343131366532356263313863333130653266623130636235303961353630376663396131
+32613633346434373630653663653536383933356133643666343966383532326236333537336135
+61633636653936303662646362333463653139366138373734356134326534656633316533393535
+66306337323164363334346663626663346535656137353534333731396537393835366261323233
+36633033613938386437386665366462323235656531656461313064333064393264366239346462
+38313737323962363465386435636539656432303162323665346531643139643438363630653538
+35636430336366353263396232316333396434616438613463313634633138616336306633643061
+63386335306261633739303532626261323566653762636262363430386134643735383937313136
+34643938356136623133623665363963623530363535353139653733393232333736383337383662
+35316237343935616538633861646238343438396131623061396232393331333038373432643465
+35643835396235323735636332383261303530653733613935646466626330333731323065303930
+33653061373765363439306464323761303464393136613864616665323837336664383238666634
+30396336303538313232633236326431313065663234323161653062363836323633363735366135
+35643562303534343832373632633962636636653562333666333563626166326435383732306332
+30383362393135646337616131633330393632613237333037353531323830363237643330643161
+66626563636635623464363533346466646133383538313730363538336637626538333830393164
+36343063346433633439313733303865383530336663636663643733396230333837353237313062
+30626330616234303039633736393161303863343234396262623436306136316366306432663930
+61346530633865396365396139613639326530353639333036313437383063633235366537626235
+62616365653761663566616133366536313338376162356662656432643532636633363838633637
+37643364643061656136323436626564363135636534383862613765616335323931643233393863
+61396463316564656136626365313065353038343936366134616136396461616265623331333633
+31613261656639333930653132633933336630663066613331386535373335333339313230616361
+33386535623363646631646262386463343031643138616464633961616137636633356238333864
+66633338343166323034333936396162663366373765353233393762373335656465656261343663
+63346330393161343236376665313639386136353265393431383563646665393462323336646263
+32616234376239386263613034626661383962656637363236323831633531313933386666313435
+39336539656530373137616138303361653331643637393066323665373132326162613461346434
+32616130386131663631396633353135323164333931393939386637356637303763663638376466
+35616230343862323037646139353838623031313361616265396136626561636338383063336238
+30393536373261396233373439633132303238323636396131386137306237643936636330353133
+38626135356238663536353733623337393061333465626531626232376430636231373162333463
+31666163316637303462663262313039663666383431373264323163663134636430663233346664
+36326636633038616531386334613762613736643038626335623835343864356366316266343131
+66383939636332613766363565346336386134306566616365336234383331383466356539306433
+64346138653536316336343931343538353235303565343663383866653139333132363035623465
+62363835303765643132643239376233386330383530373530386461663565613030303665396339
+39656262643563313064633832646565373236353235393032616532353733353630623566336265
+65653664303465393139303232636439663231326430393435336438633931303332633731393639
+30623430656465326136653361613734373835376661313135623032356562363830303139366337
+65636231323866366235313933643733313630376533343438643863386166656239336635393736
+34376232613362323839653139336261623034613334396234623432326563383737393562373939
+31653764393061656535663862333936643264613865366565386166663866666232366538373838
+65376534653866653864623237653337346431373730626335386630616137323164353464613463
+30336532386265376234663562306334313432623539336366653361653565643032643531633939
+30626165666134653264616639373830333130653263313534326337366466333032333939653263
+38636331653937353531646635653937376132313732313836353131343632353034663832383639
+36636338343563366566396166343734396636313866393938373266633832343832396664616266
+64613339616539303333656635636465313964383239376138643834393232323666386563303265
+64386437663539333234353461303763353930643861613461393865386638386633623633623937
+63663734306332313033646536616561316638643765623566616139653031336563383365636138
+65303930343461623535663834323132353533653333366630356431653733636566376336353464
+38376162636565323335343737376633653165396632323235663463323730326162316635346366
+39353736623262666462396338363765306264663232366463353966623239313666613839373530
+66613062633436333734383139323964326663323634373635636365663832303230636366333636
+65323737383633636338363066356135373166363936646262303236356166316533326437393735
+32363639303137623335643632323566373032656233363063313264396436626633313433636133
+35646464373665356136316536356530653966313935333931643639376537373735373331386537
+38646336356631623731653439613164303835643039346430353364653561626337333666616137
+33636434323938316661643939383937396533323661363365643164356538393765396134396433
+33343031633764643239643531373663633734646232656466643362383838393037363636323466
+31613433393132306364326430666639666561616664333035613863393335383034353039343265
+31633730363161646439373637643938356462343639383666636437623639653066323536653463
+39626239363130346539363961326331613764386531386436316564386135626335333439333539
+38353638653531353132323866663665663831353063623764346438323935643431326538336534
+30643934626236663762333062646363316635323735633336386339336366323861323438313137
+30356665663032653261356663373033326634623639396666396664363430303437303163376637
+61363866316434663433316565393363343938363131363635373934623233626463303731633662
+62663437616333363438366239323934656138663362376363353063633461333532356265336663
+66633366316433653038353734373566633330653737366363303164393536353232326465623063
+65333136306264326430393935306431646134383036626466663032643931633862323066363863
+32353231326334633031656562333266363436386535303465343437666431393234623662346137
+32626636656336653738333934383339646335316137326630373062663831636139383232343437
+66356138373465316431643937623432643965616130373239346661396330363433373834303932
+37363932623564303661323234376336323939646361636665386663653761353033666339303439
+66303261376263306566323135643835373365626463376262376666643331626236656139363134
+39373833393930363630663833633832613063333139326564633161346366326239623735376461
+34663063663232313138376336663836333136363639646539623661373635366264653965396431
+30633430333136656139613033353939653433393038323438313363346264333462653464346437
+37666532336136373762306533326633323135376538393932633165653932396334343530653166
+32323337623731366261376534616164613836333237323466323830313830396662633837313533
+32373238626339353031303136306139613639343437336566303936656435636531626464613366
+30636235356363306663366334396631613832613062363134313934646366303766636438653762
+64313931616563313939326365643635643730633562323834326433343238646362333631656630
+61643639316631303861633130326261613061353635333065656363653862366562663830366637
+33613938363538663736663934626464323538623831336664643935633866383562316637323132
+39393934656263633466636565316161376537343962383334323336643730323634326461303666
+33396364666339646135313039393538356436656338366536613934306431306664383532326331
+64623231363366313637613161316234336136613862633466363837653133343339376238393437
+36363638613635663564316138303365373165336239363935313631336533303562363165306366
+30363138666435653364633439303561626637333037663134633837303131376638613437643363
+39666139656636353630373631653631613664313965303335303138316634663139656337343064
+34653061333934326234336132303965303338383566613032396433393838333439656565633130
+63306564623635633835616264316138326630656532373235316538356230663966323730386164
+34623137626665613935646330383530326163316637306334323933643462623133363463626434
+38633730626432323732303361336462383361306433336335396162353838666233393364333932
+36626437613131303136633739386263363331646130346264333838646330326532646437353035
+32376231663466373439653531386230383565626465623938366637393866343566346135383935
+36623431636138383063333265636332633265646463353565636335363830616563356236343030
+64623365396237313138613163336239663765313831383765313538326539646437663732333031
+66343464646538633830326235643837366437313161666635643539393830656139356138343431
+61303734306161333235383532653935653133613333333531353265303136356362373932376135
+61623738336566363761316164353563376431623864623465343065663966616533333130623032
+30613661616232323430646564383466616630356461653866626666303165326364613861336639
+38333865663237343530643432613439626166633232303164663263323961643739363164343932
+39343330336530636364656336333539623732353431616334616333653665336361306431653535
+66393162653865383365313833373462356135343238623661333735393466396563316462306165
+31656136616539373834663430663266313564336639656137636538646538653735633761373534
+39303365303364326461653763333164353563613961396331653032363165316437356566646335
+30313262666230633533383966313437633235383232656133366165373434663264363266373466
+36333766366434343633656361326530383065616531363435313165343037353736653830373235
+36633534663738616436363330393136333066653935346466663234393563656266666362346564
+63396435303332626461643161646632623561313530396634336264633334313133636666323835
+66323634613864646566396562306661303438316135366138386662616361373432316237363166
+33656638316338373064656130613635653865636433383664663431393731376332656330613466
+64663636316166383663313861383136376234373863363135353733663166333638373364613435
+61366438346265643230623663303538623732623761376666383038656533343632303162633433
+37613238346237613666626430623262653764393664313633336536316535383765333562393362
+62326162343161623866386466633933613436383564666664346439333937623036656530666661
+37323331373464626637643065666230653165353735303634643966636630613335376162373231
+32306531333664653230613737363334656261326631633339383662636530316264353139343663
+63623662333336373563656236393034316263323438363361633835396238316362313561623236
+62353533383630333962323966663236316463633461366166333230356664646466363061633263
+61646666643735353230346433633765303931353466633837316161396336363161613664623861
+32663930373664376638386663343466663033626535643661303561353836346136333166613737
+39383164346463656137353237383662646263396366323838326165623037363736366664636462
+35663764333137656139376330353463363965626237366530663263656536336661613062646232
+32396131383436386539663935653061663263363161303765663966383035366137653265363463
+38383530383738306465396232336337646366666664666338663164363562626463326539393539
+30623838656238666639356239353535333637343439393233643136366337616431343165396563
+30633632653962623033306162633439643565333332626237663032383338396435633832383933
+39663336363934666638333839666463623763313638313735653137353734663432303963616232
+37386238353337356565633933383733373631356466616666613132633934626435643163346165
+31306234353866656430333566623761353531333930363431343233366362313032353365636163
+37623330356632626262373838376365626465373566666231663537666332356232313536613532
+66343265643536313436636563623933626232323431356164636265343464373536363837333837
+66663839393431653061396331303734353962373565653636366564396539303265393136356435
+65623138363463653330376431316330303334656538366461666364323137396265383663303262
+32393439343139386633643031666535623531316365653735323336353464383434643739356363
+63623364336364666535333134356535346136353461373839613639313964616131313235393736
+35613337326263633464383665333062356639626263306238306664396162343636316564616539
+65303730356266303361633939336637363930303865663632346535396238633934643332306434
+39383961653933376533383632653566343730613939393738393435663565633536363836376161
+61616466653662643963613765656336626535326166303962326133313562306134656266613863
+62356166353336333961313731363166396437663734353535663035663764356366396438326133
+39326264626235653132326363616165653835303731626630613166313361643364306466666463
+63363039343164636166626633326330656339623136653432333464376131663363636238343638
+63333433633861376131303136346335653033383361383963343839346137393538656364336134
+30386337306432313332626132383236333963366431333736363836326565336564353834373361
+35626535643266613334633736343061636365316539346330366431353134653264306162613332
+62323131316531333366303163373265653634396537613935326436343961633735323835356362
+64646137333065643830343131343565386566396238626461343165383363363430333737643635
+32666439633330303663653933633261333030626166643932613634653636323034386665613465
+65326461636265333563333730393730303238633666646463343531363131396234653134306531
+63633837313639336137663437353730356132313932666337643463383862376261356266656462
+31353936666331346463653363393036363032306566366562306138636234316265323538313364
+35303638663262613164313765306638306434333338626266663765306562366363376238666162
+31313964326431633736666466313066393736313662353436353665353136353136373038393162
+37333537336664616231323865666330336162383535373135613536656534666664663734336434
+34386365326133326165646563386366343264343435623461336434393962626330303065383732
+61323664336431303962313531303366616539616131363766656564313063303234386466633165
+62323337373664356138343236396361376137336564653930616263666236666233393531633563
+39653264626238636430373963643831643663363337373161376437653630323562313765373933
+35303165396132666134333832366264303164376536383065666431363039643336396332326233
+64373833303830313734616132666564653464656330323866346538623231303335363839613334
+34333236363866363039373238336431333666313536653433626435623337306364663939366261
+65383865653634316339643330626236376435623366633331343839643339333166353036653432
+31613161303339613531313536313539643430653666303432623933663637373734313366633930
+38393031313863343136326261316236373966373966356130643164313361396139343739356230
+36323465633262626430373531386164313365663233396332343965393934653135643832383230
+33653232366632373366366363636331316338386462333634666630393463333165653336323335
+65613361303663306430346630663762383134653130373134306338303038333365643064383765
+36636438663033643431616630346631393532626437626235306430306234326432353064663139
+64636662356630383138333935326165333235313764326436323864383938356638303331386666
+39326338343339633636623432343537633636626362616339656539326431306266636232613436
+34336534323363373331393962343730303161323538346335636463666635636461633234323465
+63343661343464336137343138343564616135333035636161646263326363363034383535303131
+64343062666635363133336135326565633738396332653663623432383134633265323739626438
+66313565333736306335653166306432326435346536323461313539393733653432366264666437
+31646438353361346166663733613835363338333032653261623830626532633665303631356134
+39613135373562663435303666363339346439386330636463313164383666313264656233313139
+63643639323964396130363637316230613734303035353165386661343737636363366137646539
+65306133393763323566343933663731363763313730376664326334613234666634643038636665
+36623636343566333666623963383365353231353137626236306636356631663433396238386364
+32316533343563616635393930396463353938346633633565356630353838303064346239346533
+31613231643039346339373032396163323161313133653237333966316665613132646662313935
+35316436616162373937383465363763663239366565323432393563326534646363646466316266
+30623166343636393331393232616335663862363264373038663035316261313938626334663964
+33326139613738346133663231356138653232633263643936393835396436383534326663303335
+62303537313730373536633334666130633765633339626631623966316661663538363437643430
+65363338653739623335313336626330666164613636366166356332363633343961643065646561
+65383466356463363566356230333939366535333335326333393838313331333862353030366364
+37383236663932323361653230373038366135383533633038613664353763373363393031616334
+30396138373163393262326238393363396364356533306166623432373165643938653561336664
+66383832383537356238653664323864616666623931396564656237333637376662346435663032
+32323261313561393662636139353438313036643135626634323465336139336162383066343765
+35633234623432366637323334616361333931306139323162343064643030393162303165633163
+64356235643037323365303836353634356336333635383031663438663536656233626465393361
+66633566336363333666613465613630623539316263613836386433303138333331396462333264
+65313362353736666234333563383039653832636165306264633966343266356239393761343934
+62363035373037396361303336356461303563323966663764643336393539623564373434383732
+65386631636137643636386430343165633837366333613038376135646637323031383533313937
+31376336313930376531643438323636323934303065643161653233616564376464313466643931
+31653433363233373731366261393066316531373365316166313531623230393062313832343438
+34643031613830653037643464393437636538663062376139386534343566393130643338346663
+32613134356134656333643434626163343938616234333861666234623233343732656165646161
+37316530323065356231376635656437346138666436313334303638653731643932643661656465
+63646263633035363066633561353134653336636464643231666233386339343232653239333864
+64663162653035353364656538336664356136333737373761333462376365626634333736306135
+36306337396131333564633438383963663036333935356262653533663031323066306164326366
+33666432353932393038316133636238633433303461323361666633386530326465633630383131
+62623035376232616639303864393566646630393063326463356666653535656438663538613066
+30343632303835336262396665356439343362626431303134313562663165323934303034386663
+30623530383936353965306563623161666535316231336232323031396239363764663635613231
+30306232646166393562626139363139356366383065626337663365633134386137343132636232
+64633139646130396364363037346262316635613461323763663163656435633165353131623436
+36363734383335353736346232326436623434626263323736356365653966333135663836623261
+38313461663838353165343563653361613735643435653638383265386536376266343534313636
+62663130613833306533386637333230363332386665633831643037393461386535323261396231
+31303733333938666634356633663734353432333764353136653730353565366638336538343938
+39326639346435313863663265633166393638343665393333353832353234316534356435363233
+38613035323861333662623662313462323762326533643632613866643633626632643039633234
+38346233333564343737363633366365383666633039323033373261303135613035373239343133
+38343961326661393838616165303438353832316334353966346666393233386631666163373662
+37373637613335616665623963323863343532666266383331393562383233336436373234396265
+66353766363631393366373563343034336266323164616262343863306136363766646430643263
+30323838393665333361636632313366323064376637626261616263616139613565656333353661
+63343263363930366565663537343538613130323537343462313365633763346635343935313964
+38356636643335666663373461306435636539396638336331653761323864373432303961396562
+31376131386363653366336331333232373833346331633536356632306638613036643834373833
+32333737353435633430663364383331383737623263303361356462303831623130343138643731
+65653439643839653263343632363837333337643732333165656438616563633030366334353965
+38386364393837363163306232323434646330323933323639313666636233326366313530653739
+37313964396161326336613238636133303136663434393336363963616265386436313835643234
+65343364663464356439363833383966383133613538333638303762366261353036323564623863
+65313030363138363362653538336235643832646262373163306634346164393634366261633964
+32353564396265303336323432353438656431643261663432323662376136326239376439393536
+39653833336662386364353161396539386534636464316433656662373836366534313136333430
+35303730353865333439316462366136663834626563323866663234313734666434393262333633
+61393636663062303337333432363931653761626632626336346462313863613931386536656435
+34666366616332366130343539383135613538366664343130333634316461636337633938383033
+65346234613863663561343234663535393930346634353530643231316536663834646266346338
+61353233356364336333646362336664366465646466363537396338343934383031666265666535
+38623163636135656562613666663166393866636364346366343733356339316465613330646538
+33646337633034343665656630316334366339626538373662616432323337313362333634643466
+63666235646432366535303438636662373830313864646639306362633463626437613932303161
+66393463346464666638333166326233613934303465386666633634633863363836616265383130
+65633334666330613363303039613562303739343961333863383763313938623337386634386238
+62656335663366303530346563353530656662303030356230383630386236393536346339643332
+35393766616534303934373136356461613461666332393632376537333864656666633835356463
+38616330383636313433653666396134343963663465393732363532613062656433663339353634
+65666530316434386165363535336637376266616662336431396533323838356534616536343732
+30326261323964636339356639356232383331666466323134333531346262363661663735656533
+34626162643834353862613731356130376132613063633830633135663238666231326338323365
+62363865616535623131383066653164336565333733336233613333613030313936363465393962
+30336331336336386564613564656639653037313238346137666234393431346563303434333638
+30366232386530386434613932333836316166653238313134356535393035663633393033613266
+64326566303565656438386532393665656338303832346133333031613761663838613864336565
+34353737363737366638653938346166356630373539353238316366363861313637623938326638
+30313432633532323539303936313462616635656232323832366561373331353331366630376663
+61343138363862613336303866373030333334306235626363616565623039373364656137666332
+34333735623964353464656131303533653031636339393335663739363234313134383865616365
+32313331323534383532663135353232643038373532626564373563383434303436636433366165
+34326465353737633336613038336534373166363636303165323035366533323939376238306235
+36386233356562356166303565366535376137636661313666623766393861666132363332333435
+33316234363137656564643639393739323736376434663132633864656161323261316534653063
+39666462343766623537336137373730636562313161666263613532333132316238643836323661
+39363562643039363562366663383737373737653439663930643466383938376664336436623337
+30376431633561656439393634633635636334363366383739613238636262316266313334373766
+39376133316331333937323131656230313734636532653437356634373935653365356339396236
+37613036666139623532656538393066313163303135396535356536303565616236646236366264
+62323233376235356462633034323639356465346136303138393234663164373834393565323062
+39383030613634396332643733313834366365313762636235306161313435313262616430633236
+34386332366262336231623732656431616630623235396538376663646262323761306663303539
+33366161303066653932333137326366363130373534306630626135313830326138343764373365
+66633462666666393461303265643239653531656631643930343730626336643037393232336163
+39376634326165623531633765666636383264613532323235663236353532336638313138666566
+63333632343161303630663431383037666265343564613636623238336564376239316665326461
+62613665353735633361383730663132356463336461653932313565306630323863663431336562
+34306537613530653934373434613463346636613465663761643065633235646365356266383761
+31303036613261376562333233623537333064313639636136306530393337373639663862356638
+30323365653136323535386134376335356531653133316530353061326666626536326239363366
+66623165623734386563316361313535393462633230646664626631396234303030376262616566
+33366234643065636633623338656331373761613432396530653839643836643537363863653139
+61643233393564393537356364613334633038633036343463383338653461623136383436353665
+34663136623332356434313664613434663032393737623039336631643133306661323432313663
+63316263353730613437393230633738346334653530663531306134316535663334316566623261
+66663539386363366335353265623939316636303938393131346434343565623266616637636332
+33616233636137613339343231386631636235346631386138356234386266326630346164343164
+37393063316633663863343830346466383636366234376466326366663239316438646366643635
+63326566616131333532353932643434386632616332333364613634376162323239633963303234
+33616132643031393039313933646434376232343862313736326639373436383466343336616535
+30323161343335646235663935636563336663306566316563356361386464373334313063656134
+35343866386234626565363866373534376364633431383062353439373566323266326662616263
+37376362366438353636636530353965646365316138303936373631623064666236616438373463
+65303431333738333236636336323037383739633839653663353463653638646635626265613430
+37616261386637643230636336303861616336646532306535326537623434383532623637666536
+37366566646466356432316461356538633834306364396338333931306231373530303462393465
+61326264616530636464343335623132376139653737636561313039353264376131636161633830
+39333737613433643962633862653434303165343232633765626335626338666232396264316530
+33373135633639366531366632643439306339393064343530666431336333643132623061306162
+30613438633232626163656134353739636463326538376338633164343166363631383336316266
+62653732643030353032353936646162626163636233616531646231636334323366626534636536
+65373033616139376466366662306566663363393164356638383431653437663366643866333263
+39306533306661363138303566656161633365386436623565376235313433356236636638633134
+63353233353030393833653363626663336561326437383161316531373364663666306330343063
+31323663636136396566643831633666323639303230636664393162366638626437343562643464
+39316239373338316136613839326663626462356138313839656239613835396638663037656232
+33393138363364353166663430663031383330356234376632333938353965376337346563393531
+63633065663332306535306466383165646663313938623434373831616431656237386363643637
+62313861643636353734626136366330376230636235663234303764656662373539333830653535
+31663030313334376531366631636636306265643235313635363736333334393239353262323238
+38326662363538636634303730303437336237666335666661366332333639663662383334313661
+30323836646630343062353562336334396235326235616536643835373163306235653562626532
+39373337333939666336333736366337653736633934343866393836316565373965613865386339
+35313762313436666162316534383462333961653932643666663866633961613961353962653962
+39326434646236393164383832613336386239663836396334653534616334346238343363393364
+66656330623839323134623939653761323561643632663234306333643662346364643834646362
+36346338656564633231303337333832663061303961396631653432346439623130623866356561
+34666465336362383833336262303637303964383636643231656565616361306666613732333734
+39303439336163383037663762316531343533343562383664626463623632626334303931626231
+35643665333161303564373662313462393732623663343530666430393636356166383636343761
+30613633383438613262646430623562363864363935353764613064636530616537613339616538
+65386262663236356237353834393331353633653462353034306164643334646364383533646632
+31636538376462303532633762373164383431613538663234396236313761323666613238646132
+62313832663335633430666632616466313930383236653231343035623233373538616562373834
+32323566353235613430643630313161353830623263313931323135633134393833376661353037
+36306234376532653232316133336434396463616536393738633439313036646364326133396633
+61393538636137376436636431356162366435626665326239646664363233383030353635333865
+31396161613062626561376337653262643861353061633863616633623737323134653334636663
+66376465386662656362313466356264323062366130336632323333373935626535303533633766
+65336636646338623039363764306634366438656137303238646361666132636464313338363262
+35613936386534656134633266356437663733346533653831386336343061336365373330363164
+39313038356163656233343834346632643435393764326166376639383563633637626464353131
+33653865326262626461653664616532343436376466666566313961343462666633653736626235
+30336135613338333166323339316363623339346566353132313165353436353465643438353238
+65646262666135616638323065316630646538383038663635646561343235366264643739373038
+33376361653438306137373632306130303733363535386664613332666662663565626266346239
+65333461653632343364323039613461336130383831376437363066656461386566363366316236
+31326630313339333732356634363162316261336666633766636665643463613061313263386261
+37376434616431383132633161633130306638643833366434323339353838303933363234663137
+62663666306534623731366333383963656638653836323864326431303334646535313532656137
+30636266353332626461636639353235303638656431653361366633356361623330393136643539
+33366635646438313432396235386630386537336235383637326632303463333664656634613661
+64323361666430343735623565623532353161653763313965666338636462313463346233666462
+65656161646531353134323832633936636533363761376131636536636661316433383837663065
+34653766636466636336643231636463623638373131633838343538383961383334613631303936
+37353734363132306534633531346261623239616338613034656335363033333164393938633430
+61663239316233643462333739626634303664353531656165303432353061306136333764373832
+63343438633063386464663734643064666330393036303333663764663736303664653239336237
+65353534633731366232333232613133633661653764303036363063323966353965343365366639
+39396161383966343264646362636632303235376165663137343436346134333930656161623632
+34636565306666376433353532396663383339333534343766333363303232343530663431386635
+30346637656665396163613232316365353537633062316532656661326362336230323331313230
+61633563353064333965636437643162613665316230633438666133663465663566373266353639
+36376264303666663431643963323731356531343234313337363533663862386366636565646231
+31346165663236336335623661356432373431366332633933313536633238353435376631396433
+31383835323934323434383965383531626366653437316135643036336339316635323534393137
+65366465346666313838383137346238646666383033346237386333626562313238333730323130
+65316530326639663661346362663862303862316161623463376536356265643462306466653138
+66306361353536386662343162353930383865666433623030313830623735353731363562623162
+39643463646237336638653136383663346131626264653861326164646336656363343639383064
+64313965636434663762646261386336626533643739343063323339343333313739343634613937
+34383839336338383736386561616166386634333533616530383635346262643239386161643136
+38323437643839323832623162636532376338346636666266663538653863633735613632386431
+65363462313538396335633033633966396339626632396430353461343864323463653162393666
+39643566613836616135323439636165626338333439373738313130666138343430386431353636
+33646134623134363462383765643365626435366433363830653836653731333731343161643166
+39653838373632636131656333306162633233313765316138613136336235343137343037656435
+62363065326330343663343766666165356662613133373634633036316435626562366165643035
+65636463356430666266343264396531636266363065303137636332626435616264353232383633
+61666635356531336632623338633230336166626261623635613439366162383035653437383130
+62386635653864306334613861383433353562363562306633653866633531663266356561363930
+35323163616162353163323330636330623865633636343264353939356137336531323964346261
+32363133613232656661653863306138393730386265353366393633373231323835303736306332
+62353039313539663936393530643063633936643162373933303665346337643133343866323831
+31386131646632323433613931623331316236353866363062343632613163363366383633393736
+35633830656366373030623431336138346237663336333733336136363735643962333261323135
+37356633653331343166323534393465316465383731633536343836653362663839376439343861
+39373661326336323535373534306362326533663464306462303533306137623434
diff --git a/fdio.infra.ansible/vault_pass b/fdio.infra.ansible/vault_pass
new file mode 100644
index 0000000000..e1d46efc1f
--- /dev/null
+++ b/fdio.infra.ansible/vault_pass
@@ -0,0 +1 @@
+Csit1234
diff --git a/fdio.infra.ansible/vpp_device.yaml b/fdio.infra.ansible/vpp_device.yaml
new file mode 100644
index 0000000000..21676811c7
--- /dev/null
+++ b/fdio.infra.ansible/vpp_device.yaml
@@ -0,0 +1,42 @@
+---
+# file: vpp_device.yaml
+
+- hosts: vpp_device
+ remote_user: localadmin
+ become: true
+ become_user: root
+ gather_facts: false
+ pre_tasks:
+ - name: Gathering Facts
+ gather_facts:
+ tags:
+ - always
+ roles:
+ - role: user_add
+ tags: user_add
+ - role: baremetal
+ tags: baremetal
+ - role: common
+ tags: common
+# - role: kernel
+# tags: kernel
+ - role: intel
+ tags: intel
+ - role: docker
+ tags: docker
+ - role: docker_images
+ tags: docker_images
+ - role: nomad
+ tags: nomad
+ - role: consul
+ tags: consul
+ - role: prometheus_exporter
+ tags: prometheus_exporter
+ - role: jenkins_job_health_exporter
+ tags: jenkins_job_health_exporter
+ - role: vpp_device
+ tags: vpp_device
+ - role: kernel_vm
+ tags: kernel_vm
+ - role: cleanup
+ tags: cleanup