aboutsummaryrefslogtreecommitdiffstats
path: root/fdio.infra.ansible
diff options
context:
space:
mode:
authorpmikus <peter.mikus@protonmail.ch>2023-09-05 08:26:26 +0000
committerPeter Mikus <peter.mikus@protonmail.ch>2023-09-06 08:37:48 +0000
commit4f69b1cbe4e3edb2ca5d6f10d6dae2a0a1fadcb2 (patch)
tree9b09a2e3d2cb9e820ebe0b517b46498978ac4af0 /fdio.infra.ansible
parent9e28d41817b4f1b3a77a4c65d76da9d292fd9d8a (diff)
feat(infra): Add new SPR servers
Signed-off-by: pmikus <peter.mikus@protonmail.ch> Change-Id: Id90ca8323e6f49b51f19526e1089bf711e4e8182
Diffstat (limited to 'fdio.infra.ansible')
-rw-r--r--fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.21.yaml87
-rw-r--r--fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.22.yaml87
-rw-r--r--fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.30.yaml96
-rw-r--r--fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.31.yaml96
-rw-r--r--fdio.infra.ansible/inventories/lf_inventory/hosts4
-rw-r--r--fdio.infra.ansible/roles/vpp_device/defaults/main.yaml2
-rw-r--r--fdio.infra.ansible/roles/vpp_device/files/csit-initialize-vfs-spr.sh26
-rw-r--r--fdio.infra.ansible/roles/vpp_device/handlers/main.yaml18
-rw-r--r--fdio.infra.ansible/roles/vpp_device/tasks/main.yaml52
9 files changed, 432 insertions, 36 deletions
diff --git a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.21.yaml b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.21.yaml
new file mode 100644
index 0000000000..fbc2071c3f
--- /dev/null
+++ b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.21.yaml
@@ -0,0 +1,87 @@
+---
+# file: host_vars/10.30.51.21.yaml
+
+hostname: "s21-nomad"
+inventory_ipmi_hostname: "10.30.50.21"
+
+# User management.
+users:
+ - username: localadmin
+ groups: [adm, sudo]
+ password: "$6$FIsbVDQR$5D0wgufOd2FtnmOiRNsGlgg6Loh.0x3dWSj72DSQnqisSyE9DROfgSgA6s0yxDwz4Jd5SRTXiTKuRYuSQ5POI1"
+ ssh_key:
+ - "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIKM1D1kkGX1l7fSma1MOgw2yLI7zJHwTCcfVROQ4hh7r peter.mikus@protonmail.ch"
+ - "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQDCG7Shfr7ASXXwpazYDGFzChGl7i4HgY9n81GTwc17B/O19IiJdrxFcBQH19HMuTFtWU4d9bQ6xgzz2pojBN13L3pEsStCHovDlEYbvfRxI2l2hcAAop1j1E4izHAS9IvCoy1AgWqBg6tsfP5mzOwGbSbcdI9ADRKIHgDTVbHg9SqKed27bNOLU0u3/5ra2Oar/tVIW37geEqFV/nHIBZ03Y/mszvXP/t/hP5bgJIGJKkzTjLd1aqEcuGmubW+wTQnVnrhFB87dw91gPj6BVyV0+7Vt1wrvPKqP2sGJhojAMSrBQBySnlrYgEg00bwDgNGFevatfG9+nTId+nhoKBkXya3MjSp4HwrGqGcij3/h7ovlau3/iRhkqlSeqenaNm4zKTAXRTnb60j2WKa6im0zdqJX98anp4mhjE8xHhmmfZV3vRT8mtY4hF/lg79miXFHpWH97bZV6r/D9qj1HWI/laJfOC5MOJdRcLETwtsUNMHeHtVnY3yu0XFYNcl2Xwajtnz3waF7vn1QHvAM9p878+JrO/IEUqEc0yZWVvlhZ7Krx1pS+APoMy8097MYDGFzFTkYSstKLGbgm/S7dEiWuSVxmMUxW7JYO3gHrQ3f1EvAYh2UFdWy76Dzr5II9UpVwOwF+HL/Oy8Sk77bPaK+tn7Kh4Tx7WWE0+EOAgElQ== ayourtch@ayourtch-lnx"
+ - "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCXUbbhesOpvPF+KI8nq4TXvEO/Un1aU/ehZ9clCyw9C40xjDkX2BlcX8WeHxFHe7fjFaCd07Vg73rn/3M9uNDnDxvjH1GQ0twvi3iBTO4PkHBBfGF9qnE8MYzno6FvlsVKLuUuPbfm8kbOQ+ZDfdXq6gdtXh0hSYYkqC1heNPCNsqaakkB99Edyle+Ot0V7cpW+Yo2wo98KuX/cgUEhVoA8QnNVE7zaWcjSXBZEteoA4gLpAbV6p67/d6H/2ykHTidBViYTEsHco56tJoA4nTPuAupDOLBcWXgF5TAN6z1aCn2JA1DDfniLakgrZ5oVj2qHhUmbxQAtnKQfHADjqzV jlinkes@jlinkes"
+ - "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQD0GXoSGDZ95TE/TT3kf4ZS3Tiso0UO3MVhqAqZ/F6LOvLyqnMPfhxPz1XpHsDikxvKgwhZvdBm1dWbKkPsD7jtw0PGphQO8QuEwBd2ZMvxZ4Qg6lNDUl5x3zRO2nkbKpcqnOugGLTtXP+yfw/wfQ2HNFLDP9gE90xegsPT83PmRUntQlhbS3ByHcCSUScC+Y1heZXuoKNyrmUY46lxkKsNfhx8sQKo0YhB21atV/mcAQbAaO2LggmaQYGtWizqPNGWIRsi9W8ZYnKva67c3Pbv/TTfaqmrNCwOXJ8G9oL+/3MlKbl3b5mYlTs2a/e9yVgMNwUZVX7aiHpgPgaVjL6j swdev@BradyBunch-MacMini.local"
+ - "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCmo2YP4t/f58AAYH72rOe5VjYjk3wb/GY3aJEd5s9WspLhnsY0xBL67C+4kMq6VmQQvg0cUB8RJSFX1tUXMHCorVWnXNHkYomx0MCPcPUpVHuRyEqczYJ2pzgZsPzoEfw9E5hTrAiGzYFNAS/NOSavapVMDZxa1zsX7+sWQvJfmVtJWpcTQb0TkoWXRsy0YM3PYfUbYvK7lR3lGwyhwCcJn0WwWGreFB7bIok0poqqX5BgJ/okZuvC8II+UfuGoBHNhg49oqST1JlNi9gRqDNmLWkHRaneWZiF+Y2hdN3PRCdkt1x3eU0R+cdi5kPKslb6P0lsjOEA7fDLlq1+T2z1"
+sshd_disable_password_login: true
+
+# Nomad settings.
+nomad_version: "1.6.1"
+nomad_certificates:
+ - src: "{{ file_nomad_ca_pem }}"
+ dest: "{{ nomad_tls_ca_file }}"
+ - src: "{{ file_nomad_server_pem }}"
+ dest: "{{ nomad_tls_cert_file }}"
+ - src: "{{ file_nomad_server_key_pem }}"
+ dest: "{{ nomad_tls_key_file }}"
+nomad_datacenter: "yul1"
+nomad_encrypt: "Y4T+5JGx1C3l2NFBBvkTWQ=="
+nomad_node_name: "{{ hostname }}-{{ ansible_architecture }}"
+nomad_node_role: "client"
+nomad_node_class: "builder"
+nomad_options:
+ driver.raw_exec.enable: 1
+ docker.cleanup.image: true
+ docker.privileged.enabled: true
+ docker.volumes.enabled: true
+ driver.whitelist: "docker,raw_exec,exec"
+ fingerprint.network.disallow_link_local: true
+nomad_service_mgr: "systemd"
+nomad_consul_use_ssl: false
+nomad_use_tls: false
+nomad_tls_http: false
+nomad_tls_rpc: false
+nomad_use_vault: false
+nomad_retry_servers:
+ - "10.30.51.26"
+ - "10.30.51.24"
+ - "10.30.51.25"
+nomad_servers:
+ - "10.30.51.26:4647"
+ - "10.30.51.24:4647"
+ - "10.30.51.25:4647"
+
+# Consul settings.
+nomad_use_consul: true
+consul_certificates:
+ - src: "{{ file_consul_ca_pem }}"
+ dest: "{{ consul_ca_file }}"
+ - src: "{{ file_consul_server_0_pem }}"
+ dest: "{{ consul_cert_file }}"
+ - src: "{{ file_consul_server_0_key_pem }}"
+ dest: "{{ consul_key_file }}"
+consul_verify_incoming: false
+consul_verify_outgoing: false
+consul_vefify_server_hostname: false
+consul_allow_tls: false
+consul_datacenter: "yul1"
+consul_node_role: "client"
+consul_encrypt: "Y4T+5JGx1C3l2NFBBvkTWQ=="
+consul_node_name: "{{ ansible_hostname }}"
+consul_retry_join: true
+consul_retry_servers:
+ - "10.30.51.26"
+ - "10.30.51.24"
+ - "10.30.51.25"
+consul_service_mgr: "systemd"
+
+# Vault settings.
+vault_version: "1.13.1"
+
+# Docker daemon settings.
+docker_daemon:
+ dns: ["172.17.0.1"]
+ dns-opts: []
+ dns-search: ["{{ansible_hostname}}"]
diff --git a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.22.yaml b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.22.yaml
new file mode 100644
index 0000000000..8d8f807c55
--- /dev/null
+++ b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.22.yaml
@@ -0,0 +1,87 @@
+---
+# file: host_vars/10.30.51.22.yaml
+
+hostname: "s22-nomad"
+inventory_ipmi_hostname: "10.30.50.22"
+
+# User management.
+users:
+ - username: localadmin
+ groups: [adm, sudo]
+ password: "$6$FIsbVDQR$5D0wgufOd2FtnmOiRNsGlgg6Loh.0x3dWSj72DSQnqisSyE9DROfgSgA6s0yxDwz4Jd5SRTXiTKuRYuSQ5POI1"
+ ssh_key:
+ - "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIKM1D1kkGX1l7fSma1MOgw2yLI7zJHwTCcfVROQ4hh7r peter.mikus@protonmail.ch"
+ - "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQDCG7Shfr7ASXXwpazYDGFzChGl7i4HgY9n81GTwc17B/O19IiJdrxFcBQH19HMuTFtWU4d9bQ6xgzz2pojBN13L3pEsStCHovDlEYbvfRxI2l2hcAAop1j1E4izHAS9IvCoy1AgWqBg6tsfP5mzOwGbSbcdI9ADRKIHgDTVbHg9SqKed27bNOLU0u3/5ra2Oar/tVIW37geEqFV/nHIBZ03Y/mszvXP/t/hP5bgJIGJKkzTjLd1aqEcuGmubW+wTQnVnrhFB87dw91gPj6BVyV0+7Vt1wrvPKqP2sGJhojAMSrBQBySnlrYgEg00bwDgNGFevatfG9+nTId+nhoKBkXya3MjSp4HwrGqGcij3/h7ovlau3/iRhkqlSeqenaNm4zKTAXRTnb60j2WKa6im0zdqJX98anp4mhjE8xHhmmfZV3vRT8mtY4hF/lg79miXFHpWH97bZV6r/D9qj1HWI/laJfOC5MOJdRcLETwtsUNMHeHtVnY3yu0XFYNcl2Xwajtnz3waF7vn1QHvAM9p878+JrO/IEUqEc0yZWVvlhZ7Krx1pS+APoMy8097MYDGFzFTkYSstKLGbgm/S7dEiWuSVxmMUxW7JYO3gHrQ3f1EvAYh2UFdWy76Dzr5II9UpVwOwF+HL/Oy8Sk77bPaK+tn7Kh4Tx7WWE0+EOAgElQ== ayourtch@ayourtch-lnx"
+ - "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCXUbbhesOpvPF+KI8nq4TXvEO/Un1aU/ehZ9clCyw9C40xjDkX2BlcX8WeHxFHe7fjFaCd07Vg73rn/3M9uNDnDxvjH1GQ0twvi3iBTO4PkHBBfGF9qnE8MYzno6FvlsVKLuUuPbfm8kbOQ+ZDfdXq6gdtXh0hSYYkqC1heNPCNsqaakkB99Edyle+Ot0V7cpW+Yo2wo98KuX/cgUEhVoA8QnNVE7zaWcjSXBZEteoA4gLpAbV6p67/d6H/2ykHTidBViYTEsHco56tJoA4nTPuAupDOLBcWXgF5TAN6z1aCn2JA1DDfniLakgrZ5oVj2qHhUmbxQAtnKQfHADjqzV jlinkes@jlinkes"
+ - "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQD0GXoSGDZ95TE/TT3kf4ZS3Tiso0UO3MVhqAqZ/F6LOvLyqnMPfhxPz1XpHsDikxvKgwhZvdBm1dWbKkPsD7jtw0PGphQO8QuEwBd2ZMvxZ4Qg6lNDUl5x3zRO2nkbKpcqnOugGLTtXP+yfw/wfQ2HNFLDP9gE90xegsPT83PmRUntQlhbS3ByHcCSUScC+Y1heZXuoKNyrmUY46lxkKsNfhx8sQKo0YhB21atV/mcAQbAaO2LggmaQYGtWizqPNGWIRsi9W8ZYnKva67c3Pbv/TTfaqmrNCwOXJ8G9oL+/3MlKbl3b5mYlTs2a/e9yVgMNwUZVX7aiHpgPgaVjL6j swdev@BradyBunch-MacMini.local"
+ - "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCmo2YP4t/f58AAYH72rOe5VjYjk3wb/GY3aJEd5s9WspLhnsY0xBL67C+4kMq6VmQQvg0cUB8RJSFX1tUXMHCorVWnXNHkYomx0MCPcPUpVHuRyEqczYJ2pzgZsPzoEfw9E5hTrAiGzYFNAS/NOSavapVMDZxa1zsX7+sWQvJfmVtJWpcTQb0TkoWXRsy0YM3PYfUbYvK7lR3lGwyhwCcJn0WwWGreFB7bIok0poqqX5BgJ/okZuvC8II+UfuGoBHNhg49oqST1JlNi9gRqDNmLWkHRaneWZiF+Y2hdN3PRCdkt1x3eU0R+cdi5kPKslb6P0lsjOEA7fDLlq1+T2z1"
+sshd_disable_password_login: true
+
+# Nomad settings.
+nomad_version: "1.6.1"
+nomad_certificates:
+ - src: "{{ file_nomad_ca_pem }}"
+ dest: "{{ nomad_tls_ca_file }}"
+ - src: "{{ file_nomad_server_pem }}"
+ dest: "{{ nomad_tls_cert_file }}"
+ - src: "{{ file_nomad_server_key_pem }}"
+ dest: "{{ nomad_tls_key_file }}"
+nomad_datacenter: "yul1"
+nomad_encrypt: "Y4T+5JGx1C3l2NFBBvkTWQ=="
+nomad_node_name: "{{ hostname }}-{{ ansible_architecture }}"
+nomad_node_role: "client"
+nomad_node_class: "builder"
+nomad_options:
+ driver.raw_exec.enable: 1
+ docker.cleanup.image: true
+ docker.privileged.enabled: true
+ docker.volumes.enabled: true
+ driver.whitelist: "docker,raw_exec,exec"
+ fingerprint.network.disallow_link_local: true
+nomad_service_mgr: "systemd"
+nomad_consul_use_ssl: false
+nomad_use_tls: false
+nomad_tls_http: false
+nomad_tls_rpc: false
+nomad_use_vault: false
+nomad_retry_servers:
+ - "10.30.51.23"
+ - "10.30.51.26"
+ - "10.30.51.25"
+nomad_servers:
+ - "10.30.51.23:4647"
+ - "10.30.51.26:4647"
+ - "10.30.51.25:4647"
+
+# Consul settigs.
+nomad_use_consul: true
+consul_certificates:
+ - src: "{{ file_consul_ca_pem }}"
+ dest: "{{ consul_ca_file }}"
+ - src: "{{ file_consul_server_1_pem }}"
+ dest: "{{ consul_cert_file }}"
+ - src: "{{ file_consul_server_1_key_pem }}"
+ dest: "{{ consul_key_file }}"
+consul_verify_incoming: false
+consul_verify_outgoing: false
+consul_vefify_server_hostname: false
+consul_allow_tls: false
+consul_datacenter: "yul1"
+consul_node_role: "client"
+consul_encrypt: "Y4T+5JGx1C3l2NFBBvkTWQ=="
+consul_node_name: "{{ ansible_hostname }}"
+consul_retry_join: true
+consul_retry_servers:
+ - "10.30.51.23"
+ - "10.30.51.26"
+ - "10.30.51.25"
+consul_service_mgr: "systemd"
+
+# Vault settings.
+vault_version: "1.13.1"
+
+# Docker daemon settings.
+docker_daemon:
+ dns: ["172.17.0.1"]
+ dns-opts: []
+ dns-search: ["{{ansible_hostname}}"]
diff --git a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.30.yaml b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.30.yaml
new file mode 100644
index 0000000000..8c2e764f11
--- /dev/null
+++ b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.30.yaml
@@ -0,0 +1,96 @@
+---
+# file: host_vars/10.30.51.30.yaml
+
+hostname: "s30-t15-sut1"
+grub:
+ hugepagesz: "2M"
+ hugepages: 32768
+ iommu: "on"
+ vfio.enable_unsafe_noiommu_mode: 1
+inventory_ipmi_hostname: "10.30.50.30"
+vfs_data_file: "csit-initialize-vfs-spr.sh"
+cpu_microarchitecture: "sapphirerapids"
+
+intel_800_matrix: "dpdk23.07"
+
+# User management.
+users:
+ - username: localadmin
+ groups: [adm, sudo]
+ password: "$6$FIsbVDQR$5D0wgufOd2FtnmOiRNsGlgg6Loh.0x3dWSj72DSQnqisSyE9DROfgSgA6s0yxDwz4Jd5SRTXiTKuRYuSQ5POI1"
+ ssh_key:
+ - "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIKM1D1kkGX1l7fSma1MOgw2yLI7zJHwTCcfVROQ4hh7r peter.mikus@protonmail.ch"
+ - "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQDCG7Shfr7ASXXwpazYDGFzChGl7i4HgY9n81GTwc17B/O19IiJdrxFcBQH19HMuTFtWU4d9bQ6xgzz2pojBN13L3pEsStCHovDlEYbvfRxI2l2hcAAop1j1E4izHAS9IvCoy1AgWqBg6tsfP5mzOwGbSbcdI9ADRKIHgDTVbHg9SqKed27bNOLU0u3/5ra2Oar/tVIW37geEqFV/nHIBZ03Y/mszvXP/t/hP5bgJIGJKkzTjLd1aqEcuGmubW+wTQnVnrhFB87dw91gPj6BVyV0+7Vt1wrvPKqP2sGJhojAMSrBQBySnlrYgEg00bwDgNGFevatfG9+nTId+nhoKBkXya3MjSp4HwrGqGcij3/h7ovlau3/iRhkqlSeqenaNm4zKTAXRTnb60j2WKa6im0zdqJX98anp4mhjE8xHhmmfZV3vRT8mtY4hF/lg79miXFHpWH97bZV6r/D9qj1HWI/laJfOC5MOJdRcLETwtsUNMHeHtVnY3yu0XFYNcl2Xwajtnz3waF7vn1QHvAM9p878+JrO/IEUqEc0yZWVvlhZ7Krx1pS+APoMy8097MYDGFzFTkYSstKLGbgm/S7dEiWuSVxmMUxW7JYO3gHrQ3f1EvAYh2UFdWy76Dzr5II9UpVwOwF+HL/Oy8Sk77bPaK+tn7Kh4Tx7WWE0+EOAgElQ== ayourtch@ayourtch-lnx"
+ - "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCXUbbhesOpvPF+KI8nq4TXvEO/Un1aU/ehZ9clCyw9C40xjDkX2BlcX8WeHxFHe7fjFaCd07Vg73rn/3M9uNDnDxvjH1GQ0twvi3iBTO4PkHBBfGF9qnE8MYzno6FvlsVKLuUuPbfm8kbOQ+ZDfdXq6gdtXh0hSYYkqC1heNPCNsqaakkB99Edyle+Ot0V7cpW+Yo2wo98KuX/cgUEhVoA8QnNVE7zaWcjSXBZEteoA4gLpAbV6p67/d6H/2ykHTidBViYTEsHco56tJoA4nTPuAupDOLBcWXgF5TAN6z1aCn2JA1DDfniLakgrZ5oVj2qHhUmbxQAtnKQfHADjqzV jlinkes@jlinkes"
+ - "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQD0GXoSGDZ95TE/TT3kf4ZS3Tiso0UO3MVhqAqZ/F6LOvLyqnMPfhxPz1XpHsDikxvKgwhZvdBm1dWbKkPsD7jtw0PGphQO8QuEwBd2ZMvxZ4Qg6lNDUl5x3zRO2nkbKpcqnOugGLTtXP+yfw/wfQ2HNFLDP9gE90xegsPT83PmRUntQlhbS3ByHcCSUScC+Y1heZXuoKNyrmUY46lxkKsNfhx8sQKo0YhB21atV/mcAQbAaO2LggmaQYGtWizqPNGWIRsi9W8ZYnKva67c3Pbv/TTfaqmrNCwOXJ8G9oL+/3MlKbl3b5mYlTs2a/e9yVgMNwUZVX7aiHpgPgaVjL6j swdev@BradyBunch-MacMini.local"
+ - "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCmo2YP4t/f58AAYH72rOe5VjYjk3wb/GY3aJEd5s9WspLhnsY0xBL67C+4kMq6VmQQvg0cUB8RJSFX1tUXMHCorVWnXNHkYomx0MCPcPUpVHuRyEqczYJ2pzgZsPzoEfw9E5hTrAiGzYFNAS/NOSavapVMDZxa1zsX7+sWQvJfmVtJWpcTQb0TkoWXRsy0YM3PYfUbYvK7lR3lGwyhwCcJn0WwWGreFB7bIok0poqqX5BgJ/okZuvC8II+UfuGoBHNhg49oqST1JlNi9gRqDNmLWkHRaneWZiF+Y2hdN3PRCdkt1x3eU0R+cdi5kPKslb6P0lsjOEA7fDLlq1+T2z1"
+sshd_disable_password_login: true
+
+# Nomad settings.
+nomad_version: "1.6.1"
+nomad_certificates:
+ - src: "{{ file_nomad_ca_pem }}"
+ dest: "{{ nomad_tls_ca_file }}"
+ - src: "{{ file_nomad_client_pem }}"
+ dest: "{{ nomad_tls_cert_file }}"
+ - src: "{{ file_nomad_client_key_pem }}"
+ dest: "{{ nomad_tls_key_file }}"
+nomad_datacenter: "yul1"
+nomad_node_name: "{{ hostname }}-{{ ansible_architecture }}"
+nomad_node_role: "client"
+nomad_node_class: "csit"
+nomad_options:
+ driver.raw_exec.enable: 1
+ docker.cleanup.image: false
+ docker.privileged.enabled: true
+ docker.volumes.enabled: true
+ driver.whitelist: "docker,raw_exec,exec"
+nomad_service_mgr: "systemd"
+nomad_consul_use_ssl: false
+nomad_use_tls: false
+nomad_tls_http: false
+nomad_tls_rpc: false
+nomad_use_vault: false
+nomad_retry_servers:
+ - "10.30.51.23"
+ - "10.30.51.24"
+ - "10.30.51.25"
+nomad_servers:
+ - "10.30.51.23:4647"
+ - "10.30.51.24:4647"
+ - "10.30.51.25:4647"
+
+# Consul settigs.
+nomad_use_consul: true
+consul_certificates:
+ - src: "{{ file_consul_agent_ca_pem }}"
+ dest: "{{ consul_ca_file }}"
+ - src: "{{ file_consul_server_0_pem }}"
+ dest: "{{ consul_cert_file }}"
+ - src: "{{ file_consul_server_0_key_pem }}"
+ dest: "{{ consul_key_file }}"
+consul_verify_incoming: false
+consul_verify_outgoing: false
+consul_vefify_server_hostname: false
+consul_allow_tls: false
+consul_datacenter: "yul1"
+consul_node_role: "client"
+consul_encrypt: "Y4T+5JGx1C3l2NFBBvkTWQ=="
+consul_node_name: "{{ ansible_hostname }}"
+consul_retry_join: true
+consul_retry_servers:
+ - "10.30.51.23"
+ - "10.30.51.24"
+ - "10.30.51.25"
+consul_service_mgr: "systemd"
+
+# Vault settings.
+vault_version: "1.13.1"
+
+# Docker settings.
+docker_daemon:
+ default-shm-size: "1073741824"
+ dns: ["172.17.0.1"]
+ dns-opts: []
+ dns-search: ["{{ ansible_hostname }}"]
+ host: ["172.17.0.1:/var/run/docker.sock"]
diff --git a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.31.yaml b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.31.yaml
new file mode 100644
index 0000000000..51d7236d81
--- /dev/null
+++ b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.31.yaml
@@ -0,0 +1,96 @@
+---
+# file: host_vars/10.30.51.31.yaml
+
+hostname: "s31-t16-sut1"
+grub:
+ hugepagesz: "2M"
+ hugepages: 32768
+ iommu: "on"
+ vfio.enable_unsafe_noiommu_mode: 1
+inventory_ipmi_hostname: "10.30.50.31"
+vfs_data_file: "csit-initialize-vfs-spr.sh"
+cpu_microarchitecture: "sapphirerapids"
+
+intel_800_matrix: "dpdk23.07"
+
+# User management.
+users:
+ - username: localadmin
+ groups: [adm, sudo]
+ password: "$6$FIsbVDQR$5D0wgufOd2FtnmOiRNsGlgg6Loh.0x3dWSj72DSQnqisSyE9DROfgSgA6s0yxDwz4Jd5SRTXiTKuRYuSQ5POI1"
+ ssh_key:
+ - "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIKM1D1kkGX1l7fSma1MOgw2yLI7zJHwTCcfVROQ4hh7r peter.mikus@protonmail.ch"
+ - "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQDCG7Shfr7ASXXwpazYDGFzChGl7i4HgY9n81GTwc17B/O19IiJdrxFcBQH19HMuTFtWU4d9bQ6xgzz2pojBN13L3pEsStCHovDlEYbvfRxI2l2hcAAop1j1E4izHAS9IvCoy1AgWqBg6tsfP5mzOwGbSbcdI9ADRKIHgDTVbHg9SqKed27bNOLU0u3/5ra2Oar/tVIW37geEqFV/nHIBZ03Y/mszvXP/t/hP5bgJIGJKkzTjLd1aqEcuGmubW+wTQnVnrhFB87dw91gPj6BVyV0+7Vt1wrvPKqP2sGJhojAMSrBQBySnlrYgEg00bwDgNGFevatfG9+nTId+nhoKBkXya3MjSp4HwrGqGcij3/h7ovlau3/iRhkqlSeqenaNm4zKTAXRTnb60j2WKa6im0zdqJX98anp4mhjE8xHhmmfZV3vRT8mtY4hF/lg79miXFHpWH97bZV6r/D9qj1HWI/laJfOC5MOJdRcLETwtsUNMHeHtVnY3yu0XFYNcl2Xwajtnz3waF7vn1QHvAM9p878+JrO/IEUqEc0yZWVvlhZ7Krx1pS+APoMy8097MYDGFzFTkYSstKLGbgm/S7dEiWuSVxmMUxW7JYO3gHrQ3f1EvAYh2UFdWy76Dzr5II9UpVwOwF+HL/Oy8Sk77bPaK+tn7Kh4Tx7WWE0+EOAgElQ== ayourtch@ayourtch-lnx"
+ - "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCXUbbhesOpvPF+KI8nq4TXvEO/Un1aU/ehZ9clCyw9C40xjDkX2BlcX8WeHxFHe7fjFaCd07Vg73rn/3M9uNDnDxvjH1GQ0twvi3iBTO4PkHBBfGF9qnE8MYzno6FvlsVKLuUuPbfm8kbOQ+ZDfdXq6gdtXh0hSYYkqC1heNPCNsqaakkB99Edyle+Ot0V7cpW+Yo2wo98KuX/cgUEhVoA8QnNVE7zaWcjSXBZEteoA4gLpAbV6p67/d6H/2ykHTidBViYTEsHco56tJoA4nTPuAupDOLBcWXgF5TAN6z1aCn2JA1DDfniLakgrZ5oVj2qHhUmbxQAtnKQfHADjqzV jlinkes@jlinkes"
+ - "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQD0GXoSGDZ95TE/TT3kf4ZS3Tiso0UO3MVhqAqZ/F6LOvLyqnMPfhxPz1XpHsDikxvKgwhZvdBm1dWbKkPsD7jtw0PGphQO8QuEwBd2ZMvxZ4Qg6lNDUl5x3zRO2nkbKpcqnOugGLTtXP+yfw/wfQ2HNFLDP9gE90xegsPT83PmRUntQlhbS3ByHcCSUScC+Y1heZXuoKNyrmUY46lxkKsNfhx8sQKo0YhB21atV/mcAQbAaO2LggmaQYGtWizqPNGWIRsi9W8ZYnKva67c3Pbv/TTfaqmrNCwOXJ8G9oL+/3MlKbl3b5mYlTs2a/e9yVgMNwUZVX7aiHpgPgaVjL6j swdev@BradyBunch-MacMini.local"
+ - "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCmo2YP4t/f58AAYH72rOe5VjYjk3wb/GY3aJEd5s9WspLhnsY0xBL67C+4kMq6VmQQvg0cUB8RJSFX1tUXMHCorVWnXNHkYomx0MCPcPUpVHuRyEqczYJ2pzgZsPzoEfw9E5hTrAiGzYFNAS/NOSavapVMDZxa1zsX7+sWQvJfmVtJWpcTQb0TkoWXRsy0YM3PYfUbYvK7lR3lGwyhwCcJn0WwWGreFB7bIok0poqqX5BgJ/okZuvC8II+UfuGoBHNhg49oqST1JlNi9gRqDNmLWkHRaneWZiF+Y2hdN3PRCdkt1x3eU0R+cdi5kPKslb6P0lsjOEA7fDLlq1+T2z1"
+sshd_disable_password_login: true
+
+# Nomad settings.
+nomad_version: "1.6.1"
+nomad_certificates:
+ - src: "{{ file_nomad_ca_pem }}"
+ dest: "{{ nomad_tls_ca_file }}"
+ - src: "{{ file_nomad_client_pem }}"
+ dest: "{{ nomad_tls_cert_file }}"
+ - src: "{{ file_nomad_client_key_pem }}"
+ dest: "{{ nomad_tls_key_file }}"
+nomad_datacenter: "yul1"
+nomad_node_name: "{{ hostname }}-{{ ansible_architecture }}"
+nomad_node_role: "client"
+nomad_node_class: "csit"
+nomad_options:
+ driver.raw_exec.enable: 1
+ docker.cleanup.image: false
+ docker.privileged.enabled: true
+ docker.volumes.enabled: true
+ driver.whitelist: "docker,raw_exec,exec"
+nomad_service_mgr: "systemd"
+nomad_consul_use_ssl: false
+nomad_use_tls: false
+nomad_tls_http: false
+nomad_tls_rpc: false
+nomad_use_vault: false
+nomad_retry_servers:
+ - "10.30.51.23"
+ - "10.30.51.24"
+ - "10.30.51.25"
+nomad_servers:
+ - "10.30.51.23:4647"
+ - "10.30.51.24:4647"
+ - "10.30.51.25:4647"
+
+# Consul settigs.
+nomad_use_consul: true
+consul_certificates:
+ - src: "{{ file_consul_agent_ca_pem }}"
+ dest: "{{ consul_ca_file }}"
+ - src: "{{ file_consul_server_0_pem }}"
+ dest: "{{ consul_cert_file }}"
+ - src: "{{ file_consul_server_0_key_pem }}"
+ dest: "{{ consul_key_file }}"
+consul_verify_incoming: false
+consul_verify_outgoing: false
+consul_vefify_server_hostname: false
+consul_allow_tls: false
+consul_datacenter: "yul1"
+consul_node_role: "client"
+consul_encrypt: "Y4T+5JGx1C3l2NFBBvkTWQ=="
+consul_node_name: "{{ ansible_hostname }}"
+consul_retry_join: true
+consul_retry_servers:
+ - "10.30.51.23"
+ - "10.30.51.24"
+ - "10.30.51.25"
+consul_service_mgr: "systemd"
+
+# Vault settings.
+vault_version: "1.13.1"
+
+# Docker settings.
+docker_daemon:
+ default-shm-size: "1073741824"
+ dns: ["172.17.0.1"]
+ dns-opts: []
+ dns-search: ["{{ ansible_hostname }}"]
+ host: ["172.17.0.1:/var/run/docker.sock"]
diff --git a/fdio.infra.ansible/inventories/lf_inventory/hosts b/fdio.infra.ansible/inventories/lf_inventory/hosts
index b43fdad227..f7141f9747 100644
--- a/fdio.infra.ansible/inventories/lf_inventory/hosts
+++ b/fdio.infra.ansible/inventories/lf_inventory/hosts
@@ -46,12 +46,16 @@ all:
10.30.51.69: #s27-t211-sut1 - thunderx2 9975
vpp_device:
hosts:
+ 10.30.51.30: #s30-t15-sut1 - sapphirerapids
+ 10.30.51.31: #s31-t16-sut1 - sapphirerapids
10.30.51.50: #s1-t11-sut1 - skylake
10.30.51.51: #s2-t12-sut1 - skylake
10.30.51.70: #s55-t13-sut1 - thunderx2 9980
10.30.51.71: #s56-t14-sut1 - thunderx2 9980
nomad:
hosts:
+ 10.30.51.21: #s21-nomad - sapphirerapids
+ 10.30.51.22: #s22-nomad - sapphirerapids
10.30.51.23: #s23-nomad - skylake
10.30.51.24: #s24-nomad - skylake
10.30.51.25: #s25-nomad - skylake
diff --git a/fdio.infra.ansible/roles/vpp_device/defaults/main.yaml b/fdio.infra.ansible/roles/vpp_device/defaults/main.yaml
index 9816d7087c..377b910a42 100644
--- a/fdio.infra.ansible/roles/vpp_device/defaults/main.yaml
+++ b/fdio.infra.ansible/roles/vpp_device/defaults/main.yaml
@@ -1,5 +1,5 @@
---
-# file: roles/vpp_device/defaults/main.yaml
+# file: defaults/main.yaml
iavf_target_dir: "/opt"
iavf_version: "4.1.1"
diff --git a/fdio.infra.ansible/roles/vpp_device/files/csit-initialize-vfs-spr.sh b/fdio.infra.ansible/roles/vpp_device/files/csit-initialize-vfs-spr.sh
new file mode 100644
index 0000000000..25a9f5d9d9
--- /dev/null
+++ b/fdio.infra.ansible/roles/vpp_device/files/csit-initialize-vfs-spr.sh
@@ -0,0 +1,26 @@
+#!/usr/bin/env bash
+
+# Copyright (c) 2023 Cisco and/or its affiliates.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at:
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Add Intel Corporation Ethernet Controller 10G X550T to blacklist.
+PCI_BLACKLIST=($(lspci -Dmmd ':1563:0200' | cut -f1 -d' '))
+# Add Intel Corporation Ethernet Controller E810-C for 100GbE QSFP to whitelist.
+PCI_WHITELIST+=($(lspci -Dmmd ':1592:0200' | cut -f1 -d' '))
+
+# See http://pci-ids.ucw.cz/v2.2/pci.ids for more info.
+
+declare -A PF_INDICES
+# Intel NICs
+PF_INDICES["0000:86:00.0"]=0
+PF_INDICES["0000:af:00.0"]=0
diff --git a/fdio.infra.ansible/roles/vpp_device/handlers/main.yaml b/fdio.infra.ansible/roles/vpp_device/handlers/main.yaml
index 29342ae43c..3ac80cc16e 100644
--- a/fdio.infra.ansible/roles/vpp_device/handlers/main.yaml
+++ b/fdio.infra.ansible/roles/vpp_device/handlers/main.yaml
@@ -1,21 +1,21 @@
---
-# file: roles/vpp_device/handlers/main.yaml
+# file: handlers/main.yaml
-- name: Start csit-initialize-vfs.service
- systemd:
+- name: "Start csit-initialize-vfs.service"
+ ansible.builtin.systemd:
enabled: true
- state: started
- name: csit-initialize-vfs.service
+ state: "started"
+ name: "csit-initialize-vfs.service"
tags:
- start-vf-service
-- name: Update GRUB
- command: update-grub
+- name: "Update GRUB"
+ ansible.builtin.command: "update-grub"
tags:
- update-grub
-- name: Reboot server
- reboot:
+- name: "Reboot server"
+ ansible.builtin.reboot:
reboot_timeout: 3600
tags:
- reboot-server
diff --git a/fdio.infra.ansible/roles/vpp_device/tasks/main.yaml b/fdio.infra.ansible/roles/vpp_device/tasks/main.yaml
index ec20d2a3cd..88d4ddb1a0 100644
--- a/fdio.infra.ansible/roles/vpp_device/tasks/main.yaml
+++ b/fdio.infra.ansible/roles/vpp_device/tasks/main.yaml
@@ -1,15 +1,15 @@
---
-# file: roles/vpp_device/tasks/main.yaml
+# file: tasks/main.yaml
-- name: ThunderX2 Kernel Modules Config
- import_tasks: thunderx2.yaml
+- name: "ThunderX2 Kernel Modules Config"
+ import_tasks: "thunderx2.yaml"
when:
- cpu_microarchitecture == "thunderx2"
tags:
- conf-kernel-modules
-- name: Disable IPv6 Router Advertisement
- sysctl:
+- name: "Disable IPv6 Router Advertisement"
+ ansible.builtin.sysctl:
name: "net.ipv6.conf.default.accept_ra"
value: "0"
state: "present"
@@ -18,8 +18,8 @@
tags:
- set-sysctl
-- name: Disable IPv6 Router Advertisement
- sysctl:
+- name: "Disable IPv6 Router Advertisement"
+ ansible.builtin.sysctl:
name: "net.ipv6.conf.all.accept_ra"
value: "0"
state: "present"
@@ -28,8 +28,8 @@
tags:
- set-sysctl
-- name: Disable IPv6 MLDv1 interval
- sysctl:
+- name: "Disable IPv6 MLDv1 interval"
+ ansible.builtin.sysctl:
name: "net.ipv6.conf.all.mldv1_unsolicited_report_interval"
value: "0"
state: "present"
@@ -38,8 +38,8 @@
tags:
- set-sysctl
-- name: Disable IPv6 MLDv2 interval
- sysctl:
+- name: "Disable IPv6 MLDv2 interval"
+ ansible.builtin.sysctl:
name: "net.ipv6.conf.all.mldv2_unsolicited_report_interval"
value: "0"
state: "present"
@@ -48,8 +48,8 @@
tags:
- set-sysctl
-- name: Disable IPv6 Autoconf
- sysctl:
+- name: "Disable IPv6 Autoconf"
+ ansible.builtin.sysctl:
name: "net.ipv6.conf.all.autoconf"
value: "0"
state: "present"
@@ -58,8 +58,8 @@
tags:
- set-sysctl
-- name: Disable IPv6 MC Forwarding
- sysctl:
+- name: "Disable IPv6 MC Forwarding"
+ ansible.builtin.sysctl:
name: "net.ipv6.conf.all.mc_forwarding"
value: "0"
state: "present"
@@ -68,8 +68,8 @@
tags:
- set-sysctl
-- name: Copy csit-initialize-vfs.sh
- copy:
+- name: "Copy csit-initialize-vfs.sh"
+ ansible.builtin.copy:
src: "files/csit-initialize-vfs.sh"
dest: "/usr/local/bin/"
owner: "root"
@@ -78,8 +78,8 @@
tags:
- copy-vf-script
-- name: Copy csit-initialize-vfs-data.sh
- copy:
+- name: "Copy csit-initialize-vfs-data.sh"
+ ansible.builtin.copy:
src: "files/{{ vfs_data_file }}"
dest: "/usr/local/bin/csit-initialize-vfs-data.sh"
owner: "root"
@@ -89,8 +89,8 @@
when:
- vfs_data_file is defined
-- name: Copy Default csit-initialize-vfs-data.sh
- copy:
+- name: "Copy Default csit-initialize-vfs-data.sh"
+ ansible.builtin.copy:
src: "files/csit-initialize-vfs-default.sh"
dest: "/usr/local/bin/csit-initialize-vfs-data.sh"
owner: "root"
@@ -100,8 +100,8 @@
when:
- vfs_data_file is not defined
-- name: Start csit-initialize-vfs.service
- copy:
+- name: "Start csit-initialize-vfs.service"
+ ansible.builtin.copy:
src: "files/csit-initialize-vfs.service"
dest: "/etc/systemd/system/"
owner: "root"
@@ -112,10 +112,10 @@
tags:
- start-vf-service
-- meta: flush_handlers
+- ansible.builtin.meta: "flush_handlers"
-- name: Set Hugepages In GRUB
- lineinfile:
+- name: "Set Hugepages In GRUB"
+ ansible.builtin.lineinfile:
path: "/etc/default/grub"
state: "present"
regexp: "^GRUB_CMDLINE_LINUX="