aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorpmikus <peter.mikus@protonmail.ch>2023-08-04 10:27:17 +0000
committerPeter Mikus <peter.mikus@protonmail.ch>2023-08-04 11:01:25 +0000
commit4b3893704a47cbfbe9986ade3745ad3d9ebcc304 (patch)
treea75f56928364983cf7bd2c7cb15a8177fcfa7ecb
parent4c138c4a6dc8fc55f6b64cbfc7daaaf0f05f258c (diff)
fix(ansible): QAT v1.x installoper-230807
Signed-off-by: pmikus <peter.mikus@protonmail.ch> Change-Id: I2e2047f3d02a1c4f90298cb15f2574e5ea6a7ad4
-rw-r--r--fdio.infra.ansible/roles/intel/defaults/main.yaml6
-rw-r--r--fdio.infra.ansible/roles/intel/tasks/main.yaml32
-rw-r--r--fdio.infra.ansible/roles/intel/tasks/qat1.yaml54
-rw-r--r--fdio.infra.ansible/roles/intel/tasks/qat2.yaml (renamed from fdio.infra.ansible/roles/intel/tasks/qat.yaml)22
4 files changed, 98 insertions, 16 deletions
diff --git a/fdio.infra.ansible/roles/intel/defaults/main.yaml b/fdio.infra.ansible/roles/intel/defaults/main.yaml
index 56696a5467..4589ec2354 100644
--- a/fdio.infra.ansible/roles/intel/defaults/main.yaml
+++ b/fdio.infra.ansible/roles/intel/defaults/main.yaml
@@ -88,7 +88,8 @@ intel_dsa_compatibility_matrix:
dsa: "4.0"
intel_qat_compatibility_matrix:
- qat: "1.0.20-00008"
+ qat2: "1.0.20-00008"
+ qat1: "4.22.0-00001"
intel_i40e_url:
"2.17.15": "i40e%20stable/2.17.15/i40e-2.17.15.tar.gz/download"
@@ -113,4 +114,5 @@ intel_dsa_url:
"4.0": "https://github.com/intel/idxd-config/archive/refs/tags/accel-config-v4.0.tar.gz"
intel_qat_url:
- "1.0.20-00008": "777529/QAT20.L.1.0.20-00008.tar.gz" \ No newline at end of file
+ "1.0.20-00008": "777529/QAT20.L.1.0.20-00008.tar.gz"
+ "4.22.0-00001": "780675/QAT.L.4.22.0-00001.tar.gz" \ No newline at end of file
diff --git a/fdio.infra.ansible/roles/intel/tasks/main.yaml b/fdio.infra.ansible/roles/intel/tasks/main.yaml
index 80c8d38998..81b97d87c8 100644
--- a/fdio.infra.ansible/roles/intel/tasks/main.yaml
+++ b/fdio.infra.ansible/roles/intel/tasks/main.yaml
@@ -41,6 +41,22 @@
tags:
- intel-inst-drivers
+- name: Check Presence of Intel C4XXX
+ ansible.builtin.shell: "lspci -d 8086:18a0"
+ register: intel_qat1_pcis
+ failed_when: false
+ changed_when: false
+ tags:
+ - intel-inst-drivers
+
+- name: Check Presence of Intel 4XXX
+ ansible.builtin.shell: "lspci -d 8086:4942"
+ register: intel_qat2_pcis
+ failed_when: false
+ changed_when: false
+ tags:
+ - intel-inst-drivers
+
- name: Get Intel Ethernet 700 Series driver versions
ansible.builtin.set_fact:
i40e: "{{ intel_700_compatibility_matrix[intel_700_matrix]['i40e'] }}"
@@ -72,7 +88,8 @@
- name: Get Intel QAT driver versions
ansible.builtin.set_fact:
- qat: "{{ intel_qat_compatibility_matrix['qat'] }}"
+ qat1: "{{ intel_qat_compatibility_matrix['qat1'] }}"
+ qat2: "{{ intel_qat_compatibility_matrix['qat2'] }}"
when: >
intel_qat_matrix is defined
tags:
@@ -112,9 +129,18 @@
tags:
- intel-inst-drivers
-- name: Driver Intel QAT
- import_tasks: qat.yaml
+- name: Driver Intel QAT 1.x
+ import_tasks: qat1.yaml
+ when: >
+ intel_qat1_pcis.stdout_lines | length > 0 and
+ intel_qat_matrix is defined
+ tags:
+ - intel-inst-drivers
+
+- name: Driver Intel QAT 2.x
+ import_tasks: qat2.yaml
when: >
+ intel_qat2_pcis.stdout_lines | length > 0 and
intel_qat_matrix is defined
tags:
- intel-inst-drivers \ No newline at end of file
diff --git a/fdio.infra.ansible/roles/intel/tasks/qat1.yaml b/fdio.infra.ansible/roles/intel/tasks/qat1.yaml
new file mode 100644
index 0000000000..701c0c1bf1
--- /dev/null
+++ b/fdio.infra.ansible/roles/intel/tasks/qat1.yaml
@@ -0,0 +1,54 @@
+---
+# file: tasks/qat1.yaml
+
+- name: Get QAT 1.x Driver
+ ansible.builtin.uri:
+ url: "{{ intel_download_url }}/{{ intel_qat_url[qat1] }}"
+ follow_redirects: "all"
+ force: true
+ dest: "{{ intel_extract_dir }}/QAT.L.{{ qat1 }}.tar.gz"
+ mode: "0644"
+ failed_when: false
+ tags:
+ - intel-inst
+
+- name: Create a Directory For QAT 1.x Driver
+ ansible.builtin.file:
+ path: "{{ intel_extract_dir }}/QAT.L.{{ qat1 }}/"
+ state: "directory"
+ mode: "0755"
+ tags:
+ - intel-inst
+
+- name: Extract QAT 1.x Driver
+ ansible.builtin.unarchive:
+ remote_src: true
+ src: "{{ intel_extract_dir }}/QAT.L.{{ qat1 }}.tar.gz"
+ dest: "{{ intel_extract_dir }}/QAT.L.{{ qat1 }}/"
+ register: intel_driver_extracted
+ tags:
+ - intel-inst
+
+- name: Install QAT1.x Driver
+ ansible.builtin.command: "{{ item }}"
+ args:
+ chdir: "{{ intel_extract_dir }}/QAT.L.{{ qat1 }}"
+ become: true
+ with_items:
+ - "./configure --enable-icp-sriov=host --enable-icp-sym-only"
+ - "make"
+ - "make install"
+ when:
+ - intel_driver_extracted
+ tags:
+ - intel-inst
+
+- name: Load Kernel Modules By Default
+ ansible.builtin.lineinfile:
+ path: "/etc/modules"
+ state: "present"
+ line: "{{ item }}"
+ with_items:
+ - "qat_c4xxx"
+ tags:
+ - intel-inst \ No newline at end of file
diff --git a/fdio.infra.ansible/roles/intel/tasks/qat.yaml b/fdio.infra.ansible/roles/intel/tasks/qat2.yaml
index 1040e45056..a560f16b2c 100644
--- a/fdio.infra.ansible/roles/intel/tasks/qat.yaml
+++ b/fdio.infra.ansible/roles/intel/tasks/qat2.yaml
@@ -1,38 +1,38 @@
---
-# file: tasks/qat.yaml
+# file: tasks/qat2.yaml
-- name: Get QAT Driver
+- name: Get QAT 2.x Driver
ansible.builtin.uri:
- url: "{{ intel_download_url }}/{{ intel_qat_url[qat] }}"
+ url: "{{ intel_download_url }}/{{ intel_qat_url[qat2] }}"
follow_redirects: "all"
force: true
- dest: "{{ intel_extract_dir }}/QAT20.L.{{ qat }}.tar.gz"
+ dest: "{{ intel_extract_dir }}/QAT20.L.{{ qat2 }}.tar.gz"
mode: "0644"
failed_when: false
tags:
- intel-inst
-- name: Create a Directory For QAT Driver
+- name: Create a Directory For QAT 2.x Driver
ansible.builtin.file:
- path: "{{ intel_extract_dir }}/QAT20.L.{{ qat }}/"
+ path: "{{ intel_extract_dir }}/QAT20.L.{{ qat2 }}/"
state: "directory"
mode: "0755"
tags:
- intel-inst
-- name: Extract QAT Driver
+- name: Extract QAT 2.x Driver
ansible.builtin.unarchive:
remote_src: true
- src: "{{ intel_extract_dir }}/QAT20.L.{{ qat }}.tar.gz"
- dest: "{{ intel_extract_dir }}/QAT20.L.{{ qat }}/"
+ src: "{{ intel_extract_dir }}/QAT20.L.{{ qat2 }}.tar.gz"
+ dest: "{{ intel_extract_dir }}/QAT20.L.{{ qat2 }}/"
register: intel_driver_extracted
tags:
- intel-inst
-- name: Install QAT Driver
+- name: Install QAT 2.x Driver
ansible.builtin.command: "{{ item }}"
args:
- chdir: "{{ intel_extract_dir }}/QAT20.L.{{ qat }}"
+ chdir: "{{ intel_extract_dir }}/QAT20.L.{{ qat2 }}"
become: true
with_items:
- "wget http://security.ubuntu.com/ubuntu/pool/main/s/systemd/libudev-dev_249.11-0ubuntu3.7_amd64.deb"
span class="nn">boto3 import session from pyspark.context import SparkContext from pyspark.sql.functions import col, lit, regexp_replace from pyspark.sql.types import StructType S3_LOGS_BUCKET=environ.get("S3_LOGS_BUCKET", "fdio-logs-s3-cloudfront-index") S3_DOCS_BUCKET=environ.get("S3_DOCS_BUCKET", "fdio-docs-s3-cloudfront-index") PATH=f"s3://{S3_LOGS_BUCKET}/vex-yul-rot-jenkins-1/csit-*-perf-*" SUFFIX="info.json.gz" IGNORE_SUFFIX=[ "suite.info.json.gz", "setup.info.json.gz", "teardown.info.json.gz", "suite.output.info.json.gz", "setup.output.info.json.gz", "teardown.output.info.json.gz" ] LAST_MODIFIED_END=utc.localize( datetime.strptime( f"{datetime.now().year}-{datetime.now().month}-{datetime.now().day}", "%Y-%m-%d" ) ) LAST_MODIFIED_BEGIN=LAST_MODIFIED_END - timedelta(1) def flatten_frame(nested_sdf): """Unnest Spark DataFrame in case there nested structered columns. :param nested_sdf: Spark DataFrame. :type nested_sdf: DataFrame :returns: Unnest DataFrame. :rtype: DataFrame """ stack = [((), nested_sdf)] columns = [] while len(stack) > 0: parents, sdf = stack.pop() for column_name, column_type in sdf.dtypes: if column_type[:6] == "struct": projected_sdf = sdf.select(column_name + ".*") stack.append((parents + (column_name,), projected_sdf)) else: columns.append( col(".".join(parents + (column_name,))) \ .alias("_".join(parents + (column_name,))) ) return nested_sdf.select(columns) def process_json_to_dataframe(schema_name, paths): """Processes JSON to Spark DataFrame. :param schema_name: Schema name. :type schema_name: string :param paths: S3 paths to process. :type paths: list :returns: Spark DataFrame. :rtype: DataFrame """ drop_subset = [ "dut_type", "dut_version", "passed", "test_name_long", "test_name_short", "test_type", "version" ] # load schemas with open(f"coverage_{schema_name}.json", "r", encoding="UTF-8") as f_schema: schema = StructType.fromJson(load(f_schema)) # create empty DF out of schemas sdf = spark.createDataFrame([], schema) # filter list filtered = [path for path in paths if schema_name in path] # select for path in filtered: print(path) sdf_loaded = spark \ .read \ .option("multiline", "true") \ .schema(schema) \ .json(path) \ .withColumn("job", lit(path.split("/")[4])) \ .withColumn("build", lit(path.split("/")[5])) sdf = sdf.unionByName(sdf_loaded, allowMissingColumns=True) # drop rows with all nulls and drop rows with null in critical frames sdf = sdf.na.drop(how="all") sdf = sdf.na.drop(how="any", thresh=None, subset=drop_subset) # flatten frame sdf = flatten_frame(sdf) return sdf # create SparkContext and GlueContext spark_context = SparkContext.getOrCreate() spark_context.setLogLevel("WARN") glue_context = GlueContext(spark_context) spark = glue_context.spark_session # files of interest paths = wr.s3.list_objects( path=PATH, suffix=SUFFIX, last_modified_begin=LAST_MODIFIED_BEGIN, last_modified_end=LAST_MODIFIED_END, ignore_suffix=IGNORE_SUFFIX, ignore_empty=True ) filtered_paths = [path for path in paths if "report-coverage-2406" in path] out_sdf = process_json_to_dataframe("soak", filtered_paths) out_sdf.printSchema() out_sdf = out_sdf \ .withColumn("year", lit(datetime.now().year)) \ .withColumn("month", lit(datetime.now().month)) \ .withColumn("day", lit(datetime.now().day)) \ .repartition(1) try: boto3_session = session.Session( aws_access_key_id=environ["OUT_AWS_ACCESS_KEY_ID"], aws_secret_access_key=environ["OUT_AWS_SECRET_ACCESS_KEY"], region_name=environ["OUT_AWS_DEFAULT_REGION"] ) except KeyError: boto3_session = session.Session() try: wr.s3.to_parquet( df=out_sdf.toPandas(), path=f"s3://{S3_DOCS_BUCKET}/csit/parquet/coverage_rls2406", dataset=True, partition_cols=["test_type", "year", "month", "day"], compression="snappy", use_threads=True, mode="overwrite_partitions", boto3_session=boto3_session ) except EmptyDataFrame: pass