diff options
author | Dave Wallace <dwallacelf@gmail.com> | 2021-09-14 18:27:26 +0000 |
---|---|---|
committer | Gerrit Code Review <gerrit@fd.io> | 2021-09-14 18:27:26 +0000 |
commit | 511bb965c5d7b3d54dfcf983753fe5a1bf883028 (patch) | |
tree | 7d85523914033c1e6b725a9c75a9e29928f02a19 /jjb | |
parent | a67b26f44b7beca8ac4be57272b362b9652e51b8 (diff) | |
parent | 9b69c8fd2589680391ff010d849277b3cf4c898c (diff) |
Merge "Docs: Cleanup"
Diffstat (limited to 'jjb')
-rw-r--r-- | jjb/global-macros.yaml | 40 | ||||
-rw-r--r-- | jjb/scripts/publish_backup_logs.sh | 39 | ||||
-rw-r--r-- | jjb/scripts/publish_library_py.sh | 139 | ||||
-rw-r--r-- | jjb/scripts/publish_logs.sh | 13 |
4 files changed, 131 insertions, 100 deletions
diff --git a/jjb/global-macros.yaml b/jjb/global-macros.yaml index ea0e367df..e00d42df8 100644 --- a/jjb/global-macros.yaml +++ b/jjb/global-macros.yaml @@ -812,60 +812,44 @@ $WORKSPACE/scripts/check-unicode.sh jjb/ - builder: - name: fdio-infra-ship-docs + name: fdio-infra-ship-backup-logs builders: - config-file-provider: files: - - file-id: "jenkins-log-archives-settings" - variable: "SETTINGS_FILE" - - config-file-provider: - files: - - file-id: "jenkins-s3-docs-ship" + - file-id: "jenkins-s3-log-ship" target: $HOME/.aws/credentials - shell: !include-raw: - scripts/publish_library_py.sh - shell: !include-raw: - - scripts/publish_docs.sh + - scripts/publish_backup_logs.sh - shell: !include-raw: - ../global-jjb/shell/logs-clear-credentials.sh - builder: - name: fdio-infra-ship-backup-logs + name: fdio-infra-ship-docs builders: - config-file-provider: files: - - file-id: "jenkins-s3-log-ship" - variable: $HOME/.aws/credentials + - file-id: "jenkins-s3-docs-ship" + target: $HOME/.aws/credentials - shell: !include-raw: - scripts/publish_library_py.sh - shell: !include-raw: - - scripts/publish_logs.sh + - scripts/publish_docs.sh - shell: !include-raw: - ../global-jjb/shell/logs-clear-credentials.sh - builder: name: fdio-infra-ship-logs builders: - # Ensure no pre-existing .netrc files are overriding logs config - - lf-provide-maven-settings-cleanup - config-file-provider: files: - - file-id: "jenkins-log-archives-settings" - variable: "SETTINGS_FILE" - - conditional-step: - condition-kind: regex-match - regex: "^.*logs-s3.*" - label: $S3_BUCKET - on-evaluation-failure: dont-run - steps: - - config-file-provider: - files: - - file-id: "jenkins-s3-log-ship" - target: $HOME/.aws/credentials - - lf-infra-create-netrc: - server-id: logs + - file-id: "jenkins-s3-log-ship" + target: $HOME/.aws/credentials + - shell: !include-raw: + - scripts/publish_library_py.sh - shell: !include-raw: - - ../global-jjb/shell/logs-deploy.sh + - scripts/publish_logs.sh - shell: !include-raw: - ../global-jjb/shell/logs-clear-credentials.sh - description-setter: diff --git a/jjb/scripts/publish_backup_logs.sh b/jjb/scripts/publish_backup_logs.sh new file mode 100644 index 000000000..002b08308 --- /dev/null +++ b/jjb/scripts/publish_backup_logs.sh @@ -0,0 +1,39 @@ +#!/bin/bash + +# Copyright (c) 2021 Cisco and/or its affiliates. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at: +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +echo "---> publish_backup_logs.sh" + +S3_BUCKET="logs.fd.io" +CDN_URL="logs.nginx.service.consul" +export AWS_ENDPOINT_URL="http://storage.service.consul:9000" +PYTHON_SCRIPT="/w/workspace/publish_library.py" + +# FIXME: s3 config (until migrated to config provider, then pwd will be reset) +mkdir -p ${HOME}/.aws +echo "[default] +aws_access_key_id = storage +aws_secret_access_key = Storage1234" > "$HOME/.aws/credentials" + +mkdir -p "$WORKSPACE/archives" + +s3_path="$JENKINS_HOSTNAME/$JOB_NAME/$BUILD_NUMBER/" + +echo "INFO: S3 path $s3_path" + +echo "INFO: archiving backup logs to S3" +python3 $PYTHON_SCRIPT deploy_s3 "$S3_BUCKET" "$s3_path" \ + "$BUILD_URL" "$WORKSPACE" + +echo "S3 build backup logs: <a href=\"https://$CDN_URL/$s3_path\">https://$CDN_URL/$s3_path</a>" diff --git a/jjb/scripts/publish_library_py.sh b/jjb/scripts/publish_library_py.sh index f8430ff0b..f9b6a378c 100644 --- a/jjb/scripts/publish_library_py.sh +++ b/jjb/scripts/publish_library_py.sh @@ -27,6 +27,7 @@ cat >$PYTHON_SCRIPT <<'END_OF_PYTHON_SCRIPT' """S3 publish library.""" +import glob import gzip import logging import os @@ -49,23 +50,37 @@ logging.basicConfig( ) logging.getLogger(u"botocore").setLevel(logging.INFO) -COMPRESS_MIME = ( - u"text/html", - u"text/xml", - u"text/plain", - u"application/octet-stream" -) - -def compress(src_fpath): - """Compress a single file. +def compress_text(src_dpath): + """Compress all text files in directory. - :param src_fpath: Input file path. - :type src_fpath: str + :param src_dpath: Input dir path. + :type src_dpath: str """ - with open(src_fpath, u"rb") as orig_file: - with gzip.open(src_fpath + ".gz", u"wb") as zipped_file: - zipped_file.writelines(orig_file) + save_dir = os.getcwd() + os.chdir(src_dpath) + + compress_types = [ + "**/*.html", + "**/*.log", + "**/*.txt", + "**/*.xml", + "**/*.json" + ] + paths = [] + for _type in compress_types: + search = os.path.join(src_dpath, _type) + paths.extend(glob.glob(search, recursive=True)) + + for _file in paths: + # glob may follow symlink paths that open can't find + if os.path.exists(_file): + gz_file = u"{}.gz".format(_file) + with open(_file, "rb") as src, gzip.open(gz_file, "wb") as dest: + shutil.copyfileobj(src, dest) + os.remove(_file) + + os.chdir(save_dir) def copy_archives(workspace): @@ -111,22 +126,37 @@ def upload(s3_resource, s3_bucket, src_fpath, s3_path): :type src_fpath: str :type s3_path: str """ - mime_guess = MimeTypes().guess_type(src_fpath) - mime = mime_guess[0] - encoding = mime_guess[1] - if not mime: - mime = u"application/octet-stream" - - if u"logs" in s3_bucket: - if mime in COMPRESS_MIME and encoding != u"gzip": - compress(src_fpath) - src_fpath = src_fpath + u".gz" - s3_path = s3_path + u".gz" - - extra_args = {u"ContentType": mime} + extra_args = { + u"ContentType": u"text/plain" + } + text_html_extra_args = { + u"ContentType": u"text/html", + u"ContentEncoding": MimeTypes().guess_type(src_fpath)[1] + } + text_plain_extra_args = { + u"ContentType": u"text/plain", + u"ContentEncoding": MimeTypes().guess_type(src_fpath)[1] + } + app_xml_extra_args = { + u"ContentType": u"application/xml", + u"ContentEncoding": MimeTypes().guess_type(src_fpath)[1] + } + + mime = MimeTypes().guess_type(src_fpath)[0] + encoding = MimeTypes().guess_type(src_fpath)[1] + + if mime is None and encoding is None: + extra_args = extra_args + elif mime is None or mime in u"text/plain": + extra_args = text_plain_extra_args + elif mime in u"text/html": + extra_args = text_html_extra_args + elif mime in u"application/xml": + extra_args = app_xml_extra_args + else: + extra_args = extra_args try: - logging.info(u"Attempting to upload file " + src_fpath) s3_resource.Bucket(s3_bucket).upload_file( src_fpath, s3_path, ExtraArgs=extra_args ) @@ -176,7 +206,15 @@ def deploy_docs(s3_bucket, s3_path, docs_dir): :type s3_path: str :type docs_dir: str """ - s3_resource = boto3.resource(u"s3") + try: + s3_resource = boto3.resource( + u"s3", + endpoint_url=os.environ[u"AWS_ENDPOINT_URL"] + ) + except KeyError: + s3_resource = boto3.resource( + u"s3" + ) upload_recursive( s3_resource=s3_resource, @@ -204,10 +242,15 @@ def deploy_s3(s3_bucket, s3_path, build_url, workspace): :type build_url: str :type workspace: str """ - s3_resource = boto3.resource( - u"s3", - endpoint_url=os.environ[u"AWS_ENDPOINT_URL"] - ) + try: + s3_resource = boto3.resource( + u"s3", + endpoint_url=os.environ[u"AWS_ENDPOINT_URL"] + ) + except KeyError: + s3_resource = boto3.resource( + u"s3" + ) previous_dir = os.getcwd() work_dir = tempfile.mkdtemp(prefix="backup-s3.") @@ -220,34 +263,6 @@ def deploy_s3(s3_bucket, s3_path, build_url, workspace): with open(u"_build-details.log", u"w+") as f: f.write(u"build-url: " + build_url) - with open(u"_sys-info.log", u"w+") as f: - sys_cmds = [] - - logging.debug(u"Platform: " + sys.platform) - if sys.platform == u"linux" or sys.platform == u"linux2": - sys_cmds = [ - [u"uname", u"-a"], - [u"lscpu"], - [u"nproc"], - [u"df", u"-h"], - [u"free", u"-m"], - [u"ip", u"addr"], - [u"sar", u"-b", u"-r", u"-n", u"DEV"], - [u"sar", u"-P", u"ALL"], - ] - - for c in sys_cmds: - try: - output = subprocess.check_output(c).decode(u"utf-8") - except FileNotFoundError: - logging.debug(u"Command not found: " + c) - continue - - cmd = u" ".join(c) - output = u"---> " + cmd + "\n" + output + "\n" - f.write(output) - logging.info(output) - # Magic string used to trim console logs at the appropriate level during # wget. MAGIC_STRING = u"-----END_OF_BUILD-----" @@ -266,6 +281,8 @@ def deploy_s3(s3_bucket, s3_path, build_url, workspace): six.text_type(resp.content.decode(u"utf-8").split(MAGIC_STRING)[0]) ) + compress_text(work_dir) + upload_recursive( s3_resource=s3_resource, s3_bucket=s3_bucket, diff --git a/jjb/scripts/publish_logs.sh b/jjb/scripts/publish_logs.sh index bc1e24c15..a567106ad 100644 --- a/jjb/scripts/publish_logs.sh +++ b/jjb/scripts/publish_logs.sh @@ -15,25 +15,16 @@ echo "---> publish_logs.sh" -S3_BUCKET="logs.fd.io" -CDN_URL="logs.nginx.service.consul" -export AWS_ENDPOINT_URL="http://storage.service.consul:9000" PYTHON_SCRIPT="/w/workspace/publish_library.py" -# FIXME: s3 config (until migrated to config provider, then pwd will be reset) -mkdir -p ${HOME}/.aws -echo "[default] -aws_access_key_id = storage -aws_secret_access_key = Storage1234" > "$HOME/.aws/credentials" - mkdir -p "$WORKSPACE/archives" s3_path="$JENKINS_HOSTNAME/$JOB_NAME/$BUILD_NUMBER/" echo "INFO: S3 path $s3_path" -echo "INFO: archiving backup logs to S3" +echo "INFO: archiving logs to S3" python3 $PYTHON_SCRIPT deploy_s3 "$S3_BUCKET" "$s3_path" \ "$BUILD_URL" "$WORKSPACE" -echo "S3 build backup logs: <a href=\"https://$CDN_URL/$s3_path\">https://$CDN_URL/$s3_path</a>" +echo "S3 build logs: <a href=\"https://$CDN_URL/$s3_path\">https://$CDN_URL/$s3_path</a>" |