summaryrefslogtreecommitdiffstats
path: root/jjb
diff options
context:
space:
mode:
Diffstat (limited to 'jjb')
-rw-r--r--jjb/global-macros.yaml2
-rw-r--r--jjb/scripts/publish_docs.sh26
-rw-r--r--jjb/scripts/terraform_s3_docs_ship.sh91
3 files changed, 107 insertions, 12 deletions
diff --git a/jjb/global-macros.yaml b/jjb/global-macros.yaml
index e00d42df8..0d3e9344e 100644
--- a/jjb/global-macros.yaml
+++ b/jjb/global-macros.yaml
@@ -833,7 +833,7 @@
- file-id: "jenkins-s3-docs-ship"
target: $HOME/.aws/credentials
- shell: !include-raw:
- - scripts/publish_library_py.sh
+ - scripts/terraform_s3_docs_ship.sh
- shell: !include-raw:
- scripts/publish_docs.sh
- shell: !include-raw:
diff --git a/jjb/scripts/publish_docs.sh b/jjb/scripts/publish_docs.sh
index 7246ffc75..86963b68a 100644
--- a/jjb/scripts/publish_docs.sh
+++ b/jjb/scripts/publish_docs.sh
@@ -17,32 +17,36 @@ echo "---> publish_docs.sh"
set -exuo pipefail
-S3_BUCKET="fdio-docs-s3-cloudfront-index"
CDN_URL="s3-docs.fd.io"
-PYTHON_SCRIPT="/w/workspace/publish_library.py"
if [[ ${JOB_NAME} == *merge* ]]; then
case "${JOB_NAME}" in
*"csit-trending"*)
- SITE_DIR="${WORKSPACE}/resources/tools/presentation/_build"
- s3_path="csit/${GERRIT_BRANCH}/trending"
+ workspace_dir="${WORKSPACE}/resources/tools/presentation/_build"
+ bucket_path="/csit/${GERRIT_BRANCH}/trending/"
;;
*"csit-report"*)
- SITE_DIR="${WORKSPACE}/resources/tools/presentation/_build"
- s3_path="csit/${GERRIT_BRANCH}/report"
+ workspace_dir="${WORKSPACE}/resources/tools/presentation/_build"
+ bucket_path="/csit/${GERRIT_BRANCH}/report/"
;;
*"csit-docs"*)
- SITE_DIR="${WORKSPACE}/resources/tools/doc_gen/_build"
- s3_path="csit/${GERRIT_BRANCH}/docs"
+ workspace_dir="${WORKSPACE}/resources/tools/doc_gen/_build"
+ bucket_path="/csit/${GERRIT_BRANCH}/docs/"
;;
*)
die "Unknown job: ${JOB_NAME}"
esac
- echo "INFO: S3 path $s3_path"
+ export TF_VAR_workspace_dir=$workspace_dir
+ export TF_VAR_bucket_path=$bucket_path
+ export AWS_SHARED_CREDENTIALS_FILE=$HOME/.aws/credentials
+ export AWS_DEFAULT_REGION="us-east-1"
echo "INFO: archiving docs to S3"
- python3 $PYTHON_SCRIPT deploy_docs "$S3_BUCKET" "$s3_path" "$SITE_DIR"
+ pushd ..
+ terraform init -no-color
+ terraform apply -no-color -auto-approve
+ popd
- echo "S3 docs: <a href=\"https://$CDN_URL/$s3_path\">https://$CDN_URL/$s3_path</a>"
+ echo "S3 docs: <a href=\"https://$CDN_URL/$bucket_path\">https://$CDN_URL/$bucket_path</a>"
fi
diff --git a/jjb/scripts/terraform_s3_docs_ship.sh b/jjb/scripts/terraform_s3_docs_ship.sh
new file mode 100644
index 000000000..5989e3e9a
--- /dev/null
+++ b/jjb/scripts/terraform_s3_docs_ship.sh
@@ -0,0 +1,91 @@
+#!/bin/bash
+
+# Copyright (c) 2021 Cisco and/or its affiliates.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at:
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+echo "---> terraform_s3_docs_ship.sh"
+
+set -exuo pipefail
+
+cat >"/w/workspace/main.tf" <<'END_OF_TERRAFORM_SCRIPT'
+provider "aws" {
+ region = "us-east-1"
+ profile = "default"
+ s3_force_path_style = false
+ skip_credentials_validation = true
+ skip_metadata_api_check = true
+ skip_requesting_account_id = true
+}
+
+locals {
+ mime_types = {
+ xml = "application/xml",
+ html = "text/html",
+ txt = "text/plain",
+ log = "text/plain",
+ css = "text/css",
+ md = "text/markdown",
+ rst = "text/x-rst",
+ csv = "text/csv",
+ svg = "image/svg+xml",
+ jpg = "image/jpeg",
+ png = "image/png",
+ gif = "image/gif",
+ js = "application/javascript",
+ pdf = "application/pdf"
+ json = "application/json",
+ otf = "font/otf",
+ ttf = "font/ttf",
+ woff = "font/woff",
+ woff2 = "font/woff2"
+ }
+}
+
+variable "workspace_dir" {
+ description = "Workspace base directory"
+ type = string
+}
+
+variable "file_match_pattern" {
+ description = "File matching pattern"
+ type = string
+ default = "**/*"
+}
+
+variable "bucket" {
+ description = "S3 bucket name"
+ type = string
+ default = "fdio-docs-s3-cloudfront-index"
+}
+
+variable "bucket_path" {
+ description = "S3 bucket path to key"
+ type = string
+}
+
+resource "aws_s3_bucket_object" "object" {
+ for_each = fileset(var.workspace_dir, var.file_match_pattern)
+
+ bucket = var.bucket
+ key = "${var.bucket_path}${each.value}"
+ source = "${var.workspace_dir}/${each.value}"
+
+ cache_control = "no-store,max-age=0,s-maxage=0"
+ etag = filemd5("${var.workspace_dir}/${each.value}")
+ content_type = lookup(
+ local.mime_types,
+ regex("\\.(?P<extension>[A-Za-z0-9]+)$", each.value).extension,
+ "application/octet-stream"
+ )
+}
+END_OF_TERRAFORM_SCRIPT