release.sh 4.7 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166
  1. #!/usr/bin/env bash
  2. set -e
  3. set -o pipefail
  4. usage() {
  5. cat >&2 <<'EOF'
  6. To publish the Docker documentation you need to set your access_key and secret_key in the docs/awsconfig file
  7. (with the keys in a [profile $AWS_S3_BUCKET] section - so you can have more than one set of keys in your file)
  8. and set the AWS_S3_BUCKET env var to the name of your bucket.
  9. If you're publishing the current release's documentation, also set `BUILD_ROOT=yes`
  10. make AWS_S3_BUCKET=docs-stage.docker.com docs-release
  11. will then push the documentation site to your s3 bucket.
  12. Note: you can add `OPTIONS=--dryrun` to see what will be done without sending to the server
  13. EOF
  14. exit 1
  15. }
  16. [ "$AWS_S3_BUCKET" ] || usage
  17. VERSION=$(cat VERSION)
  18. if [ "$AWS_S3_BUCKET" == "docs.docker.com" ]; then
  19. if [ "${VERSION%-dev}" != "$VERSION" ]; then
  20. echo "Please do not push '-dev' documentation to docs.docker.com ($VERSION)"
  21. exit 1
  22. fi
  23. cat > ./sources/robots.txt <<'EOF'
  24. User-agent: *
  25. Allow: /
  26. EOF
  27. else
  28. cat > ./sources/robots.txt <<'EOF'
  29. User-agent: *
  30. Disallow: /
  31. EOF
  32. fi
  33. # Remove the last version - 1.0.2-dev -> 1.0
  34. MAJOR_MINOR="v${VERSION%.*}"
  35. export MAJOR_MINOR
  36. export BUCKET=$AWS_S3_BUCKET
  37. export AWS_CONFIG_FILE=$(pwd)/awsconfig
  38. [ -e "$AWS_CONFIG_FILE" ] || usage
  39. export AWS_DEFAULT_PROFILE=$BUCKET
  40. echo "cfg file: $AWS_CONFIG_FILE ; profile: $AWS_DEFAULT_PROFILE"
  41. setup_s3() {
  42. echo "Create $BUCKET"
  43. # Try creating the bucket. Ignore errors (it might already exist).
  44. aws s3 mb --profile $BUCKET s3://$BUCKET 2>/dev/null || true
  45. # Check access to the bucket.
  46. echo "test $BUCKET exists"
  47. aws s3 --profile $BUCKET ls s3://$BUCKET
  48. # Make the bucket accessible through website endpoints.
  49. echo "make $BUCKET accessible as a website"
  50. #aws s3 website s3://$BUCKET --index-document index.html --error-document jsearch/index.html
  51. s3conf=$(cat s3_website.json | envsubst)
  52. echo
  53. echo $s3conf
  54. echo
  55. aws s3api --profile $BUCKET put-bucket-website --bucket $BUCKET --website-configuration "$s3conf"
  56. }
  57. build_current_documentation() {
  58. mkdocs build
  59. }
  60. upload_current_documentation() {
  61. src=site/
  62. dst=s3://$BUCKET$1
  63. cache=max-age=3600
  64. if [ "$NOCACHE" ]; then
  65. cache=no-cache
  66. fi
  67. echo
  68. echo "Uploading $src"
  69. echo " to $dst"
  70. echo
  71. #s3cmd --recursive --follow-symlinks --preserve --acl-public sync "$src" "$dst"
  72. #aws s3 cp --profile $BUCKET --cache-control "max-age=3600" --acl public-read "site/search_content.json" "$dst"
  73. # a really complicated way to send only the files we want
  74. # if there are too many in any one set, aws s3 sync seems to fall over with 2 files to go
  75. # versions.html_fragment
  76. include="--recursive --include \"*.$i\" "
  77. echo "uploading *.$i"
  78. run="aws s3 cp $src $dst $OPTIONS --profile $BUCKET --cache-control $cache --acl public-read $include"
  79. echo "======================="
  80. echo "$run"
  81. echo "======================="
  82. $run
  83. }
  84. invalidate_cache() {
  85. if [ "" == "$DISTRIBUTION_ID" ]; then
  86. echo "Skipping Cloudfront cache invalidation"
  87. return
  88. fi
  89. dst=$1
  90. #aws cloudfront create-invalidation --profile docs.docker.com --distribution-id $DISTRIBUTION_ID --invalidation-batch '{"Paths":{"Quantity":1, "Items":["'+$file+'"]},"CallerReference":"19dec2014sventest1"}'
  91. aws configure set preview.cloudfront true
  92. files=($(cat changed-files | grep 'sources/.*$' | sed -E 's#.*docs/sources##' | sed -E 's#index\.md#index.html#' | sed -E 's#\.md#/index.html#'))
  93. files[${#files[@]}]="/index.html"
  94. files[${#files[@]}]="/versions.html_fragment"
  95. len=${#files[@]}
  96. echo "aws cloudfront create-invalidation --profile $AWS_S3_BUCKET --distribution-id $DISTRIBUTION_ID --invalidation-batch '" > batchfile
  97. echo "{\"Paths\":{\"Quantity\":$len," >> batchfile
  98. echo "\"Items\": [" >> batchfile
  99. #for file in $(cat changed-files | grep 'sources/.*$' | sed -E 's#.*docs/sources##' | sed -E 's#index\.md#index.html#' | sed -E 's#\.md#/index.html#')
  100. for file in "${files[@]}"
  101. do
  102. if [ "$file" == "${files[${#files[@]}-1]}" ]; then
  103. comma=""
  104. else
  105. comma=","
  106. fi
  107. echo "\"$dst$file\"$comma" >> batchfile
  108. done
  109. echo "]}, \"CallerReference\":" >> batchfile
  110. echo "\"$(date)\"}'" >> batchfile
  111. echo "-----"
  112. cat batchfile
  113. echo "-----"
  114. sh batchfile
  115. echo "-----"
  116. }
  117. if [ "$OPTIONS" != "--dryrun" ]; then
  118. setup_s3
  119. fi
  120. # Default to only building the version specific docs so we don't clober the latest by accident with old versions
  121. if [ "$BUILD_ROOT" == "yes" ]; then
  122. echo "Building root documentation"
  123. build_current_documentation
  124. upload_current_documentation
  125. [ "$NOCACHE" ] || invalidate_cache
  126. fi
  127. #build again with /v1.0/ prefix
  128. sed -i "s/^site_url:.*/site_url: \/$MAJOR_MINOR\//" mkdocs.yml
  129. echo "Building the /$MAJOR_MINOR/ documentation"
  130. build_current_documentation
  131. upload_current_documentation "/$MAJOR_MINOR/"
  132. [ "$NOCACHE" ] || invalidate_cache "/$MAJOR_MINOR"