release.sh 3.7 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138
  1. #!/usr/bin/env bash
  2. set -e
  3. set -o pipefail
  4. usage() {
  5. cat >&2 <<'EOF'
  6. To publish the Docker documentation you need to set your access_key and secret_key in the docs/awsconfig file
  7. (with the keys in a [profile $AWS_S3_BUCKET] section - so you can have more than one set of keys in your file)
  8. and set the AWS_S3_BUCKET env var to the name of your bucket.
  9. If you're publishing the current release's documentation, also set `BUILD_ROOT=yes`
  10. make AWS_S3_BUCKET=docs-stage.docker.com docs-release
  11. will then push the documentation site to your s3 bucket.
  12. Note: you can add `OPTIONS=--dryrun` to see what will be done without sending to the server
  13. EOF
  14. exit 1
  15. }
  16. [ "$AWS_S3_BUCKET" ] || usage
  17. VERSION=$(cat VERSION)
  18. if [ "$AWS_S3_BUCKET" == "docs.docker.com" ]; then
  19. if [ "${VERSION%-dev}" != "$VERSION" ]; then
  20. echo "Please do not push '-dev' documentation to docs.docker.com ($VERSION)"
  21. exit 1
  22. fi
  23. cat > ./sources/robots.txt <<'EOF'
  24. User-agent: *
  25. Allow: /
  26. EOF
  27. else
  28. cat > ./sources/robots.txt <<'EOF'
  29. User-agent: *
  30. Disallow: /
  31. EOF
  32. fi
  33. # Remove the last version - 1.0.2-dev -> 1.0
  34. MAJOR_MINOR="v${VERSION%.*}"
  35. export MAJOR_MINOR
  36. export BUCKET=$AWS_S3_BUCKET
  37. export AWS_CONFIG_FILE=$(pwd)/awsconfig
  38. [ -e "$AWS_CONFIG_FILE" ] || usage
  39. export AWS_DEFAULT_PROFILE=$BUCKET
  40. echo "cfg file: $AWS_CONFIG_FILE ; profile: $AWS_DEFAULT_PROFILE"
  41. setup_s3() {
  42. echo "Create $BUCKET"
  43. # Try creating the bucket. Ignore errors (it might already exist).
  44. aws s3 mb --profile $BUCKET s3://$BUCKET 2>/dev/null || true
  45. # Check access to the bucket.
  46. echo "test $BUCKET exists"
  47. aws s3 --profile $BUCKET ls s3://$BUCKET
  48. # Make the bucket accessible through website endpoints.
  49. echo "make $BUCKET accessible as a website"
  50. #aws s3 website s3://$BUCKET --index-document index.html --error-document jsearch/index.html
  51. s3conf=$(cat s3_website.json | envsubst)
  52. echo
  53. echo $s3conf
  54. echo
  55. aws s3api --profile $BUCKET put-bucket-website --bucket $BUCKET --website-configuration "$s3conf"
  56. }
  57. build_current_documentation() {
  58. mkdocs build
  59. }
  60. upload_current_documentation() {
  61. src=site/
  62. dst=s3://$BUCKET$1
  63. echo
  64. echo "Uploading $src"
  65. echo " to $dst"
  66. echo
  67. #s3cmd --recursive --follow-symlinks --preserve --acl-public sync "$src" "$dst"
  68. #aws s3 cp --profile $BUCKET --cache-control "max-age=3600" --acl public-read "site/search_content.json" "$dst"
  69. # a really complicated way to send only the files we want
  70. # if there are too many in any one set, aws s3 sync seems to fall over with 2 files to go
  71. # versions.html_fragment
  72. endings=( json txt html xml css js gif png JPG ttf svg woff html_fragment )
  73. for i in ${endings[@]}; do
  74. include=""
  75. for j in ${endings[@]}; do
  76. if [ "$i" != "$j" ];then
  77. include="$include --exclude *.$j"
  78. fi
  79. done
  80. include="--include *.$i $include"
  81. echo "uploading *.$i"
  82. run="aws s3 sync $OPTIONS --profile $BUCKET --cache-control \"max-age=3600\" --acl public-read \
  83. $include \
  84. --exclude *.text* \
  85. --exclude *.*~ \
  86. --exclude *Dockerfile \
  87. --exclude *.DS_Store \
  88. --exclude *.psd \
  89. --exclude *.ai \
  90. --exclude *.eot \
  91. --exclude *.otf \
  92. --exclude *.rej \
  93. --exclude *.rst \
  94. --exclude *.orig \
  95. --exclude *.py \
  96. $src $dst"
  97. echo "======================="
  98. #echo "$run"
  99. #echo "======================="
  100. $run
  101. done
  102. }
  103. if [ "$OPTIONS" != "--dryrun" ]; then
  104. setup_s3
  105. fi
  106. # Default to only building the version specific docs so we don't clober the latest by accident with old versions
  107. if [ "$BUILD_ROOT" == "yes" ]; then
  108. echo "Building root documentation"
  109. build_current_documentation
  110. upload_current_documentation
  111. fi
  112. #build again with /v1.0/ prefix
  113. sed -i "s/^site_url:.*/site_url: \/$MAJOR_MINOR\//" mkdocs.yml
  114. echo "Building the /$MAJOR_MINOR/ documentation"
  115. build_current_documentation
  116. upload_current_documentation "/$MAJOR_MINOR/"