This repository has been archived by the owner on Oct 29, 2020. It is now read-only.
-
Notifications
You must be signed in to change notification settings - Fork 11
/
release.sh
executable file
·128 lines (102 loc) · 3.4 KB
/
release.sh
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
#!/usr/bin/env bash
set -e
set -o pipefail
usage() {
cat >&2 <<'EOF'
To publish the Grafana documentation you need to set your access_key and secret_key in the docs/awsconfig file
(with the keys in a [profile $AWS_S3_BUCKET] section - so you can have more than one set of keys in your file)
and set the AWS_S3_BUCKET env var to the name of your bucket.
If you're publishing the current release's documentation, also set `BUILD_ROOT=yes`
make AWS_S3_BUCKET=docs-stage.docker.com docs-release
will then push the documentation site to your s3 bucket.
Note: you can add `OPTIONS=--dryrun` to see what will be done without sending to the server
EOF
exit 1
}
[ "$AWS_S3_BUCKET" ] || usage
VERSION=$(cat VERSION)
if [ "$AWS_S3_BUCKET" == "docs.grafana.org" ]; then
if [ "${VERSION%-dev}" != "$VERSION" ]; then
echo "Please do not push '-dev' documentation to docs.grafana.org ($VERSION)"
exit 1
fi
cat > ./sources/robots.txt <<'EOF'
User-agent: *
Allow: /
EOF
else
cat > ./sources/robots.txt <<'EOF'
User-agent: *
Disallow: /
EOF
fi
# Remove the last version - 1.0.2-dev -> 1.0
MAJOR_MINOR="v${VERSION%.*}"
export MAJOR_MINOR
export BUCKET=$AWS_S3_BUCKET
export AWS_CONFIG_FILE=$(pwd)/awsconfig
[ -e "$AWS_CONFIG_FILE" ] || usage
export AWS_DEFAULT_PROFILE=$BUCKET
cat $AWS_CONFIG_FILE
echo "cfg file: $AWS_CONFIG_FILE ; profile: $AWS_DEFAULT_PROFILE"
setup_s3() {
echo "Create $BUCKET"
# Try creating the bucket. Ignore errors (it might already exist).
aws s3 mb --profile $BUCKET s3://$BUCKET 2>/dev/null || true
# Check access to the bucket.
echo "test $BUCKET exists"
aws s3 --profile $BUCKET ls s3://$BUCKET
# Make the bucket accessible through website endpoints.
echo "make $BUCKET accessible as a website"
#aws s3 website s3://$BUCKET --index-document index.html --error-document jsearch/index.html
s3conf=$(cat s3_website.json | envsubst)
echo
echo $s3conf
echo
aws s3api --profile $BUCKET put-bucket-website --bucket $BUCKET --website-configuration "$s3conf"
}
build_current_documentation() {
mkdocs build
cd site/
gzip -9k -f search_content.json
cd ..
}
upload_current_documentation() {
src=site/
dst=s3://$BUCKET$1
cache=max-age=3600
if [ "$NOCACHE" ]; then
cache=no-cache
fi
echo
echo "Uploading $src"
echo " to $dst"
echo
# a really complicated way to send only the files we want
# if there are too many in any one set, aws s3 sync seems to fall over with 2 files to go
# versions.html_fragment
include="--recursive --include \"*.$i\" "
echo "uploading *.$i"
run="aws s3 cp $src $dst $OPTIONS --profile $BUCKET --cache-control $cache --acl public-read $include"
echo "======================="
echo "$run"
echo "======================="
$run
# Make sure the search_content.json.gz file has the right content-encoding
aws s3 cp --profile $BUCKET --cache-control $cache --content-encoding="gzip" --acl public-read "site/search_content.json.gz" "$dst"
}
if [ "$OPTIONS" != "--dryrun" ]; then
setup_s3
fi
sed -i "s/^site_url:.*/site_url: http:\/\/docs.grafana.org\//" mkdocs.yml
cat mkdocs.yml
# Default to only building the version specific docs so we don't clober the latest by accident with old versions
if [ "$BUILD_ROOT" == "yes" ]; then
echo "Building root documentation"
build_current_documentation
upload_current_documentation
fi
#build again with /v1.0/ prefix
echo "Building the /$MAJOR_MINOR/ documentation"
build_current_documentation
upload_current_documentation "/$MAJOR_MINOR/"