Drone: Add release pipelines (#28083)

* Drone: Implement release pipeline

Signed-off-by: Arve Knudsen <arve.knudsen@gmail.com>

* Drone: Implement test-release pipeline

Signed-off-by: Arve Knudsen <arve.knudsen@gmail.com>
This commit is contained in:
Arve Knudsen 2020-10-07 14:22:15 +02:00 committed by GitHub
parent 61749dee23
commit d2b957b726
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
8 changed files with 2119 additions and 345 deletions

View File

@ -2,11 +2,6 @@ version: 2.1
aliases:
# Workflow filters
- &filter-only-release
branches:
only: chore/test-release-pipeline
tags:
only: /^v[0-9]+(\.[0-9]+){2}(-.+|[^-.]*)$/
- &filter-only-master
branches:
only: master
@ -110,13 +105,6 @@ jobs:
resource_class: large
executor: grafana-build
steps:
- run:
name: Exit if enterprise and forked PR
command: |
if [[ "<< parameters.edition >>" == "enterprise" && -n "$CIRCLE_PR_NUMBER" ]]; then
echo "Nothing to do for forked PRs, so marking this step successful"
circleci step halt
fi
- checkout
- restore_cache:
keys:
@ -188,13 +176,8 @@ jobs:
- run:
name: Build internal Grafana plug-ins
command: |
if [[ -n "$CIRCLE_PR_NUMBER" ]]; then
# This is a forked PR, so don't sign as it requires an API secret
/tmp/grabpl build-plugins --jobs 2 --edition << parameters.edition >>
else
export GRAFANA_API_KEY=$GRAFANA_COM_API_KEY
/tmp/grabpl build-plugins --jobs 2 --edition << parameters.edition >> --sign --signing-admin
fi
export GRAFANA_API_KEY=$GRAFANA_COM_API_KEY
/tmp/grabpl build-plugins --jobs 2 --edition << parameters.edition >> --sign --signing-admin
- run:
name: Move artifacts
command: |
@ -341,38 +324,6 @@ jobs:
command: "./scripts/ci-job-succeeded.sh"
when: on_success
release-next-packages:
executor: grafana-build
steps:
- run:
name: Exit if forked PR
command: |
if [[ -n "$CIRCLE_PR_NUMBER" ]]; then
echo "Nothing to do for forked PRs, so marking this step successful"
circleci step halt
fi
- checkout
- run:
name: CI job started
command: ./scripts/ci-job-started.sh
- run:
name: Bootstrap lerna
command: npx lerna bootstrap
- run:
name: npm - Prepare auth token
command: "echo //registry.npmjs.org/:_authToken=$NPM_TOKEN >> ~/.npmrc"
- run:
name: Release next packages
command: ./scripts/circle-release-next-packages.sh
- run:
name: CI job failed
command: ./scripts/ci-job-failed.sh
when: on_fail
- run:
name: CI job succeeded
command: ./scripts/ci-job-succeeded.sh
when: on_success
package-oss:
executor: grafana-build
steps:
@ -389,11 +340,6 @@ jobs:
- run:
name: Package Grafana
command: |
if [[ -n "$CIRCLE_PR_NUMBER" ]]; then
echo Using test GPG key pair since building a forked PR
source scripts/build/gpg-test-vars.sh
fi
# Necessary for signing bundled plugins
export GRAFANA_API_KEY=$GRAFANA_COM_API_KEY
if [[ -n $CIRCLE_TAG ]]; then
@ -528,11 +474,6 @@ jobs:
yarn install --frozen-lockfile --no-progress
yarn storybook:build
if [[ -n "$CIRCLE_PR_NUMBER" ]]; then
echo "Nothing to do for forked PRs, so marking this step successful"
circleci step halt
fi
if [[ $CIRCLE_BRANCH == "chore/test-release-pipeline" ]]; then
# We're testing the release pipeline
echo Testing release
@ -830,9 +771,6 @@ jobs:
elif [[ $CIRCLE_BRANCH == "chore/test-release-pipeline" ]]; then
# We're testing the release pipeline
./scripts/circle-test-frontend.sh --edition << parameters.edition >> v7.0.0-test
else
# A master build
./scripts/circle-test-frontend.sh --edition << parameters.edition >> --build-id $CIRCLE_WORKFLOW_ID
fi
- store_test_results:
path: reports/junit
@ -867,10 +805,6 @@ jobs:
# We're testing the release pipeline
/tmp/grabpl test-backend --github-token "${GITHUB_GRAFANABOT_TOKEN}" --edition << parameters.edition >> \
v7.0.0-test
else
# A master build
/tmp/grabpl test-backend --github-token "${GITHUB_GRAFANABOT_TOKEN}" --edition << parameters.edition >> \
--build-id $CIRCLE_WORKFLOW_ID
fi
- run:
name: CI job failed
@ -881,45 +815,9 @@ jobs:
command: "./scripts/ci-job-succeeded.sh"
when: on_success
build-docs-website:
executor: base
steps:
- checkout
- setup_remote_docker
- run:
name: CI job started
command: "./scripts/ci-job-started.sh"
- run:
name: Build Grafana docs website
command: |
# Use latest revision here, since we want to catch if it breaks
IMAGE=grafana/docs-base:latest
# In order to copy sources into the remote container, we need to employ a trick of creating a container
# with a volume, that we copy the sources into. Then, we launch the build container, with the volume
# from the other container
docker create -v /hugo/content/docs/grafana --name docs alpine:3.11 /bin/true
docker cp ${PWD}/docs/sources docs:/hugo/content/docs/grafana/latest
docker run --volumes-from docs $IMAGE /bin/bash -c 'make prod'
- run:
name: CI job failed
command: "./scripts/ci-job-failed.sh"
when: on_fail
- run:
name: CI job succeeded
command: "./scripts/ci-job-succeeded.sh"
when: on_success
release-packages:
executor: node
steps:
- run:
name: Exit if forked PR
command: |
if [[ -n "$CIRCLE_PR_NUMBER" ]]; then
echo "Nothing to do for forked PRs, so marking this step successful"
circleci step halt
fi
- checkout
- run:
name: CI job started
@ -1223,7 +1121,7 @@ workflows:
requires:
- package-enterprise
- release-packages:
filters: *filter-only-release
filters: *filter-release
requires:
- end-to-end-tests
- mysql-integration-test

View File

@ -1,5 +1,8 @@
load('scripts/lib.star', 'pr_pipelines', 'master_pipelines')
load('scripts/pr.star', 'pr_pipelines')
load('scripts/master.star', 'master_pipelines')
load('scripts/release.star', 'release_pipelines', 'test_release_pipelines')
def main(ctx):
edition = 'oss'
return pr_pipelines(edition=edition) + master_pipelines(edition=edition)
return pr_pipelines(edition=edition) + master_pipelines(edition=edition) + release_pipelines() + \
test_release_pipelines()

1535
.drone.yml

File diff suppressed because it is too large Load Diff

View File

@ -7,7 +7,7 @@ PACKAGES=(ui toolkit data runtime e2e e2e-selectors)
source "$(dirname "$0")/helpers/exit-if-fail.sh"
function prepare_version_commit () {
echo $'\nCommitting version changes. This commit will not be checked-in!'
echo $'\nCommitting version changes. This commit will not be pushed!'
git config --global user.email "drone@grafana.com"
git config --global user.name "Drone"
git commit -am "Version commit"
@ -48,9 +48,9 @@ else
# repository. We will not push this commit to the origin repository.
prepare_version_commit
# Frontend packages have already been versioned and built by the
# build-frontend step in drone. We will only unpublish the previous
# canary version and publish the current built version as the new
# Frontend packages have already been versioned and built by the
# build-frontend step in drone. We will only unpublish the previous
# canary version and publish the current built version as the new
# latest canary build.
for PACKAGE in "${PACKAGES[@]}"
do

View File

@ -1,194 +1,16 @@
grabpl_version = '0.5.17'
build_image = 'grafana/build-container:1.2.27'
publish_image = 'grafana/grafana-ci-deploy:1.2.6'
grafana_docker_image = 'grafana/drone-grafana-docker:0.3.2'
alpine_image = 'alpine:3.12'
windows_image = 'mcr.microsoft.com/windows:1809'
grabpl_version = '0.5.16'
git_image = 'alpine/git:v2.26.2'
dockerize_version = '0.6.1'
wix_image = 'grafana/ci-wix:0.1.1'
def pr_pipelines(edition):
version_mode = 'pr'
services = [
{
'name': 'postgres',
'image': 'postgres:12.3-alpine',
'environment': {
'POSTGRES_USER': 'grafanatest',
'POSTGRES_PASSWORD': 'grafanatest',
'POSTGRES_DB': 'grafanatest',
},
},
{
'name': 'mysql',
'image': 'mysql:5.6.48',
'environment': {
'MYSQL_ROOT_PASSWORD': 'rootpass',
'MYSQL_DATABASE': 'grafana_tests',
'MYSQL_USER': 'grafana',
'MYSQL_PASSWORD': 'password',
},
},
]
variants = ['linux-x64', 'linux-x64-musl', 'osx64', 'win64',]
steps = [
lint_backend_step(edition),
codespell_step(),
shellcheck_step(),
test_backend_step(),
test_frontend_step(),
build_backend_step(edition=edition, variants=variants),
build_frontend_step(edition=edition),
build_plugins_step(edition=edition),
package_step(edition=edition, variants=variants),
e2e_tests_server_step(),
e2e_tests_step(),
build_storybook_step(edition),
build_docs_website_step(),
copy_packages_for_docker_step(),
build_docker_images_step(edition=edition, archs=['amd64',]),
postgres_integration_tests_step(),
mysql_integration_tests_step(),
]
windows_steps = get_windows_steps(edition=edition, version_mode=version_mode)
if edition == 'enterprise':
steps.append(benchmark_ldap_step())
services.append(ldap_service())
trigger = {
'event': ['pull_request',],
}
return [
pipeline(
name='test-pr', edition=edition, trigger=trigger, services=services, steps=steps,
version_mode=version_mode,
),
pipeline(
name='windows-pr', edition=edition, trigger=trigger, steps=windows_steps, platform='windows',
version_mode=version_mode,
),
]
def master_steps(edition, is_downstream=False):
publish = edition != 'enterprise' or is_downstream
steps = [
enterprise_downstream_step(edition),
lint_backend_step(edition),
codespell_step(),
shellcheck_step(),
test_backend_step(),
test_frontend_step(),
frontend_metrics_step(edition=edition),
build_backend_step(edition=edition, is_downstream=is_downstream),
build_frontend_step(edition=edition, is_downstream=is_downstream),
build_plugins_step(edition=edition, sign=True),
package_step(edition=edition, sign=True, is_downstream=is_downstream),
e2e_tests_server_step(),
e2e_tests_step(),
build_storybook_step(edition=edition),
publish_storybook_step(edition=edition),
build_docs_website_step(),
copy_packages_for_docker_step(),
build_docker_images_step(edition=edition, publish=publish),
build_docker_images_step(edition=edition, ubuntu=True, publish=publish),
postgres_integration_tests_step(),
mysql_integration_tests_step(),
release_next_npm_packages_step(edition),
upload_packages_step(edition, is_downstream),
deploy_to_kubernetes_step(edition, is_downstream),
]
windows_steps = get_windows_steps(edition=edition, version_mode='master', is_downstream=is_downstream)
publish_steps = [
publish_packages_step(edition, is_downstream),
]
return steps, windows_steps, publish_steps
def master_pipelines(edition):
version_mode = 'master'
services = [
{
'name': 'postgres',
'image': 'postgres:12.3-alpine',
'environment': {
'POSTGRES_USER': 'grafanatest',
'POSTGRES_PASSWORD': 'grafanatest',
'POSTGRES_DB': 'grafanatest',
},
},
{
'name': 'mysql',
'image': 'mysql:5.6.48',
'environment': {
'MYSQL_ROOT_PASSWORD': 'rootpass',
'MYSQL_DATABASE': 'grafana_tests',
'MYSQL_USER': 'grafana',
'MYSQL_PASSWORD': 'password',
},
},
]
trigger = {
'event': ['push',],
'branch': 'master',
}
steps, windows_steps, publish_steps = master_steps(edition=edition)
if edition == 'enterprise':
steps.append(benchmark_ldap_step())
services.append(ldap_service())
pipelines = [
pipeline(
name='build-master', edition=edition, trigger=trigger, services=services, steps=steps,
version_mode=version_mode,
),
pipeline(
name='windows-master', edition=edition, trigger=trigger, steps=windows_steps, platform='windows',
depends_on=['build-master'], version_mode=version_mode,
),
]
if edition != 'enterprise':
pipelines.append(pipeline(
name='publish-master', edition=edition, trigger=trigger, steps=publish_steps,
depends_on=['build-master', 'windows-master',], install_deps=False, version_mode=version_mode,
))
notify_trigger = dict(trigger, status = ['failure'])
pipelines.append(notify_pipeline(
name='notify-master', slack_channel='grafana-ci-notifications', trigger=notify_trigger,
depends_on=['build-master', 'windows-master', 'publish-master'],
))
if edition == 'enterprise':
# Add downstream enterprise pipelines triggerable from OSS builds
trigger = {
'event': ['custom',],
}
steps, windows_steps, publish_steps = master_steps(edition=edition, is_downstream=True)
pipelines.append(pipeline(
name='build-master-downstream', edition=edition, trigger=trigger, services=services, steps=steps,
is_downstream=True, version_mode=version_mode,
))
pipelines.append(pipeline(
name='windows-master-downstream', edition=edition, trigger=trigger, steps=windows_steps,
platform='windows', depends_on=['build-master-downstream'], is_downstream=True, version_mode=version_mode,
))
pipelines.append(pipeline(
name='publish-master-downstream', edition=edition, trigger=trigger, steps=publish_steps,
depends_on=['build-master-downstream', 'windows-master-downstream'], is_downstream=True, install_deps=False,
version_mode=version_mode,
))
notify_trigger = dict(trigger, status = ['failure'])
pipelines.append(notify_pipeline(
name='notify-master-downstream', slack_channel='grafana-enterprise-ci-notifications', trigger=notify_trigger,
depends_on=['build-master-downstream', 'windows-master-downstream', 'publish-master-downstream'],
))
return pipelines
test_release_ver = 'v7.3.0-test'
def pipeline(
name, edition, trigger, steps, version_mode, services=[], platform='linux', depends_on=[],
name, edition, trigger, steps, ver_mode, services=[], platform='linux', depends_on=[],
is_downstream=False, install_deps=True,
):
if platform != 'windows':
@ -211,7 +33,7 @@ def pipeline(
'trigger': trigger,
'services': services,
'steps': init_steps(
edition, platform, is_downstream=is_downstream, install_deps=install_deps, version_mode=version_mode,
edition, platform, is_downstream=is_downstream, install_deps=install_deps, ver_mode=ver_mode,
) + steps,
'depends_on': depends_on,
}
@ -225,6 +47,7 @@ def pipeline(
return pipeline
def notify_pipeline(name, slack_channel, trigger, depends_on=[]):
trigger = dict(trigger, status = ['failure'])
return {
'kind': 'pipeline',
'type': 'docker',
@ -253,7 +76,7 @@ def slack_step(channel):
},
}
def init_steps(edition, platform, version_mode, is_downstream=False, install_deps=True):
def init_steps(edition, platform, ver_mode, is_downstream=False, install_deps=True):
if platform == 'windows':
return [
{
@ -265,6 +88,13 @@ def init_steps(edition, platform, version_mode, is_downstream=False, install_dep
},
]
common_cmds = []
if ver_mode == 'release':
common_cmds.append('./bin/grabpl verify-version ${DRONE_TAG}')
elif ver_mode == 'test-release':
common_cmds.append('./bin/grabpl verify-version {}'.format(test_release_ver))
identify_runner_step = {
'name': 'identify-runner',
'image': alpine_image,
@ -274,14 +104,12 @@ def init_steps(edition, platform, version_mode, is_downstream=False, install_dep
}
if install_deps:
common_cmds = [
common_cmds.extend([
'curl -fLO https://github.com/jwilder/dockerize/releases/download/v$${DOCKERIZE_VERSION}/dockerize-linux-amd64-v$${DOCKERIZE_VERSION}.tar.gz',
'tar -C bin -xzvf dockerize-linux-amd64-v$${DOCKERIZE_VERSION}.tar.gz',
'rm dockerize-linux-amd64-v$${DOCKERIZE_VERSION}.tar.gz',
'yarn install --frozen-lockfile --no-progress',
]
else:
common_cmds = []
])
if edition == 'enterprise':
if is_downstream:
source_commit = ' $${SOURCE_COMMIT}'
@ -418,7 +246,10 @@ def ldap_service():
},
}
def build_storybook_step(edition):
def build_storybook_step(edition, ver_mode):
if edition == 'enterprise' and ver_mode in ('release', 'test-release'):
return None
return {
'name': 'build-storybook',
'image': build_image,
@ -431,10 +262,27 @@ def build_storybook_step(edition):
],
}
def publish_storybook_step(edition):
def publish_storybook_step(edition, ver_mode):
if edition == 'enterprise':
return None
if ver_mode == 'test-release':
commands = [
'echo Testing release',
]
else:
if ver_mode == 'release':
channels = ['latest', '${DRONE_TAG}',]
else:
channels = ['canary',]
commands = [
'printenv GCP_KEY | base64 -d > /tmp/gcpkey.json',
'gcloud auth activate-service-account --key-file=/tmp/gcpkey.json',
] + [
'gsutil -m rsync -d -r ./packages/grafana-ui/dist/storybook gs://grafana-storybook/{}'.format(c)
for c in channels
]
return {
'name': 'publish-storybook',
'image': publish_image,
@ -446,22 +294,44 @@ def publish_storybook_step(edition):
'from_secret': 'gcp_key',
},
},
'commands': [
'printenv GCP_KEY | base64 -d > /tmp/gcpkey.json',
'gcloud auth activate-service-account --key-file=/tmp/gcpkey.json',
'gsutil -m rsync -d -r ./packages/grafana-ui/dist/storybook gs://grafana-storybook/canary',
],
'commands': commands,
}
def build_backend_step(edition, variants=None, is_downstream=False):
if not is_downstream:
build_no = '${DRONE_BUILD_NUMBER}'
else:
build_no = '$${SOURCE_BUILD_NUMBER}'
def build_backend_step(edition, ver_mode, variants=None, is_downstream=False):
if variants:
variants_str = ' --variants {}'.format(','.join(variants))
else:
variants_str = ''
# TODO: Convert number of jobs to percentage
if ver_mode == 'release':
env = {
'GITHUB_TOKEN': {
'from_secret': 'github_token',
},
}
cmd = './bin/grabpl build-backend --jobs 8 --edition {} --github-token $${{GITHUB_TOKEN}} --no-pull-enterprise ${{DRONE_TAG}}'.format(
edition,
)
elif ver_mode == 'test-release':
env = {
'GITHUB_TOKEN': {
'from_secret': 'github_token',
},
}
cmd = './bin/grabpl build-backend --jobs 8 --edition {} --github-token $${{GITHUB_TOKEN}} --no-pull-enterprise {}'.format(
edition, test_release_ver,
)
else:
if not is_downstream:
build_no = '${DRONE_BUILD_NUMBER}'
else:
build_no = '$${SOURCE_BUILD_NUMBER}'
env = {}
cmd = './bin/grabpl build-backend --jobs 8 --edition {} --build-id {}{} --no-pull-enterprise'.format(
edition, build_no, variants_str,
)
return {
'name': 'build-backend',
'image': build_image,
@ -470,19 +340,29 @@ def build_backend_step(edition, variants=None, is_downstream=False):
'lint-backend',
'test-backend',
],
'environment': env,
'commands': [
# TODO: Convert number of jobs to percentage
'./bin/grabpl build-backend --jobs 8 --edition {} --build-id {}{} --no-pull-enterprise'.format(
edition, build_no, variants_str,
),
cmd,
],
}
def build_frontend_step(edition, is_downstream=False):
def build_frontend_step(edition, ver_mode, is_downstream=False):
if not is_downstream:
build_no = '${DRONE_BUILD_NUMBER}'
else:
build_no = '$${SOURCE_BUILD_NUMBER}'
# TODO: Use percentage for num jobs
if ver_mode == 'release':
cmd = './bin/grabpl build-frontend --jobs 8 --github-token $${GITHUB_TOKEN} --no-install-deps ' + \
'--edition {} --no-pull-enterprise ${{DRONE_TAG}}'.format(edition)
elif ver_mode == 'test-release':
cmd = './bin/grabpl build-frontend --jobs 8 --github-token $${GITHUB_TOKEN} --no-install-deps ' + \
'--edition {} --no-pull-enterprise {}'.format(edition, test_release_ver)
else:
cmd = './bin/grabpl build-frontend --jobs 8 --no-install-deps --edition {} '.format(edition) + \
'--build-id {} --no-pull-enterprise'.format(build_no)
return {
'name': 'build-frontend',
'image': build_image,
@ -491,9 +371,7 @@ def build_frontend_step(edition, is_downstream=False):
'test-frontend',
],
'commands': [
# TODO: Use percentage for num jobs
'./bin/grabpl build-frontend --jobs 8 --no-install-deps --edition {} '.format(edition) +
'--build-id {} --no-pull-enterprise'.format(build_no),
cmd,
],
}
@ -612,21 +490,20 @@ def shellcheck_step():
],
}
def package_step(edition, variants=None, sign=False, is_downstream=False):
if not is_downstream:
build_no = '${DRONE_BUILD_NUMBER}'
else:
build_no = '$${SOURCE_BUILD_NUMBER}'
def package_step(edition, ver_mode, variants=None, is_downstream=False):
if variants:
variants_str = ' --variants {}'.format(','.join(variants))
else:
variants_str = ''
if sign:
if ver_mode in ('master', 'release', 'test-release',):
sign_args = ' --sign'
env = {
'GRAFANA_API_KEY': {
'from_secret': 'grafana_api_key',
},
'GITHUB_TOKEN': {
'from_secret': 'github_token',
},
'GPG_PRIV_KEY': {
'from_secret': 'gpg_priv_key',
},
@ -643,6 +520,25 @@ def package_step(edition, variants=None, sign=False, is_downstream=False):
env = None
test_args = '. scripts/build/gpg-test-vars.sh && '
# TODO: Use percentage for jobs
if ver_mode == 'release':
cmd = '{}./bin/grabpl package --jobs 8 --edition {} '.format(test_args, edition) + \
'--github-token $${{GITHUB_TOKEN}} --no-pull-enterprise{} ${{DRONE_TAG}}'.format(
sign_args
)
elif ver_mode == 'test-release':
cmd = '{}./bin/grabpl package --jobs 8 --edition {} '.format(test_args, edition) + \
'--github-token $${{GITHUB_TOKEN}} --no-pull-enterprise{} {}'.format(
sign_args, test_release_ver,
)
else:
if not is_downstream:
build_no = '${DRONE_BUILD_NUMBER}'
else:
build_no = '$${SOURCE_BUILD_NUMBER}'
cmd = '{}./bin/grabpl package --jobs 8 --edition {} '.format(test_args, edition) + \
'--build-id {} --no-pull-enterprise{}{}'.format(build_no, variants_str, sign_args)
return {
'name': 'package',
'image': build_image,
@ -657,9 +553,7 @@ def package_step(edition, variants=None, sign=False, is_downstream=False):
],
'environment': env,
'commands': [
# TODO: Use percentage for jobs
'{}./bin/grabpl package --jobs 8 --edition {} '.format(test_args, edition) +
'--build-id {} --no-pull-enterprise{}{}'.format(build_no, variants_str, sign_args),
cmd,
],
}
@ -721,7 +615,10 @@ def copy_packages_for_docker_step():
],
}
def build_docker_images_step(edition, archs=None, ubuntu=False, publish=False):
def build_docker_images_step(edition, ver_mode, archs=None, ubuntu=False, publish=False):
if ver_mode == 'test-release':
publish = False
sfx = ''
if ubuntu:
sfx = '-ubuntu'
@ -818,7 +715,7 @@ def release_next_npm_packages_step(edition):
],
}
def deploy_to_kubernetes_step(edition, is_downstream):
def deploy_to_kubernetes_step(edition, is_downstream=False):
if edition != 'enterprise' or not is_downstream:
return None
@ -838,10 +735,17 @@ def deploy_to_kubernetes_step(edition, is_downstream):
],
}
def upload_packages_step(edition, is_downstream):
if edition == 'enterprise' and not is_downstream:
def upload_packages_step(edition, ver_mode, is_downstream=False):
if ver_mode == 'master' and edition == 'enterprise' and not is_downstream:
return None
if ver_mode == 'test-release':
cmd = './bin/grabpl upload-packages --edition {} '.format(edition) + \
'--deb-db-bucket grafana-testing-aptly-db --deb-repo-bucket grafana-testing-repo --packages-bucket ' + \
'grafana-downloads-test --rpm-repo-bucket grafana-testing-repo --dry-run'
else:
cmd = './bin/grabpl upload-packages --edition {}'.format(edition)
return {
'name': 'upload-packages',
'image': publish_image,
@ -898,7 +802,7 @@ def publish_packages_step(edition, is_downstream):
],
}
def get_windows_steps(edition, version_mode, is_downstream=False):
def get_windows_steps(edition, ver_mode, is_downstream=False):
if not is_downstream:
source_commit = ''
else:
@ -907,10 +811,6 @@ def get_windows_steps(edition, version_mode, is_downstream=False):
sfx = ''
if edition == 'enterprise':
sfx = '-enterprise'
if not is_downstream:
build_no = 'DRONE_BUILD_NUMBER'
else:
build_no = 'SOURCE_BUILD_NUMBER'
steps = [
{
'name': 'initialize',
@ -921,7 +821,20 @@ def get_windows_steps(edition, version_mode, is_downstream=False):
],
},
]
if version_mode == 'master' and (edition != 'enterprise' or is_downstream):
if (ver_mode == 'master' and (edition != 'enterprise' or is_downstream)) or ver_mode in ('release', 'test-release'):
if ver_mode == 'release':
ver_part = '$$env:DRONE_TAG'
dir = 'release'
elif ver_mode == 'test-release':
ver_part = test_release_ver
dir = 'release'
else:
dir = 'master'
if not is_downstream:
build_no = 'DRONE_BUILD_NUMBER'
else:
build_no = 'SOURCE_BUILD_NUMBER'
ver_part = '--build-id $$env:{}'.format(build_no)
installer_commands = [
'$$gcpKey = $$env:GCP_KEY',
'[System.Text.Encoding]::UTF8.GetString([System.Convert]::FromBase64String($$gcpKey)) > gcpkey.json',
@ -930,10 +843,10 @@ def get_windows_steps(edition, version_mode, is_downstream=False):
'gcloud auth activate-service-account --key-file=gcpkey.json',
'rm gcpkey.json',
'cp C:\\App\\nssm-2.24.zip .',
'.\\grabpl.exe windows-installer --edition {} --build-id $$env:{}'.format(edition, build_no),
'.\\grabpl.exe windows-installer --edition {} {}'.format(edition, ver_part),
'$$fname = ((Get-Childitem grafana*.msi -name) -split "`n")[0]',
'gsutil cp $$fname gs://grafana-downloads/{}/{}/'.format(edition, version_mode),
'gsutil cp "$$fname.sha256" gs://grafana-downloads/{}/{}/'.format(edition, version_mode),
'gsutil cp $$fname gs://grafana-downloads/{}/{}/'.format(edition, dir),
'gsutil cp "$$fname.sha256" gs://grafana-downloads/{}/{}/'.format(edition, dir),
]
steps.append({
'name': 'build-windows-installer',
@ -983,3 +896,26 @@ def get_windows_steps(edition, version_mode, is_downstream=False):
])
return steps
def integration_test_services():
return [
{
'name': 'postgres',
'image': 'postgres:12.3-alpine',
'environment': {
'POSTGRES_USER': 'grafanatest',
'POSTGRES_PASSWORD': 'grafanatest',
'POSTGRES_DB': 'grafanatest',
},
},
{
'name': 'mysql',
'image': 'mysql:5.6.48',
'environment': {
'MYSQL_ROOT_PASSWORD': 'rootpass',
'MYSQL_DATABASE': 'grafana_tests',
'MYSQL_USER': 'grafana',
'MYSQL_PASSWORD': 'password',
},
},
]

130
scripts/master.star Normal file
View File

@ -0,0 +1,130 @@
load(
'scripts/lib.star',
'pipeline',
'lint_backend_step',
'codespell_step',
'shellcheck_step',
'test_backend_step',
'test_frontend_step',
'build_backend_step',
'build_frontend_step',
'build_plugins_step',
'package_step',
'e2e_tests_server_step',
'e2e_tests_step',
'build_storybook_step',
'build_docs_website_step',
'copy_packages_for_docker_step',
'build_docker_images_step',
'postgres_integration_tests_step',
'mysql_integration_tests_step',
'get_windows_steps',
'benchmark_ldap_step',
'ldap_service',
'enterprise_downstream_step',
'frontend_metrics_step',
'publish_storybook_step',
'release_next_npm_packages_step',
'upload_packages_step',
'deploy_to_kubernetes_step',
'publish_packages_step',
'notify_pipeline',
'integration_test_services',
)
ver_mode = 'master'
def get_steps(edition, is_downstream=False):
publish = edition != 'enterprise' or is_downstream
steps = [
enterprise_downstream_step(edition),
lint_backend_step(edition),
codespell_step(),
shellcheck_step(),
test_backend_step(),
test_frontend_step(),
frontend_metrics_step(edition=edition),
build_backend_step(edition=edition, ver_mode=ver_mode, is_downstream=is_downstream),
build_frontend_step(edition=edition, ver_mode=ver_mode, is_downstream=is_downstream),
build_plugins_step(edition=edition, sign=True),
package_step(edition=edition, ver_mode=ver_mode, is_downstream=is_downstream),
e2e_tests_server_step(),
e2e_tests_step(),
build_storybook_step(edition=edition, ver_mode=ver_mode),
publish_storybook_step(edition=edition, ver_mode=ver_mode),
build_docs_website_step(),
copy_packages_for_docker_step(),
build_docker_images_step(edition=edition, ver_mode=ver_mode, publish=publish),
build_docker_images_step(edition=edition, ver_mode=ver_mode, ubuntu=True, publish=publish),
postgres_integration_tests_step(),
mysql_integration_tests_step(),
release_next_npm_packages_step(edition),
upload_packages_step(edition=edition, ver_mode=ver_mode, is_downstream=is_downstream),
deploy_to_kubernetes_step(edition=edition, is_downstream=is_downstream),
]
windows_steps = get_windows_steps(edition=edition, ver_mode=ver_mode, is_downstream=is_downstream)
publish_steps = [
publish_packages_step(edition=edition, is_downstream=is_downstream),
]
return steps, windows_steps, publish_steps
def master_pipelines(edition):
services = integration_test_services()
trigger = {
'event': ['push',],
'branch': 'master',
}
steps, windows_steps, publish_steps = get_steps(edition=edition)
if edition == 'enterprise':
steps.append(benchmark_ldap_step())
services.append(ldap_service())
pipelines = [
pipeline(
name='build-master', edition=edition, trigger=trigger, services=services, steps=steps,
ver_mode=ver_mode,
),
pipeline(
name='windows-master', edition=edition, trigger=trigger, steps=windows_steps, platform='windows',
depends_on=['build-master'], ver_mode=ver_mode,
),
]
if edition != 'enterprise':
pipelines.append(pipeline(
name='publish-master', edition=edition, trigger=trigger, steps=publish_steps,
depends_on=['build-master', 'windows-master',], install_deps=False, ver_mode=ver_mode,
))
pipelines.append(notify_pipeline(
name='notify-master', slack_channel='grafana-ci-notifications', trigger=trigger,
depends_on=['build-master', 'windows-master', 'publish-master'],
))
else:
# Add downstream enterprise pipelines triggerable from OSS builds
trigger = {
'event': ['custom',],
}
steps, windows_steps, publish_steps = get_steps(edition=edition, is_downstream=True)
pipelines.append(pipeline(
name='build-master-downstream', edition=edition, trigger=trigger, services=services, steps=steps,
is_downstream=True, ver_mode=ver_mode,
))
pipelines.append(pipeline(
name='windows-master-downstream', edition=edition, trigger=trigger, steps=windows_steps,
platform='windows', depends_on=['build-master-downstream'], is_downstream=True, ver_mode=ver_mode,
))
pipelines.append(pipeline(
name='publish-master-downstream', edition=edition, trigger=trigger, steps=publish_steps,
depends_on=['build-master-downstream', 'windows-master-downstream'], is_downstream=True, install_deps=False,
ver_mode=ver_mode,
))
pipelines.append(notify_pipeline(
name='notify-master-downstream', slack_channel='grafana-enterprise-ci-notifications', trigger=trigger,
depends_on=['build-master-downstream', 'windows-master-downstream', 'publish-master-downstream'],
))
return pipelines

67
scripts/pr.star Normal file
View File

@ -0,0 +1,67 @@
load(
'scripts/lib.star',
'pipeline',
'lint_backend_step',
'codespell_step',
'shellcheck_step',
'test_backend_step',
'test_frontend_step',
'build_backend_step',
'build_frontend_step',
'build_plugins_step',
'package_step',
'e2e_tests_server_step',
'e2e_tests_step',
'build_storybook_step',
'build_docs_website_step',
'copy_packages_for_docker_step',
'build_docker_images_step',
'postgres_integration_tests_step',
'mysql_integration_tests_step',
'get_windows_steps',
'benchmark_ldap_step',
'ldap_service',
'integration_test_services',
)
ver_mode = 'pr'
def pr_pipelines(edition):
services = integration_test_services()
variants = ['linux-x64', 'linux-x64-musl', 'osx64', 'win64',]
steps = [
lint_backend_step(edition),
codespell_step(),
shellcheck_step(),
test_backend_step(),
test_frontend_step(),
build_backend_step(edition=edition, ver_mode=ver_mode, variants=variants),
build_frontend_step(edition=edition, ver_mode=ver_mode),
build_plugins_step(edition=edition),
package_step(edition=edition, ver_mode=ver_mode, variants=variants),
e2e_tests_server_step(),
e2e_tests_step(),
build_storybook_step(edition=edition, ver_mode=ver_mode),
build_docs_website_step(),
copy_packages_for_docker_step(),
build_docker_images_step(edition=edition, ver_mode=ver_mode, archs=['amd64',]),
postgres_integration_tests_step(),
mysql_integration_tests_step(),
]
windows_steps = get_windows_steps(edition=edition, ver_mode=ver_mode)
if edition == 'enterprise':
steps.append(benchmark_ldap_step())
services.append(ldap_service())
trigger = {
'event': ['pull_request',],
}
return [
pipeline(
name='test-pr', edition=edition, trigger=trigger, services=services, steps=steps,
ver_mode=ver_mode,
),
pipeline(
name='windows-pr', edition=edition, trigger=trigger, steps=windows_steps, platform='windows',
ver_mode=ver_mode,
),
]

215
scripts/release.star Normal file
View File

@ -0,0 +1,215 @@
load(
'scripts/lib.star',
'test_release_ver',
'build_image',
'publish_image',
'pipeline',
'lint_backend_step',
'codespell_step',
'shellcheck_step',
'test_backend_step',
'test_frontend_step',
'build_backend_step',
'build_frontend_step',
'build_plugins_step',
'package_step',
'e2e_tests_server_step',
'e2e_tests_step',
'build_storybook_step',
'copy_packages_for_docker_step',
'build_docker_images_step',
'postgres_integration_tests_step',
'mysql_integration_tests_step',
'get_windows_steps',
'benchmark_ldap_step',
'ldap_service',
'frontend_metrics_step',
'publish_storybook_step',
'upload_packages_step',
'publish_packages_step',
'notify_pipeline',
'integration_test_services',
)
def release_npm_packages_step(edition, ver_mode):
if edition == 'enterprise':
return None
commands = [
'./node_modules/.bin/lerna bootstrap',
'echo "//registry.npmjs.org/:_authToken=$${NPM_TOKEN}" >> ~/.npmrc',
]
if ver_mode == 'release':
commands.append('./scripts/build/release-packages.sh ${DRONE_TAG}')
return {
'name': 'release-npm-packages',
'image': build_image,
'depends_on': [
'end-to-end-tests',
],
'environment': {
'NPM_TOKEN': {
'from_secret': 'npm_token',
},
},
'commands': commands,
}
def get_steps(edition, ver_mode):
steps = [
lint_backend_step(edition),
codespell_step(),
shellcheck_step(),
test_backend_step(),
test_frontend_step(),
build_backend_step(edition=edition, ver_mode=ver_mode),
build_frontend_step(edition=edition, ver_mode=ver_mode),
build_plugins_step(edition=edition, sign=True),
package_step(edition=edition, ver_mode=ver_mode),
e2e_tests_server_step(),
e2e_tests_step(),
build_storybook_step(edition=edition, ver_mode=ver_mode),
publish_storybook_step(edition=edition, ver_mode=ver_mode),
copy_packages_for_docker_step(),
build_docker_images_step(edition=edition, ver_mode=ver_mode, publish=True),
build_docker_images_step(edition=edition, ver_mode=ver_mode, ubuntu=True, publish=True),
postgres_integration_tests_step(),
mysql_integration_tests_step(),
release_npm_packages_step(edition=edition, ver_mode=ver_mode),
upload_packages_step(edition=edition, ver_mode=ver_mode),
]
windows_steps = get_windows_steps(edition=edition, ver_mode=ver_mode)
return steps, windows_steps
def get_oss_pipelines(trigger, ver_mode):
services = integration_test_services()
steps, windows_steps = get_steps(edition='oss', ver_mode=ver_mode)
return [
pipeline(
name='oss-build-{}'.format(ver_mode), edition='oss', trigger=trigger, services=services, steps=steps,
ver_mode=ver_mode,
),
pipeline(
name='oss-windows-{}'.format(ver_mode), edition='oss', trigger=trigger, steps=windows_steps,
platform='windows', depends_on=['oss-build-{}'.format(ver_mode)], ver_mode=ver_mode,
),
]
def get_enterprise_pipelines(trigger, ver_mode):
services = integration_test_services()
steps, windows_steps = get_steps(edition='enterprise', ver_mode=ver_mode)
return [
pipeline(
name='enterprise-build-{}'.format(ver_mode), edition='enterprise', trigger=trigger, services=services, steps=steps,
ver_mode=ver_mode,
),
pipeline(
name='enterprise-windows-{}'.format(ver_mode), edition='enterprise', trigger=trigger, steps=windows_steps, platform='windows',
depends_on=['enterprise-build-{}'.format(ver_mode)], ver_mode=ver_mode,
),
]
def release_pipelines():
ver_mode = 'release'
services = integration_test_services()
trigger = {
'ref': ['refs/tags/v*',],
}
# The release pipelines include also enterprise ones, so both editions are built for a release.
# We could also solve this by triggering a downstream build for the enterprise repo, but by including enterprise
# in OSS release builds, we simplify the UX for the release engineer.
oss_pipelines = get_oss_pipelines(ver_mode=ver_mode, trigger=trigger)
enterprise_pipelines = get_enterprise_pipelines(ver_mode=ver_mode, trigger=trigger)
publish_pipeline = pipeline(
name='publish-{}'.format(ver_mode), trigger=trigger, edition='oss', steps=[
{
'name': 'publish-packages',
'image': publish_image,
'depends_on': [
'initialize',
],
'environment': {
'GRAFANA_COM_API_KEY': {
'from_secret': 'grafana_api_key',
},
},
'commands': [
'./bin/grabpl publish-packages --edition oss ${DRONE_TAG}',
'./bin/grabpl publish-packages --edition enterprise ${DRONE_TAG}',
],
},
], depends_on=[p['name'] for p in oss_pipelines + enterprise_pipelines], install_deps=False,
ver_mode=ver_mode,
)
pipelines = oss_pipelines + enterprise_pipelines + [publish_pipeline,]
pipelines.append(notify_pipeline(
name='notify-{}'.format(ver_mode), slack_channel='grafana-ci-notifications', trigger=trigger,
depends_on=[p['name'] for p in pipelines],
))
return pipelines
def test_release_pipelines():
ver_mode = 'test-release'
services = integration_test_services()
trigger = {
'event': ['custom',],
}
oss_pipelines = get_oss_pipelines(ver_mode=ver_mode, trigger=trigger)
enterprise_pipelines = get_enterprise_pipelines(ver_mode=ver_mode, trigger=trigger)
publish_cmd = './bin/grabpl publish-packages --edition {{}} --dry-run {}'.format(test_release_ver)
publish_pipeline = pipeline(
name='publish-{}'.format(ver_mode), trigger=trigger, edition='oss', steps=[
{
'name': 'publish-packages-oss',
'image': publish_image,
'depends_on': [
'initialize',
],
'environment': {
'GRAFANA_COM_API_KEY': {
'from_secret': 'grafana_api_key',
},
},
'commands': [
publish_cmd.format('oss'),
],
},
{
'name': 'publish-packages-enterprise',
'image': publish_image,
'depends_on': [
'initialize',
],
'environment': {
'GRAFANA_COM_API_KEY': {
'from_secret': 'grafana_api_key',
},
},
'commands': [
publish_cmd.format('enterprise'),
],
},
], depends_on=[p['name'] for p in oss_pipelines + enterprise_pipelines], install_deps=False,
ver_mode=ver_mode,
)
pipelines = oss_pipelines + enterprise_pipelines + [publish_pipeline,]
pipelines.append(notify_pipeline(
name='notify-{}'.format(ver_mode), slack_channel='grafana-ci-notifications', trigger=trigger,
depends_on=[p['name'] for p in pipelines],
))
return pipelines