chore: run deployment in a single script
This commit is contained in:
parent
1d463af70c
commit
20251ae0d0
30
bin/deploy
30
bin/deploy
@ -1,30 +0,0 @@
|
|||||||
#! /usr/bin/env bash
|
|
||||||
|
|
||||||
#
|
|
||||||
# Prelude - make bash behave sanely
|
|
||||||
# http://redsymbol.net/articles/unofficial-bash-strict-mode/
|
|
||||||
#
|
|
||||||
set -euo pipefail
|
|
||||||
|
|
||||||
# Beware of CDPATH gotchas causing cd not to work correctly when a user
|
|
||||||
# has set this in their environment
|
|
||||||
# https://bosker.wordpress.com/2012/02/12/bash-scripters-beware-of-the-cdpath/
|
|
||||||
unset CDPATH
|
|
||||||
|
|
||||||
readonly INCLUDE="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
|
|
||||||
# shellcheck source=bin/setup-tools
|
|
||||||
. "${INCLUDE}"/setup-tools
|
|
||||||
|
|
||||||
echo ">> running triton deploy with docker-compose"
|
|
||||||
ensure_command triton
|
|
||||||
ensure_triton_cns_is_enabled
|
|
||||||
get_triton_details
|
|
||||||
write_env_file
|
|
||||||
|
|
||||||
export DOCKER_HOST=$_DOCKER_HOST
|
|
||||||
export DOCKER_CERT_PATH=$_DOCKER_CERT_PATH
|
|
||||||
# Do not TLS verify for now, incompatibilities between circleci and joyent
|
|
||||||
export DOCKER_TLS_VERIFY=
|
|
||||||
|
|
||||||
docker-compose pull
|
|
||||||
COMPOSE_PROJECT_NAME=${CIRCLE_BRANCH} docker-compose up -d
|
|
@ -1,10 +0,0 @@
|
|||||||
#! /usr/bin/env bash
|
|
||||||
|
|
||||||
#
|
|
||||||
# Prelude - make bash behave sanely
|
|
||||||
# http://redsymbol.net/articles/unofficial-bash-strict-mode/
|
|
||||||
#
|
|
||||||
set -euo pipefail
|
|
||||||
|
|
||||||
echo ">> Logging into $_DOCKER_REGISTRY"
|
|
||||||
docker login -e="." -u="$_DOCKER_LOGIN_USERNAME" -p="$_DOCKER_LOGIN_PASSWORD" "$_DOCKER_REGISTRY"
|
|
23
bin/setup
23
bin/setup
@ -1,23 +0,0 @@
|
|||||||
#! /usr/bin/env bash
|
|
||||||
|
|
||||||
#
|
|
||||||
# Prelude
|
|
||||||
#
|
|
||||||
set -euo pipefail
|
|
||||||
IFS=$'\n\t'
|
|
||||||
# Beware of CDPATH gotchas causing cd not to work correctly when a user
|
|
||||||
# has set this in their environment
|
|
||||||
# https://bosker.wordpress.com/2012/02/12/bash-scripters-beware-of-the-cdpath/
|
|
||||||
unset CDPATH
|
|
||||||
|
|
||||||
readonly INCLUDE="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
|
|
||||||
# shellcheck source=bin/setup-tools
|
|
||||||
. "${INCLUDE}"/setup-tools
|
|
||||||
|
|
||||||
#
|
|
||||||
# Main
|
|
||||||
#
|
|
||||||
ensure_prerequisites
|
|
||||||
get_triton_details
|
|
||||||
check_docker_config
|
|
||||||
write_env_file
|
|
105
bin/setup-tools
105
bin/setup-tools
@ -1,105 +0,0 @@
|
|||||||
#! /usr/bin/env bash
|
|
||||||
|
|
||||||
# setup.sh - Checks that all the required tools are present and that they are
|
|
||||||
# appropriately configured for deploying to Triton.
|
|
||||||
#
|
|
||||||
# Adapted from https://github.com/autopilotpattern/mysql/blob/master/setup.sh
|
|
||||||
#
|
|
||||||
|
|
||||||
#
|
|
||||||
# Prelude
|
|
||||||
#
|
|
||||||
set -euo pipefail
|
|
||||||
IFS=$'\n\t'
|
|
||||||
|
|
||||||
#
|
|
||||||
# Utilities
|
|
||||||
#
|
|
||||||
die() {
|
|
||||||
local msg="$*"
|
|
||||||
[[ -z "${msg}" ]] || {
|
|
||||||
echo
|
|
||||||
tput setaf 1 # red
|
|
||||||
tput bold
|
|
||||||
echo "${msg}"
|
|
||||||
tput sgr0 # reset
|
|
||||||
}
|
|
||||||
exit 1
|
|
||||||
}
|
|
||||||
|
|
||||||
#
|
|
||||||
# Check functions
|
|
||||||
#
|
|
||||||
ensure_command() {
|
|
||||||
local cmd="$1"
|
|
||||||
|
|
||||||
command -v "${cmd}" > /dev/null 2>&1 || {
|
|
||||||
die "Couldn't find required command: ${cmd}"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
get_triton_details() {
|
|
||||||
TRITON_USER=$(triton profile get | awk -F": " '/account:/{print $2}')
|
|
||||||
TRITON_DC=$(triton profile get | awk -F"/" '/url:/{print $3}' | awk -F'.' '{print $1}')
|
|
||||||
TRITON_URL=$(triton profile get | awk -F' ' '/url:/{print $2}')
|
|
||||||
TRITON_ACCOUNT=$(triton account get | awk -F": " '/id:/{print $2}')
|
|
||||||
TRITON_KEY=$(triton profile get | awk -F' ' '/keyId:/{print $2}')
|
|
||||||
}
|
|
||||||
|
|
||||||
check_docker_config() {
|
|
||||||
[[ "${DOCKER_HOST:=unset}" == "unset" ]] && {
|
|
||||||
echo "Run \"docker-compose -f local-compose.yml up\" to run locally"
|
|
||||||
return 0
|
|
||||||
}
|
|
||||||
|
|
||||||
return 0
|
|
||||||
}
|
|
||||||
|
|
||||||
ensure_docker_config_matches_triton_config_and_capture_triton_details() {
|
|
||||||
local docker_user
|
|
||||||
docker_user=$(docker info 2>&1 | awk -F": " '/SDCAccount:/{print $2}')
|
|
||||||
local docker_dc
|
|
||||||
docker_dc="$(echo "${DOCKER_HOST}" | awk -F"/" '{print $3}' | awk -F'.' '{print $1}')"
|
|
||||||
get_triton_details
|
|
||||||
[[ ! "$docker_user" = "$TRITON_USER" ]] || [[ ! "$docker_dc" = "$TRITON_DC" ]] && {
|
|
||||||
echo "Docker user: ${docker_user}"
|
|
||||||
echo "Triton user: ${TRITON_USER}"
|
|
||||||
echo "Docker data center: ${docker_dc}"
|
|
||||||
echo "Triton data center: ${TRITON_DC}"
|
|
||||||
die "Your Triton config does not match your Docker configuration."
|
|
||||||
}
|
|
||||||
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
ensure_triton_cns_is_enabled() {
|
|
||||||
local triton_cns_enabled
|
|
||||||
triton_cns_enabled=$(triton account get | awk -F": " '/cns/{print $2}')
|
|
||||||
[[ "$triton_cns_enabled" == "true" ]] || {
|
|
||||||
die "Triton CNS is required and not enabled."
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
write_env_file() {
|
|
||||||
if [[ -f .env ]] ; then
|
|
||||||
echo "Env file already exists, not overwriting"
|
|
||||||
else
|
|
||||||
echo '# Consul discovery via Triton CNS' >> .env
|
|
||||||
[[ "${DOCKER_HOST:=unset}" == "*docker.joyent.com*" ]] || {
|
|
||||||
echo CONSUL="consul.svc.${TRITON_ACCOUNT}.${TRITON_DC}.cns.joyent.com" \
|
|
||||||
>> .env
|
|
||||||
}
|
|
||||||
echo SDC_KEY_ID=${TRITON_KEY} >> .env
|
|
||||||
echo SDC_ACCOUNT=${TRITON_ACCOUNT} >> .env
|
|
||||||
echo SDC_URL=${TRITON_URL} >> .env
|
|
||||||
echo >> .env
|
|
||||||
fi
|
|
||||||
}
|
|
||||||
|
|
||||||
ensure_prerequisites() {
|
|
||||||
ensure_command docker
|
|
||||||
ensure_command docker-compose
|
|
||||||
ensure_command triton
|
|
||||||
}
|
|
||||||
|
|
||||||
# vim: syntax=sh et ts=2 sts=2 sw=2
|
|
@ -16,10 +16,8 @@ dependencies:
|
|||||||
- sudo chmod +x /usr/local/bin/docker-compose
|
- sudo chmod +x /usr/local/bin/docker-compose
|
||||||
# install and setup triton
|
# install and setup triton
|
||||||
- yarn global add triton@4.15.0 || cat /home/ubuntu/.yarn-config/global/yarn-error.log
|
- yarn global add triton@4.15.0 || cat /home/ubuntu/.yarn-config/global/yarn-error.log
|
||||||
- echo '{"url":"https://eu-ams-1.api.joyent.com","account":"'$SDC_ACCOUNT'","keyId":"c3:30:35:9b:85:48:73:44:31:cc:4b:2e:6a:00:16:e2","name":"eu-ams-1","curr":true}' | triton profile create -f -
|
- echo '{"url":"https://eu-ams-1.api.joyent.com", "account":"'$SDC_ACCOUNT'", "keyId":"$SDC_KEY_ID", "name":"eu-ams-1", "curr":true}' | triton profile create -f -
|
||||||
- triton env --docker eu-ams-1
|
- triton env --docker eu-ams-1
|
||||||
# setup tap report
|
|
||||||
- mkdir -p ${CIRCLE_TEST_REPORTS}/tap-xunit/
|
|
||||||
override:
|
override:
|
||||||
# add rethinkdb sources
|
# add rethinkdb sources
|
||||||
- source /etc/lsb-release && echo "deb http://download.rethinkdb.com/apt $DISTRIB_CODENAME main" | sudo tee /etc/apt/sources.list.d/rethinkdb.list
|
- source /etc/lsb-release && echo "deb http://download.rethinkdb.com/apt $DISTRIB_CODENAME main" | sudo tee /etc/apt/sources.list.d/rethinkdb.list
|
||||||
@ -41,9 +39,7 @@ deployment:
|
|||||||
development:
|
development:
|
||||||
branch: master
|
branch: master
|
||||||
commands:
|
commands:
|
||||||
- ./bin/docker-login
|
- ./scripts/deploy
|
||||||
- ./scripts/build
|
|
||||||
- ./bin/deploy
|
|
||||||
# production:
|
# production:
|
||||||
# tag: /production-*/
|
# tag: /production-*/
|
||||||
# commands:
|
# commands:
|
||||||
|
@ -5,6 +5,7 @@
|
|||||||
"license": "MPL-2.0",
|
"license": "MPL-2.0",
|
||||||
"repository": "github:yldio/joyent-portal",
|
"repository": "github:yldio/joyent-portal",
|
||||||
"scripts": {
|
"scripts": {
|
||||||
|
"deploy": "./scripts/deploy",
|
||||||
"format": "./scripts/format",
|
"format": "./scripts/format",
|
||||||
"test-staged": "./scripts/run-staged-pkg --test",
|
"test-staged": "./scripts/run-staged-pkg --test",
|
||||||
"lint-staged": "./scripts/run-staged-pkg --lint",
|
"lint-staged": "./scripts/run-staged-pkg --lint",
|
||||||
@ -19,7 +20,7 @@
|
|||||||
"updt:packages": "lerna exec ncu -au",
|
"updt:packages": "lerna exec ncu -au",
|
||||||
"updt:teardown": "run-s clean bootstrap",
|
"updt:teardown": "run-s clean bootstrap",
|
||||||
"updt": "run-s updt:*",
|
"updt": "run-s updt:*",
|
||||||
"publish": "./scripts/publish",
|
"release": "./scripts/release",
|
||||||
"clean": "lerna clean --yes",
|
"clean": "lerna clean --yes",
|
||||||
"bootstrap": "lerna bootstrap",
|
"bootstrap": "lerna bootstrap",
|
||||||
"prepare": "run-s clean bootstrap",
|
"prepare": "run-s clean bootstrap",
|
||||||
@ -39,6 +40,7 @@
|
|||||||
"apr-main": "^1.0.7",
|
"apr-main": "^1.0.7",
|
||||||
"apr-map": "^1.0.5",
|
"apr-map": "^1.0.5",
|
||||||
"apr-reduce": "^1.0.5",
|
"apr-reduce": "^1.0.5",
|
||||||
|
"apr-series": "^1.0.5",
|
||||||
"apr-sort-by": "^1.0.5",
|
"apr-sort-by": "^1.0.5",
|
||||||
"babel-eslint": "^7.2.3",
|
"babel-eslint": "^7.2.3",
|
||||||
"chalk": "^1.1.3",
|
"chalk": "^1.1.3",
|
||||||
@ -47,6 +49,7 @@
|
|||||||
"conventional-changelog-lint": "^1.1.9",
|
"conventional-changelog-lint": "^1.1.9",
|
||||||
"conventional-changelog-lint-config-angular": "^0.4.1",
|
"conventional-changelog-lint-config-angular": "^0.4.1",
|
||||||
"conventional-changelog-lint-config-lerna-scopes": "^1.0.0",
|
"conventional-changelog-lint-config-lerna-scopes": "^1.0.0",
|
||||||
|
"dotenv": "^4.0.0",
|
||||||
"eslint": "^3.19.0",
|
"eslint": "^3.19.0",
|
||||||
"eslint-config-prettier": "^2.1.1",
|
"eslint-config-prettier": "^2.1.1",
|
||||||
"eslint-config-react-app": "^1.0.4",
|
"eslint-config-react-app": "^1.0.4",
|
||||||
|
@ -1,45 +0,0 @@
|
|||||||
#!/usr/bin/env node
|
|
||||||
|
|
||||||
const execa = require('execa');
|
|
||||||
const main = require('apr-main');
|
|
||||||
const map = require('apr-map');
|
|
||||||
const globby = require('globby');
|
|
||||||
const path = require('path');
|
|
||||||
const flatten = require('lodash.flatten');
|
|
||||||
const readPkg = require('read-pkg');
|
|
||||||
|
|
||||||
const NAMESPACE = 'quay.io/yldio';
|
|
||||||
const ROOT = path.join(__dirname, '..');
|
|
||||||
const CIRCLE_BRANCH = process.env['CIRCLE_BRANCH'];
|
|
||||||
|
|
||||||
const build = async () => {
|
|
||||||
const dockerfiles = await globby(['packages/*/Dockerfile'], {
|
|
||||||
cwd: ROOT
|
|
||||||
});
|
|
||||||
|
|
||||||
return map(dockerfiles, async dockerfile => {
|
|
||||||
const folder = path.resolve(ROOT, path.dirname(dockerfile));
|
|
||||||
const { name } = await readPkg(folder);
|
|
||||||
const tags = [`${name}:${CIRCLE_BRANCH}`, `${name}:latest`];
|
|
||||||
|
|
||||||
await execa(
|
|
||||||
'docker',
|
|
||||||
flatten([
|
|
||||||
'build',
|
|
||||||
flatten(tags.map(name => ['-t', `${NAMESPACE}/${name}`])),
|
|
||||||
'-f',
|
|
||||||
dockerfile,
|
|
||||||
'.'
|
|
||||||
]),
|
|
||||||
{
|
|
||||||
stdio: 'inherit'
|
|
||||||
}
|
|
||||||
);
|
|
||||||
|
|
||||||
return execa('docker', ['push', `${NAMESPACE}/${name}`], {
|
|
||||||
stdio: 'inherit'
|
|
||||||
});
|
|
||||||
});
|
|
||||||
};
|
|
||||||
|
|
||||||
main(build());
|
|
98
scripts/deploy
Executable file
98
scripts/deploy
Executable file
@ -0,0 +1,98 @@
|
|||||||
|
#!/usr/bin/env node
|
||||||
|
|
||||||
|
const execa = require('execa');
|
||||||
|
const { parse } = require('dotenv');
|
||||||
|
const main = require('apr-main');
|
||||||
|
const map = require('apr-map');
|
||||||
|
const series = require('apr-series');
|
||||||
|
const globby = require('globby');
|
||||||
|
const path = require('path');
|
||||||
|
const flatten = require('lodash.flatten');
|
||||||
|
const readPkg = require('read-pkg');
|
||||||
|
const figures = require('figures');
|
||||||
|
|
||||||
|
const NAMESPACE = 'quay.io/yldio';
|
||||||
|
const ROOT = path.join(__dirname, '..');
|
||||||
|
const LOG = console.log;
|
||||||
|
|
||||||
|
const getEnv = async () => {
|
||||||
|
const tritonEnv = await execa.stdout('triton', ['env']);
|
||||||
|
const dotEnv = tritonEnv.replace(/^export /gim, '');
|
||||||
|
return Object.assign({}, process.env, parse(dotEnv));
|
||||||
|
};
|
||||||
|
|
||||||
|
const login = ({
|
||||||
|
_DOCKER_LOGIN_USERNAME,
|
||||||
|
_DOCKER_LOGIN_PASSWORD,
|
||||||
|
_DOCKER_REGISTRY
|
||||||
|
}) => async () => {
|
||||||
|
LOG(`${figures.arrowRight} login`);
|
||||||
|
return execa('docker', [
|
||||||
|
'login',
|
||||||
|
`--username="${_DOCKER_LOGIN_USERNAME}"`,
|
||||||
|
`--password="${_DOCKER_LOGIN_PASSWORD}"`,
|
||||||
|
_DOCKER_REGISTRY
|
||||||
|
]);
|
||||||
|
};
|
||||||
|
|
||||||
|
const build = ({ CIRCLE_BRANCH }) => async () => {
|
||||||
|
LOG(`${figures.arrowRight} build`);
|
||||||
|
|
||||||
|
const dockerfiles = await globby(['packages/*/Dockerfile'], {
|
||||||
|
cwd: ROOT
|
||||||
|
});
|
||||||
|
|
||||||
|
LOG(`${figures.arrowRight} build.dockerfiles`);
|
||||||
|
LOG(JSON.stringify(dockerfiles, null, 2));
|
||||||
|
|
||||||
|
return map(dockerfiles, async dockerfile => {
|
||||||
|
const folder = path.resolve(ROOT, path.dirname(dockerfile));
|
||||||
|
const { name } = await readPkg(folder);
|
||||||
|
const tags = [`${name}:${CIRCLE_BRANCH}`, `${name}:latest`];
|
||||||
|
|
||||||
|
LOG(`${figures.arrowRight} build.name ${name}`);
|
||||||
|
LOG(JSON.stringify(tags, null, 2));
|
||||||
|
|
||||||
|
await execa(
|
||||||
|
'docker',
|
||||||
|
flatten([
|
||||||
|
'build',
|
||||||
|
flatten(tags.map(name => ['-t', `${NAMESPACE}/${name}`])),
|
||||||
|
'-f',
|
||||||
|
dockerfile,
|
||||||
|
'.'
|
||||||
|
]),
|
||||||
|
{
|
||||||
|
stdio: 'inherit'
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
LOG(`${figures.arrowRight} build.push ${NAMESPACE}/${name}`);
|
||||||
|
return execa('docker', ['push', `${NAMESPACE}/${name}`], {
|
||||||
|
stdio: 'inherit'
|
||||||
|
});
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
const logout = env => () => {
|
||||||
|
LOG(`${figures.arrowRight} logout`);
|
||||||
|
return execa('docker', ['logout']);
|
||||||
|
};
|
||||||
|
|
||||||
|
const deploy = env => () => {
|
||||||
|
LOG(`${figures.arrowRight} deploy`);
|
||||||
|
return execa('docker-compose', ['up', '-d', '--build'], {
|
||||||
|
env
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
const run = async () => {
|
||||||
|
const env = await getEnv();
|
||||||
|
LOG(`${figures.arrowRight} .env`);
|
||||||
|
LOG(JSON.stringify(Object.keys(env), null, 2));
|
||||||
|
|
||||||
|
return series([login(env), build(env), logout(env), deploy(env)]);
|
||||||
|
};
|
||||||
|
|
||||||
|
LOG(`${figures.arrowRight} DEPLOY`);
|
||||||
|
main(run());
|
Loading…
Reference in New Issue
Block a user