chore: merge changes from upstream

This commit is contained in:
geek 2017-06-29 09:38:30 -05:00 committed by Sérgio Ramos
parent f68ecddaa2
commit 86a1149acd
2 changed files with 114 additions and 50 deletions

View File

@ -1,8 +1,49 @@
'use strict'; 'use strict';
const Triton = require('triton');
const Url = require('url');
const Path = require('path');
const Fs = require('fs');
const Data = require('portal-api/lib/data'); const Data = require('portal-api/lib/data');
const {
DOCKER_HOST,
DOCKER_CERT_PATH,
SDC_URL,
SDC_ACCOUNT,
SDC_KEY_ID
} = process.env;
const DOCKER_HOST_URL = DOCKER_HOST ?
Url.parse(DOCKER_HOST) :
{};
const settings = {
db: {
host: process.env.RETHINK_HOST || 'localhost'
},
docker: {
protocol: 'https',
host: DOCKER_HOST_URL.hostname,
port: DOCKER_HOST_URL.port,
ca: DOCKER_CERT_PATH ?
Fs.readFileSync(Path.join(DOCKER_CERT_PATH, 'ca.pem')) :
undefined,
cert: DOCKER_CERT_PATH ?
Fs.readFileSync(Path.join(DOCKER_CERT_PATH, 'cert.pem')) :
undefined,
key: DOCKER_CERT_PATH ?
Fs.readFileSync(Path.join(DOCKER_CERT_PATH, 'key.pem')) :
undefined
},
triton: {
url: SDC_URL,
account: SDC_ACCOUNT,
keyId: SDC_KEY_ID
}
};
const ifError = function (err) { const ifError = function (err) {
if (err) { if (err) {
console.error(err); console.error(err);
@ -11,47 +52,39 @@ const ifError = function (err) {
}; };
const bootstrap = function () { const bootstrap = function () {
const data = new Data({ const data = new Data(settings);
db: {
host: process.env.RETHINK_HOST || 'localhost'
}
});
const region = process.env.TRITON_DC || 'us-sw-1'; const region = process.env.TRITON_DC || 'us-sw-1';
const login = process.env.TRITON_USER || 'nikola';
data.connect(() => { data.connect((err) => {
ifError(err);
data.createDatacenter({ region, name: region }, (err, datacenter) => { data.createDatacenter({ region, name: region }, (err, datacenter) => {
ifError(err); ifError(err);
data.createUser({ firstName: 'Nikola', lastName: 'Tesla', email: 'nikola@tesla.com', login }, (err, user) => { Triton.createClient({
profile: settings.triton
}, (err, { cloudapi }) => {
ifError(err); ifError(err);
data.createPortal({ cloudapi.getAccount((err, { firstName, lastName, email, login }) => {
user,
datacenter
}, (err, portal) => {
ifError(err); ifError(err);
console.log('data bootstrapped');
process.exit(0); data.createUser({ firstName, lastName, email, login }, (err, user) => {
ifError(err);
data.createPortal({
user,
datacenter
}, (err, portal) => {
ifError(err);
console.log('data bootstrapped');
process.exit(0);
});
});
}); });
}); });
}); });
}); });
}; };
/*
const main = function () {
const dropData = new Data({
db: {
host: process.env.RETHINK_HOST || 'localhost'
}
});
dropData.connect(() => {
dropData._db.r.dbDrop('portal').run(dropData._db._connection, () => {
bootstrap();
});
});
};
*/
bootstrap(); bootstrap();

View File

@ -10,6 +10,7 @@ const Util = require('util');
const DockerClient = require('docker-compose-client'); const DockerClient = require('docker-compose-client');
const Dockerode = require('dockerode'); const Dockerode = require('dockerode');
const Hoek = require('hoek'); const Hoek = require('hoek');
const Triton = require('triton');
const ParamCase = require('param-case'); const ParamCase = require('param-case');
const Penseur = require('penseur'); const Penseur = require('penseur');
const { DEPLOYMENT_GROUP, SERVICE, HASH } = require('../watch'); const { DEPLOYMENT_GROUP, SERVICE, HASH } = require('../watch');
@ -70,10 +71,18 @@ module.exports = class Data extends EventEmitter {
this._dockerCompose = new DockerClient(settings.dockerComposeHost); this._dockerCompose = new DockerClient(settings.dockerComposeHost);
this._docker = new Dockerode(settings.docker); this._docker = new Dockerode(settings.docker);
this._watcher = null; this._watcher = null;
this._triton = null;
// if (settings.consul && settings.consul.address) { Triton.createClient({
// CPClient.config(settings.consul); profile: settings.triton
// } }, (err, client) => {
if (err) {
this.emit('error', err);
return;
}
this._triton = client.cloudapi;
});
this._dockerCompose.on('error', (err) => { this._dockerCompose.on('error', (err) => {
this.emit('error', err); this.emit('error', err);
@ -1002,8 +1011,11 @@ module.exports = class Data extends EventEmitter {
return next(err); return next(err);
} }
const container = this._docker.getContainer(instance.machine_id.split(/-/)[0]); if (!this._triton) {
container.stop(next); return next();
}
this._triton.stopMachine(instance.machine_id, next);
}); });
}, },
inputs: instanceIds inputs: instanceIds
@ -1038,8 +1050,11 @@ module.exports = class Data extends EventEmitter {
return next(err); return next(err);
} }
const container = this._docker.getContainer(instance.machine_id.split(/-/)[0]); if (!this._triton) {
container.start(next); return next();
}
this._triton.startMachine(instance.machine_id, next);
}); });
}, },
inputs: instanceIds inputs: instanceIds
@ -1074,8 +1089,11 @@ module.exports = class Data extends EventEmitter {
return next(err); return next(err);
} }
const container = this._docker.getContainer(instance.machine_id.split(/-/)[0]); if (!this._triton) {
container.restart(next); return next();
}
this._triton.rebootMachine(instance.machine_id, next);
}); });
}, },
inputs: instanceIds inputs: instanceIds
@ -1127,9 +1145,11 @@ module.exports = class Data extends EventEmitter {
return next(err); return next(err);
} }
const container = this._docker.getContainer(instance.machine_id.split(/-/)[0]); if (!this._triton) {
// Use force in case the container is running. TODO: should we keep force? return next();
container.remove({ force: true }, next); }
this._triton.deleteMachine(instance.machine_id, next);
}); });
}, },
inputs: instanceIds inputs: instanceIds
@ -1220,8 +1240,11 @@ module.exports = class Data extends EventEmitter {
VAsync.forEachParallel({ VAsync.forEachParallel({
func: (instance, next) => { func: (instance, next) => {
const container = this._docker.getContainer(instance.machine_id.split(/-/)[0]); if (!this._triton) {
container.stop(next); return next();
}
this._triton.stopMachine(instance.machine_id, next);
}, },
inputs: instances inputs: instances
}, (err, results) => { }, (err, results) => {
@ -1246,19 +1269,26 @@ module.exports = class Data extends EventEmitter {
VAsync.forEachParallel({ VAsync.forEachParallel({
func: (instance, next) => { func: (instance, next) => {
const container = this._docker.getContainer(instance.machine_id.split(/-/)[0]); if (!this._triton) {
container.start((err) => { return next();
}
this._triton.startMachine(instance.machine_id, (err) => {
if (err) { if (err) {
return next(err); return next(err);
} }
const container = this._docker.getContainer(instance.machine_id.split(/-/)[0]);
// Update the IPAddress for the instance // Update the IPAddress for the instance
container.inspect((err, details) => { container.inspect((err, details) => {
if (err) { if (err) {
return next(err); return next(err);
} }
this._db.instances.update(instance.id, { ips: [details.NetworkSettings.IPAddress] }, next); this._db.instances.update(instance.id, {
ips: [details.NetworkSettings.IPAddress]
}, next);
}); });
}); });
}, },
@ -1285,10 +1315,11 @@ module.exports = class Data extends EventEmitter {
VAsync.forEachParallel({ VAsync.forEachParallel({
func: (instance, next) => { func: (instance, next) => {
this.updateInstance({ id: instance.id, status: 'RESTARTING' }, () => { if (!this._triton) {
const container = this._docker.getContainer(instance.machine_id.split(/-/)[0]); return next();
container.restart(next); }
});
this._triton.rebootMachine(instance.machine_id, next);
}, },
inputs: instances inputs: instances
}, (err, results) => { }, (err, results) => {