watch reachability of containers
This commit is contained in:
parent
ffe6809390
commit
9f03c65f05
4
spikes/leak/.dockerignore
Normal file
4
spikes/leak/.dockerignore
Normal file
@ -0,0 +1,4 @@
|
|||||||
|
/node_modules
|
||||||
|
coverage
|
||||||
|
.nyc_output
|
||||||
|
npm-debug.log
|
@ -1,7 +1,7 @@
|
|||||||
config:
|
config:
|
||||||
target: "http://fast-node:8000"
|
target: "http://fast-node:8000"
|
||||||
phases:
|
phases:
|
||||||
- duration: 13200
|
- duration: 172800
|
||||||
arrivalRate: 1
|
arrivalRate: 1
|
||||||
scenarios:
|
scenarios:
|
||||||
- flow:
|
- flow:
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
config:
|
config:
|
||||||
target: "http://fast-node:8000"
|
target: "http://fast-node:8000"
|
||||||
phases:
|
phases:
|
||||||
- duration: 13200
|
- duration: 172800
|
||||||
arrivalRate: 1
|
arrivalRate: 1
|
||||||
scenarios:
|
scenarios:
|
||||||
- flow:
|
- flow:
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
config:
|
config:
|
||||||
target: "http://plain-node:8000"
|
target: "http://plain-node:8000"
|
||||||
phases:
|
phases:
|
||||||
- duration: 13200
|
- duration: 172800
|
||||||
arrivalRate: 1
|
arrivalRate: 1
|
||||||
scenarios:
|
scenarios:
|
||||||
- flow:
|
- flow:
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
config:
|
config:
|
||||||
target: "http://slow-node:8000"
|
target: "http://slow-node:8000"
|
||||||
phases:
|
phases:
|
||||||
- duration: 13200
|
- duration: 172800
|
||||||
arrivalRate: 1
|
arrivalRate: 1
|
||||||
scenarios:
|
scenarios:
|
||||||
- flow:
|
- flow:
|
||||||
|
128636
spikes/leak/dataset-leak.json
Normal file
128636
spikes/leak/dataset-leak.json
Normal file
File diff suppressed because it is too large
Load Diff
@ -11,19 +11,6 @@ fast-artillery:
|
|||||||
- MODE=fast
|
- MODE=fast
|
||||||
links:
|
links:
|
||||||
- fast-node:fast-node
|
- fast-node:fast-node
|
||||||
# another-fast-node:
|
|
||||||
# build: .
|
|
||||||
# environment:
|
|
||||||
# - TYPE=node
|
|
||||||
# ports:
|
|
||||||
# - "8000"
|
|
||||||
# another-fast-artillery:
|
|
||||||
# build: .
|
|
||||||
# environment:
|
|
||||||
# - TYPE=artillery
|
|
||||||
# - MODE=another-fast
|
|
||||||
# links:
|
|
||||||
# - another-fast-node:another-fast-node
|
|
||||||
slow-node:
|
slow-node:
|
||||||
build: .
|
build: .
|
||||||
environment:
|
environment:
|
||||||
|
File diff suppressed because it is too large
Load Diff
@ -11,6 +11,7 @@
|
|||||||
"build-array": "^1.0.0",
|
"build-array": "^1.0.0",
|
||||||
"chart.js": "^2.5.0",
|
"chart.js": "^2.5.0",
|
||||||
"date.js": "^0.3.1",
|
"date.js": "^0.3.1",
|
||||||
|
"dockerode": "^2.4.1",
|
||||||
"epimetheus": "^1.0.46",
|
"epimetheus": "^1.0.46",
|
||||||
"force-array": "^3.1.0",
|
"force-array": "^3.1.0",
|
||||||
"good": "^7.1.0",
|
"good": "^7.1.0",
|
||||||
@ -42,7 +43,17 @@
|
|||||||
"simple-statistics": "^2.5.0"
|
"simple-statistics": "^2.5.0"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
|
"apr-awaitify": "^1.0.2",
|
||||||
|
"apr-filter": "^1.0.3",
|
||||||
|
"apr-find": "^1.0.3",
|
||||||
|
"apr-for-each": "^1.0.4",
|
||||||
|
"apr-intercept": "^1.0.2",
|
||||||
|
"apr-map": "^1.0.3",
|
||||||
|
"apr-parallel": "^1.0.3",
|
||||||
|
"apr-some": "^1.0.3",
|
||||||
|
"apr-until": "^1.0.3",
|
||||||
"async": "^2.1.5",
|
"async": "^2.1.5",
|
||||||
|
"axios": "^0.15.3",
|
||||||
"babel-core": "^6.23.1",
|
"babel-core": "^6.23.1",
|
||||||
"babel-eslint": "^7.1.1",
|
"babel-eslint": "^7.1.1",
|
||||||
"babel-loader": "^6.4.0",
|
"babel-loader": "^6.4.0",
|
||||||
@ -51,7 +62,9 @@
|
|||||||
"babel-plugin-transform-object-rest-spread": "^6.23.0",
|
"babel-plugin-transform-object-rest-spread": "^6.23.0",
|
||||||
"babel-preset-es2015": "^6.22.0",
|
"babel-preset-es2015": "^6.22.0",
|
||||||
"babel-preset-react": "^6.23.0",
|
"babel-preset-react": "^6.23.0",
|
||||||
|
"delay": "^1.3.1",
|
||||||
"diskusage": "^0.2.1",
|
"diskusage": "^0.2.1",
|
||||||
|
"dockerode": "^2.4.1",
|
||||||
"eslint": "^3.17.0",
|
"eslint": "^3.17.0",
|
||||||
"eslint-config-semistandard": "^7.0.0",
|
"eslint-config-semistandard": "^7.0.0",
|
||||||
"eslint-config-standard": "^7.0.0",
|
"eslint-config-standard": "^7.0.0",
|
||||||
@ -59,9 +72,16 @@
|
|||||||
"eslint-plugin-promise": "^3.5.0",
|
"eslint-plugin-promise": "^3.5.0",
|
||||||
"eslint-plugin-react": "^6.10.0",
|
"eslint-plugin-react": "^6.10.0",
|
||||||
"eslint-plugin-standard": "^2.1.1",
|
"eslint-plugin-standard": "^2.1.1",
|
||||||
|
"got": "^6.7.1",
|
||||||
|
"js-yaml": "^3.8.2",
|
||||||
"json-loader": "^0.5.4",
|
"json-loader": "^0.5.4",
|
||||||
|
"lodash.flatten": "^4.4.0",
|
||||||
|
"lodash.uniq": "^4.5.0",
|
||||||
|
"minimist": "^1.2.0",
|
||||||
|
"moment": "^2.18.0",
|
||||||
"os-utils": "^0.0.14",
|
"os-utils": "^0.0.14",
|
||||||
"simple-statistics": "^2.5.0",
|
"simple-statistics": "^2.5.0",
|
||||||
|
"triton": "^5.1.0",
|
||||||
"webpack": "^2.2.1",
|
"webpack": "^2.2.1",
|
||||||
"webpack-dev-server": "^2.4.1"
|
"webpack-dev-server": "^2.4.1"
|
||||||
}
|
}
|
||||||
|
@ -12,6 +12,6 @@ scrape_configs:
|
|||||||
# static_configs:
|
# static_configs:
|
||||||
# - targets: ['plain-node:8000']
|
# - targets: ['plain-node:8000']
|
||||||
- job_name: 'leak'
|
- job_name: 'leak'
|
||||||
scrape_interval: 1s
|
scrape_interval: 15s
|
||||||
static_configs:
|
static_configs:
|
||||||
- targets: ['fast-node:8000', 'slow-node:8000', 'plain-node:8000']
|
- targets: ['fast-node:8000', 'slow-node:8000', 'plain-node:8000']
|
||||||
|
@ -48,7 +48,7 @@ const range = module.exports.range = async ({
|
|||||||
query = [],
|
query = [],
|
||||||
ago = '24h ago',
|
ago = '24h ago',
|
||||||
step = '15s',
|
step = '15s',
|
||||||
hostname = 'localhost'
|
host = 'localhost:9090'
|
||||||
}) => {
|
}) => {
|
||||||
const end = timestamp(new Date());
|
const end = timestamp(new Date());
|
||||||
const start = timestamp(date(ago));
|
const start = timestamp(date(ago));
|
||||||
@ -57,8 +57,7 @@ const range = module.exports.range = async ({
|
|||||||
return await got(url.format({
|
return await got(url.format({
|
||||||
protocol: 'http:',
|
protocol: 'http:',
|
||||||
slashes: true,
|
slashes: true,
|
||||||
port: '9090',
|
host: host,
|
||||||
hostname: hostname,
|
|
||||||
pathname: '/api/v1/query_range',
|
pathname: '/api/v1/query_range',
|
||||||
query: {
|
query: {
|
||||||
query,
|
query,
|
||||||
@ -75,25 +74,14 @@ const range = module.exports.range = async ({
|
|||||||
};
|
};
|
||||||
|
|
||||||
const query = module.exports.query = async ({
|
const query = module.exports.query = async ({
|
||||||
hostname = 'localhost',
|
host = 'localhost:9090',
|
||||||
query = []
|
query = []
|
||||||
}) => {
|
}) => {
|
||||||
const res = await map(query, async (query) => {
|
const res = await map(query, async (query) => {
|
||||||
console.log(url.format({
|
|
||||||
protocol: 'http:',
|
|
||||||
slashes: true,
|
|
||||||
port: '9090',
|
|
||||||
host: hostname,
|
|
||||||
pathname: '/api/v1/query',
|
|
||||||
query: {
|
|
||||||
query: query
|
|
||||||
}
|
|
||||||
}));
|
|
||||||
return await got(url.format({
|
return await got(url.format({
|
||||||
protocol: 'http:',
|
protocol: 'http:',
|
||||||
slashes: true,
|
slashes: true,
|
||||||
port: '9090',
|
host: host,
|
||||||
host: hostname,
|
|
||||||
pathname: '/api/v1/query',
|
pathname: '/api/v1/query',
|
||||||
query: {
|
query: {
|
||||||
query: query
|
query: query
|
||||||
@ -107,14 +95,13 @@ const query = module.exports.query = async ({
|
|||||||
};
|
};
|
||||||
|
|
||||||
const tree = module.exports.tree = async ({
|
const tree = module.exports.tree = async ({
|
||||||
hostname = 'localhost',
|
host = 'localhost:9090',
|
||||||
query = []
|
query = []
|
||||||
}) => {
|
}) => {
|
||||||
const res = await got(url.format({
|
const res = await got(url.format({
|
||||||
protocol: 'http:',
|
protocol: 'http:',
|
||||||
slashes: true,
|
slashes: true,
|
||||||
port: '9090',
|
host: host,
|
||||||
host: hostname,
|
|
||||||
pathname: '/api/v1/series',
|
pathname: '/api/v1/series',
|
||||||
search: qs.stringify({
|
search: qs.stringify({
|
||||||
match: query
|
match: query
|
||||||
@ -160,7 +147,7 @@ if (!module.parent) {
|
|||||||
query: argv.query,
|
query: argv.query,
|
||||||
ago: argv.ago,
|
ago: argv.ago,
|
||||||
step: argv.step,
|
step: argv.step,
|
||||||
hostname: argv.hostname
|
host: argv.host
|
||||||
};
|
};
|
||||||
|
|
||||||
handlers[argv.type](conf).then((res) => {
|
handlers[argv.type](conf).then((res) => {
|
||||||
|
44
spikes/leak/sort.js
Normal file
44
spikes/leak/sort.js
Normal file
@ -0,0 +1,44 @@
|
|||||||
|
const uniq = require('lodash.uniq');
|
||||||
|
const flatten = require('lodash.flatten');
|
||||||
|
const argv = require('minimist')(process.argv);
|
||||||
|
const moment = require('moment');
|
||||||
|
const path = require('path');
|
||||||
|
const fs = require('fs');
|
||||||
|
|
||||||
|
if (!argv.file) {
|
||||||
|
throw new Error('--file required');
|
||||||
|
}
|
||||||
|
|
||||||
|
const filename = path.resolve(__dirname, argv.file);
|
||||||
|
|
||||||
|
if (!fs.existsSync(filename)) {
|
||||||
|
throw new Error('--file does not exist');
|
||||||
|
}
|
||||||
|
|
||||||
|
const data = require(filename);
|
||||||
|
const metrics = flatten(uniq(Object.keys(data.leak).map((service) => {
|
||||||
|
return Object.keys(data.leak[service]);
|
||||||
|
})));
|
||||||
|
|
||||||
|
const aggregated = metrics.reduce((agg, name) => Object.assign(agg, {
|
||||||
|
[name]: []
|
||||||
|
}), {});
|
||||||
|
|
||||||
|
const sort = (set) => {
|
||||||
|
return set.sort((a, b) => {
|
||||||
|
return moment(a[0], 'X').isAfter(moment(b[0], 'X')) ? 1 : -1;
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
Object.keys(data.leak).forEach((service) => {
|
||||||
|
Object.keys(data.leak[service]).forEach((metric) => {
|
||||||
|
aggregated[metric] = aggregated[metric].concat(data.leak[service][metric]);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
Object.keys(aggregated).forEach((metric) => {
|
||||||
|
console.error(metric);
|
||||||
|
aggregated[metric] = sort(aggregated[metric]);
|
||||||
|
});
|
||||||
|
|
||||||
|
console.log(JSON.stringify(aggregated, null, 2));
|
@ -1,3 +1,4 @@
|
|||||||
|
const epimetheus = require('epimetheus');
|
||||||
const requireDir = require('require-dir');
|
const requireDir = require('require-dir');
|
||||||
const plugins = require('./plugins');
|
const plugins = require('./plugins');
|
||||||
const routes = requireDir('./routes');
|
const routes = requireDir('./routes');
|
||||||
@ -10,6 +11,8 @@ server.connection({
|
|||||||
port: 8000
|
port: 8000
|
||||||
});
|
});
|
||||||
|
|
||||||
|
epimetheus.instrument(server);
|
||||||
|
|
||||||
server.register(plugins, (err) => {
|
server.register(plugins, (err) => {
|
||||||
if (err) {
|
if (err) {
|
||||||
throw err;
|
throw err;
|
||||||
|
@ -1,4 +1,3 @@
|
|||||||
const webpack = require('webpack');
|
|
||||||
const path = require('path');
|
const path = require('path');
|
||||||
|
|
||||||
module.exports = [
|
module.exports = [
|
||||||
|
@ -1,43 +0,0 @@
|
|||||||
const os = require('os');
|
|
||||||
const process = require('process');
|
|
||||||
// const pidusage = require('pidusage');
|
|
||||||
|
|
||||||
const metric = ({ name, desc, type, value }) => `
|
|
||||||
# HELP ${name} ${desc}
|
|
||||||
# TYPE ${name} ${type || 'gauge'}
|
|
||||||
${name} ${value}
|
|
||||||
`;
|
|
||||||
|
|
||||||
const metrics = {
|
|
||||||
process_mem_used_perc: () => {
|
|
||||||
const free = os.freemem();
|
|
||||||
const total = os.totalmem();
|
|
||||||
const perc = ((total - free) * 100) / total;
|
|
||||||
|
|
||||||
//
|
|
||||||
// console.log({
|
|
||||||
// available: os.totalmem(),
|
|
||||||
// free: os.freemem(),
|
|
||||||
// heapTotal,
|
|
||||||
// prom: {
|
|
||||||
// name: 'proc_mem_used_perc',
|
|
||||||
// desc: 'Process memory used in percentage.',
|
|
||||||
// value: (heapTotal * 100) / available
|
|
||||||
// }
|
|
||||||
// });
|
|
||||||
//
|
|
||||||
// pidusage.stat(process.pid, function(err, stat) {
|
|
||||||
// console.log(stat);
|
|
||||||
// });
|
|
||||||
|
|
||||||
return metric({
|
|
||||||
name: 'proc_mem_used_perc',
|
|
||||||
desc: 'Process memory used in percentage.',
|
|
||||||
value: perc
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = () => Object.keys(metrics)
|
|
||||||
.map((name) => metrics[name]())
|
|
||||||
.join('\n');
|
|
@ -1,12 +0,0 @@
|
|||||||
|
|
||||||
const path = require('path');
|
|
||||||
|
|
||||||
module.exports = (server) => {
|
|
||||||
server.route({
|
|
||||||
method: 'GET',
|
|
||||||
path: '/',
|
|
||||||
handler: (request, reply) => {
|
|
||||||
reply.file(path.join(__dirname, '../../../static/index.html'));
|
|
||||||
}
|
|
||||||
});
|
|
||||||
};
|
|
@ -13,59 +13,51 @@ const fibonacci = (num) => {
|
|||||||
module.exports = (server) => {
|
module.exports = (server) => {
|
||||||
server.route({
|
server.route({
|
||||||
method: 'GET',
|
method: 'GET',
|
||||||
path: '/mem-fast',
|
path: '/mem-slow',
|
||||||
config: {
|
config: {
|
||||||
handler: (req, reply) => {
|
handler: (req, reply) => {
|
||||||
console.log('got /mem-fast request');
|
console.log('got /mem-slow request');
|
||||||
const start = process.hrtime();
|
const start = process.hrtime();
|
||||||
const length = (anotherLeak.length || 1);
|
const length = (anotherLeak.length || 1);
|
||||||
|
|
||||||
anotherLeak.push({
|
anotherLeak.push({
|
||||||
longStr: Array.from({
|
longStr: Array.from({
|
||||||
length: length * length
|
length: length * 50
|
||||||
}, (v, i) => i)
|
}, (v, i) => i)
|
||||||
});
|
});
|
||||||
|
|
||||||
console.log('mem-fast', anotherLeak[length - 1].longStr.length);
|
console.log('mem-slow prev length', length);
|
||||||
|
console.log('mem-slow new length', anotherLeak[length - 1].longStr.length);
|
||||||
|
|
||||||
const end = process.hrtime(start);
|
const end = process.hrtime(start);
|
||||||
reply(prettyHrtime(end));
|
reply(prettyHrtime(end));
|
||||||
console.log('sent /mem-fast response');
|
console.log('sent /mem-slow response');
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
server.route({
|
server.route({
|
||||||
method: 'GET',
|
method: 'GET',
|
||||||
path: '/mem-slow',
|
path: '/mem-fast',
|
||||||
config: {
|
config: {
|
||||||
handler: (req, reply) => {
|
handler: (req, reply) => {
|
||||||
console.log('got /mem-slow request');
|
console.log('got /mem-fast request');
|
||||||
|
|
||||||
const start = process.hrtime();
|
const start = process.hrtime();
|
||||||
|
|
||||||
const originalLeak = theLeak;
|
const length = (((theLeak || {}).longStr || '').length || 1);
|
||||||
|
|
||||||
const unused = () => {
|
|
||||||
// referencig something that is going to be replaced
|
|
||||||
if (originalLeak) {
|
|
||||||
console.log("hi");
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
theLeak = {
|
theLeak = {
|
||||||
longStr: Array.from({
|
longStr: Array.from({
|
||||||
length: 1000
|
length: length + 500
|
||||||
}, (v, i) => i).join('*')
|
}, (v, i) => i)
|
||||||
};
|
};
|
||||||
|
|
||||||
anotherLeak.push(anotherLeak.length);
|
console.log('mem-fast %d', theLeak.longStr.length);
|
||||||
console.log('mem-slow %d', anotherLeak.length);
|
|
||||||
|
|
||||||
const end = process.hrtime(start);
|
const end = process.hrtime(start);
|
||||||
reply(prettyHrtime(end));
|
reply(prettyHrtime(end));
|
||||||
console.log('sent /mem-slow response');
|
console.log('sent /mem-fast response');
|
||||||
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
@ -1,13 +0,0 @@
|
|||||||
const prom = require('../prom');
|
|
||||||
|
|
||||||
module.exports = (server) => {
|
|
||||||
server.route({
|
|
||||||
method: 'GET',
|
|
||||||
path: '/metrics',
|
|
||||||
handler: (req, reply) => {
|
|
||||||
console.log('before metrics');
|
|
||||||
reply(prom()).type('text/plain')
|
|
||||||
console.log('after metrics');
|
|
||||||
}
|
|
||||||
});
|
|
||||||
};
|
|
196
spikes/leak/watch.js
Normal file
196
spikes/leak/watch.js
Normal file
@ -0,0 +1,196 @@
|
|||||||
|
// const DOCKER_URL = url.parse(DOCKER_HOST);
|
||||||
|
|
||||||
|
// {
|
||||||
|
// // host: DOCKER_URL.hostname,
|
||||||
|
// // port: DOCKER_URL.port,
|
||||||
|
// // ca: fs.readFileSync(path.join(DOCKER_CERT_PATH, 'ca.pem')),
|
||||||
|
// // cert: fs.readFileSync(path.join(DOCKER_CERT_PATH, 'cert.pem')),
|
||||||
|
// // key: fs.readFileSync(path.join(DOCKER_CERT_PATH, 'key.pem')),
|
||||||
|
// version: 'v1.24'
|
||||||
|
// }
|
||||||
|
// const DOCKER_CERT_PATH = process.env.DOCKER_CERT_PATH;
|
||||||
|
// const DOCKER_HOST = process.env.DOCKER_HOST;
|
||||||
|
//
|
||||||
|
// if (!DOCKER_HOST || !DOCKER_CERT_PATH) {
|
||||||
|
// throw new Error(`
|
||||||
|
// Required ENV variables: DOCKER_HOST and DOCKER_CERT_PATH
|
||||||
|
// `);
|
||||||
|
// }
|
||||||
|
|
||||||
|
const prometheus = require('./scripts/prometheus');
|
||||||
|
const Docker = require('dockerode');
|
||||||
|
const url = require('url');
|
||||||
|
const path = require('path');
|
||||||
|
const fs = require('fs');
|
||||||
|
const delay = require('delay');
|
||||||
|
const until = require('apr-until');
|
||||||
|
const forEach = require('apr-for-each');
|
||||||
|
const filter = require('apr-filter');
|
||||||
|
const some = require('apr-some');
|
||||||
|
const map = require('apr-map');
|
||||||
|
const intercept = require('apr-intercept');
|
||||||
|
const find = require('apr-find');
|
||||||
|
const parallel = require('apr-parallel');
|
||||||
|
const awaitify = require('apr-awaitify');
|
||||||
|
const flatten = require('lodash.flatten');
|
||||||
|
const yaml = require('js-yaml');
|
||||||
|
const axios = require('axios');
|
||||||
|
|
||||||
|
const start = new Date().getTime();
|
||||||
|
const window = 1000 * 60 * 60 * 5; // 5h
|
||||||
|
const interval = 1000 * 15; // 15s
|
||||||
|
const dockerComposeFilename = path.join(__dirname, 'docker-compose.yml');
|
||||||
|
const services = yaml.safeLoad(fs.readFileSync(dockerComposeFilename, 'utf-8'));
|
||||||
|
const docker = new Docker();
|
||||||
|
const writeFile = awaitify(fs.writeFile);
|
||||||
|
let restarts = 0;
|
||||||
|
|
||||||
|
const getContainer = async (Id) => {
|
||||||
|
const container = docker.getContainer(Id)
|
||||||
|
const meta = await container.inspect();
|
||||||
|
return { container, meta };
|
||||||
|
};
|
||||||
|
|
||||||
|
const getServiceName = ({ Config }) => {
|
||||||
|
return Config.Labels['com.docker.compose.service'];
|
||||||
|
};
|
||||||
|
|
||||||
|
const getHrefs = async ({ NetworkSettings }) => {
|
||||||
|
const ports = await filter(NetworkSettings.Ports, Boolean);
|
||||||
|
|
||||||
|
const hrefs = await map(ports, async (values = []) => {
|
||||||
|
return await map(values, ({ HostIp, HostPort }) => url.format({
|
||||||
|
hostname: HostIp,
|
||||||
|
port: HostPort,
|
||||||
|
protocol: 'http:',
|
||||||
|
slashes: true
|
||||||
|
}));
|
||||||
|
});
|
||||||
|
|
||||||
|
return flatten(Object.keys(hrefs).map((name) => hrefs[name]));
|
||||||
|
};
|
||||||
|
|
||||||
|
const findContainer = async (name) => {
|
||||||
|
const ps = await docker.listContainers();
|
||||||
|
|
||||||
|
const { Id } = await find(ps, async ({ Id }) => {
|
||||||
|
const { container, meta } = await getContainer(Id);
|
||||||
|
return getServiceName(meta) === name;
|
||||||
|
});
|
||||||
|
|
||||||
|
return await getContainer(Id);
|
||||||
|
}
|
||||||
|
|
||||||
|
const report = async () => {
|
||||||
|
const { telemetry, meta } = await findContainer('telemetry')
|
||||||
|
const hrefs = await getHrefs(meta);
|
||||||
|
|
||||||
|
if (!hrefs.length) {
|
||||||
|
console.error('Telemetry service unavailable');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const [pErr, data] = await intercept(prometheus.range({
|
||||||
|
host: url.parse(hrefs[0]).host,
|
||||||
|
query: [
|
||||||
|
'node_memory_rss_bytes',
|
||||||
|
'node_memory_heap_total_bytes',
|
||||||
|
'node_memory_heap_used_bytes',
|
||||||
|
'process_heap_bytes',
|
||||||
|
'process_resident_memory_bytes',
|
||||||
|
'process_virtual_memory_bytes',
|
||||||
|
'process_cpu_seconds_total',
|
||||||
|
'process_cpu_system_seconds_total',
|
||||||
|
'process_cpu_user_seconds_total',
|
||||||
|
'node_lag_duration_milliseconds',
|
||||||
|
'http_request_duration_milliseconds'
|
||||||
|
],
|
||||||
|
ago: '24h ago',
|
||||||
|
step: '15s'
|
||||||
|
}));
|
||||||
|
|
||||||
|
if (pErr) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const [dErr] = await intercept(writeFile(
|
||||||
|
path.join(__dirname, `datasets-${start}-${restarts}.json`),
|
||||||
|
JSON.stringify(data, null, 2),
|
||||||
|
'utf-8'
|
||||||
|
));
|
||||||
|
|
||||||
|
return !dErr
|
||||||
|
? console.log('Updated datasets.json')
|
||||||
|
: console.error(err);
|
||||||
|
};
|
||||||
|
|
||||||
|
const checkHref = async (href) => {
|
||||||
|
const [err] = await intercept(axios.get(`${href}/metrics`, {
|
||||||
|
timeout: 500
|
||||||
|
}));
|
||||||
|
|
||||||
|
return !!err;
|
||||||
|
};
|
||||||
|
|
||||||
|
const inspectContainer = async ({ Id }) => {
|
||||||
|
const { container, meta } = await getContainer(Id);
|
||||||
|
const hrefs = await getHrefs(meta);
|
||||||
|
const serviceName = getServiceName(meta);
|
||||||
|
const service = services[serviceName];
|
||||||
|
const isUnreachable = await some(hrefs, checkHref);
|
||||||
|
|
||||||
|
const shouldRestart = !!(
|
||||||
|
isUnreachable || (
|
||||||
|
service.ports &&
|
||||||
|
!Object.keys(hrefs).length
|
||||||
|
)
|
||||||
|
);
|
||||||
|
|
||||||
|
console.log(`${serviceName} is ${isUnreachable ? 'unreachable' : 'reachable'}`);
|
||||||
|
|
||||||
|
if (!shouldRestart) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log(`\n\nIS GOING TO RESTART: ${serviceName}\n\n`);
|
||||||
|
|
||||||
|
const artilleryServiceName = serviceName.replace(/node/, 'artillery');
|
||||||
|
const { container: artillery } = await findContainer(artilleryServiceName);
|
||||||
|
|
||||||
|
restarts = (serviceName === 'telemetry')
|
||||||
|
? restarts + 1
|
||||||
|
: restarts;
|
||||||
|
|
||||||
|
await parallel([
|
||||||
|
() => container.restart(),
|
||||||
|
() => artillery.restart()
|
||||||
|
]);
|
||||||
|
};
|
||||||
|
|
||||||
|
const inspect = async () => {
|
||||||
|
const ps = await docker.listContainers();
|
||||||
|
await forEach(ps, inspectContainer);
|
||||||
|
};
|
||||||
|
|
||||||
|
// const handleError = async (p) => {
|
||||||
|
// const [err] = await intercept(p);
|
||||||
|
//
|
||||||
|
// if (err) {
|
||||||
|
// console.error(err);
|
||||||
|
// }
|
||||||
|
// }
|
||||||
|
|
||||||
|
const tick = () => parallel({
|
||||||
|
inspect,
|
||||||
|
report
|
||||||
|
});
|
||||||
|
|
||||||
|
const check = () => !((new Date().getTime() - start) > window)
|
||||||
|
? delay(interval)
|
||||||
|
: true;
|
||||||
|
|
||||||
|
until(check, tick).then(() => {
|
||||||
|
console.log('done')
|
||||||
|
}, (err) => {
|
||||||
|
console.error(err)
|
||||||
|
});
|
File diff suppressed because it is too large
Load Diff
Loading…
Reference in New Issue
Block a user