use percentage in leak mem metrics

also: move to a docker compose format that is compatible with triton
This commit is contained in:
Sérgio Ramos 2017-03-15 16:03:20 +00:00
parent 6ea7891194
commit 59ca154fd1
10 changed files with 163 additions and 217 deletions

View File

@ -1,67 +1,62 @@
version: '2'
services:
fast-node:
build: .
environment:
- TYPE=node
ports:
- "8001:8000"
fast-artillery:
build: .
environment:
- TYPE=artillery
- MODE=fast
depends_on:
- fast-node
another-fast-node:
build: .
environment:
- TYPE=node
ports:
- "8004:8000"
another-fast-artillery:
build: .
environment:
- TYPE=artillery
- MODE=another-fast
depends_on:
- another-fast-node
slow-node:
build: .
environment:
- TYPE=node
ports:
- "8002:8000"
slow-artillery:
build: .
environment:
- TYPE=artillery
- MODE=slow
depends_on:
- slow-node
plain-node:
build: .
environment:
- TYPE=node
ports:
- "8003:8000"
plain-artillery:
build: .
environment:
- TYPE=artillery
- MODE=plain
depends_on:
- plain-node
telemetry:
image: prom/prometheus:v1.5.2
ports:
- "9090:9090"
volumes:
- ./prometheus.yml:/etc/prometheus/prometheus.yml
environment:
- TYPE=telemetry
depends_on:
- fast-node
- another-fast-node
- slow-node
- plain-node
fast-node:
build: .
environment:
- TYPE=node
ports:
- "8000"
fast-artillery:
build: .
environment:
- TYPE=artillery
- MODE=fast
links:
- fast-node:fast-node
# another-fast-node:
# build: .
# environment:
# - TYPE=node
# ports:
# - "8000"
# another-fast-artillery:
# build: .
# environment:
# - TYPE=artillery
# - MODE=another-fast
# links:
# - another-fast-node:another-fast-node
slow-node:
build: .
environment:
- TYPE=node
ports:
- "8000"
slow-artillery:
build: .
environment:
- TYPE=artillery
- MODE=slow
links:
- slow-node:slow-node
plain-node:
build: .
environment:
- TYPE=node
ports:
- "8000"
plain-artillery:
build: .
environment:
- TYPE=artillery
- MODE=plain
links:
- plain-node:plain-node
telemetry:
build: ./prometheus
ports:
- "9090"
environment:
- TYPE=telemetry
links:
- fast-node:fast-node
- slow-node:slow-node
- plain-node:plain-node

View File

@ -1,17 +0,0 @@
scrape_configs:
- job_name: 'leak-fast'
scrape_interval: 15s
static_configs:
- targets: ['fast-node:8000', 'another-fast-node:8000']
- job_name: 'leak-slow'
scrape_interval: 15s
static_configs:
- targets: ['slow-node:8000']
- job_name: 'no-leak'
scrape_interval: 15s
static_configs:
- targets: ['plain-node:8000']
# - job_name: 'leak'
# scrape_interval: 1s
# static_configs:
# - targets: ['fast-node:8000', 'another-fast-node:8000', 'slow-node:8000', 'plain-node:8000']

View File

@ -0,0 +1,2 @@
FROM prom/prometheus:v1.5.2
ADD prometheus.yml /etc/prometheus/

View File

@ -0,0 +1,17 @@
scrape_configs:
# - job_name: 'leak-fast'
# scrape_interval: 15s
# static_configs:
# - targets: ['fast-node:8000', 'another-fast-node:8000']
# - job_name: 'leak-slow'
# scrape_interval: 15s
# static_configs:
# - targets: ['slow-node:8000']
# - job_name: 'no-leak'
# scrape_interval: 15s
# static_configs:
# - targets: ['plain-node:8000']
- job_name: 'leak'
scrape_interval: 1s
static_configs:
- targets: ['fast-node:8000', 'slow-node:8000', 'plain-node:8000']

View File

@ -46,14 +46,14 @@ const transform = (res) => {
const range = module.exports.range = async ({
query = [],
ago = '1h ago',
ago = '24h ago',
step = '15s',
hostname = 'localhost'
}) => {
const end = timestamp(new Date());
const start = timestamp(date(ago));
const ranges = await map(query, async (query) => {
const ranges = await map(forceArray(query), async (query) => {
return await got(url.format({
protocol: 'http:',
slashes: true,
@ -79,11 +79,21 @@ const query = module.exports.query = async ({
query = []
}) => {
const res = await map(query, async (query) => {
console.log(url.format({
protocol: 'http:',
slashes: true,
port: '9090',
host: hostname,
pathname: '/api/v1/query',
query: {
query: query
}
}));
return await got(url.format({
protocol: 'http:',
slashes: true,
port: '9090',
hostname: hostname,
host: hostname,
pathname: '/api/v1/query',
query: {
query: query
@ -104,7 +114,7 @@ const tree = module.exports.tree = async ({
protocol: 'http:',
slashes: true,
port: '9090',
hostname: hostname,
host: hostname,
pathname: '/api/v1/series',
search: qs.stringify({
match: query

View File

@ -1,4 +1,3 @@
const epimetheus = require('epimetheus');
const requireDir = require('require-dir');
const plugins = require('./plugins');
const routes = requireDir('./routes');
@ -11,8 +10,6 @@ server.connection({
port: 8000
});
epimetheus.instrument(server);
server.register(plugins, (err) => {
if (err) {
throw err;

View File

@ -1,103 +0,0 @@
const relativeDate = require('relative-date');
const statistics = require('simple-statistics');
const prometheus = require('../../scripts/prometheus');
const async = require('async');
const cdm = {};
const calc = (sample) => {
return {
firstQuartile: statistics.quantile(sample, 0.25),
median: statistics.median(sample),
thirdQuartile: statistics.quantile(sample, 0.75),
max: statistics.max(sample),
min: statistics.min(sample),
stddev: statistics.sampleStandardDeviation(sample)
};
};
const getMem = ({
job
}, fn) => {
prometheus.query({
query: [`node_memory_heap_used_bytes{job="${job}"}`]
}).then((res) => {
if (!res || !res[job]) {
return null
}
const aggregate = calc(Object.keys(res[job]).map((inst) => {
return Number(res[job][inst].node_memory_heap_used_bytes[1]);
}));
const instances = Object.keys(res[job]).reduce((sum, inst) => {
return Object.assign(sum, {
[inst]: calc([Number(res[job][inst].node_memory_heap_used_bytes[1])])
})
}, {});
return {
raw: res[job],
aggregate,
instances
};
}).then((res) => {
return fn(null, res);
}).catch((err) => {
return fn(err);
});
};
const getStats = (ctx, fn) => {
async.parallel({
mem: async.apply(getMem, ctx)
}, fn);
};
module.exports = (server) => ({
on: (job) => {
console.log('on', job);
if (cdm[job] && (cdm[job].sockets > 0)) {
cdm[job].sockets += 1;
return;
}
let messageId = 0;
const update = () => {
console.log(`publishing /stats/${job}/${messageId += 1}`);
getStats({
job: job
}, (err, stats) => {
if (err) {
return console.error(err);
}
server.publish(`/stats/${job}`, {
when: new Date().getTime(),
stats
});
});
};
cdm[job] = {
interval: setInterval(update, 1000),
sockets: 1
};
},
off: (job) => {
console.log('off', job);
if (!(cdm[job].sockets -= 1)) {
clearInterval(cdm[job].interval);
}
}
});
module.exports.tree = (ctx) => {
return prometheus.tree({
query: ['node_memory_heap_used_bytes']
});
};

View File

@ -0,0 +1,43 @@
const os = require('os');
const process = require('process');
// const pidusage = require('pidusage');
const metric = ({ name, desc, type, value }) => `
# HELP ${name} ${desc}
# TYPE ${name} ${type || 'gauge'}
${name} ${value}
`;
const metrics = {
process_mem_used_perc: () => {
const free = os.freemem();
const total = os.totalmem();
const perc = ((total - free) * 100) / total;
//
// console.log({
// available: os.totalmem(),
// free: os.freemem(),
// heapTotal,
// prom: {
// name: 'proc_mem_used_perc',
// desc: 'Process memory used in percentage.',
// value: (heapTotal * 100) / available
// }
// });
//
// pidusage.stat(process.pid, function(err, stat) {
// console.log(stat);
// });
return metric({
name: 'proc_mem_used_perc',
desc: 'Process memory used in percentage.',
value: perc
});
}
}
module.exports = () => Object.keys(metrics)
.map((name) => metrics[name]())
.join('\n');

View File

@ -16,16 +16,21 @@ module.exports = (server) => {
path: '/mem-fast',
config: {
handler: (req, reply) => {
console.log('got /mem-fast request');
const start = process.hrtime();
const length = (anotherLeak.length || 1);
anotherLeak.push({
longStr: new Array(Math.ceil(anotherLeak.length * 2)).map((v, i) => i)
longStr: Array.from({
length: length * length
}, (v, i) => i)
});
console.log('mem-fast %d', Math.ceil(anotherLeak.length * 2));
console.log('mem-fast', anotherLeak[length - 1].longStr.length);
const end = process.hrtime(start);
reply(prettyHrtime(end));
console.log('sent /mem-fast response');
}
}
});
@ -35,6 +40,8 @@ module.exports = (server) => {
path: '/mem-slow',
config: {
handler: (req, reply) => {
console.log('got /mem-slow request');
const start = process.hrtime();
const originalLeak = theLeak;
@ -47,7 +54,9 @@ module.exports = (server) => {
};
theLeak = {
longStr: new Array(1000).join('*')
longStr: Array.from({
length: 1000
}, (v, i) => i).join('*')
};
anotherLeak.push(anotherLeak.length);
@ -55,6 +64,8 @@ module.exports = (server) => {
const end = process.hrtime(start);
reply(prettyHrtime(end));
console.log('sent /mem-slow response');
}
}
});
@ -64,12 +75,16 @@ module.exports = (server) => {
path: '/cpu',
config: {
handler: (req, reply) => {
console.log('got /cpu request');
const start = process.hrtime();
fibonacci(40);
const end = process.hrtime(start);
reply(prettyHrtime(end));
console.log('sent /cpu response');
}
}
});

View File

@ -1,26 +1,13 @@
const Metric = require('../metric');
const prom = require('../prom');
module.exports = (server) => {
const metric = Metric(server);
server.route({
method: 'GET',
path: '/job-tree',
config: {
handler: (request, reply) => reply(Metric.tree())
}
});
server.subscription('/stats/{id}', {
onSubscribe: (socket, path, params, next) => {
console.log('onSubscribe');
metric.on(params.id);
next();
},
onUnsubscribe: (socket, path, params, next) => {
console.log('onUnsubscribe');
metric.off(params.id);
next();
path: '/metrics',
handler: (req, reply) => {
console.log('before metrics');
reply(prom()).type('text/plain')
console.log('after metrics');
}
});
};