use percentage in leak mem metrics

also: move to a docker compose format that is compatible with triton
This commit is contained in:
Sérgio Ramos 2017-03-15 16:03:20 +00:00
parent 6ea7891194
commit 59ca154fd1
10 changed files with 163 additions and 217 deletions

View File

@ -1,67 +1,62 @@
version: '2' fast-node:
services: build: .
fast-node: environment:
build: . - TYPE=node
environment: ports:
- TYPE=node - "8000"
ports: fast-artillery:
- "8001:8000" build: .
fast-artillery: environment:
build: . - TYPE=artillery
environment: - MODE=fast
- TYPE=artillery links:
- MODE=fast - fast-node:fast-node
depends_on: # another-fast-node:
- fast-node # build: .
another-fast-node: # environment:
build: . # - TYPE=node
environment: # ports:
- TYPE=node # - "8000"
ports: # another-fast-artillery:
- "8004:8000" # build: .
another-fast-artillery: # environment:
build: . # - TYPE=artillery
environment: # - MODE=another-fast
- TYPE=artillery # links:
- MODE=another-fast # - another-fast-node:another-fast-node
depends_on: slow-node:
- another-fast-node build: .
slow-node: environment:
build: . - TYPE=node
environment: ports:
- TYPE=node - "8000"
ports: slow-artillery:
- "8002:8000" build: .
slow-artillery: environment:
build: . - TYPE=artillery
environment: - MODE=slow
- TYPE=artillery links:
- MODE=slow - slow-node:slow-node
depends_on: plain-node:
- slow-node build: .
plain-node: environment:
build: . - TYPE=node
environment: ports:
- TYPE=node - "8000"
ports: plain-artillery:
- "8003:8000" build: .
plain-artillery: environment:
build: . - TYPE=artillery
environment: - MODE=plain
- TYPE=artillery links:
- MODE=plain - plain-node:plain-node
depends_on: telemetry:
- plain-node build: ./prometheus
telemetry: ports:
image: prom/prometheus:v1.5.2 - "9090"
ports: environment:
- "9090:9090" - TYPE=telemetry
volumes: links:
- ./prometheus.yml:/etc/prometheus/prometheus.yml - fast-node:fast-node
environment: - slow-node:slow-node
- TYPE=telemetry - plain-node:plain-node
depends_on:
- fast-node
- another-fast-node
- slow-node
- plain-node

View File

@ -1,17 +0,0 @@
scrape_configs:
- job_name: 'leak-fast'
scrape_interval: 15s
static_configs:
- targets: ['fast-node:8000', 'another-fast-node:8000']
- job_name: 'leak-slow'
scrape_interval: 15s
static_configs:
- targets: ['slow-node:8000']
- job_name: 'no-leak'
scrape_interval: 15s
static_configs:
- targets: ['plain-node:8000']
# - job_name: 'leak'
# scrape_interval: 1s
# static_configs:
# - targets: ['fast-node:8000', 'another-fast-node:8000', 'slow-node:8000', 'plain-node:8000']

View File

@ -0,0 +1,2 @@
FROM prom/prometheus:v1.5.2
ADD prometheus.yml /etc/prometheus/

View File

@ -0,0 +1,17 @@
scrape_configs:
# - job_name: 'leak-fast'
# scrape_interval: 15s
# static_configs:
# - targets: ['fast-node:8000', 'another-fast-node:8000']
# - job_name: 'leak-slow'
# scrape_interval: 15s
# static_configs:
# - targets: ['slow-node:8000']
# - job_name: 'no-leak'
# scrape_interval: 15s
# static_configs:
# - targets: ['plain-node:8000']
- job_name: 'leak'
scrape_interval: 1s
static_configs:
- targets: ['fast-node:8000', 'slow-node:8000', 'plain-node:8000']

View File

@ -46,14 +46,14 @@ const transform = (res) => {
const range = module.exports.range = async ({ const range = module.exports.range = async ({
query = [], query = [],
ago = '1h ago', ago = '24h ago',
step = '15s', step = '15s',
hostname = 'localhost' hostname = 'localhost'
}) => { }) => {
const end = timestamp(new Date()); const end = timestamp(new Date());
const start = timestamp(date(ago)); const start = timestamp(date(ago));
const ranges = await map(query, async (query) => { const ranges = await map(forceArray(query), async (query) => {
return await got(url.format({ return await got(url.format({
protocol: 'http:', protocol: 'http:',
slashes: true, slashes: true,
@ -79,11 +79,21 @@ const query = module.exports.query = async ({
query = [] query = []
}) => { }) => {
const res = await map(query, async (query) => { const res = await map(query, async (query) => {
console.log(url.format({
protocol: 'http:',
slashes: true,
port: '9090',
host: hostname,
pathname: '/api/v1/query',
query: {
query: query
}
}));
return await got(url.format({ return await got(url.format({
protocol: 'http:', protocol: 'http:',
slashes: true, slashes: true,
port: '9090', port: '9090',
hostname: hostname, host: hostname,
pathname: '/api/v1/query', pathname: '/api/v1/query',
query: { query: {
query: query query: query
@ -104,7 +114,7 @@ const tree = module.exports.tree = async ({
protocol: 'http:', protocol: 'http:',
slashes: true, slashes: true,
port: '9090', port: '9090',
hostname: hostname, host: hostname,
pathname: '/api/v1/series', pathname: '/api/v1/series',
search: qs.stringify({ search: qs.stringify({
match: query match: query

View File

@ -1,4 +1,3 @@
const epimetheus = require('epimetheus');
const requireDir = require('require-dir'); const requireDir = require('require-dir');
const plugins = require('./plugins'); const plugins = require('./plugins');
const routes = requireDir('./routes'); const routes = requireDir('./routes');
@ -11,8 +10,6 @@ server.connection({
port: 8000 port: 8000
}); });
epimetheus.instrument(server);
server.register(plugins, (err) => { server.register(plugins, (err) => {
if (err) { if (err) {
throw err; throw err;

View File

@ -1,103 +0,0 @@
const relativeDate = require('relative-date');
const statistics = require('simple-statistics');
const prometheus = require('../../scripts/prometheus');
const async = require('async');
const cdm = {};
const calc = (sample) => {
return {
firstQuartile: statistics.quantile(sample, 0.25),
median: statistics.median(sample),
thirdQuartile: statistics.quantile(sample, 0.75),
max: statistics.max(sample),
min: statistics.min(sample),
stddev: statistics.sampleStandardDeviation(sample)
};
};
const getMem = ({
job
}, fn) => {
prometheus.query({
query: [`node_memory_heap_used_bytes{job="${job}"}`]
}).then((res) => {
if (!res || !res[job]) {
return null
}
const aggregate = calc(Object.keys(res[job]).map((inst) => {
return Number(res[job][inst].node_memory_heap_used_bytes[1]);
}));
const instances = Object.keys(res[job]).reduce((sum, inst) => {
return Object.assign(sum, {
[inst]: calc([Number(res[job][inst].node_memory_heap_used_bytes[1])])
})
}, {});
return {
raw: res[job],
aggregate,
instances
};
}).then((res) => {
return fn(null, res);
}).catch((err) => {
return fn(err);
});
};
const getStats = (ctx, fn) => {
async.parallel({
mem: async.apply(getMem, ctx)
}, fn);
};
module.exports = (server) => ({
on: (job) => {
console.log('on', job);
if (cdm[job] && (cdm[job].sockets > 0)) {
cdm[job].sockets += 1;
return;
}
let messageId = 0;
const update = () => {
console.log(`publishing /stats/${job}/${messageId += 1}`);
getStats({
job: job
}, (err, stats) => {
if (err) {
return console.error(err);
}
server.publish(`/stats/${job}`, {
when: new Date().getTime(),
stats
});
});
};
cdm[job] = {
interval: setInterval(update, 1000),
sockets: 1
};
},
off: (job) => {
console.log('off', job);
if (!(cdm[job].sockets -= 1)) {
clearInterval(cdm[job].interval);
}
}
});
module.exports.tree = (ctx) => {
return prometheus.tree({
query: ['node_memory_heap_used_bytes']
});
};

View File

@ -0,0 +1,43 @@
const os = require('os');
const process = require('process');
// const pidusage = require('pidusage');
const metric = ({ name, desc, type, value }) => `
# HELP ${name} ${desc}
# TYPE ${name} ${type || 'gauge'}
${name} ${value}
`;
const metrics = {
process_mem_used_perc: () => {
const free = os.freemem();
const total = os.totalmem();
const perc = ((total - free) * 100) / total;
//
// console.log({
// available: os.totalmem(),
// free: os.freemem(),
// heapTotal,
// prom: {
// name: 'proc_mem_used_perc',
// desc: 'Process memory used in percentage.',
// value: (heapTotal * 100) / available
// }
// });
//
// pidusage.stat(process.pid, function(err, stat) {
// console.log(stat);
// });
return metric({
name: 'proc_mem_used_perc',
desc: 'Process memory used in percentage.',
value: perc
});
}
}
module.exports = () => Object.keys(metrics)
.map((name) => metrics[name]())
.join('\n');

View File

@ -16,16 +16,21 @@ module.exports = (server) => {
path: '/mem-fast', path: '/mem-fast',
config: { config: {
handler: (req, reply) => { handler: (req, reply) => {
console.log('got /mem-fast request');
const start = process.hrtime(); const start = process.hrtime();
const length = (anotherLeak.length || 1);
anotherLeak.push({ anotherLeak.push({
longStr: new Array(Math.ceil(anotherLeak.length * 2)).map((v, i) => i) longStr: Array.from({
length: length * length
}, (v, i) => i)
}); });
console.log('mem-fast %d', Math.ceil(anotherLeak.length * 2)); console.log('mem-fast', anotherLeak[length - 1].longStr.length);
const end = process.hrtime(start); const end = process.hrtime(start);
reply(prettyHrtime(end)); reply(prettyHrtime(end));
console.log('sent /mem-fast response');
} }
} }
}); });
@ -35,6 +40,8 @@ module.exports = (server) => {
path: '/mem-slow', path: '/mem-slow',
config: { config: {
handler: (req, reply) => { handler: (req, reply) => {
console.log('got /mem-slow request');
const start = process.hrtime(); const start = process.hrtime();
const originalLeak = theLeak; const originalLeak = theLeak;
@ -47,7 +54,9 @@ module.exports = (server) => {
}; };
theLeak = { theLeak = {
longStr: new Array(1000).join('*') longStr: Array.from({
length: 1000
}, (v, i) => i).join('*')
}; };
anotherLeak.push(anotherLeak.length); anotherLeak.push(anotherLeak.length);
@ -55,6 +64,8 @@ module.exports = (server) => {
const end = process.hrtime(start); const end = process.hrtime(start);
reply(prettyHrtime(end)); reply(prettyHrtime(end));
console.log('sent /mem-slow response');
} }
} }
}); });
@ -64,12 +75,16 @@ module.exports = (server) => {
path: '/cpu', path: '/cpu',
config: { config: {
handler: (req, reply) => { handler: (req, reply) => {
console.log('got /cpu request');
const start = process.hrtime(); const start = process.hrtime();
fibonacci(40); fibonacci(40);
const end = process.hrtime(start); const end = process.hrtime(start);
reply(prettyHrtime(end)); reply(prettyHrtime(end));
console.log('sent /cpu response');
} }
} }
}); });

View File

@ -1,26 +1,13 @@
const Metric = require('../metric'); const prom = require('../prom');
module.exports = (server) => { module.exports = (server) => {
const metric = Metric(server);
server.route({ server.route({
method: 'GET', method: 'GET',
path: '/job-tree', path: '/metrics',
config: { handler: (req, reply) => {
handler: (request, reply) => reply(Metric.tree()) console.log('before metrics');
reply(prom()).type('text/plain')
console.log('after metrics');
} }
}); });
};
server.subscription('/stats/{id}', {
onSubscribe: (socket, path, params, next) => {
console.log('onSubscribe');
metric.on(params.id);
next();
},
onUnsubscribe: (socket, path, params, next) => {
console.log('onUnsubscribe');
metric.off(params.id);
next();
}
});
};