From 59ca154fd165f4c1b47b2f482921462720f77034 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Se=CC=81rgio=20Ramos?= Date: Wed, 15 Mar 2017 16:03:20 +0000 Subject: [PATCH] use percentage in leak mem metrics also: move to a docker compose format that is compatible with triton --- spikes/leak/docker-compose.yml | 129 +++++++++++------------ spikes/leak/prometheus.yml | 17 --- spikes/leak/prometheus/Dockerfile | 2 + spikes/leak/prometheus/prometheus.yml | 17 +++ spikes/leak/scripts/prometheus.js | 18 +++- spikes/leak/src/server/index.js | 3 - spikes/leak/src/server/metric.js | 103 ------------------ spikes/leak/src/server/prom.js | 43 ++++++++ spikes/leak/src/server/routes/leak.js | 21 +++- spikes/leak/src/server/routes/metrics.js | 27 ++--- 10 files changed, 163 insertions(+), 217 deletions(-) delete mode 100644 spikes/leak/prometheus.yml create mode 100644 spikes/leak/prometheus/Dockerfile create mode 100644 spikes/leak/prometheus/prometheus.yml delete mode 100644 spikes/leak/src/server/metric.js create mode 100644 spikes/leak/src/server/prom.js diff --git a/spikes/leak/docker-compose.yml b/spikes/leak/docker-compose.yml index 414d520c..1e214388 100644 --- a/spikes/leak/docker-compose.yml +++ b/spikes/leak/docker-compose.yml @@ -1,67 +1,62 @@ -version: '2' -services: - fast-node: - build: . - environment: - - TYPE=node - ports: - - "8001:8000" - fast-artillery: - build: . - environment: - - TYPE=artillery - - MODE=fast - depends_on: - - fast-node - another-fast-node: - build: . - environment: - - TYPE=node - ports: - - "8004:8000" - another-fast-artillery: - build: . - environment: - - TYPE=artillery - - MODE=another-fast - depends_on: - - another-fast-node - slow-node: - build: . - environment: - - TYPE=node - ports: - - "8002:8000" - slow-artillery: - build: . - environment: - - TYPE=artillery - - MODE=slow - depends_on: - - slow-node - plain-node: - build: . - environment: - - TYPE=node - ports: - - "8003:8000" - plain-artillery: - build: . - environment: - - TYPE=artillery - - MODE=plain - depends_on: - - plain-node - telemetry: - image: prom/prometheus:v1.5.2 - ports: - - "9090:9090" - volumes: - - ./prometheus.yml:/etc/prometheus/prometheus.yml - environment: - - TYPE=telemetry - depends_on: - - fast-node - - another-fast-node - - slow-node - - plain-node +fast-node: + build: . + environment: + - TYPE=node + ports: + - "8000" +fast-artillery: + build: . + environment: + - TYPE=artillery + - MODE=fast + links: + - fast-node:fast-node +# another-fast-node: +# build: . +# environment: +# - TYPE=node +# ports: +# - "8000" +# another-fast-artillery: +# build: . +# environment: +# - TYPE=artillery +# - MODE=another-fast +# links: +# - another-fast-node:another-fast-node +slow-node: + build: . + environment: + - TYPE=node + ports: + - "8000" +slow-artillery: + build: . + environment: + - TYPE=artillery + - MODE=slow + links: + - slow-node:slow-node +plain-node: + build: . + environment: + - TYPE=node + ports: + - "8000" +plain-artillery: + build: . + environment: + - TYPE=artillery + - MODE=plain + links: + - plain-node:plain-node +telemetry: + build: ./prometheus + ports: + - "9090" + environment: + - TYPE=telemetry + links: + - fast-node:fast-node + - slow-node:slow-node + - plain-node:plain-node diff --git a/spikes/leak/prometheus.yml b/spikes/leak/prometheus.yml deleted file mode 100644 index f85f04a3..00000000 --- a/spikes/leak/prometheus.yml +++ /dev/null @@ -1,17 +0,0 @@ -scrape_configs: - - job_name: 'leak-fast' - scrape_interval: 15s - static_configs: - - targets: ['fast-node:8000', 'another-fast-node:8000'] - - job_name: 'leak-slow' - scrape_interval: 15s - static_configs: - - targets: ['slow-node:8000'] - - job_name: 'no-leak' - scrape_interval: 15s - static_configs: - - targets: ['plain-node:8000'] - # - job_name: 'leak' - # scrape_interval: 1s - # static_configs: - # - targets: ['fast-node:8000', 'another-fast-node:8000', 'slow-node:8000', 'plain-node:8000'] diff --git a/spikes/leak/prometheus/Dockerfile b/spikes/leak/prometheus/Dockerfile new file mode 100644 index 00000000..28d82054 --- /dev/null +++ b/spikes/leak/prometheus/Dockerfile @@ -0,0 +1,2 @@ +FROM prom/prometheus:v1.5.2 +ADD prometheus.yml /etc/prometheus/ diff --git a/spikes/leak/prometheus/prometheus.yml b/spikes/leak/prometheus/prometheus.yml new file mode 100644 index 00000000..f5babcde --- /dev/null +++ b/spikes/leak/prometheus/prometheus.yml @@ -0,0 +1,17 @@ +scrape_configs: + # - job_name: 'leak-fast' + # scrape_interval: 15s + # static_configs: + # - targets: ['fast-node:8000', 'another-fast-node:8000'] + # - job_name: 'leak-slow' + # scrape_interval: 15s + # static_configs: + # - targets: ['slow-node:8000'] + # - job_name: 'no-leak' + # scrape_interval: 15s + # static_configs: + # - targets: ['plain-node:8000'] + - job_name: 'leak' + scrape_interval: 1s + static_configs: + - targets: ['fast-node:8000', 'slow-node:8000', 'plain-node:8000'] diff --git a/spikes/leak/scripts/prometheus.js b/spikes/leak/scripts/prometheus.js index 28bb4983..7355c5dc 100644 --- a/spikes/leak/scripts/prometheus.js +++ b/spikes/leak/scripts/prometheus.js @@ -46,14 +46,14 @@ const transform = (res) => { const range = module.exports.range = async ({ query = [], - ago = '1h ago', + ago = '24h ago', step = '15s', hostname = 'localhost' }) => { const end = timestamp(new Date()); const start = timestamp(date(ago)); - const ranges = await map(query, async (query) => { + const ranges = await map(forceArray(query), async (query) => { return await got(url.format({ protocol: 'http:', slashes: true, @@ -79,11 +79,21 @@ const query = module.exports.query = async ({ query = [] }) => { const res = await map(query, async (query) => { + console.log(url.format({ + protocol: 'http:', + slashes: true, + port: '9090', + host: hostname, + pathname: '/api/v1/query', + query: { + query: query + } +})); return await got(url.format({ protocol: 'http:', slashes: true, port: '9090', - hostname: hostname, + host: hostname, pathname: '/api/v1/query', query: { query: query @@ -104,7 +114,7 @@ const tree = module.exports.tree = async ({ protocol: 'http:', slashes: true, port: '9090', - hostname: hostname, + host: hostname, pathname: '/api/v1/series', search: qs.stringify({ match: query diff --git a/spikes/leak/src/server/index.js b/spikes/leak/src/server/index.js index a4c1ef4c..ca6d85e9 100644 --- a/spikes/leak/src/server/index.js +++ b/spikes/leak/src/server/index.js @@ -1,4 +1,3 @@ -const epimetheus = require('epimetheus'); const requireDir = require('require-dir'); const plugins = require('./plugins'); const routes = requireDir('./routes'); @@ -11,8 +10,6 @@ server.connection({ port: 8000 }); -epimetheus.instrument(server); - server.register(plugins, (err) => { if (err) { throw err; diff --git a/spikes/leak/src/server/metric.js b/spikes/leak/src/server/metric.js deleted file mode 100644 index 73bd116b..00000000 --- a/spikes/leak/src/server/metric.js +++ /dev/null @@ -1,103 +0,0 @@ -const relativeDate = require('relative-date'); -const statistics = require('simple-statistics'); -const prometheus = require('../../scripts/prometheus'); -const async = require('async'); - -const cdm = {}; - -const calc = (sample) => { - return { - firstQuartile: statistics.quantile(sample, 0.25), - median: statistics.median(sample), - thirdQuartile: statistics.quantile(sample, 0.75), - max: statistics.max(sample), - min: statistics.min(sample), - stddev: statistics.sampleStandardDeviation(sample) - }; -}; - -const getMem = ({ - job -}, fn) => { - prometheus.query({ - query: [`node_memory_heap_used_bytes{job="${job}"}`] - }).then((res) => { - if (!res || !res[job]) { - return null - } - - const aggregate = calc(Object.keys(res[job]).map((inst) => { - return Number(res[job][inst].node_memory_heap_used_bytes[1]); - })); - - const instances = Object.keys(res[job]).reduce((sum, inst) => { - return Object.assign(sum, { - [inst]: calc([Number(res[job][inst].node_memory_heap_used_bytes[1])]) - }) - }, {}); - - return { - raw: res[job], - aggregate, - instances - }; - }).then((res) => { - return fn(null, res); - }).catch((err) => { - return fn(err); - }); -}; - -const getStats = (ctx, fn) => { - async.parallel({ - mem: async.apply(getMem, ctx) - }, fn); -}; - -module.exports = (server) => ({ - on: (job) => { - console.log('on', job); - - if (cdm[job] && (cdm[job].sockets > 0)) { - cdm[job].sockets += 1; - return; - } - - let messageId = 0; - - const update = () => { - console.log(`publishing /stats/${job}/${messageId += 1}`); - - getStats({ - job: job - }, (err, stats) => { - if (err) { - return console.error(err); - } - - server.publish(`/stats/${job}`, { - when: new Date().getTime(), - stats - }); - }); - }; - - cdm[job] = { - interval: setInterval(update, 1000), - sockets: 1 - }; - }, - off: (job) => { - console.log('off', job); - - if (!(cdm[job].sockets -= 1)) { - clearInterval(cdm[job].interval); - } - } -}); - -module.exports.tree = (ctx) => { - return prometheus.tree({ - query: ['node_memory_heap_used_bytes'] - }); -}; diff --git a/spikes/leak/src/server/prom.js b/spikes/leak/src/server/prom.js new file mode 100644 index 00000000..885e3e7b --- /dev/null +++ b/spikes/leak/src/server/prom.js @@ -0,0 +1,43 @@ +const os = require('os'); +const process = require('process'); +// const pidusage = require('pidusage'); + +const metric = ({ name, desc, type, value }) => ` +# HELP ${name} ${desc} +# TYPE ${name} ${type || 'gauge'} +${name} ${value} +`; + +const metrics = { + process_mem_used_perc: () => { + const free = os.freemem(); + const total = os.totalmem(); + const perc = ((total - free) * 100) / total; + + // + // console.log({ + // available: os.totalmem(), + // free: os.freemem(), + // heapTotal, + // prom: { + // name: 'proc_mem_used_perc', + // desc: 'Process memory used in percentage.', + // value: (heapTotal * 100) / available + // } + // }); + // + // pidusage.stat(process.pid, function(err, stat) { + // console.log(stat); + // }); + + return metric({ + name: 'proc_mem_used_perc', + desc: 'Process memory used in percentage.', + value: perc + }); + } +} + +module.exports = () => Object.keys(metrics) + .map((name) => metrics[name]()) + .join('\n'); diff --git a/spikes/leak/src/server/routes/leak.js b/spikes/leak/src/server/routes/leak.js index a63c5974..6442c407 100644 --- a/spikes/leak/src/server/routes/leak.js +++ b/spikes/leak/src/server/routes/leak.js @@ -16,16 +16,21 @@ module.exports = (server) => { path: '/mem-fast', config: { handler: (req, reply) => { + console.log('got /mem-fast request'); const start = process.hrtime(); + const length = (anotherLeak.length || 1); anotherLeak.push({ - longStr: new Array(Math.ceil(anotherLeak.length * 2)).map((v, i) => i) + longStr: Array.from({ + length: length * length + }, (v, i) => i) }); - console.log('mem-fast %d', Math.ceil(anotherLeak.length * 2)); + console.log('mem-fast', anotherLeak[length - 1].longStr.length); const end = process.hrtime(start); reply(prettyHrtime(end)); + console.log('sent /mem-fast response'); } } }); @@ -35,6 +40,8 @@ module.exports = (server) => { path: '/mem-slow', config: { handler: (req, reply) => { + console.log('got /mem-slow request'); + const start = process.hrtime(); const originalLeak = theLeak; @@ -47,7 +54,9 @@ module.exports = (server) => { }; theLeak = { - longStr: new Array(1000).join('*') + longStr: Array.from({ + length: 1000 + }, (v, i) => i).join('*') }; anotherLeak.push(anotherLeak.length); @@ -55,6 +64,8 @@ module.exports = (server) => { const end = process.hrtime(start); reply(prettyHrtime(end)); + console.log('sent /mem-slow response'); + } } }); @@ -64,12 +75,16 @@ module.exports = (server) => { path: '/cpu', config: { handler: (req, reply) => { + console.log('got /cpu request'); + const start = process.hrtime(); fibonacci(40); const end = process.hrtime(start); reply(prettyHrtime(end)); + + console.log('sent /cpu response'); } } }); diff --git a/spikes/leak/src/server/routes/metrics.js b/spikes/leak/src/server/routes/metrics.js index 1767b41b..e0ff712e 100644 --- a/spikes/leak/src/server/routes/metrics.js +++ b/spikes/leak/src/server/routes/metrics.js @@ -1,26 +1,13 @@ -const Metric = require('../metric'); +const prom = require('../prom'); module.exports = (server) => { - const metric = Metric(server); - server.route({ method: 'GET', - path: '/job-tree', - config: { - handler: (request, reply) => reply(Metric.tree()) + path: '/metrics', + handler: (req, reply) => { + console.log('before metrics'); + reply(prom()).type('text/plain') + console.log('after metrics'); } }); - - server.subscription('/stats/{id}', { - onSubscribe: (socket, path, params, next) => { - console.log('onSubscribe'); - metric.on(params.id); - next(); - }, - onUnsubscribe: (socket, path, params, next) => { - console.log('onUnsubscribe'); - metric.off(params.id); - next(); - } - }); -}; \ No newline at end of file +};