mirror of
https://github.com/yldio/copilot.git
synced 2024-11-28 14:10:04 +02:00
feat: remove traefik from balancing
This commit is contained in:
parent
7daf22b5b6
commit
f896569f4c
@ -17,8 +17,8 @@ RUN curl --retry 7 --fail -vo /tmp/consul.zip "https://releases.hashicorp.com/co
|
|||||||
&& mkdir /config
|
&& mkdir /config
|
||||||
|
|
||||||
# Install ContainerPilot
|
# Install ContainerPilot
|
||||||
ENV CP_SHA1 1f159207c7dc2b622f693754f6dda77c82a88263
|
ENV CP_SHA1 e27c1b9cd1023e622f77bb19914606dee3c9b22c
|
||||||
ENV CONTAINERPILOT_VERSION 3.1.1
|
ENV CONTAINERPILOT_VERSION 3.3.1
|
||||||
RUN curl -Lo /tmp/containerpilot.tar.gz "https://github.com/joyent/containerpilot/releases/download/${CONTAINERPILOT_VERSION}/containerpilot-${CONTAINERPILOT_VERSION}.tar.gz" \
|
RUN curl -Lo /tmp/containerpilot.tar.gz "https://github.com/joyent/containerpilot/releases/download/${CONTAINERPILOT_VERSION}/containerpilot-${CONTAINERPILOT_VERSION}.tar.gz" \
|
||||||
&& echo "${CP_SHA1} /tmp/containerpilot.tar.gz" | sha1sum -c \
|
&& echo "${CP_SHA1} /tmp/containerpilot.tar.gz" | sha1sum -c \
|
||||||
&& tar zxf /tmp/containerpilot.tar.gz -C /bin \
|
&& tar zxf /tmp/containerpilot.tar.gz -C /bin \
|
||||||
|
2
docker/api/bootstrap-data.js
vendored
2
docker/api/bootstrap-data.js
vendored
@ -5,7 +5,7 @@ const Url = require('url');
|
|||||||
const Path = require('path');
|
const Path = require('path');
|
||||||
const Fs = require('fs');
|
const Fs = require('fs');
|
||||||
|
|
||||||
const Data = require('./lib/data');
|
const Data = require('portal-api/lib/data');
|
||||||
|
|
||||||
const {
|
const {
|
||||||
DOCKER_HOST,
|
DOCKER_HOST,
|
||||||
|
@ -21,7 +21,7 @@
|
|||||||
"joi": "^10.6.0",
|
"joi": "^10.6.0",
|
||||||
"joyent-cp-gql-schema": "^1.2.0",
|
"joyent-cp-gql-schema": "^1.2.0",
|
||||||
"piloted": "^3.1.1",
|
"piloted": "^3.1.1",
|
||||||
"portal-api": "^1.2.0",
|
"portal-api": "^1.3.2",
|
||||||
"toppsy": "^1.1.0",
|
"toppsy": "^1.1.0",
|
||||||
"triton": "^5.2.0"
|
"triton": "^5.2.0"
|
||||||
}
|
}
|
||||||
|
@ -3,12 +3,8 @@
|
|||||||
const Brule = require('brule');
|
const Brule = require('brule');
|
||||||
const Good = require('good');
|
const Good = require('good');
|
||||||
const Hapi = require('hapi');
|
const Hapi = require('hapi');
|
||||||
const HapiSwagger = require('hapi-swagger');
|
|
||||||
const Inert = require('inert');
|
|
||||||
const Toppsy = require('toppsy');
|
const Toppsy = require('toppsy');
|
||||||
const Vision = require('vision');
|
const Portal = require('portal-api');
|
||||||
const Pack = require('./package');
|
|
||||||
const Portal = require('./lib');
|
|
||||||
const Path = require('path');
|
const Path = require('path');
|
||||||
const Fs = require('fs');
|
const Fs = require('fs');
|
||||||
const Url = require('url');
|
const Url = require('url');
|
||||||
@ -16,13 +12,6 @@ const Url = require('url');
|
|||||||
const server = new Hapi.Server();
|
const server = new Hapi.Server();
|
||||||
server.connection({ port: 3000 });
|
server.connection({ port: 3000 });
|
||||||
|
|
||||||
const swaggerOptions = {
|
|
||||||
info: {
|
|
||||||
title: 'Portal API Documentation',
|
|
||||||
version: Pack.version
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
const {
|
const {
|
||||||
DOCKER_HOST,
|
DOCKER_HOST,
|
||||||
DOCKER_CERT_PATH,
|
DOCKER_CERT_PATH,
|
||||||
@ -74,7 +63,7 @@ const goodOptions = {
|
|||||||
{
|
{
|
||||||
module: 'good-squeeze',
|
module: 'good-squeeze',
|
||||||
name: 'Squeeze',
|
name: 'Squeeze',
|
||||||
args: [{ log: '*', response: '*', error: '*' }]
|
args: [{ response: '*', error: '*' }]
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
module: 'good-console'
|
module: 'good-console'
|
||||||
@ -87,8 +76,6 @@ const goodOptions = {
|
|||||||
server.register(
|
server.register(
|
||||||
[
|
[
|
||||||
Brule,
|
Brule,
|
||||||
Inert,
|
|
||||||
Vision,
|
|
||||||
{
|
{
|
||||||
register: Good,
|
register: Good,
|
||||||
options: goodOptions
|
options: goodOptions
|
||||||
@ -97,10 +84,6 @@ server.register(
|
|||||||
register: Portal,
|
register: Portal,
|
||||||
options: portalOptions
|
options: portalOptions
|
||||||
},
|
},
|
||||||
{
|
|
||||||
register: HapiSwagger,
|
|
||||||
options: swaggerOptions
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
register: Toppsy,
|
register: Toppsy,
|
||||||
options: { namespace: 'portal', subsystem: 'api' }
|
options: { namespace: 'portal', subsystem: 'api' }
|
||||||
|
@ -11,10 +11,10 @@ RUN export CONSUL_VERSION=0.7.0 \
|
|||||||
&& mkdir /config
|
&& mkdir /config
|
||||||
|
|
||||||
# Add Containerpilot and set its configuration
|
# Add Containerpilot and set its configuration
|
||||||
ENV CONTAINERPILOT_VERSION 3.1.1
|
ENV CONTAINERPILOT_VERSION 3.3.1
|
||||||
ENV CONTAINERPILOT /etc/containerpilot.json
|
ENV CONTAINERPILOT /etc/containerpilot.json
|
||||||
|
|
||||||
RUN export CONTAINERPILOT_CHECKSUM=1f159207c7dc2b622f693754f6dda77c82a88263 \
|
RUN export CONTAINERPILOT_CHECKSUM=e27c1b9cd1023e622f77bb19914606dee3c9b22c \
|
||||||
&& export archive=containerpilot-${CONTAINERPILOT_VERSION}.tar.gz \
|
&& export archive=containerpilot-${CONTAINERPILOT_VERSION}.tar.gz \
|
||||||
&& curl -Lso /tmp/${archive} \
|
&& curl -Lso /tmp/${archive} \
|
||||||
"https://github.com/joyent/containerpilot/releases/download/${CONTAINERPILOT_VERSION}/${archive}" \
|
"https://github.com/joyent/containerpilot/releases/download/${CONTAINERPILOT_VERSION}/${archive}" \
|
||||||
|
@ -9,6 +9,11 @@ RUN set -x \
|
|||||||
&& apk upgrade \
|
&& apk upgrade \
|
||||||
&& rm -rf /var/cache/apk/*
|
&& rm -rf /var/cache/apk/*
|
||||||
|
|
||||||
|
# Use consul-template to re-write our Nginx virtualhost config
|
||||||
|
RUN curl -Lo /tmp/consul_template_0.15.0_linux_amd64.zip https://releases.hashicorp.com/consul-template/0.15.0/consul-template_0.15.0_linux_amd64.zip && \
|
||||||
|
unzip /tmp/consul_template_0.15.0_linux_amd64.zip && \
|
||||||
|
mv consul-template /bin
|
||||||
|
|
||||||
# Install Consul agent
|
# Install Consul agent
|
||||||
ENV CONSUL_VERSION 0.7.0
|
ENV CONSUL_VERSION 0.7.0
|
||||||
ENV CONSUL_CHECKSUM b350591af10d7d23514ebaa0565638539900cdb3aaa048f077217c4c46653dd8
|
ENV CONSUL_CHECKSUM b350591af10d7d23514ebaa0565638539900cdb3aaa048f077217c4c46653dd8
|
||||||
@ -19,8 +24,8 @@ RUN curl --retry 7 --fail -vo /tmp/consul.zip "https://releases.hashicorp.com/co
|
|||||||
&& mkdir /config
|
&& mkdir /config
|
||||||
|
|
||||||
# Install ContainerPilot
|
# Install ContainerPilot
|
||||||
ENV CP_SHA1 1f159207c7dc2b622f693754f6dda77c82a88263
|
ENV CP_SHA1 e27c1b9cd1023e622f77bb19914606dee3c9b22c
|
||||||
ENV CONTAINERPILOT_VERSION 3.1.1
|
ENV CONTAINERPILOT_VERSION 3.3.1
|
||||||
RUN curl -Lo /tmp/containerpilot.tar.gz "https://github.com/joyent/containerpilot/releases/download/${CONTAINERPILOT_VERSION}/containerpilot-${CONTAINERPILOT_VERSION}.tar.gz" \
|
RUN curl -Lo /tmp/containerpilot.tar.gz "https://github.com/joyent/containerpilot/releases/download/${CONTAINERPILOT_VERSION}/containerpilot-${CONTAINERPILOT_VERSION}.tar.gz" \
|
||||||
&& echo "${CP_SHA1} /tmp/containerpilot.tar.gz" | sha1sum -c \
|
&& echo "${CP_SHA1} /tmp/containerpilot.tar.gz" | sha1sum -c \
|
||||||
&& tar zxf /tmp/containerpilot.tar.gz -C /bin \
|
&& tar zxf /tmp/containerpilot.tar.gz -C /bin \
|
||||||
|
32
docker/frontend/bin/reload-nginx.sh
Executable file
32
docker/frontend/bin/reload-nginx.sh
Executable file
@ -0,0 +1,32 @@
|
|||||||
|
#!/bin/sh
|
||||||
|
|
||||||
|
# Render Nginx configuration template using values from Consul,
|
||||||
|
# but do not reload because Nginx has't started yet
|
||||||
|
preStart() {
|
||||||
|
consul-template \
|
||||||
|
-once \
|
||||||
|
-consul localhost:8500 \
|
||||||
|
-template "/etc/nginx/nginx.conf.tmpl:/etc/nginx/nginx.conf"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Render Nginx configuration template using values from Consul,
|
||||||
|
# then gracefully reload Nginx
|
||||||
|
onChange() {
|
||||||
|
consul-template \
|
||||||
|
-once \
|
||||||
|
-consul localhost:8500 \
|
||||||
|
-template "/etc/nginx/nginx.conf.tmpl:/etc/nginx/nginx.conf:nginx -s reload"
|
||||||
|
}
|
||||||
|
|
||||||
|
until
|
||||||
|
cmd=$1
|
||||||
|
if [ -z "$cmd" ]; then
|
||||||
|
onChange
|
||||||
|
fi
|
||||||
|
shift 1
|
||||||
|
$cmd "$@"
|
||||||
|
[ "$?" -ne 127 ]
|
||||||
|
do
|
||||||
|
onChange
|
||||||
|
exit
|
||||||
|
done
|
@ -1,31 +1,6 @@
|
|||||||
{
|
{
|
||||||
consul: 'localhost:8500',
|
consul: 'localhost:8500',
|
||||||
jobs: [
|
jobs: [
|
||||||
{
|
|
||||||
name: 'config-nginx',
|
|
||||||
exec: 'containerpilot -config /etc/nginx/nginx.conf.tmpl -template -out /etc/nginx/nginx.conf'
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: 'cp-frontend',
|
|
||||||
port: {{.PORT}},
|
|
||||||
exec: 'nginx',
|
|
||||||
interfaces: ["eth0", "eth1"],
|
|
||||||
restarts: 'unlimited',
|
|
||||||
when: {
|
|
||||||
source: 'config-nginx',
|
|
||||||
once: 'exitSuccess'
|
|
||||||
},
|
|
||||||
health: {
|
|
||||||
exec: '/usr/bin/curl -o /dev/null --fail -s http://localhost:{{.PORT}}',
|
|
||||||
interval: 5,
|
|
||||||
ttl: 25
|
|
||||||
},
|
|
||||||
tags: [
|
|
||||||
'traefik.backend=cp-frontend',
|
|
||||||
'traefik.frontend.rule=PathPrefix:/',
|
|
||||||
'traefik.frontend.entryPoints={{ .ENTRYPOINTS | default "http,ws,wss" }}'
|
|
||||||
]
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
name: 'consul-agent',
|
name: 'consul-agent',
|
||||||
exec: ['/usr/local/bin/consul', 'agent',
|
exec: ['/usr/local/bin/consul', 'agent',
|
||||||
@ -36,8 +11,45 @@
|
|||||||
'-retry-join', '{{ .CONSUL | default "consul" }}',
|
'-retry-join', '{{ .CONSUL | default "consul" }}',
|
||||||
'-retry-max', '10',
|
'-retry-max', '10',
|
||||||
'-retry-interval', '10s'],
|
'-retry-interval', '10s'],
|
||||||
|
health: {
|
||||||
|
exec: '/usr/bin/curl -o /dev/null --fail -s http://localhost:8500',
|
||||||
|
interval: 5,
|
||||||
|
ttl: 25
|
||||||
|
},
|
||||||
restarts: 'unlimited'
|
restarts: 'unlimited'
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
name: "preStart",
|
||||||
|
exec: "/bin/reload-nginx.sh preStart",
|
||||||
|
when: {
|
||||||
|
source: 'consul-agent',
|
||||||
|
once: 'healthy'
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'cp-frontend',
|
||||||
|
port: {{.PORT}},
|
||||||
|
exec: 'nginx',
|
||||||
|
interfaces: ["eth0", "eth1"],
|
||||||
|
restarts: 'unlimited',
|
||||||
|
when: {
|
||||||
|
source: 'preStart',
|
||||||
|
once: 'exitSuccess'
|
||||||
|
},
|
||||||
|
health: {
|
||||||
|
exec: '/usr/bin/curl -o /dev/null --fail -s http://localhost:{{.PORT}}',
|
||||||
|
interval: 5,
|
||||||
|
ttl: 25
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "onchange-api",
|
||||||
|
exec: "/bin/reload-nginx.sh onChange",
|
||||||
|
when: {
|
||||||
|
source: "watch.api",
|
||||||
|
each: "changed"
|
||||||
|
}
|
||||||
|
},
|
||||||
{
|
{
|
||||||
name: 'sensor_memory_usage',
|
name: 'sensor_memory_usage',
|
||||||
exec: '/bin/sensors.sh memory',
|
exec: '/bin/sensors.sh memory',
|
||||||
@ -75,6 +87,12 @@
|
|||||||
restarts: 'unlimited'
|
restarts: 'unlimited'
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
|
watches: [
|
||||||
|
{
|
||||||
|
name: 'api',
|
||||||
|
interval: 3
|
||||||
|
}
|
||||||
|
],
|
||||||
telemetry: {
|
telemetry: {
|
||||||
port: 9090,
|
port: 9090,
|
||||||
tags: ['op'],
|
tags: ['op'],
|
||||||
|
@ -17,85 +17,100 @@ include /etc/nginx/modules/*.conf;
|
|||||||
|
|
||||||
|
|
||||||
events {
|
events {
|
||||||
# The maximum number of simultaneous connections that can be opened by
|
# The maximum number of simultaneous connections that can be opened by
|
||||||
# a worker process.
|
# a worker process.
|
||||||
worker_connections 1024;
|
worker_connections 1024;
|
||||||
}
|
}
|
||||||
|
|
||||||
http {
|
http {
|
||||||
index index.html index.htm;
|
index index.html index.htm;
|
||||||
server {
|
|
||||||
server_name _;
|
{{ if service "api" }}
|
||||||
listen {{ .PORT | default "80" }} default_server;
|
upstream api_hosts {
|
||||||
listen [::]:{{ .PORT | default "80" }} default_server;
|
{{range service "api"}}
|
||||||
root /opt/app/packages/cp-frontend/build;
|
server {{.Address}}:{{.Port}};
|
||||||
location / {
|
{{end}}
|
||||||
|
}{{ end }}
|
||||||
|
|
||||||
|
server {
|
||||||
|
server_name _;
|
||||||
|
listen {{ env "PORT" }} default_server;
|
||||||
|
listen [::]:{{ env "PORT" }} default_server;
|
||||||
|
root /opt/app/packages/cp-frontend/build;
|
||||||
|
location / {
|
||||||
try_files $uri /index.html;
|
try_files $uri /index.html;
|
||||||
}
|
|
||||||
}
|
}
|
||||||
# Includes mapping of file name extensions to MIME types of responses
|
location /api {
|
||||||
# and defines the default type.
|
rewrite /api/(.*) /$1 break;
|
||||||
include /etc/nginx/mime.types;
|
proxy_pass http://api_hosts;
|
||||||
default_type application/octet-stream;
|
proxy_redirect off;
|
||||||
|
proxy_set_header Host $host;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
# Name servers used to resolve names of upstream servers into addresses.
|
# Includes mapping of file name extensions to MIME types of responses
|
||||||
# It's also needed when using tcpsocket and udpsocket in Lua modules.
|
# and defines the default type.
|
||||||
#resolver 208.67.222.222 208.67.220.220;
|
include /etc/nginx/mime.types;
|
||||||
|
default_type application/octet-stream;
|
||||||
|
|
||||||
# Don't tell nginx version to clients.
|
# Name servers used to resolve names of upstream servers into addresses.
|
||||||
server_tokens off;
|
# It's also needed when using tcpsocket and udpsocket in Lua modules.
|
||||||
|
#resolver 208.67.222.222 208.67.220.220;
|
||||||
|
|
||||||
# Specifies the maximum accepted body size of a client request, as
|
# Don't tell nginx version to clients.
|
||||||
# indicated by the request header Content-Length. If the stated content
|
server_tokens off;
|
||||||
# length is greater than this size, then the client receives the HTTP
|
|
||||||
# error code 413. Set to 0 to disable.
|
|
||||||
client_max_body_size 1m;
|
|
||||||
|
|
||||||
# Timeout for keep-alive connections. Server will close connections after
|
# Specifies the maximum accepted body size of a client request, as
|
||||||
# this time.
|
# indicated by the request header Content-Length. If the stated content
|
||||||
keepalive_timeout 65;
|
# length is greater than this size, then the client receives the HTTP
|
||||||
|
# error code 413. Set to 0 to disable.
|
||||||
|
client_max_body_size 1m;
|
||||||
|
|
||||||
# Sendfile copies data between one FD and other from within the kernel,
|
# Timeout for keep-alive connections. Server will close connections after
|
||||||
# which is more efficient than read() + write().
|
# this time.
|
||||||
sendfile on;
|
keepalive_timeout 65;
|
||||||
|
|
||||||
# Don't buffer data-sends (disable Nagle algorithm).
|
# Sendfile copies data between one FD and other from within the kernel,
|
||||||
# Good for sending frequent small bursts of data in real time.
|
# which is more efficient than read() + write().
|
||||||
tcp_nodelay on;
|
sendfile on;
|
||||||
|
|
||||||
# Causes nginx to attempt to send its HTTP response head in one packet,
|
# Don't buffer data-sends (disable Nagle algorithm).
|
||||||
# instead of using partial frames.
|
# Good for sending frequent small bursts of data in real time.
|
||||||
#tcp_nopush on;
|
tcp_nodelay on;
|
||||||
|
|
||||||
# Path of the file with Diffie-Hellman parameters for EDH ciphers.
|
# Causes nginx to attempt to send its HTTP response head in one packet,
|
||||||
#ssl_dhparam /etc/ssl/nginx/dh2048.pem;
|
# instead of using partial frames.
|
||||||
|
#tcp_nopush on;
|
||||||
|
|
||||||
# Specifies that our cipher suits should be preferred over client ciphers.
|
# Path of the file with Diffie-Hellman parameters for EDH ciphers.
|
||||||
ssl_prefer_server_ciphers on;
|
#ssl_dhparam /etc/ssl/nginx/dh2048.pem;
|
||||||
|
|
||||||
# Enables a shared SSL cache with size that can hold around 8000 sessions.
|
# Specifies that our cipher suits should be preferred over client ciphers.
|
||||||
ssl_session_cache shared:SSL:2m;
|
ssl_prefer_server_ciphers on;
|
||||||
|
|
||||||
|
# Enables a shared SSL cache with size that can hold around 8000 sessions.
|
||||||
|
ssl_session_cache shared:SSL:2m;
|
||||||
|
|
||||||
|
|
||||||
# Enable gzipping of responses.
|
# Enable gzipping of responses.
|
||||||
#gzip on;
|
#gzip on;
|
||||||
|
|
||||||
# Set the Vary HTTP header as defined in the RFC 2616.
|
# Set the Vary HTTP header as defined in the RFC 2616.
|
||||||
gzip_vary on;
|
gzip_vary on;
|
||||||
|
|
||||||
# Enable checking the existence of precompressed files.
|
# Enable checking the existence of precompressed files.
|
||||||
#gzip_static on;
|
#gzip_static on;
|
||||||
|
|
||||||
|
|
||||||
# Specifies the main log format.
|
# Specifies the main log format.
|
||||||
log_format main '$remote_addr - $remote_user [$time_local] "$request" '
|
log_format main '$remote_addr - $remote_user [$time_local] "$request" '
|
||||||
'$status $body_bytes_sent "$http_referer" '
|
'$status $body_bytes_sent "$http_referer" '
|
||||||
'"$http_user_agent" "$http_x_forwarded_for"';
|
'"$http_user_agent" "$http_x_forwarded_for"';
|
||||||
|
|
||||||
# Sets the path, format, and configuration for a buffered log write.
|
# Sets the path, format, and configuration for a buffered log write.
|
||||||
access_log /var/log/nginx/access.log main;
|
access_log /var/log/nginx/access.log main;
|
||||||
|
|
||||||
|
|
||||||
# Includes virtual hosts configs.
|
# Includes virtual hosts configs.
|
||||||
# include /etc/nginx/conf.d/*.conf;
|
# include /etc/nginx/conf.d/*.conf;
|
||||||
}
|
}
|
||||||
|
@ -53,22 +53,13 @@ frontend:
|
|||||||
- consul:consul
|
- consul:consul
|
||||||
environment:
|
environment:
|
||||||
- CONSUL=consul
|
- CONSUL=consul
|
||||||
- PORT=3069
|
- PORT=80
|
||||||
- REACT_APP_GQL_HOSTNAME=localhost
|
- REACT_APP_GQL_HOSTNAME=localhost
|
||||||
- REACT_APP_GQL_PORT=80
|
- REACT_APP_GQL_PORT=80
|
||||||
expose:
|
|
||||||
- 3069
|
|
||||||
|
|
||||||
traefik:
|
|
||||||
image: d0cker/traefik
|
|
||||||
ports:
|
ports:
|
||||||
- "80:80"
|
- "80:80"
|
||||||
- "8080:8080"
|
dns:
|
||||||
links:
|
- 127.0.0.1
|
||||||
- consul:consul
|
|
||||||
environment:
|
|
||||||
- CONSUL=consul
|
|
||||||
restart: always
|
|
||||||
|
|
||||||
|
|
||||||
#############################################################################
|
#############################################################################
|
||||||
@ -108,7 +99,7 @@ rethinkdb:
|
|||||||
restart: always
|
restart: always
|
||||||
mem_limit: 1g
|
mem_limit: 1g
|
||||||
ports:
|
ports:
|
||||||
- 8081:8080
|
- 8080:8080
|
||||||
expose:
|
expose:
|
||||||
- 28015
|
- 28015
|
||||||
- 29015
|
- 29015
|
||||||
|
@ -90,6 +90,7 @@ class Data extends EventEmitter {
|
|||||||
this._docker = new Dockerode(settings.docker);
|
this._docker = new Dockerode(settings.docker);
|
||||||
this._machines = null;
|
this._machines = null;
|
||||||
this._triton = null;
|
this._triton = null;
|
||||||
|
this._server = settings.server;
|
||||||
|
|
||||||
Triton.createClient({
|
Triton.createClient({
|
||||||
profile: settings.triton
|
profile: settings.triton
|
||||||
@ -252,7 +253,7 @@ class Data extends EventEmitter {
|
|||||||
|
|
||||||
createDeploymentGroup (clientDeploymentGroup, cb) {
|
createDeploymentGroup (clientDeploymentGroup, cb) {
|
||||||
const dg = Transform.toDeploymentGroup(clientDeploymentGroup);
|
const dg = Transform.toDeploymentGroup(clientDeploymentGroup);
|
||||||
console.log(`-> creating DeploymentGroup: ${Util.inspect(dg)}`);
|
this._server.log(['debug'], `-> creating DeploymentGroup: ${Util.inspect(dg)}`);
|
||||||
|
|
||||||
this._db.deployment_groups.query({
|
this._db.deployment_groups.query({
|
||||||
slug: dg.slug
|
slug: dg.slug
|
||||||
@ -278,7 +279,7 @@ class Data extends EventEmitter {
|
|||||||
|
|
||||||
updateDeploymentGroup (clientDeploymentGroup, cb) {
|
updateDeploymentGroup (clientDeploymentGroup, cb) {
|
||||||
const dg = Transform.toDeploymentGroup(clientDeploymentGroup);
|
const dg = Transform.toDeploymentGroup(clientDeploymentGroup);
|
||||||
console.log(`-> updating DeploymentGroup: ${Util.inspect(dg)}`);
|
this._server.log(['debug'], `-> updating DeploymentGroup: ${Util.inspect(dg)}`);
|
||||||
|
|
||||||
this._db.deployment_groups.update([dg], (err) => {
|
this._db.deployment_groups.update([dg], (err) => {
|
||||||
if (err) {
|
if (err) {
|
||||||
@ -527,7 +528,7 @@ class Data extends EventEmitter {
|
|||||||
Hoek.assert(clientVersion.manifest, 'manifest is required');
|
Hoek.assert(clientVersion.manifest, 'manifest is required');
|
||||||
Hoek.assert(clientVersion.deploymentGroupId, 'deploymentGroupId is required');
|
Hoek.assert(clientVersion.deploymentGroupId, 'deploymentGroupId is required');
|
||||||
|
|
||||||
console.log(`-> creating new Version for DeploymentGroup ${clientVersion.deploymentGroupId}: ${Util.inspect(clientVersion)}`);
|
this._server.log(['debug'], `-> creating new Version for DeploymentGroup ${clientVersion.deploymentGroupId}: ${Util.inspect(clientVersion)}`);
|
||||||
|
|
||||||
const version = Transform.toVersion(clientVersion);
|
const version = Transform.toVersion(clientVersion);
|
||||||
this._db.versions.insert(version, (err, key) => {
|
this._db.versions.insert(version, (err, key) => {
|
||||||
@ -535,7 +536,7 @@ class Data extends EventEmitter {
|
|||||||
return cb(err);
|
return cb(err);
|
||||||
}
|
}
|
||||||
|
|
||||||
console.log(`-> new Version for DeploymentGroup ${clientVersion.deploymentGroupId} created: ${key}`);
|
this._server.log(['debug'], `-> new Version for DeploymentGroup ${clientVersion.deploymentGroupId} created: ${key}`);
|
||||||
this._db.deployment_groups.query({
|
this._db.deployment_groups.query({
|
||||||
id: clientVersion.deploymentGroupId
|
id: clientVersion.deploymentGroupId
|
||||||
}, (err, deploymentGroup) => {
|
}, (err, deploymentGroup) => {
|
||||||
@ -551,7 +552,7 @@ class Data extends EventEmitter {
|
|||||||
[]
|
[]
|
||||||
};
|
};
|
||||||
|
|
||||||
console.log(`-> updating DeploymentGroup ${clientVersion.deploymentGroupId} to add Version ${key}`);
|
this._server.log(['debug'], `-> updating DeploymentGroup ${clientVersion.deploymentGroupId} to add Version ${key}`);
|
||||||
|
|
||||||
this._db.deployment_groups.update([changes], (err) => {
|
this._db.deployment_groups.update([changes], (err) => {
|
||||||
if (err) {
|
if (err) {
|
||||||
@ -715,7 +716,7 @@ class Data extends EventEmitter {
|
|||||||
isHandled: false
|
isHandled: false
|
||||||
};
|
};
|
||||||
|
|
||||||
console.log('-> scale request received');
|
this._server.log(['debug'], '-> scale request received');
|
||||||
|
|
||||||
const handleFailedScale = (err1, cb) => {
|
const handleFailedScale = (err1, cb) => {
|
||||||
if (err1) {
|
if (err1) {
|
||||||
@ -747,7 +748,7 @@ class Data extends EventEmitter {
|
|||||||
|
|
||||||
ctx.isHandled = true;
|
ctx.isHandled = true;
|
||||||
|
|
||||||
console.log(`-> got response from docker-compose to scale ${ctx.service.name} to ${replicas} replicas`);
|
this._server.log(['debug'], `-> got response from docker-compose to scale ${ctx.service.name} to ${replicas} replicas`);
|
||||||
};
|
};
|
||||||
|
|
||||||
const triggerScale = (err, newVersion) => {
|
const triggerScale = (err, newVersion) => {
|
||||||
@ -755,12 +756,12 @@ class Data extends EventEmitter {
|
|||||||
return handleFailedScale(err, cb);
|
return handleFailedScale(err, cb);
|
||||||
}
|
}
|
||||||
|
|
||||||
console.log('-> new Version created');
|
this._server.log(['debug'], '-> new Version created');
|
||||||
|
|
||||||
cb(null, newVersion);
|
cb(null, newVersion);
|
||||||
|
|
||||||
setImmediate(() => {
|
setImmediate(() => {
|
||||||
console.log(`-> requesting docker-compose to scale ${ctx.service.name} to ${replicas} replicas`);
|
this._server.log(['debug'], `-> requesting docker-compose to scale ${ctx.service.name} to ${replicas} replicas`);
|
||||||
|
|
||||||
this._dockerCompose.scale({
|
this._dockerCompose.scale({
|
||||||
projectName: ctx.deploymentGroup.name,
|
projectName: ctx.deploymentGroup.name,
|
||||||
@ -803,7 +804,7 @@ class Data extends EventEmitter {
|
|||||||
hasPlan: true
|
hasPlan: true
|
||||||
};
|
};
|
||||||
|
|
||||||
console.log(`-> creating new Version for DOWN scale ${Util.inspect(payload)}`);
|
this._server.log(['debug'], `-> creating new Version for DOWN scale ${Util.inspect(payload)}`);
|
||||||
|
|
||||||
// note: createVersion updates deploymentGroup
|
// note: createVersion updates deploymentGroup
|
||||||
this.createVersion(payload, triggerScale);
|
this.createVersion(payload, triggerScale);
|
||||||
@ -826,7 +827,7 @@ class Data extends EventEmitter {
|
|||||||
hasPlan: true
|
hasPlan: true
|
||||||
};
|
};
|
||||||
|
|
||||||
console.log(`-> creating new Version for UP scale ${Util.inspect(payload)}`);
|
this._server.log(['debug'], `-> creating new Version for UP scale ${Util.inspect(payload)}`);
|
||||||
|
|
||||||
// note: createVersion updates deploymentGroup
|
// note: createVersion updates deploymentGroup
|
||||||
this.createVersion(payload, triggerScale);
|
this.createVersion(payload, triggerScale);
|
||||||
@ -837,7 +838,7 @@ class Data extends EventEmitter {
|
|||||||
return handleFailedScale(err, cb);
|
return handleFailedScale(err, cb);
|
||||||
}
|
}
|
||||||
|
|
||||||
console.log(`-> got current scale ${Util.inspect(currentScale)}`);
|
this._server.log(['debug'], `-> got current scale ${Util.inspect(currentScale)}`);
|
||||||
|
|
||||||
const serviceReplicas = Find(currentScale, ['serviceName', ctx.service.name]).replicas;
|
const serviceReplicas = Find(currentScale, ['serviceName', ctx.service.name]).replicas;
|
||||||
const serviceScale = Number.isFinite(serviceReplicas) ? serviceReplicas : 1;
|
const serviceScale = Number.isFinite(serviceReplicas) ? serviceReplicas : 1;
|
||||||
@ -869,7 +870,7 @@ class Data extends EventEmitter {
|
|||||||
|
|
||||||
ctx.manifest = manifest;
|
ctx.manifest = manifest;
|
||||||
|
|
||||||
console.log('-> fetching current scale');
|
this._server.log(['debug'], '-> fetching current scale');
|
||||||
|
|
||||||
this._getCurrentScale({
|
this._getCurrentScale({
|
||||||
deploymentGroupName: ctx.deploymentGroup.name,
|
deploymentGroupName: ctx.deploymentGroup.name,
|
||||||
@ -891,7 +892,7 @@ class Data extends EventEmitter {
|
|||||||
|
|
||||||
ctx.version = version;
|
ctx.version = version;
|
||||||
|
|
||||||
console.log(`-> fetching Manifest ${version.manifest_id}`);
|
this._server.log(['debug'], `-> fetching Manifest ${version.manifest_id}`);
|
||||||
|
|
||||||
this._db.manifests.single({
|
this._db.manifests.single({
|
||||||
id: version.manifest_id
|
id: version.manifest_id
|
||||||
@ -909,7 +910,7 @@ class Data extends EventEmitter {
|
|||||||
|
|
||||||
ctx.deploymentGroup = deploymentGroup;
|
ctx.deploymentGroup = deploymentGroup;
|
||||||
|
|
||||||
console.log(`-> fetching Version ${ctx.deploymentGroup.version_id}`);
|
this._server.log(['debug'], `-> fetching Version ${ctx.deploymentGroup.version_id}`);
|
||||||
|
|
||||||
this._db.versions.single({
|
this._db.versions.single({
|
||||||
id: deploymentGroup.version_id
|
id: deploymentGroup.version_id
|
||||||
@ -921,11 +922,11 @@ class Data extends EventEmitter {
|
|||||||
return handleFailedScale(err, cb);
|
return handleFailedScale(err, cb);
|
||||||
}
|
}
|
||||||
|
|
||||||
console.log(`-> got ${instances.length} Instances from ${ctx.service.name}`);
|
this._server.log(['debug'], `-> got ${instances.length} Instances from ${ctx.service.name}`);
|
||||||
|
|
||||||
ctx.instances = instances;
|
ctx.instances = instances;
|
||||||
|
|
||||||
console.log(`-> fetching DeploymentGroup ${ctx.service.deployment_group_id}`);
|
this._server.log(['debug'], `-> fetching DeploymentGroup ${ctx.service.deployment_group_id}`);
|
||||||
|
|
||||||
this._db.deployment_groups.single({
|
this._db.deployment_groups.single({
|
||||||
id: ctx.service.deployment_group_id
|
id: ctx.service.deployment_group_id
|
||||||
@ -937,7 +938,7 @@ class Data extends EventEmitter {
|
|||||||
return handleFailedScale(err, cb);
|
return handleFailedScale(err, cb);
|
||||||
}
|
}
|
||||||
|
|
||||||
console.log(`-> fetching Instances from ${ctx.service.name}`);
|
this._server.log(['debug'], `-> fetching Instances from ${ctx.service.name}`);
|
||||||
|
|
||||||
this.getInstances({ ids: ctx.service.instance_ids }, handleInstances);
|
this.getInstances({ ids: ctx.service.instance_ids }, handleInstances);
|
||||||
};
|
};
|
||||||
@ -957,7 +958,7 @@ class Data extends EventEmitter {
|
|||||||
|
|
||||||
ctx.service = service;
|
ctx.service = service;
|
||||||
|
|
||||||
console.log(`-> fetching DeploymentGroup ${service.deployment_group_id}`);
|
this._server.log(['debug'], `-> fetching DeploymentGroup ${service.deployment_group_id}`);
|
||||||
|
|
||||||
this.updateService({
|
this.updateService({
|
||||||
id: serviceId,
|
id: serviceId,
|
||||||
@ -965,7 +966,7 @@ class Data extends EventEmitter {
|
|||||||
}, handleUpdatedService);
|
}, handleUpdatedService);
|
||||||
};
|
};
|
||||||
|
|
||||||
console.log(`-> fetching Service ${serviceId}`);
|
this._server.log(['debug'], `-> fetching Service ${serviceId}`);
|
||||||
|
|
||||||
this._db.services.single({ id: serviceId }, handleService);
|
this._db.services.single({ id: serviceId }, handleService);
|
||||||
}
|
}
|
||||||
@ -992,7 +993,7 @@ class Data extends EventEmitter {
|
|||||||
isHandled: false
|
isHandled: false
|
||||||
};
|
};
|
||||||
|
|
||||||
console.log('-> provision request received');
|
this._server.log(['debug'], '-> provision request received');
|
||||||
|
|
||||||
const handleFailedProvision = (err) => {
|
const handleFailedProvision = (err) => {
|
||||||
if (!err) {
|
if (!err) {
|
||||||
@ -1027,7 +1028,7 @@ class Data extends EventEmitter {
|
|||||||
|
|
||||||
const services = ForceArray(result.successes);
|
const services = ForceArray(result.successes);
|
||||||
|
|
||||||
console.log(`-> got a map of Service's-Instance's from DeploymentGroup ${ctx.currentDeploymentGroup.id} ${Util.inspect(services)}`);
|
this._server.log(['debug'], `-> got a map of Service's-Instance's from DeploymentGroup ${ctx.currentDeploymentGroup.id} ${Util.inspect(services)}`);
|
||||||
|
|
||||||
const plan = Flatten(services.map(({ name, instances }) => {
|
const plan = Flatten(services.map(({ name, instances }) => {
|
||||||
const provision = ctx.provisionRes[name];
|
const provision = ctx.provisionRes[name];
|
||||||
@ -1092,7 +1093,7 @@ class Data extends EventEmitter {
|
|||||||
VAsync.parallel({
|
VAsync.parallel({
|
||||||
funcs: [
|
funcs: [
|
||||||
(cb) => {
|
(cb) => {
|
||||||
console.log(`-> updating Version ${ctx.newVersion.id} from DeploymentGroup ${ctx.currentDeploymentGroup.id} with new Plan ${Util.inspect(plan)}`);
|
this._server.log(['debug'], `-> updating Version ${ctx.newVersion.id} from DeploymentGroup ${ctx.currentDeploymentGroup.id} with new Plan ${Util.inspect(plan)}`);
|
||||||
return this.updateVersion({
|
return this.updateVersion({
|
||||||
id: ctx.newVersion.id,
|
id: ctx.newVersion.id,
|
||||||
hasPlan: true,
|
hasPlan: true,
|
||||||
@ -1100,7 +1101,7 @@ class Data extends EventEmitter {
|
|||||||
}, cb);
|
}, cb);
|
||||||
},
|
},
|
||||||
(cb) => {
|
(cb) => {
|
||||||
console.log(`-> updating DeploymentGroup ${ctx.currentDeploymentGroup.id} with new Service's ${Util.inspect(ctx.newServices)} and ACTIVE status`);
|
this._server.log(['debug'], `-> updating DeploymentGroup ${ctx.currentDeploymentGroup.id} with new Service's ${Util.inspect(ctx.newServices)} and ACTIVE status`);
|
||||||
|
|
||||||
const services = UniqBy(
|
const services = UniqBy(
|
||||||
ForceArray(ctx.newServices)
|
ForceArray(ctx.newServices)
|
||||||
@ -1124,8 +1125,8 @@ class Data extends EventEmitter {
|
|||||||
return handleFailedProvision(err);
|
return handleFailedProvision(err);
|
||||||
}
|
}
|
||||||
|
|
||||||
console.log(`-> marked removed Service's with DELETING from DeploymentGroup ${ctx.currentDeploymentGroup.id}`);
|
this._server.log(['debug'], `-> marked removed Service's with DELETING from DeploymentGroup ${ctx.currentDeploymentGroup.id}`);
|
||||||
console.log(`-> fetching a map of Service's-Instance's from DeploymentGroup ${ctx.currentDeploymentGroup.id}`);
|
this._server.log(['debug'], `-> fetching a map of Service's-Instance's from DeploymentGroup ${ctx.currentDeploymentGroup.id}`);
|
||||||
|
|
||||||
VAsync.forEachParallel({
|
VAsync.forEachParallel({
|
||||||
inputs: ctx.previousServices,
|
inputs: ctx.previousServices,
|
||||||
@ -1149,7 +1150,7 @@ class Data extends EventEmitter {
|
|||||||
return handleFailedProvision(err);
|
return handleFailedProvision(err);
|
||||||
}
|
}
|
||||||
|
|
||||||
console.log(`-> identified previous Service's from DeploymentGroup ${ctx.currentDeploymentGroup.id} ${Util.inspect(ctx.previousServices)}`);
|
this._server.log(['debug'], `-> identified previous Service's from DeploymentGroup ${ctx.currentDeploymentGroup.id} ${Util.inspect(ctx.previousServices)}`);
|
||||||
|
|
||||||
ctx.previousServices = previousServices;
|
ctx.previousServices = previousServices;
|
||||||
|
|
||||||
@ -1158,12 +1159,12 @@ class Data extends EventEmitter {
|
|||||||
return !Find(ctx.newServices, ['name', name]);
|
return !Find(ctx.newServices, ['name', name]);
|
||||||
});
|
});
|
||||||
|
|
||||||
console.log(`-> identified removed Service's from DeploymentGroup ${ctx.currentDeploymentGroup.id} ${Util.inspect(ctx.removedServices)}`);
|
this._server.log(['debug'], `-> identified removed Service's from DeploymentGroup ${ctx.currentDeploymentGroup.id} ${Util.inspect(ctx.removedServices)}`);
|
||||||
|
|
||||||
VAsync.forEachParallel({
|
VAsync.forEachParallel({
|
||||||
inputs: ctx.removedServices,
|
inputs: ctx.removedServices,
|
||||||
func: ({ id, name }, next) => {
|
func: ({ id, name }, next) => {
|
||||||
console.log(`-> marking Service ${name} as DELETING from DeploymentGroup ${ctx.currentDeploymentGroup.id}`);
|
this._server.log(['debug'], `-> marking Service ${name} as DELETING from DeploymentGroup ${ctx.currentDeploymentGroup.id}`);
|
||||||
this.updateService({
|
this.updateService({
|
||||||
id,
|
id,
|
||||||
status: 'DELETING'
|
status: 'DELETING'
|
||||||
@ -1180,18 +1181,18 @@ class Data extends EventEmitter {
|
|||||||
|
|
||||||
ctx.newServices = ForceArray(result.successes);
|
ctx.newServices = ForceArray(result.successes);
|
||||||
|
|
||||||
console.log(`-> got "${ctx.newServices.length}" Services provisioned from DeploymentGroup ${ctx.currentDeploymentGroup.id}`);
|
this._server.log(['debug'], `-> got "${ctx.newServices.length}" Services provisioned from DeploymentGroup ${ctx.currentDeploymentGroup.id}`);
|
||||||
|
|
||||||
ctx.currentDeploymentGroup.services({}, handlePreviousServices);
|
ctx.currentDeploymentGroup.services({}, handlePreviousServices);
|
||||||
};
|
};
|
||||||
|
|
||||||
const createProvisionService = ({ payload }, cb) => {
|
const createProvisionService = ({ payload }, cb) => {
|
||||||
console.log(`-> creating Service "${payload.name}" from DeploymentGroup ${ctx.currentDeploymentGroup.id}`);
|
this._server.log(['debug'], `-> creating Service "${payload.name}" from DeploymentGroup ${ctx.currentDeploymentGroup.id}`);
|
||||||
this.createService(payload, cb);
|
this.createService(payload, cb);
|
||||||
};
|
};
|
||||||
|
|
||||||
const updateProvisionService = ({ payload, serviceId }, cb) => {
|
const updateProvisionService = ({ payload, serviceId }, cb) => {
|
||||||
console.log(`-> updating Service "${payload.name}" from DeploymentGroup ${ctx.currentDeploymentGroup.id}`);
|
this._server.log(['debug'], `-> updating Service "${payload.name}" from DeploymentGroup ${ctx.currentDeploymentGroup.id}`);
|
||||||
this.updateService(Object.assign({}, payload, {
|
this.updateService(Object.assign({}, payload, {
|
||||||
id: serviceId
|
id: serviceId
|
||||||
}), cb);
|
}), cb);
|
||||||
@ -1199,7 +1200,7 @@ class Data extends EventEmitter {
|
|||||||
|
|
||||||
// 10. on each service, either create or update it with new status and hash
|
// 10. on each service, either create or update it with new status and hash
|
||||||
const handleProvisionService = (serviceName, next) => {
|
const handleProvisionService = (serviceName, next) => {
|
||||||
console.log(`-> handling Service "${serviceName}" from DeploymentGroup ${ctx.currentDeploymentGroup.id}`);
|
this._server.log(['debug'], `-> handling Service "${serviceName}" from DeploymentGroup ${ctx.currentDeploymentGroup.id}`);
|
||||||
|
|
||||||
this.getServices({
|
this.getServices({
|
||||||
name: serviceName,
|
name: serviceName,
|
||||||
@ -1209,7 +1210,7 @@ class Data extends EventEmitter {
|
|||||||
return next(err);
|
return next(err);
|
||||||
}
|
}
|
||||||
|
|
||||||
console.log(`-> got ${services.length} services with name ${serviceName} from DeploymentGroup ${ctx.currentDeploymentGroup.id}`);
|
this._server.log(['debug'], `-> got ${services.length} services with name ${serviceName} from DeploymentGroup ${ctx.currentDeploymentGroup.id}`);
|
||||||
|
|
||||||
const provision = ctx.provisionRes[serviceName];
|
const provision = ctx.provisionRes[serviceName];
|
||||||
const action = Get(provision, 'plan.action', 'noop').toUpperCase();
|
const action = Get(provision, 'plan.action', 'noop').toUpperCase();
|
||||||
@ -1245,7 +1246,7 @@ class Data extends EventEmitter {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
console.log(`-> got response from provision ${Util.inspect(provisionRes)}`);
|
this._server.log(['debug'], `-> got response from provision ${Util.inspect(provisionRes)}`);
|
||||||
|
|
||||||
ctx.isHandled = true;
|
ctx.isHandled = true;
|
||||||
ctx.provisionRes = provisionRes;
|
ctx.provisionRes = provisionRes;
|
||||||
@ -1272,7 +1273,7 @@ class Data extends EventEmitter {
|
|||||||
cb(null, ctx.newVersion);
|
cb(null, ctx.newVersion);
|
||||||
|
|
||||||
setImmediate(() => {
|
setImmediate(() => {
|
||||||
console.log(`-> requesting docker-compose provision for DeploymentGroup ${ctx.currentDeploymentGroup.name}`);
|
this._server.log(['debug'], `-> requesting docker-compose provision for DeploymentGroup ${ctx.currentDeploymentGroup.name}`);
|
||||||
|
|
||||||
this._dockerCompose.provision({
|
this._dockerCompose.provision({
|
||||||
projectName: ctx.currentDeploymentGroup.name,
|
projectName: ctx.currentDeploymentGroup.name,
|
||||||
@ -1290,7 +1291,7 @@ class Data extends EventEmitter {
|
|||||||
return cb(err);
|
return cb(err);
|
||||||
}
|
}
|
||||||
|
|
||||||
console.log(`-> got current scale ${Util.inspect(currentScale)}`);
|
this._server.log(['debug'], `-> got current scale ${Util.inspect(currentScale)}`);
|
||||||
|
|
||||||
ctx.currentScale = currentScale;
|
ctx.currentScale = currentScale;
|
||||||
|
|
||||||
@ -1311,9 +1312,9 @@ class Data extends EventEmitter {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (!currentVersion) {
|
if (!currentVersion) {
|
||||||
console.log(`-> detected first provision for DeploymentGroup ${ctx.currentDeploymentGroup.id}`);
|
this._server.log(['debug'], `-> detected first provision for DeploymentGroup ${ctx.currentDeploymentGroup.id}`);
|
||||||
} else {
|
} else {
|
||||||
console.log(`-> creating new Version based on old Version ${currentVersion.id}`);
|
this._server.log(['debug'], `-> creating new Version based on old Version ${currentVersion.id}`);
|
||||||
}
|
}
|
||||||
|
|
||||||
ctx.currentVersion = currentVersion;
|
ctx.currentVersion = currentVersion;
|
||||||
@ -1332,7 +1333,7 @@ class Data extends EventEmitter {
|
|||||||
return cb(err);
|
return cb(err);
|
||||||
}
|
}
|
||||||
|
|
||||||
console.log(`-> fetching current version for ${ctx.currentDeploymentGroup.id}`);
|
this._server.log(['debug'], `-> fetching current version for ${ctx.currentDeploymentGroup.id}`);
|
||||||
|
|
||||||
ctx.newManifest = newManifest;
|
ctx.newManifest = newManifest;
|
||||||
ctx.currentDeploymentGroup.version(null, handleCurrentVersion);
|
ctx.currentDeploymentGroup.version(null, handleCurrentVersion);
|
||||||
@ -1345,7 +1346,7 @@ class Data extends EventEmitter {
|
|||||||
return cb(err);
|
return cb(err);
|
||||||
}
|
}
|
||||||
|
|
||||||
console.log(`-> got docker-compose config ${Util.inspect(config)}`);
|
this._server.log(['debug'], `-> got docker-compose config ${Util.inspect(config)}`);
|
||||||
|
|
||||||
ctx.config = config;
|
ctx.config = config;
|
||||||
|
|
||||||
@ -1372,13 +1373,13 @@ class Data extends EventEmitter {
|
|||||||
return currentDeploymentGroup.version({}, cb);
|
return currentDeploymentGroup.version({}, cb);
|
||||||
}
|
}
|
||||||
|
|
||||||
console.log(`-> DeploymentGroup found with id ${currentDeploymentGroup.id}`);
|
this._server.log(['debug'], `-> DeploymentGroup found with id ${currentDeploymentGroup.id}`);
|
||||||
|
|
||||||
const configPayload = Object.assign({}, clientManifest, {
|
const configPayload = Object.assign({}, clientManifest, {
|
||||||
deploymentGroupName: currentDeploymentGroup.name
|
deploymentGroupName: currentDeploymentGroup.name
|
||||||
});
|
});
|
||||||
|
|
||||||
console.log(`-> requesting docker-compose config for manifest ${Util.inspect(configPayload)}`);
|
this._server.log(['debug'], `-> requesting docker-compose config for manifest ${Util.inspect(configPayload)}`);
|
||||||
|
|
||||||
ctx.currentDeploymentGroup = currentDeploymentGroup;
|
ctx.currentDeploymentGroup = currentDeploymentGroup;
|
||||||
|
|
||||||
@ -1401,7 +1402,7 @@ class Data extends EventEmitter {
|
|||||||
}
|
}
|
||||||
|
|
||||||
createManifest (clientManifest, cb) {
|
createManifest (clientManifest, cb) {
|
||||||
console.log(`-> creating new Manifest ${Util.inspect(clientManifest)}`);
|
this._server.log(['debug'], `-> creating new Manifest ${Util.inspect(clientManifest)}`);
|
||||||
|
|
||||||
const newManifest = Transform.toManifest(clientManifest);
|
const newManifest = Transform.toManifest(clientManifest);
|
||||||
this._db.manifests.insert(newManifest, (err, manifestId) => {
|
this._db.manifests.insert(newManifest, (err, manifestId) => {
|
||||||
@ -1409,7 +1410,7 @@ class Data extends EventEmitter {
|
|||||||
return cb(err);
|
return cb(err);
|
||||||
}
|
}
|
||||||
|
|
||||||
console.log(`-> new Manifest created with id ${manifestId}`);
|
this._server.log(['debug'], `-> new Manifest created with id ${manifestId}`);
|
||||||
|
|
||||||
clientManifest.id = manifestId;
|
clientManifest.id = manifestId;
|
||||||
cb(null, Transform.fromManifest(clientManifest));
|
cb(null, Transform.fromManifest(clientManifest));
|
||||||
@ -1455,7 +1456,7 @@ class Data extends EventEmitter {
|
|||||||
|
|
||||||
updateService (clientService, cb) {
|
updateService (clientService, cb) {
|
||||||
const payload = Transform.toService(clientService);
|
const payload = Transform.toService(clientService);
|
||||||
console.log(`-> got update Service request ${Util.inspect(payload)}`);
|
this._server.log(['debug'], `-> got update Service request ${Util.inspect(payload)}`);
|
||||||
|
|
||||||
this._db.services.update([payload], (err) => {
|
this._db.services.update([payload], (err) => {
|
||||||
if (err) {
|
if (err) {
|
||||||
@ -1468,20 +1469,20 @@ class Data extends EventEmitter {
|
|||||||
|
|
||||||
getService ({ id, hash }, cb) {
|
getService ({ id, hash }, cb) {
|
||||||
const query = id ? { id } : { version_hash: hash };
|
const query = id ? { id } : { version_hash: hash };
|
||||||
console.log(`-> fetching Service ${Util.inspect(query)}`);
|
this._server.log(['debug'], `-> fetching Service ${Util.inspect(query)}`);
|
||||||
this._db.services.query(query, (err, services) => {
|
this._db.services.query(query, (err, services) => {
|
||||||
if (err) {
|
if (err) {
|
||||||
return cb(err);
|
return cb(err);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!services || !services.length) {
|
if (!services || !services.length) {
|
||||||
console.log(`-> Service ${Util.inspect(query)} not found`);
|
this._server.log(['debug'], `-> Service ${Util.inspect(query)} not found`);
|
||||||
return cb(Boom.notFound());
|
return cb(Boom.notFound());
|
||||||
}
|
}
|
||||||
|
|
||||||
const service = services.shift();
|
const service = services.shift();
|
||||||
|
|
||||||
console.log(`-> Service ${Util.inspect(query)} found ${Util.inspect(service)}`);
|
this._server.log(['debug'], `-> Service ${Util.inspect(query)} found ${Util.inspect(service)}`);
|
||||||
|
|
||||||
const branches = ForceArray(service.branches).map((branch) => {
|
const branches = ForceArray(service.branches).map((branch) => {
|
||||||
return Object.assign({}, branch, {
|
return Object.assign({}, branch, {
|
||||||
@ -2277,17 +2278,17 @@ class Data extends EventEmitter {
|
|||||||
}
|
}
|
||||||
|
|
||||||
importDeploymentGroup ({ deploymentGroupSlug }, cb) {
|
importDeploymentGroup ({ deploymentGroupSlug }, cb) {
|
||||||
console.log(`-> import requested for ${deploymentGroupSlug}`);
|
this._server.log(['debug'], `-> import requested for ${deploymentGroupSlug}`);
|
||||||
|
|
||||||
if (!this._machines) {
|
if (!this._machines) {
|
||||||
console.log('-> watcher not yet defined');
|
this._server.log(['debug'], '-> watcher not yet defined');
|
||||||
return cb(null, null);
|
return cb(null, null);
|
||||||
}
|
}
|
||||||
|
|
||||||
const machines = this._machines.getContainers();
|
const machines = this._machines.getContainers();
|
||||||
|
|
||||||
if (!Array.isArray(machines)) {
|
if (!Array.isArray(machines)) {
|
||||||
console.log('-> no machines found');
|
this._server.log(['debug'], '-> no machines found');
|
||||||
return cb(null, null);
|
return cb(null, null);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -2304,7 +2305,7 @@ class Data extends EventEmitter {
|
|||||||
);
|
);
|
||||||
|
|
||||||
if (!containers.length) {
|
if (!containers.length) {
|
||||||
console.log(`-> no containers found for ${deploymentGroupSlug}`);
|
this._server.log(['debug'], `-> no containers found for ${deploymentGroupSlug}`);
|
||||||
return cb(null, null);
|
return cb(null, null);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -2341,7 +2342,7 @@ class Data extends EventEmitter {
|
|||||||
return (serviceId, next) => {
|
return (serviceId, next) => {
|
||||||
const service = services[serviceId];
|
const service = services[serviceId];
|
||||||
|
|
||||||
console.log(`-> creating Service ${Util.inspect(service)}`);
|
this._server.log(['debug'], `-> creating Service ${Util.inspect(service)}`);
|
||||||
|
|
||||||
VAsync.forEachParallel({
|
VAsync.forEachParallel({
|
||||||
inputs: service.instances,
|
inputs: service.instances,
|
||||||
@ -2355,7 +2356,7 @@ class Data extends EventEmitter {
|
|||||||
return cb(err);
|
return cb(err);
|
||||||
}
|
}
|
||||||
|
|
||||||
console.log(`-> created Instances ${Util.inspect(results.successes)}`);
|
this._server.log(['debug'], `-> created Instances ${Util.inspect(results.successes)}`);
|
||||||
|
|
||||||
this.createService(Object.assign(service, {
|
this.createService(Object.assign(service, {
|
||||||
instances: results.successes,
|
instances: results.successes,
|
||||||
@ -2372,7 +2373,7 @@ class Data extends EventEmitter {
|
|||||||
imported: true
|
imported: true
|
||||||
};
|
};
|
||||||
|
|
||||||
console.log(`-> creating DeploymentGroup ${Util.inspect(deploymentGroup)}`);
|
this._server.log(['debug'], `-> creating DeploymentGroup ${Util.inspect(deploymentGroup)}`);
|
||||||
|
|
||||||
this.createDeploymentGroup(deploymentGroup, (err, dg) => {
|
this.createDeploymentGroup(deploymentGroup, (err, dg) => {
|
||||||
if (err) {
|
if (err) {
|
||||||
|
@ -30,6 +30,8 @@ module.exports = function (server, options, next) {
|
|||||||
console.error(ex);
|
console.error(ex);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
options.watch.server = server;
|
||||||
|
options.data.server = server;
|
||||||
const data = new Data(options.data);
|
const data = new Data(options.data);
|
||||||
const cpWatcher = new ContainerPilotWatcher(Object.assign(options.watch, { data }));
|
const cpWatcher = new ContainerPilotWatcher(Object.assign(options.watch, { data }));
|
||||||
const machinesWatcher = new MachinesWatcher(Object.assign(options.watch, {
|
const machinesWatcher = new MachinesWatcher(Object.assign(options.watch, {
|
||||||
|
@ -63,6 +63,7 @@ module.exports = class MachineWatcher {
|
|||||||
|
|
||||||
// todo assert options
|
// todo assert options
|
||||||
this._data = options.data;
|
this._data = options.data;
|
||||||
|
this._server = options.watch;
|
||||||
this._frequency = 200;
|
this._frequency = 200;
|
||||||
|
|
||||||
this._tritonWatch = new TritonWatch({
|
this._tritonWatch = new TritonWatch({
|
||||||
@ -161,7 +162,7 @@ module.exports = class MachineWatcher {
|
|||||||
}
|
}
|
||||||
|
|
||||||
createInstance ({ deploymentGroup, machine, instances, service }, cb) {
|
createInstance ({ deploymentGroup, machine, instances, service }, cb) {
|
||||||
console.error(`-> detected that machine ${machine.name} was created`);
|
this._server.log(['debug', 'error'], `-> detected that machine ${machine.name} was created`);
|
||||||
|
|
||||||
const status = (machine.state || '').toUpperCase();
|
const status = (machine.state || '').toUpperCase();
|
||||||
|
|
||||||
@ -176,7 +177,7 @@ module.exports = class MachineWatcher {
|
|||||||
machineId: machine.id
|
machineId: machine.id
|
||||||
};
|
};
|
||||||
|
|
||||||
console.log('-> creating instance', Util.inspect(instance));
|
this._server.log(['debug'], '-> creating instance', Util.inspect(instance));
|
||||||
this._data.createInstance(instance, (err, instance) => {
|
this._data.createInstance(instance, (err, instance) => {
|
||||||
if (err) {
|
if (err) {
|
||||||
return cb(err);
|
return cb(err);
|
||||||
@ -187,7 +188,7 @@ module.exports = class MachineWatcher {
|
|||||||
instances: instances.concat(instance)
|
instances: instances.concat(instance)
|
||||||
};
|
};
|
||||||
|
|
||||||
console.log('-> updating service', Util.inspect(payload));
|
this._server.log(['debug'], '-> updating service', Util.inspect(payload));
|
||||||
this._data.updateService(payload, cb);
|
this._data.updateService(payload, cb);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
@ -200,7 +201,7 @@ module.exports = class MachineWatcher {
|
|||||||
status: (machine.state || '').toUpperCase()
|
status: (machine.state || '').toUpperCase()
|
||||||
};
|
};
|
||||||
|
|
||||||
console.log('-> updating instance', Util.inspect(updatedInstance));
|
this._server.log(['debug'], '-> updating instance', Util.inspect(updatedInstance));
|
||||||
this._data.updateInstance(updatedInstance, (err) => {
|
this._data.updateInstance(updatedInstance, (err) => {
|
||||||
if (err) {
|
if (err) {
|
||||||
return cb(err);
|
return cb(err);
|
||||||
@ -218,7 +219,7 @@ module.exports = class MachineWatcher {
|
|||||||
})
|
})
|
||||||
};
|
};
|
||||||
|
|
||||||
console.log('-> updating service', Util.inspect(payload));
|
this._server.log(['debug'], '-> updating service', Util.inspect(payload));
|
||||||
this._data.updateService(payload, cb);
|
this._data.updateService(payload, cb);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
@ -544,7 +545,7 @@ module.exports = class MachineWatcher {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
console.log('-> `change` event received', Util.inspect(machine));
|
//console.log('-> `change` event received', Util.inspect(machine));
|
||||||
|
|
||||||
const { id, tags = {} } = machine;
|
const { id, tags = {} } = machine;
|
||||||
|
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "portal-api",
|
"name": "portal-api",
|
||||||
"version": "1.2.0",
|
"version": "1.3.2",
|
||||||
"description": "",
|
"description": "",
|
||||||
"main": "./lib/index.js",
|
"main": "./lib/index.js",
|
||||||
"scripts": {
|
"scripts": {
|
||||||
@ -37,6 +37,7 @@
|
|||||||
"cidr-matcher": "^1.0.5",
|
"cidr-matcher": "^1.0.5",
|
||||||
"docker-compose-client": "^1.0.8",
|
"docker-compose-client": "^1.0.8",
|
||||||
"dockerode": "^2.5.0",
|
"dockerode": "^2.5.0",
|
||||||
|
"force-array": "^3.1.0",
|
||||||
"graphi": "^2.2.1",
|
"graphi": "^2.2.1",
|
||||||
"hoek": "^4.1.1",
|
"hoek": "^4.1.1",
|
||||||
"joyent-cp-gql-schema": "^1.0.4",
|
"joyent-cp-gql-schema": "^1.0.4",
|
||||||
|
Loading…
Reference in New Issue
Block a user