Merge pull request #62 from Maix0/monitoring/nginx

Cleanup monitoring side of the service
This commit is contained in:
Raphaël 2025-12-17 20:56:59 +01:00 committed by GitHub
commit 3fd3635fd1
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
18 changed files with 247 additions and 150 deletions

View file

@ -1,18 +1,15 @@
networks:
transcendance-network:
app:
driver: bridge
monitoring:
driver: bridge
services:
###############
# USER #
###############
frontend:
build: ./frontend
container_name: frontend
container_name: app-frontend
restart: on-failure:3
networks:
- transcendance-network
- app
volumes:
- static-volume:/volumes/static
logging:
@ -21,20 +18,16 @@ services:
gelf-address: "udp://127.0.0.1:12201"
tag: "{{.Name}}"
#
# The "entry point" as in it does all of this:
# - serve files (images, static files, video)
# - redirect request to appropirate service (reverse proxy)
# - be the one that handles TLS/SSL (basically HTTPS)
# - other stuff I don't know yet
nginx:
build: ./nginx
container_name: nginx
container_name: app-nginx
restart: always
networks:
- transcendance-network
- app
- monitoring
ports:
- '8888:443'
- '9090:8443'
volumes:
# if you need to share files with nginx, you do it here.
- static-volume:/volumes/static
@ -48,18 +41,19 @@ services:
tag: "{{.Name}}"
###############
# AUTH #
# SERVICE #
###############
auth:
build:
context: ./src/
args:
- SERVICE=auth
- EXTRA_FILES=auth/extra
container_name: auth
container_name: app-auth
restart: always
networks:
- transcendance-network
- app
volumes:
- sqlite-volume:/volumes/database
- static-volume:/volumes/static
@ -73,39 +67,15 @@ services:
gelf-address: "udp://127.0.0.1:12201"
tag: "{{.Name}}"
###############
# TIC-TAC-TOE #
###############
# tic-tac-toe:
# build:
# context: ./src/
# args:
# - SERVICE=tic-tac-toe
# - EXTRA_FILES=tic-tac-toe/extra
# container_name: tic-tac-toe
# restart: unless-stopped
# networks:
# - transcendance-network
# volumes:
# - sqlite-volume:/volumes/database
# - static-volume:/volumes/static
# environment:
# - JWT_SECRET=KRUGKIDROVUWG2ZAMJZG653OEBTG66BANJ2W24DTEBXXMZLSEB2GQZJANRQXU6JA
# - DATABASE_DIR=/volumes/database
###############
# CHAT #
###############
chat:
build:
context: ./src/
args:
- SERVICE=chat
- EXTRA_FILES=chat/extra
container_name: chat
container_name: app-chat
restart: always
networks:
- transcendance-network
- app
volumes:
- sqlite-volume:/volumes/database
- static-volume:/volumes/static
@ -114,20 +84,21 @@ services:
- DATABASE_DIR=/volumes/database
- PROVIDER_FILE=/extra/providers.toml
- SESSION_MANAGER=${SESSION_MANAGER}
logging:
driver: gelf
options:
gelf-address: "udp://127.0.0.1:12201"
tag: "{{.Name}}"
###############
# USER #
###############
user:
build:
context: ./src/
args:
- SERVICE=user
# - EXTRA_FILES=user/extra
container_name: user
container_name: app-user
restart: always
networks:
- transcendance-network
- app
volumes:
- sqlite-volume:/volumes/database
- static-volume:/volumes/static
@ -146,16 +117,14 @@ services:
###############
grafana:
container_name: monitoring-grafana
container_name: mon-grafana
image: grafana/grafana-enterprise
restart: always
networks:
- transcendance-network
- app
- monitoring
depends_on:
- prometheus
ports:
- '3000:3000'
volumes:
- ./monitoring/grafana/alerting:/etc/grafana/provisioning/alerting
- ./monitoring/grafana/datasources:/etc/grafana/provisioning/datasources
@ -166,7 +135,7 @@ services:
# this can stay the same for developpement. This is an alias to `localhost`
- NGINX_DOMAIN=local.maix.me
- GF_LOG_LEVEL=warn
- GF_SERVER_ROOT_URL=http://local.maix.me:3000
- GF_SERVER_ROOT_URL=https://local.maix.me:9090/grafana/
- GF_SECURITY_ADMIN_USER=${GRAFANA_ADMIN_USER}
- GF_SECURITY_ADMIN_PASSWORD=${GRAFANA_ADMIN_PASS}
logging:
@ -174,12 +143,18 @@ services:
options:
gelf-address: "udp://127.0.0.1:12201"
tag: "{{.Name}}"
healthcheck:
test: ["CMD-SHELL", "curl -f -s http://localhost:3000/api/health || exit 1"]
interval: 30s
timeout: 10s
retries: 3
start_period: 30s
prometheus:
image: prom/prometheus:latest
container_name: monitoring-prometheus
container_name: mon-prometheus
networks:
- transcendance-network
- app
- monitoring
volumes:
- ./monitoring/prometheus:/etc/prometheus/
@ -189,14 +164,22 @@ services:
options:
gelf-address: "udp://127.0.0.1:12201"
tag: "{{.Name}}"
healthcheck:
test: ["CMD", "wget", "--quiet", "--spider", "http://localhost:9090/-/healthy"]
interval: 30s
timeout: 5s
retries: 3
start_period: 30s
cadvisor:
image: gcr.io/cadvisor/cadvisor:latest
networks:
- monitoring
container_name: monitoring-cadvisor
ports:
- '8080:8080'
container_name: mon-cadvisor
command:
- '-url_base_prefix=/cadvisor'
environment:
- CADVISOR_HEALTHCHECK_URL=http://localhost:8080/cadvisor/healthz
volumes:
- /:/rootfs:ro
- /var/run:/var/run:ro
@ -209,20 +192,23 @@ services:
gelf-address: "udp://127.0.0.1:12201"
tag: "{{.Name}}"
blackbox:
image: prom/blackbox-exporter:latest
container_name: monitoring-blackbox
container_name: mon-blackbox
networks:
- transcendance-network
ports:
- "9115:9115"
- app
restart: unless-stopped
logging:
driver: gelf
options:
gelf-address: "udp://127.0.0.1:12201"
tag: "{{.Name}}"
healthcheck:
test: ["CMD", "wget", "--quiet", "--spider", "http://localhost:9115/-/healthy"]
interval: 30s
timeout: 5s
retries: 3
start_period: 10s
@ -232,7 +218,7 @@ services:
elasticsearch:
image: docker.elastic.co/elasticsearch/elasticsearch:7.17.23
container_name: logs-elasticsearch
container_name: log-elasticsearch
networks:
- monitoring
environment:
@ -242,14 +228,18 @@ services:
volumes:
- elastic-data:/usr/share/elasticsearch/data
- ./logs/elasticsearch:/setup
ports:
- "9200:9200"
command: ["/setup/bootstrap.sh"]
restart: unless-stopped
healthcheck:
test: ["CMD", "curl", "-f", "-s", "localhost:9200"]
interval: 30s
timeout: 10s
retries: 3
start_period: 10s
logstash:
image: docker.elastic.co/logstash/logstash:7.17.23
container_name: logs-logstash
container_name: log-logstash
depends_on:
- elasticsearch
networks:
@ -259,25 +249,38 @@ services:
ports:
- "12201:12201/udp"
restart: unless-stopped
healthcheck:
test: ["CMD", "curl", "-f", "-s", "localhost:9600"]
interval: 30s
timeout: 10s
retries: 3
start_period: 10s
kibana:
image: docker.elastic.co/kibana/kibana:7.17.23
container_name: logs-kibana
container_name: log-kibana
depends_on:
- elasticsearch
networks:
- monitoring
- app
environment:
- ELASTICSEARCH_HOSTS=http://elasticsearch:9200
- SERVER_PUBLICBASEURL=http://local.maix.me:5601
- SERVER_PUBLICBASEURL=https://local.maix.me:9090/kibana
- SERVER_BASEPATH=/kibana
- SERVER_REWRITEBASEPATH=true
- ELASTICSEARCH_USERNAME=elastic
- ELASTIC_PASSWORD=${ELASTIC_PASSWORD}
ports:
- "5601:5601"
volumes:
- ./logs/kibana:/setup
command: ["/setup/bootstrap.sh"]
restart: unless-stopped
healthcheck:
test: ["CMD", "curl", "-f", "-s", "localhost:5601/kibana/api/status"]
interval: 30s
timeout: 10s
retries: 3
start_period: 10s
volumes:
sqlite-volume:

View file

@ -2,11 +2,11 @@
kibana_setup() {
set -xe
until curl -s -f "localhost:5601/api/status"; do
until curl -s -f "localhost:5601/kibana/api/status"; do
sleep 2
done
curl -v -X POST "localhost:5601/api/saved_objects/_import?overwrite=true" \
curl -v -X POST "localhost:5601/kibana/api/saved_objects/_import?overwrite=true" \
-H "kbn-xsrf: true" \
--form file='@/setup/export.ndjson'
exit 0

View file

@ -17,10 +17,10 @@ scrape_configs:
static_configs:
- targets:
- http://nginx/monitoring/ok
- http://nginx:8080/ok
- http://auth/monitoring
- http://user/monitoring
- http://icons/monitoring
- http://chat/monitoring
relabel_configs:
- source_labels: [__address__]

View file

@ -27,3 +27,9 @@ COPY ./15-local-resolvers.envsh /docker-entrypoint.d/
COPY ./17-add-template-prefix.sh /docker-entrypoint.d/
COPY ./conf /etc/nginx/templates
COPY ./monitoring.index.html /var/share/www/monitoring/
RUN chmod -R +r /var/share/www/monitoring/;
HEALTHCHECK --interval=30s --timeout=3s \
CMD curl -f -s http://localhost:8080/ok?docker || exit 1;

View file

@ -1,10 +1,15 @@
# please make sure you want to edit this file...
map $http_upgrade $connection_upgrade {
default upgrade;
'' close;
}
# this allows the redirection of `http://domain/URL` to `https://domain/URL`
server {
charset UTF-8;
listen 80;
listen [::]:80;
#listen [::]:80;
resolver $NGINX_RESOLVERS;
server_name $NGINX_DOMAIN;
@ -14,7 +19,7 @@ server {
server {
charset UTF-8;
listen [::]:443 ssl;
#listen [::]:443 ssl;
listen 443 ssl;
resolver $NGINX_RESOLVERS;
server_name $NGINX_DOMAIN;
@ -27,3 +32,5 @@ server {
error_page 497 https://$http_host$request_uri;
include conf.d/locations/*.conf;
}
include conf.d/monitoring/server.conf;

View file

@ -1,4 +0,0 @@
location /monitoring/ok {
add_header Content-Type text/plain;
return 200 'healthy';
}

View file

@ -0,0 +1,39 @@
# This is required to proxy Grafana Live WebSocket connections.
location /kibana {
proxy_set_header Host $host;
set $upstream_kibana kibana:5601;
proxy_pass http://$upstream_kibana;
}
location /cadvisor {
proxy_set_header Host $host;
set $upstream_cadvisor cadvisor:8080;
proxy_pass http://$upstream_cadvisor;
}
location /grafana {
proxy_set_header Host $host;
rewrite ^/grafana/?(.*) /$1 break;
set $upstream_grafana grafana:3000;
proxy_pass http://$upstream_grafana;
}
# Proxy Grafana Live WebSocket connections.
location /grafana/api/live/ {
proxy_http_version 1.1;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection $connection_upgrade;
proxy_set_header Host $host;
rewrite ^/grafana/?(.*) /$1 break;
set $upstream_grafana grafana:3000;
proxy_pass http://$upstream_grafana;
}
location /ok {
add_header Content-Type text/plain;
return 200 'healthy';
}
location / {
root /var/share/www/monitoring/;
index monitoring.index.html;
}

View file

@ -0,0 +1,26 @@
server {
charset UTF-8;
listen 8080;
#listen [::]:8080;
resolver $NGINX_RESOLVERS;
server_name $NGINX_DOMAIN;
include conf.d/monitoring/locations.conf;
}
server {
charset UTF-8;
#listen [::]:10443 ssl;
listen 8443 ssl;
resolver $NGINX_RESOLVERS;
server_name $NGINX_DOMAIN;
ssl_certificate /etc/ssl/certs/nginx-selfsigned.crt;
ssl_certificate_key /etc/ssl/private/nginx-selfsigned.key;
ssl_protocols TLSv1.3;
error_page 497 https://$http_host$request_uri;
include conf.d/monitoring/locations.conf;
}

View file

@ -0,0 +1,64 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<title>Service Dashboard</title>
<style>
body {
font-family: Arial, Helvetica, sans-serif;
background-color: #f4f6f8;
margin: 0;
padding: 0;
}
.container {
max-width: 400px;
margin: 100px auto;
padding: 20px;
background-color: #ffffff;
border-radius: 6px;
box-shadow: 0 2px 6px rgba(0, 0, 0, 0.1);
}
h1 {
font-size: 20px;
text-align: center;
margin-bottom: 20px;
}
ul {
list-style: none;
padding: 0;
margin: 0;
}
li {
margin-bottom: 10px;
}
a {
display: block;
padding: 12px;
text-decoration: none;
color: #333;
background-color: #e9ecef;
border-radius: 4px;
text-align: center;
}
a:hover {
background-color: #dfe3e6;
}
</style>
</head>
<body>
<div class="container">
<h1>Services</h1>
<ul>
<li><a href="/kibana">Kibana</a></li>
<li><a href="/cadvisor">cAdvisor</a></li>
<li><a href="/grafana">Grafana</a></li>
</ul>
</div>
</body>
</html>

View file

@ -28,8 +28,10 @@ export const useDatabase = fp<FastifyPluginAsync>(async function(
if (dbAdded) { return; }
dbAdded = true;
const path = process.env.DATABASE_DIR;
if (isNullish(path)) { throw 'env `DATABASE_DIR` not defined'; }
f.log.info(`Opening database with path: ${path}/database.db`);
if (isNullish(path)) {
f.log.fatal('env `DATABASE_DIR` not defined');
throw 'env `DATABASE_DIR` not defined';
}
const db: Database = new DbImpl(`${path}/database.db`) as Database;
if (!f.hasDecorator('db')) { f.decorate('db', db); }
});

View file

@ -11,7 +11,7 @@ import { FastifyReply } from 'fastify';
import fp from 'fastify-plugin';
export const useMonitoring = fp(async (fastify) => {
fastify.get('/monitoring', { schema: { hide: true } }, (req, res) => {
fastify.get('/monitoring', { schema: { hide: true }, logLevel: 'silent' }, (req, res) => {
void req;
res.code(200).send('Ok');
});

View file

@ -1,6 +1,7 @@
FROM node:22-alpine AS pnpm_base
RUN npm install --global pnpm@10 --no-fund -q;
RUN apk add make python3 gcc clang build-base musl-dev;
RUN apk add --no-cache curl
FROM pnpm_base AS deps
WORKDIR /build
@ -50,5 +51,8 @@ COPY --from=deps /build/node_modules /src/node_modules
COPY ${EXTRA_FILES} /extra
ENTRYPOINT [ "/src/entrypoint.sh" ]
HEALTHCHECK --interval=30s --timeout=3s \
CMD curl -f -s http://localhost/monitoring?docker || exit 1
CMD ["node", "/src/run.cjs"]

View file

@ -1,12 +1,7 @@
#!/bin/sh
set -e
set -x
# do anything here
mkdir -p /volumes/static/auth/
cp -r /extra/login_demo.html /volumes/static/auth/index.html
cp -r /extra/login_demo.js /volumes/static/auth/login_demo.js
# run the CMD [ ... ] from the dockerfile
exec "$@"

View file

@ -4,23 +4,9 @@ import fastify, { FastifyInstance } from 'fastify';
import app from './app';
const start = async () => {
const envToLogger = {
development: {
transport: {
target: 'pino-pretty',
options: {
translateTime: 'HH:MM:ss Z',
ignore: 'pid,hostname',
},
},
},
production: true,
test: false,
};
const f: FastifyInstance = fastify({ logger: envToLogger.development });
const f: FastifyInstance = fastify({ logger: { level: 'info' } });
process.on('SIGTERM', () => {
f.log.info('Requested to shutdown');
f.log.warn('Requested to shutdown');
process.exit(134);
});
try {

View file

@ -1,7 +1,6 @@
#!/bin/sh
set -e
set -x
# do anything here
# run the CMD [ ... ] from the dockerfile

View file

@ -4,33 +4,18 @@ import fastify, { FastifyInstance } from 'fastify';
import app from './app';
const start = async () => {
const envToLogger = {
development: {
transport: {
target: 'pino-pretty',
options: {
translateTime: 'HH:MM:ss Z',
ignore: 'pid,hostname',
},
},
},
production: true,
test: false,
};
const f: FastifyInstance = fastify({ logger: envToLogger.development });
try {
const f: FastifyInstance = fastify({ logger: { level: 'info' } });
process.on('SIGTERM', () => {
f.log.info('Requested to shutdown');
f.log.warn('Requested to shutdown');
process.exit(134);
});
console.log('-------->Serving static files from:');
try {
await f.register(app, {});
await f.listen({ port: 80, host: '0.0.0.0' });
}
catch (err) {
f.log.error(err);
process.exit(1);
};
}
};
start();

View file

@ -1,7 +1,6 @@
#!/bin/sh
set -e
set -x
# do anything here
# run the CMD [ ... ] from the dockerfile

View file

@ -4,23 +4,9 @@ import fastify, { FastifyInstance } from 'fastify';
import app from './app';
const start = async () => {
const envToLogger = {
development: {
transport: {
target: 'pino-pretty',
options: {
translateTime: 'HH:MM:ss Z',
ignore: 'pid,hostname',
},
},
},
production: true,
test: false,
};
const f: FastifyInstance = fastify({ logger: envToLogger.development });
const f: FastifyInstance = fastify({ logger: { level: 'info' } });
process.on('SIGTERM', () => {
f.log.info('Requested to shutdown');
f.log.warn('Requested to shutdown');
process.exit(134);
});
try {