Merge commit 'c40cfc5d09
' into upstream-20231129
This commit is contained in:
commit
16914f5f02
50 changed files with 851 additions and 428 deletions
7
streaming/.dockerignore
Normal file
7
streaming/.dockerignore
Normal file
|
@ -0,0 +1,7 @@
|
|||
.env
|
||||
.env.*
|
||||
.gitignore
|
||||
node_modules
|
||||
.DS_Store
|
||||
*.swp
|
||||
*~
|
32
streaming/.eslintrc.js
Normal file
32
streaming/.eslintrc.js
Normal file
|
@ -0,0 +1,32 @@
|
|||
// @ts-check
|
||||
const { defineConfig } = require('eslint-define-config');
|
||||
|
||||
module.exports = defineConfig({
|
||||
extends: ['../.eslintrc.js'],
|
||||
env: {
|
||||
browser: false,
|
||||
},
|
||||
parserOptions: {
|
||||
project: true,
|
||||
tsconfigRootDir: __dirname,
|
||||
ecmaFeatures: {
|
||||
jsx: false,
|
||||
},
|
||||
ecmaVersion: 2021,
|
||||
},
|
||||
rules: {
|
||||
'import/no-commonjs': 'off',
|
||||
'import/no-extraneous-dependencies': [
|
||||
'error',
|
||||
{
|
||||
devDependencies: [
|
||||
'streaming/.eslintrc.js',
|
||||
],
|
||||
optionalDependencies: false,
|
||||
peerDependencies: false,
|
||||
includeTypes: true,
|
||||
packageDir: __dirname,
|
||||
},
|
||||
],
|
||||
},
|
||||
});
|
104
streaming/Dockerfile
Normal file
104
streaming/Dockerfile
Normal file
|
@ -0,0 +1,104 @@
|
|||
# syntax=docker/dockerfile:1.4
|
||||
|
||||
# Please see https://docs.docker.com/engine/reference/builder for information about
|
||||
# the extended buildx capabilities used in this file.
|
||||
# Make sure multiarch TARGETPLATFORM is available for interpolation
|
||||
# See: https://docs.docker.com/build/building/multi-platform/
|
||||
ARG TARGETPLATFORM=${TARGETPLATFORM}
|
||||
ARG BUILDPLATFORM=${BUILDPLATFORM}
|
||||
|
||||
# Node version to use in base image, change with [--build-arg NODE_MAJOR_VERSION="20"]
|
||||
ARG NODE_MAJOR_VERSION="20"
|
||||
# Debian image to use for base image, change with [--build-arg DEBIAN_VERSION="bookworm"]
|
||||
ARG DEBIAN_VERSION="bookworm"
|
||||
# Node image to use for base image based on combined variables (ex: 20-bookworm-slim)
|
||||
FROM docker.io/node:${NODE_MAJOR_VERSION}-${DEBIAN_VERSION}-slim as streaming
|
||||
|
||||
# Timezone used by the Docker container and runtime, change with [--build-arg TZ=Europe/Berlin]
|
||||
ARG TZ="Etc/UTC"
|
||||
# Linux UID (user id) for the mastodon user, change with [--build-arg UID=1234]
|
||||
ARG UID="991"
|
||||
# Linux GID (group id) for the mastodon user, change with [--build-arg GID=1234]
|
||||
ARG GID="991"
|
||||
|
||||
# Apply Mastodon build options based on options above
|
||||
ENV \
|
||||
# Apply Mastodon version information
|
||||
MASTODON_VERSION_PRERELEASE="${MASTODON_VERSION_PRERELEASE}" \
|
||||
MASTODON_VERSION_METADATA="${MASTODON_VERSION_METADATA}" \
|
||||
# Apply timezone
|
||||
TZ=${TZ}
|
||||
|
||||
ENV \
|
||||
# Configure the IP to bind Mastodon to when serving traffic
|
||||
BIND="0.0.0.0" \
|
||||
# Explicitly set PORT to match the exposed port
|
||||
PORT=4000 \
|
||||
# Use production settings for Yarn, Node and related nodejs based tools
|
||||
NODE_ENV="production" \
|
||||
# Add Ruby and Mastodon installation to the PATH
|
||||
DEBIAN_FRONTEND="noninteractive"
|
||||
|
||||
# Set default shell used for running commands
|
||||
SHELL ["/bin/bash", "-o", "pipefail", "-o", "errexit", "-c"]
|
||||
|
||||
ARG TARGETPLATFORM
|
||||
|
||||
RUN echo "Target platform is ${TARGETPLATFORM}"
|
||||
|
||||
RUN \
|
||||
# Remove automatic apt cache Docker cleanup scripts
|
||||
rm -f /etc/apt/apt.conf.d/docker-clean; \
|
||||
# Sets timezone
|
||||
echo "${TZ}" > /etc/localtime; \
|
||||
# Creates mastodon user/group and sets home directory
|
||||
groupadd -g "${GID}" mastodon; \
|
||||
useradd -l -u "${UID}" -g "${GID}" -m -d /opt/mastodon mastodon; \
|
||||
# Creates symlink for /mastodon folder
|
||||
ln -s /opt/mastodon /mastodon;
|
||||
|
||||
# hadolint ignore=DL3008,DL3005
|
||||
RUN \
|
||||
# Mount Apt cache and lib directories from Docker buildx caches
|
||||
--mount=type=cache,id=apt-cache-${TARGETPLATFORM},target=/var/cache/apt,sharing=locked \
|
||||
--mount=type=cache,id=apt-lib-${TARGETPLATFORM},target=/var/lib/apt,sharing=locked \
|
||||
# Upgrade to check for security updates to Debian image
|
||||
apt-get update; \
|
||||
apt-get dist-upgrade -yq; \
|
||||
apt-get install -y --no-install-recommends \
|
||||
ca-certificates \
|
||||
curl \
|
||||
tzdata \
|
||||
;
|
||||
|
||||
# Set /opt/mastodon as working directory
|
||||
WORKDIR /opt/mastodon
|
||||
|
||||
# Copy Node package configuration files from build system to container
|
||||
COPY package.json yarn.lock .yarnrc.yml /opt/mastodon/
|
||||
COPY .yarn /opt/mastodon/.yarn
|
||||
# Copy Streaming source code from build system to container
|
||||
COPY ./streaming /opt/mastodon/streaming
|
||||
|
||||
RUN \
|
||||
# Mount local Corepack and Yarn caches from Docker buildx caches
|
||||
--mount=type=cache,id=corepack-cache-${TARGETPLATFORM},target=/usr/local/share/.cache/corepack,sharing=locked \
|
||||
--mount=type=cache,id=yarn-cache-${TARGETPLATFORM},target=/usr/local/share/.cache/yarn,sharing=locked \
|
||||
# Configure Corepack
|
||||
rm /usr/local/bin/yarn*; \
|
||||
corepack enable; \
|
||||
corepack prepare --activate;
|
||||
|
||||
RUN \
|
||||
# Mount Corepack and Yarn caches from Docker buildx caches
|
||||
--mount=type=cache,id=corepack-cache-${TARGETPLATFORM},target=/usr/local/share/.cache/corepack,sharing=locked \
|
||||
--mount=type=cache,id=yarn-cache-${TARGETPLATFORM},target=/usr/local/share/.cache/yarn,sharing=locked \
|
||||
# Install Node packages
|
||||
yarn workspaces focus --production @mastodon/streaming;
|
||||
|
||||
# Set the running user for resulting container
|
||||
USER mastodon
|
||||
# Expose default Streaming ports
|
||||
EXPOSE 4000
|
||||
# Run streaming when started
|
||||
CMD [ node ./streaming/index.js ]
|
|
@ -12,10 +12,12 @@ const { JSDOM } = require('jsdom');
|
|||
const log = require('npmlog');
|
||||
const pg = require('pg');
|
||||
const dbUrlToConfig = require('pg-connection-string').parse;
|
||||
const metrics = require('prom-client');
|
||||
const uuid = require('uuid');
|
||||
const WebSocket = require('ws');
|
||||
|
||||
const { setupMetrics } = require('./metrics');
|
||||
const { isTruthy } = require("./utils");
|
||||
|
||||
const environment = process.env.NODE_ENV || 'development';
|
||||
|
||||
// Correctly detect and load .env or .env.production file based on environment:
|
||||
|
@ -196,78 +198,15 @@ const startServer = async () => {
|
|||
const redisClient = await createRedisClient(redisConfig);
|
||||
const { redisPrefix } = redisConfig;
|
||||
|
||||
// Collect metrics from Node.js
|
||||
metrics.collectDefaultMetrics();
|
||||
|
||||
new metrics.Gauge({
|
||||
name: 'pg_pool_total_connections',
|
||||
help: 'The total number of clients existing within the pool',
|
||||
collect() {
|
||||
this.set(pgPool.totalCount);
|
||||
},
|
||||
});
|
||||
|
||||
new metrics.Gauge({
|
||||
name: 'pg_pool_idle_connections',
|
||||
help: 'The number of clients which are not checked out but are currently idle in the pool',
|
||||
collect() {
|
||||
this.set(pgPool.idleCount);
|
||||
},
|
||||
});
|
||||
|
||||
new metrics.Gauge({
|
||||
name: 'pg_pool_waiting_queries',
|
||||
help: 'The number of queued requests waiting on a client when all clients are checked out',
|
||||
collect() {
|
||||
this.set(pgPool.waitingCount);
|
||||
},
|
||||
});
|
||||
|
||||
const connectedClients = new metrics.Gauge({
|
||||
name: 'connected_clients',
|
||||
help: 'The number of clients connected to the streaming server',
|
||||
labelNames: ['type'],
|
||||
});
|
||||
|
||||
const connectedChannels = new metrics.Gauge({
|
||||
name: 'connected_channels',
|
||||
help: 'The number of channels the streaming server is streaming to',
|
||||
labelNames: [ 'type', 'channel' ]
|
||||
});
|
||||
|
||||
const redisSubscriptions = new metrics.Gauge({
|
||||
name: 'redis_subscriptions',
|
||||
help: 'The number of Redis channels the streaming server is subscribed to',
|
||||
});
|
||||
|
||||
const redisMessagesReceived = new metrics.Counter({
|
||||
name: 'redis_messages_received_total',
|
||||
help: 'The total number of messages the streaming server has received from redis subscriptions'
|
||||
});
|
||||
|
||||
const messagesSent = new metrics.Counter({
|
||||
name: 'messages_sent_total',
|
||||
help: 'The total number of messages the streaming server sent to clients per connection type',
|
||||
labelNames: [ 'type' ]
|
||||
});
|
||||
|
||||
// Prime the gauges so we don't loose metrics between restarts:
|
||||
redisSubscriptions.set(0);
|
||||
connectedClients.set({ type: 'websocket' }, 0);
|
||||
connectedClients.set({ type: 'eventsource' }, 0);
|
||||
|
||||
// For each channel, initialize the gauges at zero; There's only a finite set of channels available
|
||||
CHANNEL_NAMES.forEach(( channel ) => {
|
||||
connectedChannels.set({ type: 'websocket', channel }, 0);
|
||||
connectedChannels.set({ type: 'eventsource', channel }, 0);
|
||||
});
|
||||
|
||||
// Prime the counters so that we don't loose metrics between restarts.
|
||||
// Unfortunately counters don't support the set() API, so instead I'm using
|
||||
// inc(0) to achieve the same result.
|
||||
redisMessagesReceived.inc(0);
|
||||
messagesSent.inc({ type: 'websocket' }, 0);
|
||||
messagesSent.inc({ type: 'eventsource' }, 0);
|
||||
const metrics = setupMetrics(CHANNEL_NAMES, pgPool);
|
||||
// TODO: migrate all metrics to metrics.X.method() instead of just X.method()
|
||||
const {
|
||||
connectedClients,
|
||||
connectedChannels,
|
||||
redisSubscriptions,
|
||||
redisMessagesReceived,
|
||||
messagesSent,
|
||||
} = metrics;
|
||||
|
||||
// When checking metrics in the browser, the favicon is requested this
|
||||
// prevents the request from falling through to the API Router, which would
|
||||
|
@ -388,25 +327,6 @@ const startServer = async () => {
|
|||
}
|
||||
};
|
||||
|
||||
const FALSE_VALUES = [
|
||||
false,
|
||||
0,
|
||||
'0',
|
||||
'f',
|
||||
'F',
|
||||
'false',
|
||||
'FALSE',
|
||||
'off',
|
||||
'OFF',
|
||||
];
|
||||
|
||||
/**
|
||||
* @param {any} value
|
||||
* @returns {boolean}
|
||||
*/
|
||||
const isTruthy = value =>
|
||||
value && !FALSE_VALUES.includes(value);
|
||||
|
||||
/**
|
||||
* @param {any} req
|
||||
* @param {any} res
|
||||
|
|
105
streaming/metrics.js
Normal file
105
streaming/metrics.js
Normal file
|
@ -0,0 +1,105 @@
|
|||
// @ts-check
|
||||
|
||||
const metrics = require('prom-client');
|
||||
|
||||
/**
|
||||
* @typedef StreamingMetrics
|
||||
* @property {metrics.Registry} register
|
||||
* @property {metrics.Gauge<"type">} connectedClients
|
||||
* @property {metrics.Gauge<"type" | "channel">} connectedChannels
|
||||
* @property {metrics.Gauge} redisSubscriptions
|
||||
* @property {metrics.Counter} redisMessagesReceived
|
||||
* @property {metrics.Counter<"type">} messagesSent
|
||||
*/
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {string[]} channels
|
||||
* @param {import('pg').Pool} pgPool
|
||||
* @returns {StreamingMetrics}
|
||||
*/
|
||||
function setupMetrics(channels, pgPool) {
|
||||
// Collect metrics from Node.js
|
||||
metrics.collectDefaultMetrics();
|
||||
|
||||
new metrics.Gauge({
|
||||
name: 'pg_pool_total_connections',
|
||||
help: 'The total number of clients existing within the pool',
|
||||
collect() {
|
||||
this.set(pgPool.totalCount);
|
||||
},
|
||||
});
|
||||
|
||||
new metrics.Gauge({
|
||||
name: 'pg_pool_idle_connections',
|
||||
help: 'The number of clients which are not checked out but are currently idle in the pool',
|
||||
collect() {
|
||||
this.set(pgPool.idleCount);
|
||||
},
|
||||
});
|
||||
|
||||
new metrics.Gauge({
|
||||
name: 'pg_pool_waiting_queries',
|
||||
help: 'The number of queued requests waiting on a client when all clients are checked out',
|
||||
collect() {
|
||||
this.set(pgPool.waitingCount);
|
||||
},
|
||||
});
|
||||
|
||||
const connectedClients = new metrics.Gauge({
|
||||
name: 'connected_clients',
|
||||
help: 'The number of clients connected to the streaming server',
|
||||
labelNames: ['type'],
|
||||
});
|
||||
|
||||
const connectedChannels = new metrics.Gauge({
|
||||
name: 'connected_channels',
|
||||
help: 'The number of channels the streaming server is streaming to',
|
||||
labelNames: [ 'type', 'channel' ]
|
||||
});
|
||||
|
||||
const redisSubscriptions = new metrics.Gauge({
|
||||
name: 'redis_subscriptions',
|
||||
help: 'The number of Redis channels the streaming server is subscribed to',
|
||||
});
|
||||
|
||||
const redisMessagesReceived = new metrics.Counter({
|
||||
name: 'redis_messages_received_total',
|
||||
help: 'The total number of messages the streaming server has received from redis subscriptions'
|
||||
});
|
||||
|
||||
const messagesSent = new metrics.Counter({
|
||||
name: 'messages_sent_total',
|
||||
help: 'The total number of messages the streaming server sent to clients per connection type',
|
||||
labelNames: [ 'type' ]
|
||||
});
|
||||
|
||||
// Prime the gauges so we don't loose metrics between restarts:
|
||||
redisSubscriptions.set(0);
|
||||
connectedClients.set({ type: 'websocket' }, 0);
|
||||
connectedClients.set({ type: 'eventsource' }, 0);
|
||||
|
||||
// For each channel, initialize the gauges at zero; There's only a finite set of channels available
|
||||
channels.forEach(( channel ) => {
|
||||
connectedChannels.set({ type: 'websocket', channel }, 0);
|
||||
connectedChannels.set({ type: 'eventsource', channel }, 0);
|
||||
});
|
||||
|
||||
// Prime the counters so that we don't loose metrics between restarts.
|
||||
// Unfortunately counters don't support the set() API, so instead I'm using
|
||||
// inc(0) to achieve the same result.
|
||||
redisMessagesReceived.inc(0);
|
||||
messagesSent.inc({ type: 'websocket' }, 0);
|
||||
messagesSent.inc({ type: 'eventsource' }, 0);
|
||||
|
||||
return {
|
||||
register: metrics.register,
|
||||
connectedClients,
|
||||
connectedChannels,
|
||||
redisSubscriptions,
|
||||
redisMessagesReceived,
|
||||
messagesSent,
|
||||
};
|
||||
}
|
||||
|
||||
exports.setupMetrics = setupMetrics;
|
|
@ -12,7 +12,8 @@
|
|||
"url": "https://github.com/mastodon/mastodon.git"
|
||||
},
|
||||
"scripts": {
|
||||
"start": "node ./index.js"
|
||||
"start": "node ./index.js",
|
||||
"check:types": "tsc --noEmit"
|
||||
},
|
||||
"dependencies": {
|
||||
"dotenv": "^16.0.3",
|
||||
|
@ -30,7 +31,10 @@
|
|||
"@types/express": "^4.17.17",
|
||||
"@types/npmlog": "^7.0.0",
|
||||
"@types/pg": "^8.6.6",
|
||||
"@types/uuid": "^9.0.0"
|
||||
"@types/uuid": "^9.0.0",
|
||||
"@types/ws": "^8.5.9",
|
||||
"eslint-define-config": "^2.0.0",
|
||||
"typescript": "^5.0.4"
|
||||
},
|
||||
"optionalDependencies": {
|
||||
"bufferutil": "^4.0.7",
|
||||
|
|
11
streaming/tsconfig.json
Normal file
11
streaming/tsconfig.json
Normal file
|
@ -0,0 +1,11 @@
|
|||
{
|
||||
"extends": "../tsconfig.json",
|
||||
"compilerOptions": {
|
||||
"target": "esnext",
|
||||
"module": "CommonJS",
|
||||
"moduleResolution": "node",
|
||||
"noUnusedParameters": false,
|
||||
"paths": {}
|
||||
},
|
||||
"include": ["./*.js", "./.eslintrc.js"]
|
||||
}
|
22
streaming/utils.js
Normal file
22
streaming/utils.js
Normal file
|
@ -0,0 +1,22 @@
|
|||
// @ts-check
|
||||
|
||||
const FALSE_VALUES = [
|
||||
false,
|
||||
0,
|
||||
'0',
|
||||
'f',
|
||||
'F',
|
||||
'false',
|
||||
'FALSE',
|
||||
'off',
|
||||
'OFF',
|
||||
];
|
||||
|
||||
/**
|
||||
* @param {any} value
|
||||
* @returns {boolean}
|
||||
*/
|
||||
const isTruthy = value =>
|
||||
value && !FALSE_VALUES.includes(value);
|
||||
|
||||
exports.isTruthy = isTruthy;
|
Loading…
Add table
Add a link
Reference in a new issue