Compare commits
2 Commits
9e7d7b88ac
...
5f46d26037
| Author | SHA1 | Date | |
|---|---|---|---|
| 5f46d26037 | |||
| 57433c7e75 |
50
kustomize/base/dlob-depth-worker/deployment-drift.yaml
Normal file
50
kustomize/base/dlob-depth-worker/deployment-drift.yaml
Normal file
@@ -0,0 +1,50 @@
|
||||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
name: dlob-depth-worker-drift
|
||||
annotations:
|
||||
argocd.argoproj.io/sync-wave: "6"
|
||||
spec:
|
||||
replicas: 1
|
||||
selector:
|
||||
matchLabels:
|
||||
app.kubernetes.io/name: dlob-depth-worker-drift
|
||||
template:
|
||||
metadata:
|
||||
labels:
|
||||
app.kubernetes.io/name: dlob-depth-worker-drift
|
||||
spec:
|
||||
containers:
|
||||
- name: worker
|
||||
image: node:20-slim
|
||||
imagePullPolicy: IfNotPresent
|
||||
env:
|
||||
- name: HASURA_GRAPHQL_URL
|
||||
value: http://hasura:8080/v1/graphql
|
||||
- name: HASURA_ADMIN_SECRET
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: trade-hasura
|
||||
key: HASURA_GRAPHQL_ADMIN_SECRET
|
||||
- name: DLOB_SOURCE
|
||||
value: drift
|
||||
- name: DLOB_MARKETS
|
||||
value: PUMP-PERP,SOL-PERP,1MBONK-PERP,BTC-PERP,ETH-PERP
|
||||
- name: DLOB_POLL_MS
|
||||
value: "1000"
|
||||
- name: DLOB_DEPTH_BPS_BANDS
|
||||
value: "5,10,20,50,100,200"
|
||||
- name: PRICE_PRECISION
|
||||
value: "1000000"
|
||||
- name: BASE_PRECISION
|
||||
value: "1000000000"
|
||||
command: ["node", "/app/worker.mjs"]
|
||||
volumeMounts:
|
||||
- name: script
|
||||
mountPath: /app/worker.mjs
|
||||
subPath: worker.mjs
|
||||
readOnly: true
|
||||
volumes:
|
||||
- name: script
|
||||
configMap:
|
||||
name: dlob-depth-worker-script
|
||||
@@ -26,6 +26,8 @@ spec:
|
||||
secretKeyRef:
|
||||
name: trade-hasura
|
||||
key: HASURA_GRAPHQL_ADMIN_SECRET
|
||||
- name: DLOB_SOURCE
|
||||
value: mevnode
|
||||
- name: DLOB_MARKETS
|
||||
value: PUMP-PERP,SOL-PERP,1MBONK-PERP,BTC-PERP,ETH-PERP
|
||||
- name: DLOB_POLL_MS
|
||||
|
||||
@@ -64,6 +64,7 @@ function resolveConfig() {
|
||||
const hasuraAdminSecret = process.env.HASURA_ADMIN_SECRET || process.env.HASURA_GRAPHQL_ADMIN_SECRET || undefined;
|
||||
const hasuraAuthToken = process.env.HASURA_AUTH_TOKEN || process.env.HASURA_JWT || undefined;
|
||||
|
||||
const dlobSource = String(process.env.DLOB_SOURCE || 'mevnode').trim() || 'mevnode';
|
||||
const markets = envList('DLOB_MARKETS', 'PUMP-PERP,SOL-PERP,1MBONK-PERP,BTC-PERP,ETH-PERP');
|
||||
const pollMs = clampInt(process.env.DLOB_POLL_MS, 250, 60_000, 1000);
|
||||
const bandsBps = envIntList('DLOB_DEPTH_BPS_BANDS', '5,10,20,50,100,200');
|
||||
@@ -79,6 +80,7 @@ function resolveConfig() {
|
||||
hasuraUrl,
|
||||
hasuraAdminSecret,
|
||||
hasuraAuthToken,
|
||||
dlobSource,
|
||||
markets,
|
||||
pollMs,
|
||||
bandsBps,
|
||||
@@ -169,8 +171,9 @@ function computeBandDepth({ bids, asks, mid, bandBps }) {
|
||||
|
||||
async function fetchL2Latest(cfg) {
|
||||
const query = `
|
||||
query DlobL2Latest($markets: [String!]!) {
|
||||
dlob_l2_latest(where: {market_name: {_in: $markets}}) {
|
||||
query DlobL2Latest($source: String!, $markets: [String!]!) {
|
||||
dlob_l2_latest(where: {source: {_eq: $source}, market_name: {_in: $markets}}) {
|
||||
source
|
||||
market_name
|
||||
market_type
|
||||
market_index
|
||||
@@ -186,7 +189,7 @@ async function fetchL2Latest(cfg) {
|
||||
}
|
||||
}
|
||||
`;
|
||||
const data = await graphqlRequest(cfg, query, { markets: cfg.markets });
|
||||
const data = await graphqlRequest(cfg, query, { source: cfg.dlobSource, markets: cfg.markets });
|
||||
return Array.isArray(data?.dlob_l2_latest) ? data.dlob_l2_latest : [];
|
||||
}
|
||||
|
||||
@@ -232,6 +235,7 @@ async function main() {
|
||||
startedAt: getIsoNow(),
|
||||
hasuraUrl: cfg.hasuraUrl,
|
||||
hasuraAuth: cfg.hasuraAuthToken ? 'bearer' : cfg.hasuraAdminSecret ? 'admin-secret' : 'none',
|
||||
dlobSource: cfg.dlobSource,
|
||||
markets: cfg.markets,
|
||||
pollMs: cfg.pollMs,
|
||||
bandsBps: cfg.bandsBps,
|
||||
@@ -268,6 +272,7 @@ async function main() {
|
||||
for (const bandBps of cfg.bandsBps) {
|
||||
const d = computeBandDepth({ bids, asks, mid, bandBps });
|
||||
rows.push({
|
||||
source: cfg.dlobSource,
|
||||
market_name: market,
|
||||
band_bps: bandBps,
|
||||
market_type: l2.market_type ? String(l2.market_type) : 'perp',
|
||||
|
||||
50
kustomize/base/dlob-slippage-worker/deployment-drift.yaml
Normal file
50
kustomize/base/dlob-slippage-worker/deployment-drift.yaml
Normal file
@@ -0,0 +1,50 @@
|
||||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
name: dlob-slippage-worker-drift
|
||||
annotations:
|
||||
argocd.argoproj.io/sync-wave: "6"
|
||||
spec:
|
||||
replicas: 1
|
||||
selector:
|
||||
matchLabels:
|
||||
app.kubernetes.io/name: dlob-slippage-worker-drift
|
||||
template:
|
||||
metadata:
|
||||
labels:
|
||||
app.kubernetes.io/name: dlob-slippage-worker-drift
|
||||
spec:
|
||||
containers:
|
||||
- name: worker
|
||||
image: node:20-slim
|
||||
imagePullPolicy: IfNotPresent
|
||||
env:
|
||||
- name: HASURA_GRAPHQL_URL
|
||||
value: http://hasura:8080/v1/graphql
|
||||
- name: HASURA_ADMIN_SECRET
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: trade-hasura
|
||||
key: HASURA_GRAPHQL_ADMIN_SECRET
|
||||
- name: DLOB_SOURCE
|
||||
value: drift
|
||||
- name: DLOB_MARKETS
|
||||
value: PUMP-PERP,SOL-PERP,1MBONK-PERP,BTC-PERP,ETH-PERP
|
||||
- name: DLOB_POLL_MS
|
||||
value: "1000"
|
||||
- name: DLOB_SLIPPAGE_SIZES_USD
|
||||
value: "0.1,0.2,0.5,1,2,5,10,25,50,100,250,500,1000,5000,10000,50000"
|
||||
- name: PRICE_PRECISION
|
||||
value: "1000000"
|
||||
- name: BASE_PRECISION
|
||||
value: "1000000000"
|
||||
command: ["node", "/app/worker.mjs"]
|
||||
volumeMounts:
|
||||
- name: script
|
||||
mountPath: /app/worker.mjs
|
||||
subPath: worker.mjs
|
||||
readOnly: true
|
||||
volumes:
|
||||
- name: script
|
||||
configMap:
|
||||
name: dlob-slippage-worker-script
|
||||
@@ -26,6 +26,8 @@ spec:
|
||||
secretKeyRef:
|
||||
name: trade-hasura
|
||||
key: HASURA_GRAPHQL_ADMIN_SECRET
|
||||
- name: DLOB_SOURCE
|
||||
value: mevnode
|
||||
- name: DLOB_MARKETS
|
||||
value: PUMP-PERP,SOL-PERP,1MBONK-PERP,BTC-PERP,ETH-PERP
|
||||
- name: DLOB_POLL_MS
|
||||
|
||||
@@ -55,6 +55,7 @@ function resolveConfig() {
|
||||
tokens.hasuraAdminSecret;
|
||||
const hasuraAuthToken = process.env.HASURA_AUTH_TOKEN || process.env.HASURA_JWT || undefined;
|
||||
|
||||
const dlobSource = String(process.env.DLOB_SOURCE || 'mevnode').trim() || 'mevnode';
|
||||
const markets = envList('DLOB_MARKETS', 'PUMP-PERP,SOL-PERP,1MBONK-PERP,BTC-PERP,ETH-PERP');
|
||||
const pollMs = clampInt(process.env.DLOB_POLL_MS, 250, 60_000, 1000);
|
||||
|
||||
@@ -77,6 +78,7 @@ function resolveConfig() {
|
||||
hasuraUrl,
|
||||
hasuraAdminSecret,
|
||||
hasuraAuthToken,
|
||||
dlobSource,
|
||||
markets,
|
||||
pollMs,
|
||||
sizesUsd,
|
||||
@@ -209,6 +211,7 @@ async function main() {
|
||||
startedAt: getIsoNow(),
|
||||
hasuraUrl: cfg.hasuraUrl,
|
||||
hasuraAuth: cfg.hasuraAuthToken ? 'bearer' : cfg.hasuraAdminSecret ? 'admin-secret' : 'none',
|
||||
dlobSource: cfg.dlobSource,
|
||||
markets: cfg.markets,
|
||||
pollMs: cfg.pollMs,
|
||||
sizesUsd: cfg.sizesUsd,
|
||||
@@ -226,8 +229,9 @@ async function main() {
|
||||
|
||||
try {
|
||||
const query = `
|
||||
query DlobL2Latest($markets: [String!]!) {
|
||||
dlob_l2_latest(where: { market_name: { _in: $markets } }) {
|
||||
query DlobL2Latest($source: String!, $markets: [String!]!) {
|
||||
dlob_l2_latest(where: { source: { _eq: $source }, market_name: { _in: $markets } }) {
|
||||
source
|
||||
market_name
|
||||
market_type
|
||||
market_index
|
||||
@@ -242,7 +246,7 @@ async function main() {
|
||||
}
|
||||
`;
|
||||
|
||||
const data = await graphqlRequest(cfg, query, { markets: cfg.markets });
|
||||
const data = await graphqlRequest(cfg, query, { source: cfg.dlobSource, markets: cfg.markets });
|
||||
const rows = Array.isArray(data?.dlob_l2_latest) ? data.dlob_l2_latest : [];
|
||||
|
||||
const objectsV1 = [];
|
||||
@@ -277,6 +281,7 @@ async function main() {
|
||||
{
|
||||
const sim = simulateFill(asks, sizeUsd);
|
||||
const baseObj = {
|
||||
source: cfg.dlobSource,
|
||||
market_name: market,
|
||||
side: 'buy',
|
||||
market_type: row?.market_type ?? 'perp',
|
||||
@@ -302,6 +307,7 @@ async function main() {
|
||||
{
|
||||
const sim = simulateFill(bids, sizeUsd);
|
||||
const baseObj = {
|
||||
source: cfg.dlobSource,
|
||||
market_name: market,
|
||||
side: 'sell',
|
||||
market_type: row?.market_type ?? 'perp',
|
||||
|
||||
44
kustomize/base/dlob-ts-archiver/deployment-drift.yaml
Normal file
44
kustomize/base/dlob-ts-archiver/deployment-drift.yaml
Normal file
@@ -0,0 +1,44 @@
|
||||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
name: dlob-ts-archiver-drift
|
||||
annotations:
|
||||
argocd.argoproj.io/sync-wave: "6"
|
||||
spec:
|
||||
replicas: 1
|
||||
selector:
|
||||
matchLabels:
|
||||
app.kubernetes.io/name: dlob-ts-archiver-drift
|
||||
template:
|
||||
metadata:
|
||||
labels:
|
||||
app.kubernetes.io/name: dlob-ts-archiver-drift
|
||||
spec:
|
||||
containers:
|
||||
- name: worker
|
||||
image: node:20-slim
|
||||
imagePullPolicy: IfNotPresent
|
||||
env:
|
||||
- name: HASURA_GRAPHQL_URL
|
||||
value: http://hasura:8080/v1/graphql
|
||||
- name: HASURA_ADMIN_SECRET
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: trade-hasura
|
||||
key: HASURA_GRAPHQL_ADMIN_SECRET
|
||||
- name: DLOB_SOURCE
|
||||
value: drift
|
||||
- name: DLOB_MARKETS
|
||||
value: PUMP-PERP,SOL-PERP,1MBONK-PERP,BTC-PERP,ETH-PERP
|
||||
- name: DLOB_TS_POLL_MS
|
||||
value: "1000"
|
||||
command: ["node", "/app/worker.mjs"]
|
||||
volumeMounts:
|
||||
- name: script
|
||||
mountPath: /app/worker.mjs
|
||||
subPath: worker.mjs
|
||||
readOnly: true
|
||||
volumes:
|
||||
- name: script
|
||||
configMap:
|
||||
name: dlob-ts-archiver-script
|
||||
@@ -26,6 +26,8 @@ spec:
|
||||
secretKeyRef:
|
||||
name: trade-hasura
|
||||
key: HASURA_GRAPHQL_ADMIN_SECRET
|
||||
- name: DLOB_SOURCE
|
||||
value: mevnode
|
||||
- name: DLOB_MARKETS
|
||||
value: PUMP-PERP,SOL-PERP,1MBONK-PERP,BTC-PERP,ETH-PERP
|
||||
- name: DLOB_TS_POLL_MS
|
||||
|
||||
@@ -49,10 +49,11 @@ function resolveConfig() {
|
||||
tokens.hasuraAdminSecret;
|
||||
const hasuraAuthToken = process.env.HASURA_AUTH_TOKEN || process.env.HASURA_JWT || undefined;
|
||||
|
||||
const dlobSource = String(process.env.DLOB_SOURCE || 'mevnode').trim() || 'mevnode';
|
||||
const markets = envList('DLOB_MARKETS', 'PUMP-PERP,SOL-PERP,1MBONK-PERP,BTC-PERP,ETH-PERP');
|
||||
const pollMs = clampInt(process.env.DLOB_TS_POLL_MS, 500, 60_000, 1000);
|
||||
|
||||
return { hasuraUrl, hasuraAdminSecret, hasuraAuthToken, markets, pollMs };
|
||||
return { hasuraUrl, hasuraAdminSecret, hasuraAuthToken, dlobSource, markets, pollMs };
|
||||
}
|
||||
|
||||
async function graphqlRequest(cfg, query, variables) {
|
||||
@@ -97,6 +98,7 @@ async function main() {
|
||||
startedAt: getIsoNow(),
|
||||
hasuraUrl: cfg.hasuraUrl,
|
||||
hasuraAuth: cfg.hasuraAuthToken ? 'bearer' : cfg.hasuraAdminSecret ? 'admin-secret' : 'none',
|
||||
dlobSource: cfg.dlobSource,
|
||||
markets: cfg.markets,
|
||||
pollMs: cfg.pollMs,
|
||||
},
|
||||
@@ -110,24 +112,24 @@ async function main() {
|
||||
|
||||
try {
|
||||
const query = `
|
||||
query Latest($markets: [String!]!) {
|
||||
dlob_stats_latest(where: { market_name: { _in: $markets } }) {
|
||||
query Latest($source: String!, $markets: [String!]!) {
|
||||
dlob_stats_latest(where: { source: { _eq: $source }, market_name: { _in: $markets } }) {
|
||||
market_name market_type market_index ts slot
|
||||
mark_price oracle_price best_bid_price best_ask_price mid_price
|
||||
spread_abs spread_bps depth_levels depth_bid_base depth_ask_base depth_bid_usd depth_ask_usd imbalance
|
||||
raw
|
||||
}
|
||||
dlob_depth_bps_latest(where: { market_name: { _in: $markets } }) {
|
||||
dlob_depth_bps_latest(where: { source: { _eq: $source }, market_name: { _in: $markets } }) {
|
||||
market_name band_bps market_type market_index ts slot
|
||||
mid_price best_bid_price best_ask_price bid_base ask_base bid_usd ask_usd imbalance
|
||||
raw
|
||||
}
|
||||
dlob_slippage_latest(where: { market_name: { _in: $markets } }) {
|
||||
dlob_slippage_latest(where: { source: { _eq: $source }, market_name: { _in: $markets } }) {
|
||||
market_name side size_usd market_type market_index ts slot
|
||||
mid_price vwap_price worst_price filled_usd filled_base impact_bps levels_consumed fill_pct
|
||||
raw
|
||||
}
|
||||
dlob_slippage_latest_v2(where: { market_name: { _in: $markets } }) {
|
||||
dlob_slippage_latest_v2(where: { source: { _eq: $source }, market_name: { _in: $markets } }) {
|
||||
market_name side size_usd market_type market_index ts slot
|
||||
mid_price vwap_price worst_price filled_usd filled_base impact_bps levels_consumed fill_pct
|
||||
raw
|
||||
@@ -135,10 +137,11 @@ async function main() {
|
||||
}
|
||||
`;
|
||||
|
||||
const data = await graphqlRequest(cfg, query, { markets: cfg.markets });
|
||||
const data = await graphqlRequest(cfg, query, { source: cfg.dlobSource, markets: cfg.markets });
|
||||
|
||||
const statsRows = (data?.dlob_stats_latest || []).map((r) => ({
|
||||
ts: now,
|
||||
source: cfg.dlobSource,
|
||||
market_name: r.market_name,
|
||||
market_type: r.market_type,
|
||||
market_index: r.market_index ?? null,
|
||||
@@ -162,6 +165,7 @@ async function main() {
|
||||
|
||||
const depthRows = (data?.dlob_depth_bps_latest || []).map((r) => ({
|
||||
ts: now,
|
||||
source: cfg.dlobSource,
|
||||
market_name: r.market_name,
|
||||
band_bps: r.band_bps,
|
||||
market_type: r.market_type,
|
||||
@@ -181,6 +185,7 @@ async function main() {
|
||||
|
||||
const slippageRows = (data?.dlob_slippage_latest || []).map((r) => ({
|
||||
ts: now,
|
||||
source: cfg.dlobSource,
|
||||
market_name: r.market_name,
|
||||
side: r.side,
|
||||
size_usd: r.size_usd,
|
||||
@@ -201,6 +206,7 @@ async function main() {
|
||||
|
||||
const slippageRowsV2 = (data?.dlob_slippage_latest_v2 || []).map((r) => ({
|
||||
ts: now,
|
||||
source: cfg.dlobSource,
|
||||
market_name: r.market_name,
|
||||
side: r.side,
|
||||
size_usd: r.size_usd,
|
||||
|
||||
52
kustomize/base/dlob-worker/deployment-drift.yaml
Normal file
52
kustomize/base/dlob-worker/deployment-drift.yaml
Normal file
@@ -0,0 +1,52 @@
|
||||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
name: dlob-worker-drift
|
||||
annotations:
|
||||
argocd.argoproj.io/sync-wave: "5"
|
||||
spec:
|
||||
replicas: 1
|
||||
selector:
|
||||
matchLabels:
|
||||
app.kubernetes.io/name: dlob-worker-drift
|
||||
template:
|
||||
metadata:
|
||||
labels:
|
||||
app.kubernetes.io/name: dlob-worker-drift
|
||||
spec:
|
||||
hostNetwork: true
|
||||
dnsPolicy: ClusterFirstWithHostNet
|
||||
containers:
|
||||
- name: worker
|
||||
image: node:20-slim
|
||||
imagePullPolicy: IfNotPresent
|
||||
env:
|
||||
- name: HASURA_GRAPHQL_URL
|
||||
value: http://hasura:8080/v1/graphql
|
||||
- name: HASURA_ADMIN_SECRET
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: trade-hasura
|
||||
key: HASURA_GRAPHQL_ADMIN_SECRET
|
||||
- name: DLOB_SOURCE
|
||||
value: drift
|
||||
- name: DLOB_HTTP_URL
|
||||
value: https://dlob.drift.trade
|
||||
- name: DLOB_FORCE_IPV6
|
||||
value: "true"
|
||||
- name: DLOB_MARKETS
|
||||
value: PUMP-PERP,SOL-PERP,1MBONK-PERP,BTC-PERP,ETH-PERP
|
||||
- name: DLOB_POLL_MS
|
||||
value: "500"
|
||||
- name: DLOB_DEPTH
|
||||
value: "10"
|
||||
command: ["node", "/app/worker.mjs"]
|
||||
volumeMounts:
|
||||
- name: script
|
||||
mountPath: /app/worker.mjs
|
||||
subPath: worker.mjs
|
||||
readOnly: true
|
||||
volumes:
|
||||
- name: script
|
||||
configMap:
|
||||
name: dlob-worker-script
|
||||
@@ -26,6 +26,8 @@ spec:
|
||||
secretKeyRef:
|
||||
name: trade-hasura
|
||||
key: HASURA_GRAPHQL_ADMIN_SECRET
|
||||
- name: DLOB_SOURCE
|
||||
value: mevnode
|
||||
- name: DLOB_HTTP_URL
|
||||
value: http://dlob-server:6969
|
||||
- name: DLOB_MARKETS
|
||||
|
||||
@@ -64,6 +64,7 @@ function resolveConfig() {
|
||||
.trim()
|
||||
.replace(/\/$/, '');
|
||||
const dlobForceIpv6 = envBool('DLOB_FORCE_IPV6', false);
|
||||
const dlobSource = String(process.env.DLOB_SOURCE || 'mevnode').trim() || 'mevnode';
|
||||
|
||||
const markets = envList('DLOB_MARKETS', 'PUMP-PERP,SOL-PERP,1MBONK-PERP,BTC-PERP,ETH-PERP');
|
||||
const depth = clampInt(process.env.DLOB_DEPTH, 1, 50, 10);
|
||||
@@ -80,6 +81,7 @@ function resolveConfig() {
|
||||
hasuraUrl,
|
||||
hasuraAdminSecret,
|
||||
hasuraAuthToken,
|
||||
dlobSource,
|
||||
dlobHttpBase,
|
||||
dlobForceIpv6,
|
||||
markets,
|
||||
@@ -238,8 +240,9 @@ function computeStats({ l2, depth, pricePrecision, basePrecision }) {
|
||||
};
|
||||
}
|
||||
|
||||
function l2ToInsertObject({ l2, updatedAt, pricePrecision }) {
|
||||
function l2ToInsertObject({ dlobSource, l2, updatedAt, pricePrecision }) {
|
||||
return {
|
||||
source: dlobSource,
|
||||
market_name: String(l2.marketName),
|
||||
market_type: String(l2.marketType || 'perp'),
|
||||
market_index: typeof l2.marketIndex === 'number' ? l2.marketIndex : null,
|
||||
@@ -256,8 +259,9 @@ function l2ToInsertObject({ l2, updatedAt, pricePrecision }) {
|
||||
};
|
||||
}
|
||||
|
||||
function statsToInsertObject({ l2, stats, updatedAt }) {
|
||||
function statsToInsertObject({ dlobSource, l2, stats, updatedAt }) {
|
||||
return {
|
||||
source: dlobSource,
|
||||
market_name: String(l2.marketName),
|
||||
market_type: String(l2.marketType || 'perp'),
|
||||
market_index: typeof l2.marketIndex === 'number' ? l2.marketIndex : null,
|
||||
@@ -371,6 +375,7 @@ async function main() {
|
||||
startedAt: getIsoNow(),
|
||||
hasuraUrl: cfg.hasuraUrl,
|
||||
hasuraAuth: cfg.hasuraAuthToken ? 'bearer' : cfg.hasuraAdminSecret ? 'admin-secret' : 'none',
|
||||
dlobSource: cfg.dlobSource,
|
||||
dlobHttpBase: cfg.dlobHttpBase,
|
||||
dlobForceIpv6: cfg.dlobForceIpv6,
|
||||
markets: cfg.markets,
|
||||
@@ -410,8 +415,8 @@ async function main() {
|
||||
basePrecision: cfg.basePrecision,
|
||||
});
|
||||
|
||||
l2Objects.push(l2ToInsertObject({ l2, updatedAt, pricePrecision: cfg.pricePrecision }));
|
||||
statsObjects.push(statsToInsertObject({ l2, stats, updatedAt }));
|
||||
l2Objects.push(l2ToInsertObject({ dlobSource: cfg.dlobSource, l2, updatedAt, pricePrecision: cfg.pricePrecision }));
|
||||
statsObjects.push(statsToInsertObject({ dlobSource: cfg.dlobSource, l2, stats, updatedAt }));
|
||||
}
|
||||
|
||||
try {
|
||||
|
||||
@@ -89,6 +89,8 @@ async function main() {
|
||||
console.log(`[hasura-bootstrap] HASURA_URL=${HASURA_URL}`);
|
||||
await waitForHasura();
|
||||
|
||||
const PUBLIC_DLOB_SOURCE_HEADER = 'X-Hasura-Dlob-Source';
|
||||
|
||||
const apiTokensTable = { schema: 'public', name: 'api_tokens' };
|
||||
const source = 'default';
|
||||
|
||||
@@ -187,7 +189,7 @@ async function main() {
|
||||
'updated_at',
|
||||
]);
|
||||
|
||||
const ensureDlobTable = async (table, columns) => {
|
||||
const ensureDlobTable = async (table, columns, { publicFilter } = {}) => {
|
||||
await metadataIgnore({ type: 'pg_untrack_table', args: { source, table } });
|
||||
await metadata({ type: 'pg_track_table', args: { source, table } });
|
||||
|
||||
@@ -200,7 +202,7 @@ async function main() {
|
||||
role: 'public',
|
||||
permission: {
|
||||
columns,
|
||||
filter: {},
|
||||
filter: publicFilter || {},
|
||||
},
|
||||
},
|
||||
});
|
||||
@@ -236,7 +238,7 @@ async function main() {
|
||||
});
|
||||
};
|
||||
|
||||
async function ensurePublicSelectTable(table, columns) {
|
||||
async function ensurePublicSelectTable(table, columns, { publicFilter } = {}) {
|
||||
await metadataIgnore({ type: 'pg_untrack_table', args: { source, table } });
|
||||
await metadata({ type: 'pg_track_table', args: { source, table } });
|
||||
|
||||
@@ -249,7 +251,7 @@ async function main() {
|
||||
role: 'public',
|
||||
permission: {
|
||||
columns,
|
||||
filter: {},
|
||||
filter: publicFilter || {},
|
||||
},
|
||||
},
|
||||
});
|
||||
@@ -259,7 +261,10 @@ async function main() {
|
||||
await metadataIgnore({ type: 'pg_drop_update_permission', args: { source, table, role: 'ingestor' } });
|
||||
}
|
||||
|
||||
const dlobPublicFilter = { source: { _eq: PUBLIC_DLOB_SOURCE_HEADER } };
|
||||
|
||||
await ensureDlobTable(dlobL2LatestTable, [
|
||||
'source',
|
||||
'market_name',
|
||||
'market_type',
|
||||
'market_index',
|
||||
@@ -273,9 +278,10 @@ async function main() {
|
||||
'asks',
|
||||
'raw',
|
||||
'updated_at',
|
||||
]);
|
||||
], { publicFilter: dlobPublicFilter });
|
||||
|
||||
await ensureDlobTable(dlobStatsLatestTable, [
|
||||
'source',
|
||||
'market_name',
|
||||
'market_type',
|
||||
'market_index',
|
||||
@@ -296,9 +302,10 @@ async function main() {
|
||||
'imbalance',
|
||||
'raw',
|
||||
'updated_at',
|
||||
]);
|
||||
], { publicFilter: dlobPublicFilter });
|
||||
|
||||
await ensurePublicSelectTable(dlobDepthBpsLatestTable, [
|
||||
'source',
|
||||
'market_name',
|
||||
'band_bps',
|
||||
'market_type',
|
||||
@@ -315,9 +322,10 @@ async function main() {
|
||||
'imbalance',
|
||||
'raw',
|
||||
'updated_at',
|
||||
]);
|
||||
], { publicFilter: dlobPublicFilter });
|
||||
|
||||
await ensurePublicSelectTable(dlobSlippageLatestTable, [
|
||||
'source',
|
||||
'market_name',
|
||||
'side',
|
||||
'size_usd',
|
||||
@@ -337,9 +345,10 @@ async function main() {
|
||||
'fill_pct',
|
||||
'raw',
|
||||
'updated_at',
|
||||
]);
|
||||
], { publicFilter: dlobPublicFilter });
|
||||
|
||||
await ensurePublicSelectTable(dlobSlippageLatestV2Table, [
|
||||
'source',
|
||||
'market_name',
|
||||
'side',
|
||||
'size_usd',
|
||||
@@ -359,11 +368,12 @@ async function main() {
|
||||
'fill_pct',
|
||||
'raw',
|
||||
'updated_at',
|
||||
]);
|
||||
], { publicFilter: dlobPublicFilter });
|
||||
|
||||
await ensurePublicSelectTable(dlobStatsTsTable, [
|
||||
'ts',
|
||||
'id',
|
||||
'source',
|
||||
'market_name',
|
||||
'market_type',
|
||||
'market_index',
|
||||
@@ -383,11 +393,12 @@ async function main() {
|
||||
'depth_ask_usd',
|
||||
'imbalance',
|
||||
'raw',
|
||||
]);
|
||||
], { publicFilter: dlobPublicFilter });
|
||||
|
||||
await ensurePublicSelectTable(dlobDepthBpsTsTable, [
|
||||
'ts',
|
||||
'id',
|
||||
'source',
|
||||
'market_name',
|
||||
'band_bps',
|
||||
'market_type',
|
||||
@@ -403,11 +414,12 @@ async function main() {
|
||||
'ask_usd',
|
||||
'imbalance',
|
||||
'raw',
|
||||
]);
|
||||
], { publicFilter: dlobPublicFilter });
|
||||
|
||||
await ensurePublicSelectTable(dlobSlippageTsTable, [
|
||||
'ts',
|
||||
'id',
|
||||
'source',
|
||||
'market_name',
|
||||
'side',
|
||||
'size_usd',
|
||||
@@ -424,11 +436,12 @@ async function main() {
|
||||
'levels_consumed',
|
||||
'fill_pct',
|
||||
'raw',
|
||||
]);
|
||||
], { publicFilter: dlobPublicFilter });
|
||||
|
||||
await ensurePublicSelectTable(dlobSlippageTsV2Table, [
|
||||
'ts',
|
||||
'id',
|
||||
'source',
|
||||
'market_name',
|
||||
'side',
|
||||
'size_usd',
|
||||
@@ -445,7 +458,7 @@ async function main() {
|
||||
'levels_consumed',
|
||||
'fill_pct',
|
||||
'raw',
|
||||
]);
|
||||
], { publicFilter: dlobPublicFilter });
|
||||
|
||||
// Return table type for candle functions (needed for Hasura to track the function).
|
||||
await metadataIgnore({ type: 'pg_track_table', args: { source, table: candlesReturnTable } });
|
||||
|
||||
@@ -321,7 +321,8 @@ $$;
|
||||
-- Latest DLOB orderbook snapshots (top-N levels), per market.
|
||||
-- Filled by a VPS worker (collector) and consumed by the UI via Hasura subscriptions.
|
||||
CREATE TABLE IF NOT EXISTS public.dlob_l2_latest (
|
||||
market_name TEXT PRIMARY KEY,
|
||||
source TEXT NOT NULL DEFAULT 'mevnode',
|
||||
market_name TEXT NOT NULL,
|
||||
market_type TEXT NOT NULL DEFAULT 'perp',
|
||||
market_index INTEGER,
|
||||
ts BIGINT,
|
||||
@@ -333,15 +334,52 @@ CREATE TABLE IF NOT EXISTS public.dlob_l2_latest (
|
||||
bids JSONB,
|
||||
asks JSONB,
|
||||
raw JSONB,
|
||||
updated_at TIMESTAMPTZ NOT NULL DEFAULT now()
|
||||
updated_at TIMESTAMPTZ NOT NULL DEFAULT now(),
|
||||
PRIMARY KEY (source, market_name)
|
||||
);
|
||||
|
||||
-- Schema upgrades (idempotent for existing volumes)
|
||||
ALTER TABLE public.dlob_l2_latest ADD COLUMN IF NOT EXISTS source TEXT;
|
||||
ALTER TABLE public.dlob_l2_latest ALTER COLUMN source SET DEFAULT 'mevnode';
|
||||
UPDATE public.dlob_l2_latest SET source = 'mevnode' WHERE source IS NULL;
|
||||
ALTER TABLE public.dlob_l2_latest ALTER COLUMN source SET NOT NULL;
|
||||
|
||||
-- Ensure PRIMARY KEY is (source, market_name) (required to keep 2 sources in parallel).
|
||||
DO $$
|
||||
DECLARE
|
||||
pk_name text;
|
||||
pk_cols text[];
|
||||
BEGIN
|
||||
SELECT
|
||||
con.conname,
|
||||
array_agg(att.attname ORDER BY ord.ordinality)
|
||||
INTO pk_name, pk_cols
|
||||
FROM pg_constraint con
|
||||
JOIN pg_class rel ON rel.oid = con.conrelid
|
||||
JOIN pg_namespace nsp ON nsp.oid = rel.relnamespace
|
||||
JOIN unnest(con.conkey) WITH ORDINALITY AS ord(attnum, ordinality) ON true
|
||||
JOIN pg_attribute att ON att.attrelid = rel.oid AND att.attnum = ord.attnum
|
||||
WHERE con.contype = 'p' AND nsp.nspname = 'public' AND rel.relname = 'dlob_l2_latest'
|
||||
GROUP BY con.conname;
|
||||
|
||||
IF pk_name IS NULL THEN
|
||||
EXECUTE 'ALTER TABLE public.dlob_l2_latest ADD CONSTRAINT dlob_l2_latest_pkey PRIMARY KEY (source, market_name)';
|
||||
ELSIF pk_cols <> ARRAY['source','market_name'] THEN
|
||||
EXECUTE format('ALTER TABLE public.dlob_l2_latest DROP CONSTRAINT %I', pk_name);
|
||||
EXECUTE 'ALTER TABLE public.dlob_l2_latest ADD CONSTRAINT dlob_l2_latest_pkey PRIMARY KEY (source, market_name)';
|
||||
END IF;
|
||||
END $$;
|
||||
|
||||
CREATE INDEX IF NOT EXISTS dlob_l2_latest_updated_at_idx
|
||||
ON public.dlob_l2_latest (updated_at DESC);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS dlob_l2_latest_source_updated_at_idx
|
||||
ON public.dlob_l2_latest (source, updated_at DESC);
|
||||
|
||||
-- Derived stats for fast UI display.
|
||||
CREATE TABLE IF NOT EXISTS public.dlob_stats_latest (
|
||||
market_name TEXT PRIMARY KEY,
|
||||
source TEXT NOT NULL DEFAULT 'mevnode',
|
||||
market_name TEXT NOT NULL,
|
||||
market_type TEXT NOT NULL DEFAULT 'perp',
|
||||
market_index INTEGER,
|
||||
ts BIGINT,
|
||||
@@ -360,15 +398,52 @@ CREATE TABLE IF NOT EXISTS public.dlob_stats_latest (
|
||||
depth_ask_usd NUMERIC,
|
||||
imbalance NUMERIC,
|
||||
raw JSONB,
|
||||
updated_at TIMESTAMPTZ NOT NULL DEFAULT now()
|
||||
updated_at TIMESTAMPTZ NOT NULL DEFAULT now(),
|
||||
PRIMARY KEY (source, market_name)
|
||||
);
|
||||
|
||||
-- Schema upgrades (idempotent for existing volumes)
|
||||
ALTER TABLE public.dlob_stats_latest ADD COLUMN IF NOT EXISTS source TEXT;
|
||||
ALTER TABLE public.dlob_stats_latest ALTER COLUMN source SET DEFAULT 'mevnode';
|
||||
UPDATE public.dlob_stats_latest SET source = 'mevnode' WHERE source IS NULL;
|
||||
ALTER TABLE public.dlob_stats_latest ALTER COLUMN source SET NOT NULL;
|
||||
|
||||
-- Ensure PRIMARY KEY is (source, market_name) (required to keep 2 sources in parallel).
|
||||
DO $$
|
||||
DECLARE
|
||||
pk_name text;
|
||||
pk_cols text[];
|
||||
BEGIN
|
||||
SELECT
|
||||
con.conname,
|
||||
array_agg(att.attname ORDER BY ord.ordinality)
|
||||
INTO pk_name, pk_cols
|
||||
FROM pg_constraint con
|
||||
JOIN pg_class rel ON rel.oid = con.conrelid
|
||||
JOIN pg_namespace nsp ON nsp.oid = rel.relnamespace
|
||||
JOIN unnest(con.conkey) WITH ORDINALITY AS ord(attnum, ordinality) ON true
|
||||
JOIN pg_attribute att ON att.attrelid = rel.oid AND att.attnum = ord.attnum
|
||||
WHERE con.contype = 'p' AND nsp.nspname = 'public' AND rel.relname = 'dlob_stats_latest'
|
||||
GROUP BY con.conname;
|
||||
|
||||
IF pk_name IS NULL THEN
|
||||
EXECUTE 'ALTER TABLE public.dlob_stats_latest ADD CONSTRAINT dlob_stats_latest_pkey PRIMARY KEY (source, market_name)';
|
||||
ELSIF pk_cols <> ARRAY['source','market_name'] THEN
|
||||
EXECUTE format('ALTER TABLE public.dlob_stats_latest DROP CONSTRAINT %I', pk_name);
|
||||
EXECUTE 'ALTER TABLE public.dlob_stats_latest ADD CONSTRAINT dlob_stats_latest_pkey PRIMARY KEY (source, market_name)';
|
||||
END IF;
|
||||
END $$;
|
||||
|
||||
CREATE INDEX IF NOT EXISTS dlob_stats_latest_updated_at_idx
|
||||
ON public.dlob_stats_latest (updated_at DESC);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS dlob_stats_latest_source_updated_at_idx
|
||||
ON public.dlob_stats_latest (source, updated_at DESC);
|
||||
|
||||
-- Depth snapshots within bps bands around mid-price (per market, per band).
|
||||
-- Filled by a derived worker that reads `dlob_l2_latest`.
|
||||
CREATE TABLE IF NOT EXISTS public.dlob_depth_bps_latest (
|
||||
source TEXT NOT NULL DEFAULT 'mevnode',
|
||||
market_name TEXT NOT NULL,
|
||||
band_bps INTEGER NOT NULL,
|
||||
market_type TEXT NOT NULL DEFAULT 'perp',
|
||||
@@ -385,18 +460,54 @@ CREATE TABLE IF NOT EXISTS public.dlob_depth_bps_latest (
|
||||
imbalance NUMERIC,
|
||||
raw JSONB,
|
||||
updated_at TIMESTAMPTZ NOT NULL DEFAULT now(),
|
||||
PRIMARY KEY (market_name, band_bps)
|
||||
PRIMARY KEY (source, market_name, band_bps)
|
||||
);
|
||||
|
||||
-- Schema upgrades (idempotent for existing volumes)
|
||||
ALTER TABLE public.dlob_depth_bps_latest ADD COLUMN IF NOT EXISTS source TEXT;
|
||||
ALTER TABLE public.dlob_depth_bps_latest ALTER COLUMN source SET DEFAULT 'mevnode';
|
||||
UPDATE public.dlob_depth_bps_latest SET source = 'mevnode' WHERE source IS NULL;
|
||||
ALTER TABLE public.dlob_depth_bps_latest ALTER COLUMN source SET NOT NULL;
|
||||
|
||||
-- Ensure PRIMARY KEY is (source, market_name, band_bps) (required to keep 2 sources in parallel).
|
||||
DO $$
|
||||
DECLARE
|
||||
pk_name text;
|
||||
pk_cols text[];
|
||||
BEGIN
|
||||
SELECT
|
||||
con.conname,
|
||||
array_agg(att.attname ORDER BY ord.ordinality)
|
||||
INTO pk_name, pk_cols
|
||||
FROM pg_constraint con
|
||||
JOIN pg_class rel ON rel.oid = con.conrelid
|
||||
JOIN pg_namespace nsp ON nsp.oid = rel.relnamespace
|
||||
JOIN unnest(con.conkey) WITH ORDINALITY AS ord(attnum, ordinality) ON true
|
||||
JOIN pg_attribute att ON att.attrelid = rel.oid AND att.attnum = ord.attnum
|
||||
WHERE con.contype = 'p' AND nsp.nspname = 'public' AND rel.relname = 'dlob_depth_bps_latest'
|
||||
GROUP BY con.conname;
|
||||
|
||||
IF pk_name IS NULL THEN
|
||||
EXECUTE 'ALTER TABLE public.dlob_depth_bps_latest ADD CONSTRAINT dlob_depth_bps_latest_pkey PRIMARY KEY (source, market_name, band_bps)';
|
||||
ELSIF pk_cols <> ARRAY['source','market_name','band_bps'] THEN
|
||||
EXECUTE format('ALTER TABLE public.dlob_depth_bps_latest DROP CONSTRAINT %I', pk_name);
|
||||
EXECUTE 'ALTER TABLE public.dlob_depth_bps_latest ADD CONSTRAINT dlob_depth_bps_latest_pkey PRIMARY KEY (source, market_name, band_bps)';
|
||||
END IF;
|
||||
END $$;
|
||||
|
||||
CREATE INDEX IF NOT EXISTS dlob_depth_bps_latest_updated_at_idx
|
||||
ON public.dlob_depth_bps_latest (updated_at DESC);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS dlob_depth_bps_latest_market_name_idx
|
||||
ON public.dlob_depth_bps_latest (market_name);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS dlob_depth_bps_latest_source_market_name_idx
|
||||
ON public.dlob_depth_bps_latest (source, market_name);
|
||||
|
||||
-- Slippage/impact estimates for "market" orders at common USD sizes.
|
||||
-- Filled by a derived worker that reads `dlob_l2_latest`.
|
||||
CREATE TABLE IF NOT EXISTS public.dlob_slippage_latest (
|
||||
source TEXT NOT NULL DEFAULT 'mevnode',
|
||||
market_name TEXT NOT NULL,
|
||||
side TEXT NOT NULL,
|
||||
size_usd INTEGER NOT NULL,
|
||||
@@ -416,19 +527,55 @@ CREATE TABLE IF NOT EXISTS public.dlob_slippage_latest (
|
||||
fill_pct NUMERIC,
|
||||
raw JSONB,
|
||||
updated_at TIMESTAMPTZ NOT NULL DEFAULT now(),
|
||||
PRIMARY KEY (market_name, side, size_usd),
|
||||
PRIMARY KEY (source, market_name, side, size_usd),
|
||||
CONSTRAINT dlob_slippage_latest_side_chk CHECK (side IN ('buy', 'sell'))
|
||||
);
|
||||
|
||||
-- Schema upgrades (idempotent for existing volumes)
|
||||
ALTER TABLE public.dlob_slippage_latest ADD COLUMN IF NOT EXISTS source TEXT;
|
||||
ALTER TABLE public.dlob_slippage_latest ALTER COLUMN source SET DEFAULT 'mevnode';
|
||||
UPDATE public.dlob_slippage_latest SET source = 'mevnode' WHERE source IS NULL;
|
||||
ALTER TABLE public.dlob_slippage_latest ALTER COLUMN source SET NOT NULL;
|
||||
|
||||
-- Ensure PRIMARY KEY is (source, market_name, side, size_usd) (required to keep 2 sources in parallel).
|
||||
DO $$
|
||||
DECLARE
|
||||
pk_name text;
|
||||
pk_cols text[];
|
||||
BEGIN
|
||||
SELECT
|
||||
con.conname,
|
||||
array_agg(att.attname ORDER BY ord.ordinality)
|
||||
INTO pk_name, pk_cols
|
||||
FROM pg_constraint con
|
||||
JOIN pg_class rel ON rel.oid = con.conrelid
|
||||
JOIN pg_namespace nsp ON nsp.oid = rel.relnamespace
|
||||
JOIN unnest(con.conkey) WITH ORDINALITY AS ord(attnum, ordinality) ON true
|
||||
JOIN pg_attribute att ON att.attrelid = rel.oid AND att.attnum = ord.attnum
|
||||
WHERE con.contype = 'p' AND nsp.nspname = 'public' AND rel.relname = 'dlob_slippage_latest'
|
||||
GROUP BY con.conname;
|
||||
|
||||
IF pk_name IS NULL THEN
|
||||
EXECUTE 'ALTER TABLE public.dlob_slippage_latest ADD CONSTRAINT dlob_slippage_latest_pkey PRIMARY KEY (source, market_name, side, size_usd)';
|
||||
ELSIF pk_cols <> ARRAY['source','market_name','side','size_usd'] THEN
|
||||
EXECUTE format('ALTER TABLE public.dlob_slippage_latest DROP CONSTRAINT %I', pk_name);
|
||||
EXECUTE 'ALTER TABLE public.dlob_slippage_latest ADD CONSTRAINT dlob_slippage_latest_pkey PRIMARY KEY (source, market_name, side, size_usd)';
|
||||
END IF;
|
||||
END $$;
|
||||
|
||||
CREATE INDEX IF NOT EXISTS dlob_slippage_latest_updated_at_idx
|
||||
ON public.dlob_slippage_latest (updated_at DESC);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS dlob_slippage_latest_market_name_idx
|
||||
ON public.dlob_slippage_latest (market_name);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS dlob_slippage_latest_source_market_name_idx
|
||||
ON public.dlob_slippage_latest (source, market_name);
|
||||
|
||||
-- Slippage v2: supports fractional order sizes (e.g. 0.1/0.2/0.5 USD), per market and side.
|
||||
-- Keep v1 intact for backward compatibility and to avoid data loss.
|
||||
CREATE TABLE IF NOT EXISTS public.dlob_slippage_latest_v2 (
|
||||
source TEXT NOT NULL DEFAULT 'mevnode',
|
||||
market_name TEXT NOT NULL,
|
||||
side TEXT NOT NULL, -- buy|sell
|
||||
size_usd NUMERIC NOT NULL,
|
||||
@@ -448,22 +595,58 @@ CREATE TABLE IF NOT EXISTS public.dlob_slippage_latest_v2 (
|
||||
fill_pct NUMERIC,
|
||||
raw JSONB,
|
||||
updated_at TIMESTAMPTZ NOT NULL DEFAULT now(),
|
||||
PRIMARY KEY (market_name, side, size_usd),
|
||||
PRIMARY KEY (source, market_name, side, size_usd),
|
||||
CONSTRAINT dlob_slippage_latest_v2_side_chk CHECK (side IN ('buy', 'sell'))
|
||||
);
|
||||
|
||||
-- Schema upgrades (idempotent for existing volumes)
|
||||
ALTER TABLE public.dlob_slippage_latest_v2 ADD COLUMN IF NOT EXISTS source TEXT;
|
||||
ALTER TABLE public.dlob_slippage_latest_v2 ALTER COLUMN source SET DEFAULT 'mevnode';
|
||||
UPDATE public.dlob_slippage_latest_v2 SET source = 'mevnode' WHERE source IS NULL;
|
||||
ALTER TABLE public.dlob_slippage_latest_v2 ALTER COLUMN source SET NOT NULL;
|
||||
|
||||
-- Ensure PRIMARY KEY is (source, market_name, side, size_usd) (required to keep 2 sources in parallel).
|
||||
DO $$
|
||||
DECLARE
|
||||
pk_name text;
|
||||
pk_cols text[];
|
||||
BEGIN
|
||||
SELECT
|
||||
con.conname,
|
||||
array_agg(att.attname ORDER BY ord.ordinality)
|
||||
INTO pk_name, pk_cols
|
||||
FROM pg_constraint con
|
||||
JOIN pg_class rel ON rel.oid = con.conrelid
|
||||
JOIN pg_namespace nsp ON nsp.oid = rel.relnamespace
|
||||
JOIN unnest(con.conkey) WITH ORDINALITY AS ord(attnum, ordinality) ON true
|
||||
JOIN pg_attribute att ON att.attrelid = rel.oid AND att.attnum = ord.attnum
|
||||
WHERE con.contype = 'p' AND nsp.nspname = 'public' AND rel.relname = 'dlob_slippage_latest_v2'
|
||||
GROUP BY con.conname;
|
||||
|
||||
IF pk_name IS NULL THEN
|
||||
EXECUTE 'ALTER TABLE public.dlob_slippage_latest_v2 ADD CONSTRAINT dlob_slippage_latest_v2_pkey PRIMARY KEY (source, market_name, side, size_usd)';
|
||||
ELSIF pk_cols <> ARRAY['source','market_name','side','size_usd'] THEN
|
||||
EXECUTE format('ALTER TABLE public.dlob_slippage_latest_v2 DROP CONSTRAINT %I', pk_name);
|
||||
EXECUTE 'ALTER TABLE public.dlob_slippage_latest_v2 ADD CONSTRAINT dlob_slippage_latest_v2_pkey PRIMARY KEY (source, market_name, side, size_usd)';
|
||||
END IF;
|
||||
END $$;
|
||||
|
||||
CREATE INDEX IF NOT EXISTS dlob_slippage_latest_v2_updated_at_idx
|
||||
ON public.dlob_slippage_latest_v2 (updated_at DESC);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS dlob_slippage_latest_v2_market_name_idx
|
||||
ON public.dlob_slippage_latest_v2 (market_name);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS dlob_slippage_latest_v2_source_market_name_idx
|
||||
ON public.dlob_slippage_latest_v2 (source, market_name);
|
||||
|
||||
-- Time-series tables for UI history (start: 7 days).
|
||||
-- Keep these append-only; use Timescale hypertables.
|
||||
|
||||
CREATE TABLE IF NOT EXISTS public.dlob_stats_ts (
|
||||
ts TIMESTAMPTZ NOT NULL,
|
||||
id BIGSERIAL NOT NULL,
|
||||
source TEXT NOT NULL DEFAULT 'mevnode',
|
||||
market_name TEXT NOT NULL,
|
||||
market_type TEXT NOT NULL DEFAULT 'perp',
|
||||
market_index INTEGER,
|
||||
@@ -486,14 +669,24 @@ CREATE TABLE IF NOT EXISTS public.dlob_stats_ts (
|
||||
PRIMARY KEY (ts, id)
|
||||
);
|
||||
|
||||
-- Schema upgrades (idempotent for existing volumes)
|
||||
ALTER TABLE public.dlob_stats_ts ADD COLUMN IF NOT EXISTS source TEXT;
|
||||
ALTER TABLE public.dlob_stats_ts ALTER COLUMN source SET DEFAULT 'mevnode';
|
||||
UPDATE public.dlob_stats_ts SET source = 'mevnode' WHERE source IS NULL;
|
||||
ALTER TABLE public.dlob_stats_ts ALTER COLUMN source SET NOT NULL;
|
||||
|
||||
SELECT create_hypertable('dlob_stats_ts', 'ts', if_not_exists => TRUE, migrate_data => TRUE);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS dlob_stats_ts_market_ts_desc_idx
|
||||
ON public.dlob_stats_ts (market_name, ts DESC);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS dlob_stats_ts_source_market_ts_desc_idx
|
||||
ON public.dlob_stats_ts (source, market_name, ts DESC);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS public.dlob_depth_bps_ts (
|
||||
ts TIMESTAMPTZ NOT NULL,
|
||||
id BIGSERIAL NOT NULL,
|
||||
source TEXT NOT NULL DEFAULT 'mevnode',
|
||||
market_name TEXT NOT NULL,
|
||||
band_bps INTEGER NOT NULL,
|
||||
market_type TEXT NOT NULL DEFAULT 'perp',
|
||||
@@ -512,14 +705,24 @@ CREATE TABLE IF NOT EXISTS public.dlob_depth_bps_ts (
|
||||
PRIMARY KEY (ts, id)
|
||||
);
|
||||
|
||||
-- Schema upgrades (idempotent for existing volumes)
|
||||
ALTER TABLE public.dlob_depth_bps_ts ADD COLUMN IF NOT EXISTS source TEXT;
|
||||
ALTER TABLE public.dlob_depth_bps_ts ALTER COLUMN source SET DEFAULT 'mevnode';
|
||||
UPDATE public.dlob_depth_bps_ts SET source = 'mevnode' WHERE source IS NULL;
|
||||
ALTER TABLE public.dlob_depth_bps_ts ALTER COLUMN source SET NOT NULL;
|
||||
|
||||
SELECT create_hypertable('dlob_depth_bps_ts', 'ts', if_not_exists => TRUE, migrate_data => TRUE);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS dlob_depth_bps_ts_market_ts_desc_idx
|
||||
ON public.dlob_depth_bps_ts (market_name, ts DESC);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS dlob_depth_bps_ts_source_market_ts_desc_idx
|
||||
ON public.dlob_depth_bps_ts (source, market_name, ts DESC);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS public.dlob_slippage_ts (
|
||||
ts TIMESTAMPTZ NOT NULL,
|
||||
id BIGSERIAL NOT NULL,
|
||||
source TEXT NOT NULL DEFAULT 'mevnode',
|
||||
market_name TEXT NOT NULL,
|
||||
side TEXT NOT NULL,
|
||||
size_usd INTEGER NOT NULL,
|
||||
@@ -539,14 +742,24 @@ CREATE TABLE IF NOT EXISTS public.dlob_slippage_ts (
|
||||
PRIMARY KEY (ts, id)
|
||||
);
|
||||
|
||||
-- Schema upgrades (idempotent for existing volumes)
|
||||
ALTER TABLE public.dlob_slippage_ts ADD COLUMN IF NOT EXISTS source TEXT;
|
||||
ALTER TABLE public.dlob_slippage_ts ALTER COLUMN source SET DEFAULT 'mevnode';
|
||||
UPDATE public.dlob_slippage_ts SET source = 'mevnode' WHERE source IS NULL;
|
||||
ALTER TABLE public.dlob_slippage_ts ALTER COLUMN source SET NOT NULL;
|
||||
|
||||
SELECT create_hypertable('dlob_slippage_ts', 'ts', if_not_exists => TRUE, migrate_data => TRUE);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS dlob_slippage_ts_market_ts_desc_idx
|
||||
ON public.dlob_slippage_ts (market_name, ts DESC);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS dlob_slippage_ts_source_market_ts_desc_idx
|
||||
ON public.dlob_slippage_ts (source, market_name, ts DESC);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS public.dlob_slippage_ts_v2 (
|
||||
ts TIMESTAMPTZ NOT NULL,
|
||||
id BIGSERIAL NOT NULL,
|
||||
source TEXT NOT NULL DEFAULT 'mevnode',
|
||||
market_name TEXT NOT NULL,
|
||||
side TEXT NOT NULL,
|
||||
size_usd NUMERIC NOT NULL,
|
||||
@@ -566,11 +779,19 @@ CREATE TABLE IF NOT EXISTS public.dlob_slippage_ts_v2 (
|
||||
PRIMARY KEY (ts, id)
|
||||
);
|
||||
|
||||
-- Schema upgrades (idempotent for existing volumes)
|
||||
ALTER TABLE public.dlob_slippage_ts_v2 ADD COLUMN IF NOT EXISTS source TEXT;
|
||||
ALTER TABLE public.dlob_slippage_ts_v2 ALTER COLUMN source SET DEFAULT 'mevnode';
|
||||
UPDATE public.dlob_slippage_ts_v2 SET source = 'mevnode' WHERE source IS NULL;
|
||||
ALTER TABLE public.dlob_slippage_ts_v2 ALTER COLUMN source SET NOT NULL;
|
||||
|
||||
SELECT create_hypertable('dlob_slippage_ts_v2', 'ts', if_not_exists => TRUE, migrate_data => TRUE);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS dlob_slippage_ts_v2_market_ts_desc_idx
|
||||
ON public.dlob_slippage_ts_v2 (market_name, ts DESC);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS dlob_slippage_ts_v2_source_market_ts_desc_idx
|
||||
ON public.dlob_slippage_ts_v2 (source, market_name, ts DESC);
|
||||
-- Retention policies (best-effort; safe if Timescale is present).
|
||||
DO $$
|
||||
BEGIN
|
||||
|
||||
@@ -19,9 +19,13 @@ resources:
|
||||
- dlob/server-service.yaml
|
||||
- dlob/server-deployment.yaml
|
||||
- dlob-worker/deployment.yaml
|
||||
- dlob-worker/deployment-drift.yaml
|
||||
- dlob-depth-worker/deployment.yaml
|
||||
- dlob-depth-worker/deployment-drift.yaml
|
||||
- dlob-slippage-worker/deployment.yaml
|
||||
- dlob-slippage-worker/deployment-drift.yaml
|
||||
- dlob-ts-archiver/deployment.yaml
|
||||
- dlob-ts-archiver/deployment-drift.yaml
|
||||
- candles-cache-worker/deployment.yaml
|
||||
|
||||
configMapGenerator:
|
||||
|
||||
18
kustomize/overlays/prod/frontend-graphql-proxy-patch.yaml
Normal file
18
kustomize/overlays/prod/frontend-graphql-proxy-patch.yaml
Normal file
@@ -0,0 +1,18 @@
|
||||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
name: trade-frontend
|
||||
spec:
|
||||
template:
|
||||
spec:
|
||||
containers:
|
||||
- name: frontend
|
||||
volumeMounts:
|
||||
- name: frontend-server-script
|
||||
mountPath: /app/services/frontend/server.mjs
|
||||
subPath: frontend-server.mjs
|
||||
readOnly: true
|
||||
volumes:
|
||||
- name: frontend-server-script
|
||||
configMap:
|
||||
name: trade-frontend-server-script
|
||||
813
kustomize/overlays/prod/frontend-server.mjs
Normal file
813
kustomize/overlays/prod/frontend-server.mjs
Normal file
@@ -0,0 +1,813 @@
|
||||
import crypto from 'node:crypto';
|
||||
import { spawnSync } from 'node:child_process';
|
||||
import fs from 'node:fs';
|
||||
import http from 'node:http';
|
||||
import https from 'node:https';
|
||||
import net from 'node:net';
|
||||
import path from 'node:path';
|
||||
import tls from 'node:tls';
|
||||
|
||||
const PORT = Number.parseInt(process.env.PORT || '8081', 10);
|
||||
if (!Number.isInteger(PORT) || PORT <= 0) throw new Error(`Invalid PORT: ${process.env.PORT}`);
|
||||
|
||||
const APP_VERSION = String(process.env.APP_VERSION || 'v1').trim() || 'v1';
|
||||
const BUILD_TIMESTAMP = String(process.env.BUILD_TIMESTAMP || '').trim() || undefined;
|
||||
const STARTED_AT = new Date().toISOString();
|
||||
|
||||
const STATIC_DIR = process.env.STATIC_DIR || '/srv';
|
||||
const BASIC_AUTH_FILE = process.env.BASIC_AUTH_FILE || '/tokens/frontend.json';
|
||||
const API_READ_TOKEN_FILE = process.env.API_READ_TOKEN_FILE || '/tokens/read.json';
|
||||
const API_UPSTREAM = process.env.API_UPSTREAM || process.env.API_URL || 'http://api:8787';
|
||||
const HASURA_UPSTREAM = process.env.HASURA_UPSTREAM || 'http://hasura:8080';
|
||||
const HASURA_GRAPHQL_PATH = process.env.HASURA_GRAPHQL_PATH || '/v1/graphql';
|
||||
const GRAPHQL_CORS_ORIGIN = process.env.GRAPHQL_CORS_ORIGIN || process.env.CORS_ORIGIN || '*';
|
||||
const BASIC_AUTH_MODE = String(process.env.BASIC_AUTH_MODE || 'on')
|
||||
.trim()
|
||||
.toLowerCase();
|
||||
const BASIC_AUTH_ENABLED = !['off', 'false', '0', 'disabled', 'none'].includes(BASIC_AUTH_MODE);
|
||||
const AUTH_USER_HEADER = String(process.env.AUTH_USER_HEADER || 'x-trade-user')
|
||||
.trim()
|
||||
.toLowerCase();
|
||||
const AUTH_MODE = String(process.env.AUTH_MODE || 'session')
|
||||
.trim()
|
||||
.toLowerCase();
|
||||
const HTPASSWD_FILE = String(process.env.HTPASSWD_FILE || '/auth/users').trim();
|
||||
const AUTH_SESSION_SECRET_FILE = String(process.env.AUTH_SESSION_SECRET_FILE || '').trim() || null;
|
||||
const AUTH_SESSION_COOKIE = String(process.env.AUTH_SESSION_COOKIE || 'trade_session')
|
||||
.trim()
|
||||
.toLowerCase();
|
||||
const AUTH_SESSION_TTL_SECONDS = Number.parseInt(process.env.AUTH_SESSION_TTL_SECONDS || '43200', 10); // 12h
|
||||
const DLOB_SOURCE_COOKIE = String(process.env.DLOB_SOURCE_COOKIE || 'trade_dlob_source').trim() || 'trade_dlob_source';
|
||||
const DLOB_SOURCE_DEFAULT = String(process.env.DLOB_SOURCE_DEFAULT || 'mevnode').trim() || 'mevnode';
|
||||
|
||||
function readJson(filePath) {
|
||||
const raw = fs.readFileSync(filePath, 'utf8');
|
||||
return JSON.parse(raw);
|
||||
}
|
||||
|
||||
function readText(filePath) {
|
||||
return fs.readFileSync(filePath, 'utf8');
|
||||
}
|
||||
|
||||
function timingSafeEqualStr(a, b) {
|
||||
const aa = Buffer.from(String(a), 'utf8');
|
||||
const bb = Buffer.from(String(b), 'utf8');
|
||||
if (aa.length !== bb.length) return false;
|
||||
return crypto.timingSafeEqual(aa, bb);
|
||||
}
|
||||
|
||||
function timingSafeEqualBuf(a, b) {
|
||||
if (!(a instanceof Uint8Array) || !(b instanceof Uint8Array)) return false;
|
||||
if (a.length !== b.length) return false;
|
||||
return crypto.timingSafeEqual(Buffer.from(a), Buffer.from(b));
|
||||
}
|
||||
|
||||
function loadBasicAuth() {
|
||||
const j = readJson(BASIC_AUTH_FILE);
|
||||
const username = (j?.username || '').toString();
|
||||
const password = (j?.password || '').toString();
|
||||
if (!username || !password) throw new Error(`Invalid BASIC_AUTH_FILE: ${BASIC_AUTH_FILE}`);
|
||||
return { username, password };
|
||||
}
|
||||
|
||||
function loadApiReadToken() {
|
||||
const j = readJson(API_READ_TOKEN_FILE);
|
||||
const token = (j?.token || '').toString();
|
||||
if (!token) throw new Error(`Invalid API_READ_TOKEN_FILE: ${API_READ_TOKEN_FILE}`);
|
||||
return token;
|
||||
}
|
||||
|
||||
function send(res, status, headers, body) {
|
||||
res.statusCode = status;
|
||||
for (const [k, v] of Object.entries(headers || {})) res.setHeader(k, v);
|
||||
if (body == null) return void res.end();
|
||||
res.end(body);
|
||||
}
|
||||
|
||||
function sendJson(res, status, body) {
|
||||
send(res, status, { 'content-type': 'application/json; charset=utf-8', 'cache-control': 'no-store' }, JSON.stringify(body));
|
||||
}
|
||||
|
||||
function basicAuthRequired(res) {
|
||||
res.setHeader('www-authenticate', 'Basic realm="trade"');
|
||||
send(res, 401, { 'content-type': 'text/plain; charset=utf-8' }, 'unauthorized');
|
||||
}
|
||||
|
||||
function unauthorized(res) {
|
||||
sendJson(res, 401, { ok: false, error: 'unauthorized' });
|
||||
}
|
||||
|
||||
function isAuthorized(req, creds) {
|
||||
const auth = req.headers.authorization || '';
|
||||
const m = String(auth).match(/^Basic\s+(.+)$/i);
|
||||
if (!m?.[1]) return false;
|
||||
let decoded;
|
||||
try {
|
||||
decoded = Buffer.from(m[1], 'base64').toString('utf8');
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
const idx = decoded.indexOf(':');
|
||||
if (idx < 0) return false;
|
||||
const u = decoded.slice(0, idx);
|
||||
const p = decoded.slice(idx + 1);
|
||||
return timingSafeEqualStr(u, creds.username) && timingSafeEqualStr(p, creds.password);
|
||||
}
|
||||
|
||||
const MIME = {
|
||||
'.html': 'text/html; charset=utf-8',
|
||||
'.css': 'text/css; charset=utf-8',
|
||||
'.js': 'application/javascript; charset=utf-8',
|
||||
'.mjs': 'application/javascript; charset=utf-8',
|
||||
'.json': 'application/json; charset=utf-8',
|
||||
'.svg': 'image/svg+xml',
|
||||
'.png': 'image/png',
|
||||
'.jpg': 'image/jpeg',
|
||||
'.jpeg': 'image/jpeg',
|
||||
'.gif': 'image/gif',
|
||||
'.ico': 'image/x-icon',
|
||||
'.txt': 'text/plain; charset=utf-8',
|
||||
'.map': 'application/json; charset=utf-8',
|
||||
};
|
||||
|
||||
function contentTypeFor(filePath) {
|
||||
return MIME[path.extname(filePath).toLowerCase()] || 'application/octet-stream';
|
||||
}
|
||||
|
||||
function safePathFromUrlPath(urlPath) {
|
||||
const decoded = decodeURIComponent(urlPath);
|
||||
const cleaned = decoded.replace(/\0/g, '');
|
||||
// strip leading slash so join() doesn't ignore STATIC_DIR
|
||||
const rel = cleaned.replace(/^\/+/, '');
|
||||
const normalized = path.normalize(rel);
|
||||
// prevent traversal
|
||||
if (normalized.startsWith('..') || path.isAbsolute(normalized)) return null;
|
||||
return normalized;
|
||||
}
|
||||
|
||||
function injectIndexHtml(html, { dlobSource, redirectPath }) {
|
||||
const src = normalizeDlobSource(dlobSource) || 'mevnode';
|
||||
const redirect = safeRedirectPath(redirectPath);
|
||||
const hrefBase = `/prefs/dlob-source?redirect=${encodeURIComponent(redirect)}&set=`;
|
||||
|
||||
const styleActive = 'font-weight:700;text-decoration:underline;';
|
||||
const styleInactive = 'font-weight:400;text-decoration:none;';
|
||||
|
||||
const snippet = `
|
||||
<!-- trade: dlob source switch -->
|
||||
<div style="position:fixed;right:12px;bottom:12px;z-index:2147483647;background:rgba(0,0,0,0.72);color:#fff;padding:8px 10px;border-radius:10px;font:12px/1.2 system-ui,-apple-system,Segoe UI,Roboto,sans-serif;backdrop-filter:blur(6px);">
|
||||
<span style="opacity:0.85;margin-right:6px;">DLOB</span>
|
||||
<a href="${hrefBase}mevnode" style="color:#fff;${src === 'mevnode' ? styleActive : styleInactive}">mevnode</a>
|
||||
<span style="opacity:0.6;margin:0 6px;">|</span>
|
||||
<a href="${hrefBase}drift" style="color:#fff;${src === 'drift' ? styleActive : styleInactive}">drift</a>
|
||||
</div>
|
||||
`;
|
||||
|
||||
const bodyClose = /<\/body>/i;
|
||||
if (bodyClose.test(html)) return html.replace(bodyClose, `${snippet}</body>`);
|
||||
return `${html}\n${snippet}\n`;
|
||||
}
|
||||
|
||||
function serveStatic(req, res) {
|
||||
if (req.method !== 'GET' && req.method !== 'HEAD') {
|
||||
send(res, 405, { 'content-type': 'text/plain; charset=utf-8' }, 'method_not_allowed');
|
||||
return;
|
||||
}
|
||||
|
||||
const url = new URL(req.url || '/', `http://${req.headers.host || 'localhost'}`);
|
||||
const rel = safePathFromUrlPath(url.pathname);
|
||||
if (rel == null) {
|
||||
send(res, 400, { 'content-type': 'text/plain; charset=utf-8' }, 'bad_request');
|
||||
return;
|
||||
}
|
||||
|
||||
const root = path.resolve(STATIC_DIR);
|
||||
const fileCandidate = path.resolve(root, rel);
|
||||
if (!fileCandidate.startsWith(root)) {
|
||||
send(res, 400, { 'content-type': 'text/plain; charset=utf-8' }, 'bad_request');
|
||||
return;
|
||||
}
|
||||
|
||||
const trySend = (filePath) => {
|
||||
try {
|
||||
const st = fs.statSync(filePath);
|
||||
if (st.isDirectory()) return trySend(path.join(filePath, 'index.html'));
|
||||
res.statusCode = 200;
|
||||
res.setHeader('content-type', contentTypeFor(filePath));
|
||||
res.setHeader('cache-control', filePath.endsWith('index.html') ? 'no-cache' : 'public, max-age=31536000');
|
||||
if (req.method === 'HEAD') return void res.end();
|
||||
if (filePath.endsWith('index.html')) {
|
||||
const html = fs.readFileSync(filePath, 'utf8');
|
||||
const injected = injectIndexHtml(html, {
|
||||
dlobSource: resolveDlobSource(req),
|
||||
redirectPath: url.pathname + url.search,
|
||||
});
|
||||
res.end(injected);
|
||||
return true;
|
||||
}
|
||||
fs.createReadStream(filePath).pipe(res);
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
};
|
||||
|
||||
// exact file, otherwise SPA fallback
|
||||
if (trySend(fileCandidate)) return;
|
||||
const indexPath = path.join(root, 'index.html');
|
||||
if (trySend(indexPath)) return;
|
||||
|
||||
send(res, 404, { 'content-type': 'text/plain; charset=utf-8' }, 'not_found');
|
||||
}
|
||||
|
||||
function stripHopByHopHeaders(headers) {
|
||||
const hop = new Set([
|
||||
'connection',
|
||||
'keep-alive',
|
||||
'proxy-authenticate',
|
||||
'proxy-authorization',
|
||||
'te',
|
||||
'trailer',
|
||||
'transfer-encoding',
|
||||
'upgrade',
|
||||
]);
|
||||
const out = {};
|
||||
for (const [k, v] of Object.entries(headers || {})) {
|
||||
if (hop.has(k.toLowerCase())) continue;
|
||||
out[k] = v;
|
||||
}
|
||||
return out;
|
||||
}
|
||||
|
||||
function readHeader(req, name) {
|
||||
const v = req.headers[String(name).toLowerCase()];
|
||||
return Array.isArray(v) ? v[0] : v;
|
||||
}
|
||||
|
||||
function readCookie(req, name) {
|
||||
const raw = typeof req.headers.cookie === 'string' ? req.headers.cookie : '';
|
||||
if (!raw) return null;
|
||||
const needle = `${name}=`;
|
||||
for (const part of raw.split(';')) {
|
||||
const t = part.trim();
|
||||
if (!t.startsWith(needle)) continue;
|
||||
return t.slice(needle.length) || null;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
function normalizeDlobSource(value) {
|
||||
const v = String(value ?? '')
|
||||
.trim()
|
||||
.toLowerCase();
|
||||
if (v === 'mevnode') return 'mevnode';
|
||||
if (v === 'drift') return 'drift';
|
||||
return null;
|
||||
}
|
||||
|
||||
function resolveDlobSource(req) {
|
||||
const fromCookie = normalizeDlobSource(readCookie(req, DLOB_SOURCE_COOKIE));
|
||||
if (fromCookie) return fromCookie;
|
||||
return normalizeDlobSource(DLOB_SOURCE_DEFAULT) || 'mevnode';
|
||||
}
|
||||
|
||||
function safeRedirectPath(value) {
|
||||
const s = String(value ?? '').trim();
|
||||
if (!s.startsWith('/')) return '/';
|
||||
if (s.startsWith('//')) return '/';
|
||||
return s.replace(/\r|\n/g, '');
|
||||
}
|
||||
|
||||
function setDlobSourceCookie(res, { secure, dlobSource }) {
|
||||
const src = normalizeDlobSource(dlobSource);
|
||||
if (!src) return false;
|
||||
const parts = [
|
||||
`${DLOB_SOURCE_COOKIE}=${src}`,
|
||||
'Path=/',
|
||||
'SameSite=Lax',
|
||||
'HttpOnly',
|
||||
'Max-Age=31536000',
|
||||
];
|
||||
if (secure) parts.push('Secure');
|
||||
res.setHeader('set-cookie', parts.join('; '));
|
||||
return true;
|
||||
}
|
||||
|
||||
function resolveAuthUser(req) {
|
||||
const user = readHeader(req, AUTH_USER_HEADER) || readHeader(req, 'x-webauth-user');
|
||||
const value = typeof user === 'string' ? user.trim() : '';
|
||||
return value || null;
|
||||
}
|
||||
|
||||
function isHttpsRequest(req) {
|
||||
const xf = readHeader(req, 'x-forwarded-proto');
|
||||
if (typeof xf === 'string' && xf.toLowerCase() === 'https') return true;
|
||||
return Boolean(req.socket && req.socket.encrypted);
|
||||
}
|
||||
|
||||
function base64urlEncode(buf) {
|
||||
return Buffer.from(buf)
|
||||
.toString('base64')
|
||||
.replace(/\+/g, '-')
|
||||
.replace(/\//g, '_')
|
||||
.replace(/=+$/g, '');
|
||||
}
|
||||
|
||||
function base64urlDecode(str) {
|
||||
const cleaned = String(str).replace(/-/g, '+').replace(/_/g, '/');
|
||||
const pad = cleaned.length % 4 === 0 ? '' : '='.repeat(4 - (cleaned.length % 4));
|
||||
return Buffer.from(cleaned + pad, 'base64');
|
||||
}
|
||||
|
||||
function loadSessionSecret() {
|
||||
if (process.env.AUTH_SESSION_SECRET && String(process.env.AUTH_SESSION_SECRET).trim()) {
|
||||
return Buffer.from(String(process.env.AUTH_SESSION_SECRET).trim(), 'utf8');
|
||||
}
|
||||
if (AUTH_SESSION_SECRET_FILE) {
|
||||
try {
|
||||
const txt = readText(AUTH_SESSION_SECRET_FILE).trim();
|
||||
if (txt) return Buffer.from(txt, 'utf8');
|
||||
} catch {
|
||||
// ignore
|
||||
}
|
||||
}
|
||||
return crypto.randomBytes(32);
|
||||
}
|
||||
|
||||
const SESSION_SECRET = loadSessionSecret();
|
||||
|
||||
function signSessionPayload(payloadB64) {
|
||||
return crypto.createHmac('sha256', SESSION_SECRET).update(payloadB64).digest();
|
||||
}
|
||||
|
||||
function makeSessionCookieValue(username) {
|
||||
const now = Math.floor(Date.now() / 1000);
|
||||
const exp = now + (Number.isFinite(AUTH_SESSION_TTL_SECONDS) && AUTH_SESSION_TTL_SECONDS > 0 ? AUTH_SESSION_TTL_SECONDS : 43200);
|
||||
const payload = JSON.stringify({ u: String(username), exp });
|
||||
const payloadB64 = base64urlEncode(Buffer.from(payload, 'utf8'));
|
||||
const sigB64 = base64urlEncode(signSessionPayload(payloadB64));
|
||||
return `${payloadB64}.${sigB64}`;
|
||||
}
|
||||
|
||||
function getSessionUser(req) {
|
||||
const raw = readCookie(req, AUTH_SESSION_COOKIE);
|
||||
if (!raw) return null;
|
||||
const parts = raw.split('.');
|
||||
if (parts.length !== 2) return null;
|
||||
const [payloadB64, sigB64] = parts;
|
||||
if (!payloadB64 || !sigB64) return null;
|
||||
|
||||
let payload;
|
||||
try {
|
||||
payload = JSON.parse(base64urlDecode(payloadB64).toString('utf8'));
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
const u = typeof payload?.u === 'string' ? payload.u.trim() : '';
|
||||
const exp = Number(payload?.exp);
|
||||
if (!u || !Number.isFinite(exp)) return null;
|
||||
const now = Math.floor(Date.now() / 1000);
|
||||
if (now >= exp) return null;
|
||||
|
||||
const expected = signSessionPayload(payloadB64);
|
||||
let got;
|
||||
try {
|
||||
got = base64urlDecode(sigB64);
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
if (!timingSafeEqualBuf(expected, got)) return null;
|
||||
|
||||
return u;
|
||||
}
|
||||
|
||||
function resolveAuthenticatedUser(req) {
|
||||
const sessionUser = getSessionUser(req);
|
||||
if (sessionUser) return sessionUser;
|
||||
const headerUser = resolveAuthUser(req);
|
||||
if (headerUser) return headerUser;
|
||||
if (AUTH_MODE === 'off' || AUTH_MODE === 'none' || AUTH_MODE === 'disabled') return 'anonymous';
|
||||
return null;
|
||||
}
|
||||
|
||||
function clearSessionCookie(res, secure) {
|
||||
const parts = [`${AUTH_SESSION_COOKIE}=`, 'Path=/', 'Max-Age=0', 'HttpOnly', 'SameSite=Lax'];
|
||||
if (secure) parts.push('Secure');
|
||||
res.setHeader('set-cookie', parts.join('; '));
|
||||
}
|
||||
|
||||
function setSessionCookie(res, secure, username) {
|
||||
const value = makeSessionCookieValue(username);
|
||||
const parts = [
|
||||
`${AUTH_SESSION_COOKIE}=${value}`,
|
||||
'Path=/',
|
||||
`Max-Age=${Number.isFinite(AUTH_SESSION_TTL_SECONDS) ? AUTH_SESSION_TTL_SECONDS : 43200}`,
|
||||
'HttpOnly',
|
||||
'SameSite=Lax',
|
||||
];
|
||||
if (secure) parts.push('Secure');
|
||||
res.setHeader('set-cookie', parts.join('; '));
|
||||
}
|
||||
|
||||
function verifyWithHtpasswd(username, password) {
|
||||
try {
|
||||
const r = spawnSync('htpasswd', ['-vb', HTPASSWD_FILE, String(username), String(password)], {
|
||||
stdio: 'ignore',
|
||||
timeout: 3000,
|
||||
});
|
||||
return r.status === 0;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
function readBody(req, limitBytes = 1024 * 16) {
|
||||
return new Promise((resolve, reject) => {
|
||||
let total = 0;
|
||||
const chunks = [];
|
||||
req.on('data', (chunk) => {
|
||||
total += chunk.length;
|
||||
if (total > limitBytes) {
|
||||
reject(new Error('payload_too_large'));
|
||||
req.destroy();
|
||||
return;
|
||||
}
|
||||
chunks.push(chunk);
|
||||
});
|
||||
req.on('end', () => resolve(Buffer.concat(chunks).toString('utf8')));
|
||||
req.on('error', reject);
|
||||
});
|
||||
}
|
||||
|
||||
function proxyApi(req, res, apiReadToken) {
|
||||
const upstreamBase = new URL(API_UPSTREAM);
|
||||
const inUrl = new URL(req.url || '/', `http://${req.headers.host || 'localhost'}`);
|
||||
|
||||
const prefix = '/api';
|
||||
const strippedPath = inUrl.pathname === prefix ? '/' : inUrl.pathname.startsWith(prefix + '/') ? inUrl.pathname.slice(prefix.length) : null;
|
||||
if (strippedPath == null) {
|
||||
send(res, 404, { 'content-type': 'text/plain; charset=utf-8' }, 'not_found');
|
||||
return;
|
||||
}
|
||||
|
||||
const target = new URL(upstreamBase.toString());
|
||||
target.pathname = strippedPath || '/';
|
||||
target.search = inUrl.search;
|
||||
|
||||
const isHttps = target.protocol === 'https:';
|
||||
const lib = isHttps ? https : http;
|
||||
|
||||
const headers = stripHopByHopHeaders(req.headers);
|
||||
delete headers.authorization; // basic auth from client must not leak upstream
|
||||
headers.host = target.host;
|
||||
headers.authorization = `Bearer ${apiReadToken}`;
|
||||
|
||||
const upstreamReq = lib.request(
|
||||
{
|
||||
protocol: target.protocol,
|
||||
hostname: target.hostname,
|
||||
port: target.port || (isHttps ? 443 : 80),
|
||||
method: req.method,
|
||||
path: target.pathname + target.search,
|
||||
headers,
|
||||
},
|
||||
(upstreamRes) => {
|
||||
const outHeaders = stripHopByHopHeaders(upstreamRes.headers);
|
||||
res.writeHead(upstreamRes.statusCode || 502, outHeaders);
|
||||
upstreamRes.pipe(res);
|
||||
}
|
||||
);
|
||||
|
||||
upstreamReq.on('error', (err) => {
|
||||
if (!res.headersSent) {
|
||||
send(res, 502, { 'content-type': 'text/plain; charset=utf-8' }, `bad_gateway: ${err?.message || err}`);
|
||||
} else {
|
||||
res.destroy();
|
||||
}
|
||||
});
|
||||
|
||||
req.pipe(upstreamReq);
|
||||
}
|
||||
|
||||
function withCors(res) {
|
||||
res.setHeader('access-control-allow-origin', GRAPHQL_CORS_ORIGIN);
|
||||
res.setHeader('access-control-allow-methods', 'GET,POST,OPTIONS');
|
||||
res.setHeader(
|
||||
'access-control-allow-headers',
|
||||
'content-type, authorization, x-hasura-admin-secret, x-hasura-role, x-hasura-user-id, x-hasura-dlob-source'
|
||||
);
|
||||
}
|
||||
|
||||
function proxyGraphqlHttp(req, res) {
|
||||
const upstreamBase = new URL(HASURA_UPSTREAM);
|
||||
const inUrl = new URL(req.url || '/', `http://${req.headers.host || 'localhost'}`);
|
||||
|
||||
const target = new URL(upstreamBase.toString());
|
||||
target.pathname = HASURA_GRAPHQL_PATH;
|
||||
target.search = inUrl.search;
|
||||
|
||||
const isHttps = target.protocol === 'https:';
|
||||
const lib = isHttps ? https : http;
|
||||
|
||||
const headers = stripHopByHopHeaders(req.headers);
|
||||
headers.host = target.host;
|
||||
delete headers['x-hasura-dlob-source'];
|
||||
headers['x-hasura-dlob-source'] = resolveDlobSource(req);
|
||||
|
||||
const upstreamReq = lib.request(
|
||||
{
|
||||
protocol: target.protocol,
|
||||
hostname: target.hostname,
|
||||
port: target.port || (isHttps ? 443 : 80),
|
||||
method: req.method,
|
||||
path: target.pathname + target.search,
|
||||
headers,
|
||||
},
|
||||
(upstreamRes) => {
|
||||
const outHeaders = stripHopByHopHeaders(upstreamRes.headers);
|
||||
withCors(res);
|
||||
res.writeHead(upstreamRes.statusCode || 502, outHeaders);
|
||||
upstreamRes.pipe(res);
|
||||
}
|
||||
);
|
||||
|
||||
upstreamReq.on('error', (err) => {
|
||||
if (!res.headersSent) {
|
||||
withCors(res);
|
||||
send(res, 502, { 'content-type': 'text/plain; charset=utf-8' }, `bad_gateway: ${err?.message || err}`);
|
||||
} else {
|
||||
res.destroy();
|
||||
}
|
||||
});
|
||||
|
||||
req.pipe(upstreamReq);
|
||||
}
|
||||
|
||||
function isGraphqlPath(pathname) {
|
||||
return pathname === '/graphql' || pathname === '/graphql-ws';
|
||||
}
|
||||
|
||||
function proxyGraphqlWs(req, socket, head) {
|
||||
const upstreamBase = new URL(HASURA_UPSTREAM);
|
||||
const inUrl = new URL(req.url || '/', `http://${req.headers.host || 'localhost'}`);
|
||||
|
||||
const target = new URL(upstreamBase.toString());
|
||||
target.pathname = HASURA_GRAPHQL_PATH;
|
||||
target.search = inUrl.search;
|
||||
|
||||
const port = Number(target.port || (target.protocol === 'https:' ? 443 : 80));
|
||||
const host = target.hostname;
|
||||
|
||||
const connect =
|
||||
target.protocol === 'https:'
|
||||
? () => tls.connect({ host, port, servername: host })
|
||||
: () => net.connect({ host, port });
|
||||
|
||||
const upstream = connect();
|
||||
upstream.setNoDelay(true);
|
||||
socket.setNoDelay(true);
|
||||
|
||||
// For WebSocket upgrades we must forward `connection`/`upgrade` and related headers.
|
||||
const headers = { ...req.headers };
|
||||
delete headers['content-length'];
|
||||
delete headers['content-type'];
|
||||
headers.host = target.host;
|
||||
delete headers['x-hasura-dlob-source'];
|
||||
headers['x-hasura-dlob-source'] = resolveDlobSource(req);
|
||||
|
||||
const lines = [];
|
||||
lines.push(`GET ${target.pathname + target.search} HTTP/1.1`);
|
||||
for (const [k, v] of Object.entries(headers)) {
|
||||
if (v == null) continue;
|
||||
if (Array.isArray(v)) {
|
||||
for (const vv of v) lines.push(`${k}: ${vv}`);
|
||||
} else {
|
||||
lines.push(`${k}: ${v}`);
|
||||
}
|
||||
}
|
||||
lines.push('', '');
|
||||
upstream.write(lines.join('\r\n'));
|
||||
|
||||
if (head?.length) upstream.write(head);
|
||||
|
||||
upstream.on('error', () => {
|
||||
try {
|
||||
socket.destroy();
|
||||
} catch {
|
||||
// ignore
|
||||
}
|
||||
});
|
||||
socket.on('error', () => {
|
||||
try {
|
||||
upstream.destroy();
|
||||
} catch {
|
||||
// ignore
|
||||
}
|
||||
});
|
||||
|
||||
upstream.pipe(socket);
|
||||
socket.pipe(upstream);
|
||||
}
|
||||
|
||||
async function handler(req, res) {
|
||||
if (req.method === 'GET' && (req.url === '/healthz' || req.url?.startsWith('/healthz?'))) {
|
||||
send(
|
||||
res,
|
||||
200,
|
||||
{ 'content-type': 'application/json; charset=utf-8' },
|
||||
JSON.stringify({ ok: true, version: APP_VERSION, buildTimestamp: BUILD_TIMESTAMP, startedAt: STARTED_AT })
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
const url = new URL(req.url || '/', `http://${req.headers.host || 'localhost'}`);
|
||||
if (isGraphqlPath(url.pathname)) {
|
||||
if (req.method === 'OPTIONS') {
|
||||
withCors(res);
|
||||
res.statusCode = 204;
|
||||
res.end();
|
||||
return;
|
||||
}
|
||||
if (AUTH_MODE !== 'off' && AUTH_MODE !== 'none' && AUTH_MODE !== 'disabled') {
|
||||
const user = resolveAuthenticatedUser(req);
|
||||
if (!user) {
|
||||
withCors(res);
|
||||
unauthorized(res);
|
||||
return;
|
||||
}
|
||||
}
|
||||
withCors(res);
|
||||
proxyGraphqlHttp(req, res);
|
||||
return;
|
||||
}
|
||||
if (req.method === 'GET' && url.pathname === '/whoami') {
|
||||
sendJson(res, 200, { ok: true, user: resolveAuthenticatedUser(req), mode: AUTH_MODE });
|
||||
return;
|
||||
}
|
||||
|
||||
if (req.method === 'GET' && url.pathname === '/prefs/dlob-source') {
|
||||
const set = url.searchParams.get('set');
|
||||
if (!set) {
|
||||
sendJson(res, 200, { ok: true, dlobSource: resolveDlobSource(req) });
|
||||
return;
|
||||
}
|
||||
|
||||
const ok = setDlobSourceCookie(res, { secure: isHttpsRequest(req), dlobSource: set });
|
||||
if (!ok) {
|
||||
sendJson(res, 400, { ok: false, error: 'invalid_dlob_source' });
|
||||
return;
|
||||
}
|
||||
|
||||
res.statusCode = 302;
|
||||
res.setHeader('location', safeRedirectPath(url.searchParams.get('redirect') || '/'));
|
||||
res.end();
|
||||
return;
|
||||
}
|
||||
|
||||
if (req.method === 'POST' && url.pathname === '/auth/login') {
|
||||
if (AUTH_MODE === 'off' || AUTH_MODE === 'none' || AUTH_MODE === 'disabled') {
|
||||
sendJson(res, 400, { ok: false, error: 'auth_disabled' });
|
||||
return;
|
||||
}
|
||||
|
||||
const raw = await readBody(req);
|
||||
const ct = String(req.headers['content-type'] || '').toLowerCase();
|
||||
let username = '';
|
||||
let password = '';
|
||||
if (ct.includes('application/json')) {
|
||||
let json;
|
||||
try {
|
||||
json = JSON.parse(raw);
|
||||
} catch {
|
||||
sendJson(res, 400, { ok: false, error: 'bad_json' });
|
||||
return;
|
||||
}
|
||||
username = typeof json?.username === 'string' ? json.username.trim() : '';
|
||||
password = typeof json?.password === 'string' ? json.password : '';
|
||||
} else {
|
||||
const params = new URLSearchParams(raw);
|
||||
username = String(params.get('username') || '').trim();
|
||||
password = String(params.get('password') || '');
|
||||
}
|
||||
|
||||
if (!username || !password || username.length > 64 || password.length > 200) {
|
||||
sendJson(res, 400, { ok: false, error: 'invalid_input' });
|
||||
return;
|
||||
}
|
||||
|
||||
const ok = verifyWithHtpasswd(username, password);
|
||||
if (!ok) {
|
||||
unauthorized(res);
|
||||
return;
|
||||
}
|
||||
|
||||
const secure = isHttpsRequest(req);
|
||||
setSessionCookie(res, secure, username);
|
||||
sendJson(res, 200, { ok: true, user: username });
|
||||
return;
|
||||
}
|
||||
|
||||
if ((req.method === 'POST' || req.method === 'GET') && (url.pathname === '/auth/logout' || url.pathname === '/logout')) {
|
||||
clearSessionCookie(res, isHttpsRequest(req));
|
||||
if (req.method === 'GET') {
|
||||
res.statusCode = 302;
|
||||
res.setHeader('location', '/');
|
||||
res.end();
|
||||
return;
|
||||
}
|
||||
sendJson(res, 200, { ok: true });
|
||||
return;
|
||||
}
|
||||
|
||||
if (BASIC_AUTH_ENABLED) {
|
||||
let creds;
|
||||
try {
|
||||
creds = loadBasicAuth();
|
||||
} catch (e) {
|
||||
send(res, 500, { 'content-type': 'text/plain; charset=utf-8' }, String(e?.message || e));
|
||||
return;
|
||||
}
|
||||
|
||||
if (!isAuthorized(req, creds)) {
|
||||
basicAuthRequired(res);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
if (req.url?.startsWith('/api') && (req.url === '/api' || req.url.startsWith('/api/'))) {
|
||||
if (AUTH_MODE !== 'off' && AUTH_MODE !== 'none' && AUTH_MODE !== 'disabled') {
|
||||
const user = resolveAuthenticatedUser(req);
|
||||
if (!user) {
|
||||
unauthorized(res);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
let token;
|
||||
try {
|
||||
token = loadApiReadToken();
|
||||
} catch (e) {
|
||||
send(res, 500, { 'content-type': 'text/plain; charset=utf-8' }, String(e?.message || e));
|
||||
return;
|
||||
}
|
||||
proxyApi(req, res, token);
|
||||
return;
|
||||
}
|
||||
|
||||
serveStatic(req, res);
|
||||
}
|
||||
|
||||
const server = http.createServer((req, res) => {
|
||||
handler(req, res).catch((e) => {
|
||||
if (res.headersSent) {
|
||||
res.destroy();
|
||||
return;
|
||||
}
|
||||
send(res, 500, { 'content-type': 'text/plain; charset=utf-8' }, String(e?.message || e));
|
||||
});
|
||||
});
|
||||
server.on('upgrade', (req, socket, head) => {
|
||||
try {
|
||||
const url = new URL(req.url || '/', `http://${req.headers.host || 'localhost'}`);
|
||||
if (!isGraphqlPath(url.pathname)) {
|
||||
socket.destroy();
|
||||
return;
|
||||
}
|
||||
if (AUTH_MODE !== 'off' && AUTH_MODE !== 'none' && AUTH_MODE !== 'disabled') {
|
||||
const user = resolveAuthenticatedUser(req);
|
||||
if (!user) {
|
||||
try {
|
||||
socket.write('HTTP/1.1 401 Unauthorized\r\n\r\n');
|
||||
} catch {
|
||||
// ignore
|
||||
}
|
||||
socket.destroy();
|
||||
return;
|
||||
}
|
||||
}
|
||||
proxyGraphqlWs(req, socket, head);
|
||||
} catch {
|
||||
socket.destroy();
|
||||
}
|
||||
});
|
||||
server.listen(PORT, () => {
|
||||
console.log(
|
||||
JSON.stringify(
|
||||
{
|
||||
service: 'trade-frontend',
|
||||
port: PORT,
|
||||
staticDir: STATIC_DIR,
|
||||
apiUpstream: API_UPSTREAM,
|
||||
hasuraUpstream: HASURA_UPSTREAM,
|
||||
basicAuthFile: BASIC_AUTH_FILE,
|
||||
basicAuthMode: BASIC_AUTH_MODE,
|
||||
apiReadTokenFile: API_READ_TOKEN_FILE,
|
||||
authUserHeader: AUTH_USER_HEADER,
|
||||
authMode: AUTH_MODE,
|
||||
htpasswdFile: HTPASSWD_FILE,
|
||||
},
|
||||
null,
|
||||
2
|
||||
)
|
||||
);
|
||||
});
|
||||
@@ -6,5 +6,16 @@ namespace: trade-prod
|
||||
resources:
|
||||
- ../../base
|
||||
|
||||
patchesStrategicMerge:
|
||||
- frontend-graphql-proxy-patch.yaml
|
||||
|
||||
configMapGenerator:
|
||||
- name: trade-frontend-server-script
|
||||
files:
|
||||
- frontend-server.mjs
|
||||
|
||||
generatorOptions:
|
||||
disableNameSuffixHash: true
|
||||
|
||||
commonLabels:
|
||||
env: prod
|
||||
|
||||
@@ -1,16 +0,0 @@
|
||||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
name: dlob-worker
|
||||
spec:
|
||||
template:
|
||||
spec:
|
||||
hostNetwork: true
|
||||
dnsPolicy: ClusterFirstWithHostNet
|
||||
containers:
|
||||
- name: worker
|
||||
env:
|
||||
- name: DLOB_HTTP_URL
|
||||
value: https://dlob.drift.trade
|
||||
- name: DLOB_FORCE_IPV6
|
||||
value: "true"
|
||||
@@ -37,6 +37,8 @@ const AUTH_SESSION_COOKIE = String(process.env.AUTH_SESSION_COOKIE || 'trade_ses
|
||||
.trim()
|
||||
.toLowerCase();
|
||||
const AUTH_SESSION_TTL_SECONDS = Number.parseInt(process.env.AUTH_SESSION_TTL_SECONDS || '43200', 10); // 12h
|
||||
const DLOB_SOURCE_COOKIE = String(process.env.DLOB_SOURCE_COOKIE || 'trade_dlob_source').trim() || 'trade_dlob_source';
|
||||
const DLOB_SOURCE_DEFAULT = String(process.env.DLOB_SOURCE_DEFAULT || 'mevnode').trim() || 'mevnode';
|
||||
|
||||
function readJson(filePath) {
|
||||
const raw = fs.readFileSync(filePath, 'utf8');
|
||||
@@ -143,6 +145,29 @@ function safePathFromUrlPath(urlPath) {
|
||||
return normalized;
|
||||
}
|
||||
|
||||
function injectIndexHtml(html, { dlobSource, redirectPath }) {
|
||||
const src = normalizeDlobSource(dlobSource) || 'mevnode';
|
||||
const redirect = safeRedirectPath(redirectPath);
|
||||
const hrefBase = `/prefs/dlob-source?redirect=${encodeURIComponent(redirect)}&set=`;
|
||||
|
||||
const styleActive = 'font-weight:700;text-decoration:underline;';
|
||||
const styleInactive = 'font-weight:400;text-decoration:none;';
|
||||
|
||||
const snippet = `
|
||||
<!-- trade: dlob source switch -->
|
||||
<div style="position:fixed;right:12px;bottom:12px;z-index:2147483647;background:rgba(0,0,0,0.72);color:#fff;padding:8px 10px;border-radius:10px;font:12px/1.2 system-ui,-apple-system,Segoe UI,Roboto,sans-serif;backdrop-filter:blur(6px);">
|
||||
<span style="opacity:0.85;margin-right:6px;">DLOB</span>
|
||||
<a href="${hrefBase}mevnode" style="color:#fff;${src === 'mevnode' ? styleActive : styleInactive}">mevnode</a>
|
||||
<span style="opacity:0.6;margin:0 6px;">|</span>
|
||||
<a href="${hrefBase}drift" style="color:#fff;${src === 'drift' ? styleActive : styleInactive}">drift</a>
|
||||
</div>
|
||||
`;
|
||||
|
||||
const bodyClose = /<\/body>/i;
|
||||
if (bodyClose.test(html)) return html.replace(bodyClose, `${snippet}</body>`);
|
||||
return `${html}\n${snippet}\n`;
|
||||
}
|
||||
|
||||
function serveStatic(req, res) {
|
||||
if (req.method !== 'GET' && req.method !== 'HEAD') {
|
||||
send(res, 405, { 'content-type': 'text/plain; charset=utf-8' }, 'method_not_allowed');
|
||||
@@ -171,6 +196,15 @@ function serveStatic(req, res) {
|
||||
res.setHeader('content-type', contentTypeFor(filePath));
|
||||
res.setHeader('cache-control', filePath.endsWith('index.html') ? 'no-cache' : 'public, max-age=31536000');
|
||||
if (req.method === 'HEAD') return void res.end();
|
||||
if (filePath.endsWith('index.html')) {
|
||||
const html = fs.readFileSync(filePath, 'utf8');
|
||||
const injected = injectIndexHtml(html, {
|
||||
dlobSource: resolveDlobSource(req),
|
||||
redirectPath: url.pathname + url.search,
|
||||
});
|
||||
res.end(injected);
|
||||
return true;
|
||||
}
|
||||
fs.createReadStream(filePath).pipe(res);
|
||||
return true;
|
||||
} catch {
|
||||
@@ -222,6 +256,43 @@ function readCookie(req, name) {
|
||||
return null;
|
||||
}
|
||||
|
||||
function normalizeDlobSource(value) {
|
||||
const v = String(value ?? '')
|
||||
.trim()
|
||||
.toLowerCase();
|
||||
if (v === 'mevnode') return 'mevnode';
|
||||
if (v === 'drift') return 'drift';
|
||||
return null;
|
||||
}
|
||||
|
||||
function resolveDlobSource(req) {
|
||||
const fromCookie = normalizeDlobSource(readCookie(req, DLOB_SOURCE_COOKIE));
|
||||
if (fromCookie) return fromCookie;
|
||||
return normalizeDlobSource(DLOB_SOURCE_DEFAULT) || 'mevnode';
|
||||
}
|
||||
|
||||
function safeRedirectPath(value) {
|
||||
const s = String(value ?? '').trim();
|
||||
if (!s.startsWith('/')) return '/';
|
||||
if (s.startsWith('//')) return '/';
|
||||
return s.replace(/\r|\n/g, '');
|
||||
}
|
||||
|
||||
function setDlobSourceCookie(res, { secure, dlobSource }) {
|
||||
const src = normalizeDlobSource(dlobSource);
|
||||
if (!src) return false;
|
||||
const parts = [
|
||||
`${DLOB_SOURCE_COOKIE}=${src}`,
|
||||
'Path=/',
|
||||
'SameSite=Lax',
|
||||
'HttpOnly',
|
||||
'Max-Age=31536000',
|
||||
];
|
||||
if (secure) parts.push('Secure');
|
||||
res.setHeader('set-cookie', parts.join('; '));
|
||||
return true;
|
||||
}
|
||||
|
||||
function resolveAuthUser(req) {
|
||||
const user = readHeader(req, AUTH_USER_HEADER) || readHeader(req, 'x-webauth-user');
|
||||
const value = typeof user === 'string' ? user.trim() : '';
|
||||
@@ -423,7 +494,7 @@ function withCors(res) {
|
||||
res.setHeader('access-control-allow-methods', 'GET,POST,OPTIONS');
|
||||
res.setHeader(
|
||||
'access-control-allow-headers',
|
||||
'content-type, authorization, x-hasura-admin-secret, x-hasura-role, x-hasura-user-id'
|
||||
'content-type, authorization, x-hasura-admin-secret, x-hasura-role, x-hasura-user-id, x-hasura-dlob-source'
|
||||
);
|
||||
}
|
||||
|
||||
@@ -440,6 +511,8 @@ function proxyGraphqlHttp(req, res) {
|
||||
|
||||
const headers = stripHopByHopHeaders(req.headers);
|
||||
headers.host = target.host;
|
||||
delete headers['x-hasura-dlob-source'];
|
||||
headers['x-hasura-dlob-source'] = resolveDlobSource(req);
|
||||
|
||||
const upstreamReq = lib.request(
|
||||
{
|
||||
@@ -499,6 +572,8 @@ function proxyGraphqlWs(req, socket, head) {
|
||||
delete headers['content-length'];
|
||||
delete headers['content-type'];
|
||||
headers.host = target.host;
|
||||
delete headers['x-hasura-dlob-source'];
|
||||
headers['x-hasura-dlob-source'] = resolveDlobSource(req);
|
||||
|
||||
const lines = [];
|
||||
lines.push(`GET ${target.pathname + target.search} HTTP/1.1`);
|
||||
@@ -570,6 +645,25 @@ async function handler(req, res) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (req.method === 'GET' && url.pathname === '/prefs/dlob-source') {
|
||||
const set = url.searchParams.get('set');
|
||||
if (!set) {
|
||||
sendJson(res, 200, { ok: true, dlobSource: resolveDlobSource(req) });
|
||||
return;
|
||||
}
|
||||
|
||||
const ok = setDlobSourceCookie(res, { secure: isHttpsRequest(req), dlobSource: set });
|
||||
if (!ok) {
|
||||
sendJson(res, 400, { ok: false, error: 'invalid_dlob_source' });
|
||||
return;
|
||||
}
|
||||
|
||||
res.statusCode = 302;
|
||||
res.setHeader('location', safeRedirectPath(url.searchParams.get('redirect') || '/'));
|
||||
res.end();
|
||||
return;
|
||||
}
|
||||
|
||||
if (req.method === 'POST' && url.pathname === '/auth/login') {
|
||||
if (AUTH_MODE === 'off' || AUTH_MODE === 'none' || AUTH_MODE === 'disabled') {
|
||||
sendJson(res, 400, { ok: false, error: 'auth_disabled' });
|
||||
|
||||
@@ -14,7 +14,6 @@ patchesStrategicMerge:
|
||||
- frontend-auth-patch.yaml
|
||||
- frontend-graphql-proxy-patch.yaml
|
||||
- ingestor-dlob-patch.yaml
|
||||
- dlob-worker-patch.yaml
|
||||
|
||||
configMapGenerator:
|
||||
- name: trade-dlob-ingestor-script
|
||||
|
||||
Reference in New Issue
Block a user