Skip to content

Commit

Permalink
Split tracing middleware into two
Browse files Browse the repository at this point in the history
  • Loading branch information
mairatma committed Oct 2, 2023
1 parent 68d26b2 commit 8d7d6a9
Show file tree
Hide file tree
Showing 3 changed files with 73 additions and 44 deletions.
70 changes: 70 additions & 0 deletions src/service/metrics/requestMetricsMiddleware.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,70 @@
import { finished as onStreamFinished } from 'stream'
import { hrToMillisFloat } from '../../utils'
import { ServiceContext } from '../worker/runtime/typings'
import {
createConcurrentRequestsInstrument,
createRequestsResponseSizesInstrument,
createRequestsTimingsInstrument,
createTotalAbortedRequestsInstrument,
createTotalRequestsInstrument,
RequestsMetricLabels,
} from '../tracing/metrics/instruments'


export const addRequestMetricsMiddleware = () => {
const concurrentRequests = createConcurrentRequestsInstrument()
const requestTimings = createRequestsTimingsInstrument()
const totalRequests = createTotalRequestsInstrument()
const responseSizes = createRequestsResponseSizesInstrument()
const abortedRequests = createTotalAbortedRequestsInstrument()

return async function addRequestMetrics(ctx: ServiceContext, next: () => Promise<void>) {
const start = process.hrtime()
concurrentRequests.inc(1)

ctx.req.once('aborted', () =>
abortedRequests.inc({ [RequestsMetricLabels.REQUEST_HANDLER]: ctx.requestHandlerName }, 1)
)

let responseClosed = false
ctx.res.once('close', () => (responseClosed = true))

try {
await next()
} finally {
const responseLength = ctx.response.length
if (responseLength) {
responseSizes.observe(
{ [RequestsMetricLabels.REQUEST_HANDLER]: ctx.requestHandlerName },
responseLength
)
}

totalRequests.inc(
{
[RequestsMetricLabels.REQUEST_HANDLER]: ctx.requestHandlerName,
[RequestsMetricLabels.STATUS_CODE]: ctx.response.status,
},
1
)

const onResFinished = () => {
requestTimings.observe(
{
[RequestsMetricLabels.REQUEST_HANDLER]: ctx.requestHandlerName,
},
hrToMillisFloat(process.hrtime(start))
)

concurrentRequests.dec(1)
}

if (responseClosed) {
onResFinished()
} else {
onStreamFinished(ctx.res, onResFinished)
}
}
}
}

45 changes: 1 addition & 44 deletions src/service/tracing/tracingMiddlewares.ts
Original file line number Diff line number Diff line change
Expand Up @@ -7,35 +7,19 @@ import { RuntimeLogFields } from '../../tracing/LogFields'
import { CustomHttpTags, OpentracingTags, VTEXIncomingRequestTags } from '../../tracing/Tags'
import { UserLandTracer } from '../../tracing/UserLandTracer'
import { cloneAndSanitizeHeaders } from '../../tracing/utils'
import { hrToMillis, hrToMillisFloat } from '../../utils'
import { hrToMillis } from '../../utils'
import { ServiceContext } from '../worker/runtime/typings'
import {
createConcurrentRequestsInstrument,
createRequestsResponseSizesInstrument,
createRequestsTimingsInstrument,
createTotalAbortedRequestsInstrument,
createTotalRequestsInstrument,
RequestsMetricLabels,
} from './metrics/instruments'

const PATHS_BLACKLISTED_FOR_TRACING = ['/_status', '/healthcheck']

export const addTracingMiddleware = (tracer: Tracer) => {
const concurrentRequests = createConcurrentRequestsInstrument()
const requestTimings = createRequestsTimingsInstrument()
const totalRequests = createTotalRequestsInstrument()
const responseSizes = createRequestsResponseSizesInstrument()
const abortedRequests = createTotalAbortedRequestsInstrument()

return async function addTracing(ctx: ServiceContext, next: () => Promise<void>) {
const start = process.hrtime()
concurrentRequests.inc(1)
const rootSpan = tracer.extract(FORMAT_HTTP_HEADERS, ctx.request.headers) as undefined | SpanContext
ctx.tracing = { tracer, currentSpan: undefined}

if (!shouldTrace(ctx, rootSpan)) {
await next()
concurrentRequests.dec(1)
return
}

Expand All @@ -47,9 +31,6 @@ export const addTracingMiddleware = (tracer: Tracer) => {
const initialSamplingDecision = getTraceInfo(currentSpan).isSampled

ctx.tracing = { currentSpan, tracer }
ctx.req.once('aborted', () =>
abortedRequests.inc({ [RequestsMetricLabels.REQUEST_HANDLER]: ctx.requestHandlerName }, 1)
)

let responseClosed = false
ctx.res.once('close', () => (responseClosed = true))
Expand All @@ -60,22 +41,6 @@ export const addTracingMiddleware = (tracer: Tracer) => {
ErrorReport.create({ originalError: err }).injectOnSpan(currentSpan, ctx.vtex?.logger)
throw err
} finally {
const responseLength = ctx.response.length
if (responseLength) {
responseSizes.observe(
{ [RequestsMetricLabels.REQUEST_HANDLER]: ctx.requestHandlerName },
responseLength
)
}

totalRequests.inc(
{
[RequestsMetricLabels.REQUEST_HANDLER]: ctx.requestHandlerName,
[RequestsMetricLabels.STATUS_CODE]: ctx.response.status,
},
1
)

const traceInfo = getTraceInfo(currentSpan)
if (traceInfo?.isSampled) {
if (!initialSamplingDecision) {
Expand All @@ -98,14 +63,6 @@ export const addTracingMiddleware = (tracer: Tracer) => {
}

const onResFinished = () => {
requestTimings.observe(
{
[RequestsMetricLabels.REQUEST_HANDLER]: ctx.requestHandlerName,
},
hrToMillisFloat(process.hrtime(start))
)

concurrentRequests.dec(1)
currentSpan?.finish()
}

Expand Down
2 changes: 2 additions & 0 deletions src/service/worker/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@ import { logOnceToDevConsole } from '../logger/console'
import { LogLevel } from '../logger/logger'
import { TracerSingleton } from '../tracing/TracerSingleton'
import { addTracingMiddleware } from '../tracing/tracingMiddlewares'
import { addRequestMetricsMiddleware } from '../metrics/requestMetricsMiddleware'
import { addProcessListeners, logger } from './listeners'
import {
healthcheckHandler,
Expand Down Expand Up @@ -221,6 +222,7 @@ export const startWorker = (serviceJSON: ServiceJSON) => {
.use(error)
.use(prometheusLoggerMiddleware())
.use(addTracingMiddleware(tracer))
.use(addRequestMetricsMiddleware())
.use(addMetricsLoggerMiddleware())
.use(concurrentRateLimiter(serviceJSON?.rateLimitPerReplica?.concurrent))
.use(compress())
Expand Down

0 comments on commit 8d7d6a9

Please sign in to comment.