OpenTelemetry
Last updated
Was this helpful?
Last updated
Was this helpful?
An open-source observability framework for generating, collecting, and exporting telemetry data (traces, metrics, logs).
Vendor-neutral: Works with any backend (e.g., Jaeger, Prometheus, Datadog, Tempo).
is the standard protocol for transmitting telemetry data (traces, metrics, logs) between:
Instrumented applications → Observability backends.
Applications → OpenTelemetry Collectors.
Default protocol for OpenTelemetry, designed for high performance and interoperability.
npm i @opentelemetry/api \
@opentelemetry/auto-instrumentations-node \
@opentelemetry/core \
@opentelemetry/exporter-trace-otlp-http \
@opentelemetry/resources \
@opentelemetry/sdk-metrics \
@opentelemetry/sdk-node \
@opentelemetry/sdk-trace-node
import { NodeSDK } from '@opentelemetry/sdk-node';
import { getNodeAutoInstrumentations } from '@opentelemetry/auto-instrumentations-node';
import { resourceFromAttributes } from '@opentelemetry/resources';
import {
ATTR_SERVICE_NAME,
ATTR_SERVICE_VERSION,
} from '@opentelemetry/semantic-conventions';
import { OTLPTraceExporter } from '@opentelemetry/exporter-trace-otlp-http';
// declare the trace exporter,
// in this case, the trace will be outputed to grafana tempo
const traceExporter = new OTLPTraceExporter({
url: 'http://tempo:4318/v1/traces',
});
const sdk = new NodeSDK({
// register the identifier (e.g: name)
resource: resourceFromAttributes({
[ATTR_SERVICE_NAME]: 'promtail-test',
[ATTR_SERVICE_VERSION]: '1.0',
}),
traceExporter,
// auto create new span for each express routes
instrumentations: [
getNodeAutoInstrumentations({
'@opentelemetry/instrumentation-http': {
enabled: true,
},
'@opentelemetry/instrumentation-express': {
enabled: true,
},
}),
],
});
sdk.start();
import {
createStream,
Options as RotateOption,
Generator,
} from 'rotating-file-stream';
const LOG_FILE_ROTATE_SUFFIX = process.env.LOG_FILE_ROTATE_SUFFIX ?? '.txt';
const generator: Generator = (time: number | Date, index?: number): string => {
const logFileName = 'promtail-test';
if (index) {
return `${logFileName}-${index}${LOG_FILE_ROTATE_SUFFIX}`;
}
return `${logFileName}.log`;
};
module.exports = function (option: RotateOption) {
return createStream(generator, option);
};
import { Options } from 'pino-http';
import pino from 'pino';
import { trace } from '@opentelemetry/api';
const LOG_FILE_BASE_PATH = process.env.LOG_FILE_BASE_PATH ?? '/tmp';
const LOG_FILE_ROTATE_SIZE = process.env.LOG_FILE_ROTATE_SIZE ?? '50M';
const LOG_FILE_MAX_FILE_HISTORY = process.env.LOG_FILE_MAX_FILE_HISTORY ?? 5;
type TransportOptions =
| pino.TransportSingleOptions<Record<string, any>>
| pino.TransportMultiOptions<Record<string, any>>
| pino.TransportPipelineOptions<Record<string, any>>;
const targetOptions = {
translateTime: 'SYS:yyyy-mm-dd HH:MM:ss.l o',
ignore: 'res,context,filename',
singleLine: true,
};
// get back the span id and trace id from the request
// append to the logging
const getTraceContext = () => {
const span = trace.getActiveSpan();
if (!span) return {};
const traceId = span.spanContext().traceId;
const spanId = span.spanContext().spanId;
return traceId ? { traceId, spanId } : {};
};
export function pinoOptionConfig(): Options {
const logLevel = process.env.BOT_BUILDER_BACKEND_LOG_LEVEL || 'info';
return {
level: logLevel,
mixin: (mergeObject, level) => {
return {
'level-label': pino.levels.labels[level].toUpperCase(),
...getTraceContext(),
};
},
timestamp: pino.stdTimeFunctions.isoTime,
autoLogging: true,
quietReqLogger: false,
transport: configTransport(logLevel),
serializers: {
req: formatRequestLog,
},
};
}
const formatRequestLog = (req: pino.SerializedRequest) => {
const requestLog: { api: string; gravitee: Record<string, string> | null } = {
api: `[${req.id}] [${req.method}] [${req.url}]`,
gravitee: null,
};
if (req.headers['x-gravitee-request-id']) {
requestLog.gravitee = {
'x-gravitee-request-id': req.headers['x-gravitee-request-id'],
'x-gravitee-transaction-id': req.headers['x-gravitee-transaction-id'],
};
}
return requestLog;
};
const configTransport = (logLevel: string): TransportOptions => {
return {
targets: [
{
target: 'pino-pretty',
level: logLevel,
options: {
...targetOptions,
},
},
{
target: require.resolve('./pino-transport-file-rotating'),
level: logLevel,
options: {
size: LOG_FILE_ROTATE_SIZE,
maxFiles: Number(LOG_FILE_MAX_FILE_HISTORY),
path: LOG_FILE_BASE_PATH,
},
},
],
};
};
import { NestFactory } from '@nestjs/core';
import { AppModule } from './app.module';
import { Logger } from 'nestjs-pino';
import './tracing';
async function bootstrap() {
const app = await NestFactory.create(AppModule);
app.useLogger(app.get(Logger));
await app.listen(process.env.PORT ?? 3000);
}
bootstrap();
import { Module } from '@nestjs/common';
import { LoggerModule } from 'nestjs-pino';
import { pinoOptionConfig } from './configs/pino.config';
import { TestModule } from './test/test.module';
@Module({
imports: [
LoggerModule.forRoot({
pinoHttp: pinoOptionConfig(),
}),
TestModule,
],
controllers: [],
providers: [],
})
export class AppModule {}
uv add opentelemetry \
opentelemetry.instrumentation.fastapi \
opentelemetry.exporter.otlp.proto.http.trace_exporter \
opentelemetry.sdk.trace
from fastapi import FastAPI
from opentelemetry import trace
from opentelemetry.sdk.trace import TracerProvider
from opentelemetry.exporter.otlp.proto.http.trace_exporter import OTLPSpanExporter
from opentelemetry.sdk.trace.export import BatchSpanProcessor
from opentelemetry.instrumentation.fastapi import FastAPIInstrumentor
from opentelemetry.sdk.resources import Resource, SERVICE_NAME, SERVICE_VERSION, DEPLOYMENT_ENVIRONMENT
import logging
import config
app = FastAPI()
# register the trace and its identifier
trace.set_tracer_provider(TracerProvider(
resource=Resource(attributes={
SERVICE_NAME: "python-opentelemetry",
SERVICE_VERSION: "1.0.0",
DEPLOYMENT_ENVIRONMENT: "development",
})
))
# send the tracing data to tempo
trace.get_tracer_provider().add_span_processor(
BatchSpanProcessor(
OTLPSpanExporter(
endpoint="http://tempo:4318/v1/traces" # Jaeger/OTLP endpoint
)
)
)
# auto adding trace ans span to each fastapi route
FastAPIInstrumentor.instrument_app(app)
logging.config.dictConfig(config.LOGGING_CONFIG)
logger = logging.getLogger(__name__)
logging.getLogger().setLevel(logging.INFO)
@app.get("/test")
def test():
logger.info("Hello World")
return {"message": "Hello World 123 "}
from os import mkdir
from decouple import config
from os.path import exists
from pythonjsonlogger import jsonlogger
from opentelemetry import trace
from collections import OrderedDict
logFileBasePath = config('LOG_FILE_BASE_PATH', cast=str, default='/tmp')
if not exists(logFileBasePath):
mkdir(logFileBasePath)
class CustomJsonFormatter(jsonlogger.JsonFormatter):
# apply the trace id and span id to logging
def add_fields(self, log_record: OrderedDict, record, message_dict):
super().add_fields(log_record, record, message_dict)
log_record.update({'time': log_record.pop('asctime', None)})
log_record.update({'level': log_record.pop('levelname', None)})
# log_record.move_to_end('level', last=False)
# log_record.move_to_end('time', last=False)
span = trace.get_current_span()
if span is not None and span.is_recording():
ctx = span.get_span_context()
log_record['trace_id'] = format(ctx.trace_id, "032x")
log_record['span_id'] = format(ctx.span_id, "016x")
# formatting the log
LOGGING_CONFIG = {
'version': 1,
'disable_existing_loggers': False,
'formatters': {
'default_formatter': {
'format': '[%(asctime)s - %(name)s] %(levelname)s : %(funcName)s:%(lineno)d — %(message)s',
'datefmt': '%Y-%m-%dT%H:%M:%SZ',
},
'JsonLoggerFormatter': {
'()': CustomJsonFormatter,
'format': '[%(asctime)s - %(name)s] %(levelname)s : %(funcName)s:%(lineno)d — %(message)s',
'datefmt': '%Y-%m-%dT%H:%M:%SZ',
}
},
'handlers': {
'stream_handler': {
'class': 'logging.StreamHandler',
'formatter': 'JsonLoggerFormatter',
},
'rotating_file_handler': {
'class': 'logging.handlers.RotatingFileHandler',
'formatter': 'JsonLoggerFormatter',
'filename': logFileBasePath + '/python-opentelemetry.log',
"maxBytes": config('LOG_FILE_ROTATE_BYTES_SIZE', cast=int, default=51200000),
"backupCount": config('LOG_FILE_MAX_FILE_HISTORY', cast=int, default=5),
}
},
'loggers': {
'': {
'handlers': ['stream_handler', 'rotating_file_handler'],
'level': config('LOG_LEVEL', cast=str, default='INFO'),
'propagate': True
}
}
}