Skip to content
14 changes: 14 additions & 0 deletions docs/config-schema.json
Original file line number Diff line number Diff line change
Expand Up @@ -630,6 +630,10 @@
"$ref": "#/$defs/GeminiConfig",
"description": "Google AI Studio (Gemini) payload extraction and parsing"
},
"mcp": {
"$ref": "#/$defs/MCPConfig",
"description": "Model Context Protocol (MCP) payload extraction and parsing"
},
"openai": {
"$ref": "#/$defs/OpenAIConfig",
"description": "OpenAI payload extraction and parsing"
Expand Down Expand Up @@ -1245,6 +1249,16 @@
},
"type": "object"
},
"MCPConfig": {
"properties": {
"enabled": {
"type": "boolean",
"description": "Enable Model Context Protocol (MCP) payload extraction and parsing",
"x-env-var": "OTEL_EBPF_HTTP_MCP_ENABLED"
}
},
"type": "object"
},
"MapsConfig": {
"properties": {
"global_scale_factor": {
Expand Down
4 changes: 4 additions & 0 deletions internal/test/integration/components/pythonmcp/Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
FROM python:3.14@sha256:61346539f7b26521a230e72c11da5ebd872924745074b19736e7d65ba748c366
EXPOSE 8080
COPY main.py /main.py
CMD ["python", "main.py"]
163 changes: 163 additions & 0 deletions internal/test/integration/components/pythonmcp/main.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,163 @@
"""
MCP (Model Context Protocol) server for integration testing.

Implements a subset of MCP methods over JSON-RPC 2.0 / HTTP
using only the Python standard library.
"""

import json
import uuid
from http.server import BaseHTTPRequestHandler, HTTPServer

SESSION_ID = str(uuid.uuid4())

KNOWN_TOOLS = {
"get-weather": "Sunny, 72\u00b0F in the requested location",
"calculator": "42",
}

KNOWN_RESOURCES = {
"file:///home/user/documents/report.pdf": {
"uri": "file:///home/user/documents/report.pdf",
"mimeType": "application/pdf",
"text": "Sample report content",
},
}

KNOWN_PROMPTS = {
"analyze-code": {
"description": "Analyzes code for potential issues",
"messages": [
{
"role": "user",
"content": {"type": "text", "text": "Analyze this code"},
}
],
},
}


def make_response(result, req_id):
return {"jsonrpc": "2.0", "result": result, "id": req_id}


def make_error(code, message, req_id):
return {
"jsonrpc": "2.0",
"error": {"code": code, "message": message},
"id": req_id,
}


def handle_initialize(params, req_id):
return make_response(
{
"protocolVersion": "2025-03-26",
"capabilities": {"tools": {}, "resources": {}, "prompts": {}},
"serverInfo": {"name": "test-mcp-server", "version": "1.0"},
},
req_id,
)


def handle_tools_list(_params, req_id):
tools = [{"name": name} for name in KNOWN_TOOLS]
return make_response({"tools": tools}, req_id)


def handle_tools_call(params, req_id):
name = params.get("name", "") if params else ""
if name not in KNOWN_TOOLS:
return make_error(-32602, f"Unknown tool: {name}", req_id)
content = [{"type": "text", "text": KNOWN_TOOLS[name]}]
return make_response({"content": content}, req_id)


def handle_resources_read(params, req_id):
uri = params.get("uri", "") if params else ""
if uri not in KNOWN_RESOURCES:
return make_error(-32602, f"Unknown resource: {uri}", req_id)
return make_response({"contents": [KNOWN_RESOURCES[uri]]}, req_id)


def handle_prompts_get(params, req_id):
name = params.get("name", "") if params else ""
if name not in KNOWN_PROMPTS:
return make_error(-32602, f"Unknown prompt: {name}", req_id)
return make_response(KNOWN_PROMPTS[name], req_id)


def handle_ping(_params, req_id):
return make_response({}, req_id)


DISPATCH = {
"initialize": handle_initialize,
"tools/list": handle_tools_list,
"tools/call": handle_tools_call,
"resources/read": handle_resources_read,
"prompts/get": handle_prompts_get,
"ping": handle_ping,
}


class Handler(BaseHTTPRequestHandler):
def do_POST(self):
content_length = int(self.headers.get("Content-Length", 0))
body = self.rfile.read(content_length).decode("utf-8")

try:
req = json.loads(body)
except json.JSONDecodeError:
self._send_json(
make_error(-32700, "Parse error", None), 200
)
return

if req.get("jsonrpc") != "2.0":
self._send_json(
make_error(-32600, "Invalid Request: missing jsonrpc 2.0",
req.get("id")),
200,
)
return

method = req.get("method", "")
params = req.get("params")
req_id = req.get("id")

handler = DISPATCH.get(method)
if handler is None:
self._send_json(
make_error(-32601, f"Method not found: {method}", req_id), 200
)
return

resp = handler(params, req_id)
self._send_json(resp, 200)

def _send_json(self, obj, status):
resp_body = json.dumps(obj).encode("utf-8")
self.send_response(status)
self.send_header("Content-Type", "application/json")
self.send_header("Content-Length", str(len(resp_body)))
self.send_header("Mcp-Session-Id", SESSION_ID)
self.end_headers()
self.wfile.write(resp_body)

def do_GET(self):
if self.path == "/smoke":
self.send_response(200)
self.end_headers()
else:
self.send_response(404)
self.end_headers()

def log_message(self, format, *args):
print(f"[mcp-server] {format % args}")


if __name__ == "__main__":
server = HTTPServer(("0.0.0.0", 8080), Handler)
print("MCP server running on port 8080")
server.serve_forever()
93 changes: 93 additions & 0 deletions internal/test/integration/docker-compose-python-mcp.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,93 @@
version: "3.8"

services:
testserver:
build:
context: ../../../internal/test/integration/components/pythonmcp/
dockerfile: Dockerfile
image: hatest-testserver-python-mcp
ports:
- "${TEST_SERVICE_PORTS}"
depends_on:
otelcol:
condition: service_started

obi:
build:
context: ../../..
dockerfile: ./internal/test/integration/components/obi/Dockerfile
volumes:
- ./configs/:/configs
- ./system/sys/kernel/security:/sys/kernel/security
- ../../../testoutput:/coverage
- ../../../testoutput/run-python-mcp:/var/run/obi
image: hatest-obi
privileged: true
network_mode: "service:testserver"
pid: "service:testserver"
environment:
OTEL_EBPF_CONFIG_PATH: "/configs/obi-config.yml"
GOCOVERDIR: "/coverage"
OTEL_EBPF_TRACE_PRINTER: "text"
OTEL_EBPF_OPEN_PORT: "${OTEL_EBPF_OPEN_PORT}"
OTEL_EBPF_DISCOVERY_POLL_INTERVAL: 500ms
OTEL_EBPF_EXECUTABLE_PATH: "${OTEL_EBPF_EXECUTABLE_PATH}"
OTEL_EBPF_SERVICE_NAMESPACE: "integration-test"
OTEL_EBPF_METRICS_INTERVAL: "10ms"
OTEL_EBPF_BPF_BATCH_TIMEOUT: "10ms"
OTEL_EBPF_OTLP_TRACES_BATCH_TIMEOUT: "1ms"
OTEL_EBPF_LOG_LEVEL: "DEBUG"
OTEL_EBPF_BPF_DEBUG: "TRUE"
OTEL_EBPF_HOSTNAME: "obi"
OTEL_EBPF_BPF_HTTP_REQUEST_TIMEOUT: "5s"
OTEL_EBPF_PROCESSES_INTERVAL: "100ms"
OTEL_EBPF_METRICS_FEATURES: "application"
OTEL_EBPF_BPF_BUFFER_SIZE_HTTP: 1024
OTEL_EBPF_HTTP_MCP_ENABLED: true
depends_on:
testserver:
condition: service_started

# OpenTelemetry Collector
otelcol:
image: otel/opentelemetry-collector-contrib:0.150.1@sha256:a516c26968aa1feb5e5fc0562e3338ea13755cb4f373603226bcc4e276374ad0
container_name: otel-col
deploy:
resources:
limits:
memory: 125M
restart: unless-stopped
command: ["--config=/etc/otelcol-config/otelcol-config.yml"]
volumes:
- ./configs/:/etc/otelcol-config
ports:
- "4317" # OTLP over gRPC receiver
- "4318:4318" # OTLP over HTTP receiver
- "9464" # Prometheus exporter
- "8888" # metrics endpoint
depends_on:
prometheus:
condition: service_started

# Prometheus
prometheus:
image: quay.io/prometheus/prometheus:v3.11.0@sha256:131bf4c9d8a0337782ea8b753249f4903afac01379f3cced87ceaf8ca82ab9f3
container_name: prometheus
command:
- --config.file=/etc/prometheus/prometheus-config.yml
- --web.enable-lifecycle
- --web.route-prefix=/
volumes:
- ./configs/:/etc/prometheus
ports:
- "9090:9090"

jaeger:
image: jaegertracing/all-in-one:1.60@sha256:4fd2d70fa347d6a47e79fcb06b1c177e6079f92cba88b083153d56263082135e
ports:
- "16686:16686" # Query frontend
- "4317" # OTEL GRPC traces collector
- "4318" # OTEL HTTP traces collector
environment:
- COLLECTOR_OTLP_ENABLED=true
- LOG_LEVEL=debug
Loading