diff --git a/dd-trace-api/src/main/java/datadog/trace/api/config/TraceInstrumentationConfig.java b/dd-trace-api/src/main/java/datadog/trace/api/config/TraceInstrumentationConfig.java index 19d05eb8393..80c294550e6 100644 --- a/dd-trace-api/src/main/java/datadog/trace/api/config/TraceInstrumentationConfig.java +++ b/dd-trace-api/src/main/java/datadog/trace/api/config/TraceInstrumentationConfig.java @@ -196,5 +196,10 @@ public final class TraceInstrumentationConfig { public static final String SQS_BODY_PROPAGATION_ENABLED = "trace.sqs.body.propagation.enabled"; + public static final String TRACE_RESOURCE_RENAMING_ENABLED = "trace.resource.renaming.enabled"; + + public static final String TRACE_RESOURCE_RENAMING_ALWAYS_SIMPLIFIED_ENDPOINT = + "trace.resource.renaming.always.simplified.endpoint"; + private TraceInstrumentationConfig() {} } diff --git a/dd-trace-core/src/main/java/datadog/trace/common/metrics/ConflatingMetricsAggregator.java b/dd-trace-core/src/main/java/datadog/trace/common/metrics/ConflatingMetricsAggregator.java index f027c0801c1..72277bc6817 100644 --- a/dd-trace-core/src/main/java/datadog/trace/common/metrics/ConflatingMetricsAggregator.java +++ b/dd-trace-core/src/main/java/datadog/trace/common/metrics/ConflatingMetricsAggregator.java @@ -3,6 +3,8 @@ import static datadog.communication.ddagent.DDAgentFeaturesDiscovery.V6_METRICS_ENDPOINT; import static datadog.trace.api.DDTags.BASE_SERVICE; import static datadog.trace.api.Functions.UTF8_ENCODE; +import static datadog.trace.bootstrap.instrumentation.api.Tags.HTTP_ENDPOINT; +import static datadog.trace.bootstrap.instrumentation.api.Tags.HTTP_METHOD; import static datadog.trace.bootstrap.instrumentation.api.Tags.SPAN_KIND; import static datadog.trace.bootstrap.instrumentation.api.Tags.SPAN_KIND_CLIENT; import static datadog.trace.bootstrap.instrumentation.api.Tags.SPAN_KIND_CONSUMER; @@ -307,6 +309,12 @@ private boolean spanKindEligible(@Nonnull CharSequence spanKind) { } private boolean publish(CoreSpan span, boolean isTopLevel, CharSequence spanKind) { + // Extract HTTP method and endpoint + Object httpMethodObj = span.unsafeGetTag(HTTP_METHOD); + String httpMethod = httpMethodObj != null ? httpMethodObj.toString() : null; + Object httpEndpointObj = span.unsafeGetTag(HTTP_ENDPOINT); + String httpEndpoint = httpEndpointObj != null ? httpEndpointObj.toString() : null; + MetricKey newKey = new MetricKey( span.getResourceName(), @@ -318,7 +326,9 @@ private boolean publish(CoreSpan span, boolean isTopLevel, CharSequence spanK span.getParentId() == 0, SPAN_KINDS.computeIfAbsent( spanKind, UTF8BytesString::create), // save repeated utf8 conversions - getPeerTags(span, spanKind.toString())); + getPeerTags(span, spanKind.toString()), + httpMethod, + httpEndpoint); MetricKey key = keys.putIfAbsent(newKey, newKey); if (null == key) { key = newKey; diff --git a/dd-trace-core/src/main/java/datadog/trace/common/metrics/MetricKey.java b/dd-trace-core/src/main/java/datadog/trace/common/metrics/MetricKey.java index 73aca7d6daf..f3d334ada08 100644 --- a/dd-trace-core/src/main/java/datadog/trace/common/metrics/MetricKey.java +++ b/dd-trace-core/src/main/java/datadog/trace/common/metrics/MetricKey.java @@ -18,6 +18,8 @@ public final class MetricKey { private final boolean isTraceRoot; private final UTF8BytesString spanKind; private final List peerTags; + private final UTF8BytesString httpMethod; + private final UTF8BytesString httpEndpoint; public MetricKey( CharSequence resource, @@ -28,7 +30,9 @@ public MetricKey( boolean synthetics, boolean isTraceRoot, CharSequence spanKind, - List peerTags) { + List peerTags, + CharSequence httpMethod, + CharSequence httpEndpoint) { this.resource = null == resource ? EMPTY : UTF8BytesString.create(resource); this.service = null == service ? EMPTY : UTF8BytesString.create(service); this.operationName = null == operationName ? EMPTY : UTF8BytesString.create(operationName); @@ -38,6 +42,8 @@ public MetricKey( this.isTraceRoot = isTraceRoot; this.spanKind = null == spanKind ? EMPTY : UTF8BytesString.create(spanKind); this.peerTags = peerTags == null ? Collections.emptyList() : peerTags; + this.httpMethod = null == httpMethod ? EMPTY : UTF8BytesString.create(httpMethod); + this.httpEndpoint = null == httpEndpoint ? EMPTY : UTF8BytesString.create(httpEndpoint); // Unrolled polynomial hashcode to avoid varargs allocation // and eliminate data dependency between iterations as in Arrays.hashCode. @@ -47,13 +53,15 @@ public MetricKey( // https://richardstartin.github.io/posts/still-true-in-java-9-handwritten-hash-codes-are-faster this.hash = - -196513505 * Boolean.hashCode(this.isTraceRoot) - + -1807454463 * this.spanKind.hashCode() - + 887_503_681 * this.peerTags.hashCode() // possibly unroll here has well. - + 28_629_151 * this.resource.hashCode() - + 923_521 * this.service.hashCode() - + 29791 * this.operationName.hashCode() - + 961 * this.type.hashCode() + -196_513_505 * Boolean.hashCode(this.isTraceRoot) + + -1_807_454_463 * this.spanKind.hashCode() + + 887_503_681 * this.peerTags.hashCode() + + 28_629_151 * this.httpMethod.hashCode() + + 923_521 * this.httpEndpoint.hashCode() + + 29_791 * this.resource.hashCode() + + 961 * this.service.hashCode() + + 31 * this.operationName.hashCode() + + this.type.hashCode() + 31 * httpStatusCode + (this.synthetics ? 1 : 0); } @@ -94,6 +102,14 @@ public List getPeerTags() { return peerTags; } + public UTF8BytesString getHttpMethod() { + return httpMethod; + } + + public UTF8BytesString getHttpEndpoint() { + return httpEndpoint; + } + @Override public boolean equals(Object o) { if (this == o) { @@ -110,7 +126,9 @@ public boolean equals(Object o) { && type.equals(metricKey.type) && isTraceRoot == metricKey.isTraceRoot && spanKind.equals(metricKey.spanKind) - && peerTags.equals(metricKey.peerTags); + && peerTags.equals(metricKey.peerTags) + && httpMethod.equals(metricKey.httpMethod) + && httpEndpoint.equals(metricKey.httpEndpoint); } return false; } diff --git a/dd-trace-core/src/main/java/datadog/trace/common/metrics/SerializingMetricWriter.java b/dd-trace-core/src/main/java/datadog/trace/common/metrics/SerializingMetricWriter.java index bdd4e99a178..8903af0418c 100644 --- a/dd-trace-core/src/main/java/datadog/trace/common/metrics/SerializingMetricWriter.java +++ b/dd-trace-core/src/main/java/datadog/trace/common/metrics/SerializingMetricWriter.java @@ -35,6 +35,8 @@ public final class SerializingMetricWriter implements MetricWriter { private static final byte[] IS_TRACE_ROOT = "IsTraceRoot".getBytes(ISO_8859_1); private static final byte[] SPAN_KIND = "SpanKind".getBytes(ISO_8859_1); private static final byte[] PEER_TAGS = "PeerTags".getBytes(ISO_8859_1); + private static final byte[] HTTP_METHOD = "HTTPMethod".getBytes(ISO_8859_1); + private static final byte[] HTTP_ENDPOINT = "HTTPEndpoint".getBytes(ISO_8859_1); // Constant declared here for compile-time folding public static final int TRISTATE_TRUE = TriState.TRUE.serialValue; @@ -104,7 +106,7 @@ public void startBucket(int metricCount, long start, long duration) { @Override public void add(MetricKey key, AggregateMetric aggregate) { - writer.startMap(15); + writer.startMap(17); writer.writeUTF8(NAME); writer.writeUTF8(key.getOperationName()); @@ -138,6 +140,12 @@ public void add(MetricKey key, AggregateMetric aggregate) { writer.writeUTF8(peerTag); } + writer.writeUTF8(HTTP_METHOD); + writer.writeUTF8(key.getHttpMethod()); + + writer.writeUTF8(HTTP_ENDPOINT); + writer.writeUTF8(key.getHttpEndpoint()); + writer.writeUTF8(HITS); writer.writeInt(aggregate.getHitCount()); diff --git a/dd-trace-core/src/main/java/datadog/trace/core/endpoint/EndpointResolver.java b/dd-trace-core/src/main/java/datadog/trace/core/endpoint/EndpointResolver.java new file mode 100644 index 00000000000..9e8ee325b07 --- /dev/null +++ b/dd-trace-core/src/main/java/datadog/trace/core/endpoint/EndpointResolver.java @@ -0,0 +1,142 @@ +package datadog.trace.core.endpoint; + +import static datadog.trace.bootstrap.instrumentation.api.Tags.HTTP_ENDPOINT; + +import javax.annotation.Nullable; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * Resolves HTTP endpoints for APM trace metrics by determining whether to use http.route or compute + * http.endpoint from the URL. + * + *

This class implements the endpoint inference logic defined in the RFC-1051 for trace resource + * renaming, including route eligibility checks and endpoint computation. + */ +public class EndpointResolver { + private static final Logger log = LoggerFactory.getLogger(EndpointResolver.class); + + private final boolean enabled; + private final boolean alwaysSimplifiedEndpoint; + + /** + * Creates a new EndpointResolver with the given configuration. + * + * @param enabled whether endpoint inference is enabled + * @param alwaysSimplifiedEndpoint whether to always compute endpoint even when route exists + */ + public EndpointResolver(boolean enabled, boolean alwaysSimplifiedEndpoint) { + this.enabled = enabled; + this.alwaysSimplifiedEndpoint = alwaysSimplifiedEndpoint; + } + + /** + * Resolves the endpoint for a span and optionally tags it with http.endpoint. + * + *

Resolution logic: + * + *

    + *
  1. If disabled, return null + *
  2. If alwaysSimplifiedEndpoint=true, compute from URL and tag span + *
  3. If http.route exists and is eligible, use it (no tagging) + *
  4. Otherwise, compute from URL and tag span with http.endpoint + *
+ * + * @param unsafeTags unsafe tag map. Using at this point, when the span is finished and there + * should not be anymore external interaction, should be considered safe + * @param httpRoute the http.route tag value (may be null) + * @param httpUrl the http.url tag value (may be null) + * @return the resolved endpoint, or null if resolution fails + */ + @Nullable + public String resolveEndpoint( + java.util.Map unsafeTags, + @Nullable String httpRoute, + @Nullable String httpUrl) { + if (!enabled) { + return null; + } + + // If alwaysSimplifiedEndpoint is set, always compute and tag + if (alwaysSimplifiedEndpoint) { + String endpoint = computeEndpoint(httpUrl); + if (endpoint != null) { + unsafeTags.put(HTTP_ENDPOINT, endpoint); + } + return endpoint; + } + + // If route exists and is eligible, use it + if (isRouteEligible(httpRoute)) { + return httpRoute; + } + + // Compute endpoint from URL and tag the span + String endpoint = computeEndpoint(httpUrl); + if (endpoint != null) { + unsafeTags.put(HTTP_ENDPOINT, endpoint); + } + return endpoint; + } + + /** + * Determines if an http.route is eligible for use as an endpoint. + * + *

A route is NOT eligible if it is null, empty, or a catch-all wildcard pattern. Catch-all + * patterns (single or double wildcards) indicate instrumentation problems rather than actual + * routes. Regex fallback patterns are considered eligible. + * + * @param route the http.route value to check + * @return true if the route can be used as an endpoint + */ + public static boolean isRouteEligible(@Nullable String route) { + if (route == null || route.isEmpty()) { + return false; + } + + // Discard catch-all routes that indicate instrumentation problems + if ("*".equals(route) || "*/*".equals(route)) { + return false; + } + + return true; + } + + /** + * Computes an endpoint from a URL using the simplification algorithm. + * + * @param url the http.url tag value + * @return the computed endpoint, or null if URL is null/empty + */ + @Nullable + public static String computeEndpoint(@Nullable String url) { + if (url == null || url.isEmpty()) { + return null; + } + + try { + return EndpointSimplifier.simplifyUrl(url); + } catch (Exception e) { + log.debug("Failed to compute endpoint from URL: {}", url, e); + return null; + } + } + + /** + * Returns whether endpoint inference is enabled. + * + * @return true if enabled + */ + public boolean isEnabled() { + return enabled; + } + + /** + * Returns whether simplified endpoint computation is always used. + * + * @return true if alwaysSimplifiedEndpoint is set + */ + public boolean isAlwaysSimplifiedEndpoint() { + return alwaysSimplifiedEndpoint; + } +} diff --git a/dd-trace-core/src/main/java/datadog/trace/core/endpoint/EndpointSimplifier.java b/dd-trace-core/src/main/java/datadog/trace/core/endpoint/EndpointSimplifier.java new file mode 100644 index 00000000000..93ff6053629 --- /dev/null +++ b/dd-trace-core/src/main/java/datadog/trace/core/endpoint/EndpointSimplifier.java @@ -0,0 +1,189 @@ +package datadog.trace.core.endpoint; + +import java.util.ArrayList; +import java.util.List; +import java.util.regex.Matcher; +import java.util.regex.Pattern; +import javax.annotation.Nullable; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * Simplifies HTTP URLs to infer endpoints for APM trace metrics. + * + *

This class extracts paths from URLs and simplifies them by replacing common parameter patterns + * (IDs, hex strings, etc.) with standardized placeholders. This reduces cardinality while + * maintaining endpoint recognizability. + * + *

Example: + * + *

+ *   /users/123/orders/abc-def-456 → /users/{param:int}/orders/{param:hex_id}
+ * 
+ */ +public class EndpointSimplifier { + private static final Logger log = LoggerFactory.getLogger(EndpointSimplifier.class); + + /** + * Regex to extract path from URL. Captures the path component between optional scheme+host and + * optional query string. Group "path" contains the extracted path. + */ + private static final Pattern URL_PATH_PATTERN = + Pattern.compile("^(?:[a-z]+://(?:[^?/]+))?(?/[^?]*)(?:(\\?).*)?$"); + + /** + * Maximum number of path segments to keep after simplification. Prevents cardinality explosion + * from very deep URLs. + */ + private static final int MAX_SEGMENTS = 8; + + /** Default endpoint when path is empty or cannot be processed. */ + private static final String DEFAULT_ENDPOINT = "/"; + + /** + * Simplifies a full URL to an inferred endpoint. + * + *

Process: + * + *

    + *
  1. Extract path from URL + *
  2. Split path into segments + *
  3. Keep only first 8 non-empty segments + *
  4. Simplify each segment using pattern matching + *
  5. Reconstruct path with simplified segments + *
+ * + * @param url the full URL (e.g., "https://example.com/users/123?foo=bar") + * @return the simplified endpoint (e.g., "/users/{param:int}") + */ + public static String simplifyUrl(@Nullable String url) { + if (url == null || url.isEmpty()) { + return DEFAULT_ENDPOINT; + } + + String path = extractPath(url); + if (path == null || path.isEmpty()) { + return DEFAULT_ENDPOINT; + } + + return simplifyPath(path); + } + + /** + * Extracts the path component from a URL. + * + *

Handles various URL formats: + * + *

    + *
  • Full URLs: "http://example.com/path" → "/path" + *
  • Path-only: "/path" → "/path" + *
  • With query: "/path?query" → "/path" + *
+ * + * @param url the URL to extract from + * @return the extracted path, or null if extraction fails + */ + @Nullable + static String extractPath(String url) { + try { + Matcher matcher = URL_PATH_PATTERN.matcher(url); + if (matcher.matches()) { + return matcher.group("path"); + } + } catch (Exception e) { + log.debug("Failed to extract path from URL: {}", url, e); + } + return null; + } + + /** + * Simplifies a URL path by replacing segments with patterns. + * + *

Example: + * + *

+   *   /users/123/orders/abc-def-456 → /users/{param:int}/orders/{param:hex_id}
+   * 
+ * + * @param path the URL path to simplify + * @return the simplified path + */ + public static String simplifyPath(String path) { + if (path == null || path.isEmpty()) { + return DEFAULT_ENDPOINT; + } + + // Special case: root path + if (path.equals("/")) { + return DEFAULT_ENDPOINT; + } + + List segments = splitAndLimitSegments(path, MAX_SEGMENTS); + + // If no segments remain after filtering, return root + if (segments.isEmpty()) { + return DEFAULT_ENDPOINT; + } + + // Simplify each segment and reconstruct path + StringBuilder result = new StringBuilder(); + for (String segment : segments) { + result.append('/'); + result.append(SegmentPattern.simplify(segment)); + } + + return result.toString(); + } + + /** + * Splits a path into segments and limits to the first N non-empty segments. + * + *

Example: + * + *

+   *   "/users//123/orders" with limit 3 → ["users", "123", "orders"]
+   * 
+ * + * @param path the path to split + * @param maxSegments maximum number of segments to keep + * @return list of non-empty segments (limited) + */ + static List splitAndLimitSegments(String path, int maxSegments) { + List segments = new ArrayList<>(maxSegments); + + // Manually split on '/' without regex to avoid forbidden API + int start = 0; + int length = path.length(); + + for (int i = 0; i < length; i++) { + if (path.charAt(i) == '/') { + if (i > start) { + // Found a non-empty segment + segments.add(path.substring(start, i)); + if (segments.size() >= maxSegments) { + return segments; + } + } + start = i + 1; + } + } + + // Add final segment if exists + if (start < length) { + segments.add(path.substring(start)); + } + + return segments; + } + + /** + * Simplifies a single path segment using pattern matching. This is a convenience method + * delegating to {@link SegmentPattern#simplify(String)}. + * + * @param segment the segment to simplify + * @return the simplified segment + */ + public static String simplifySegment(String segment) { + return SegmentPattern.simplify(segment); + } +} diff --git a/dd-trace-core/src/main/java/datadog/trace/core/endpoint/SegmentPattern.java b/dd-trace-core/src/main/java/datadog/trace/core/endpoint/SegmentPattern.java new file mode 100644 index 00000000000..1bfb2323703 --- /dev/null +++ b/dd-trace-core/src/main/java/datadog/trace/core/endpoint/SegmentPattern.java @@ -0,0 +1,90 @@ +package datadog.trace.core.endpoint; + +import java.util.regex.Pattern; + +/** + * Defines the patterns used to simplify URL path segments for endpoint inference. These patterns + * replace common parameter types (IDs, hex strings, etc.) with standardized placeholders to reduce + * cardinality in APM metrics. + * + *

Patterns are applied in order, and the first match wins. + */ +public enum SegmentPattern { + /** + * Matches integers of size at least 2 digits. Examples: "123", "9876" Replacement: "{param:int}" + */ + INTEGER(Pattern.compile("^[1-9][0-9]+$"), "{param:int}"), + + /** + * Matches mixed strings with digits and delimiters (at least 3 chars). Must contain at least one + * digit. Examples: "user-123", "order_456", "item.789" Replacement: "{param:int_id}" + */ + INT_ID(Pattern.compile("^(?=.*[0-9].*)[0-9._-]{3,}$"), "{param:int_id}"), + + /** + * Matches hexadecimal strings of size at least 6 chars. Must contain at least one decimal digit + * (0-9). Examples: "abc123", "deadbeef", "A1B2C3" Replacement: "{param:hex}" + */ + HEX(Pattern.compile("^(?=.*[0-9].*)[A-Fa-f0-9]{6,}$"), "{param:hex}"), + + /** + * Matches mixed strings with hex digits and delimiters (at least 6 chars). Must contain at least + * one decimal digit. Examples: "uuid-abc123", "token_def456" Replacement: "{param:hex_id}" + */ + HEX_ID(Pattern.compile("^(?=.*[0-9].*)[A-Fa-f0-9._-]{6,}$"), "{param:hex_id}"), + + /** + * Matches long strings (20+ chars) or strings with special characters. Special chars: % & ' ( ) * + * + , : = @ Examples: "very-long-string-with-many-characters", "param%20value", + * "user@example.com" Replacement: "{param:str}" + */ + STRING(Pattern.compile("^(.{20,}|.*[%&'()*+,:=@].*)$"), "{param:str}"); + + private final Pattern pattern; + private final String replacement; + + SegmentPattern(Pattern pattern, String replacement) { + this.pattern = pattern; + this.replacement = replacement; + } + + /** + * Tests if the given segment matches this pattern. + * + * @param segment the URL path segment to test + * @return true if the segment matches this pattern + */ + public boolean matches(String segment) { + return pattern.matcher(segment).matches(); + } + + /** + * Gets the replacement string for this pattern. + * + * @return the replacement placeholder + */ + public String getReplacement() { + return replacement; + } + + /** + * Attempts to simplify the given segment by matching against all patterns in order. If no pattern + * matches, returns the original segment unchanged. + * + * @param segment the URL path segment to simplify + * @return the simplified segment or the original if no pattern matches + */ + public static String simplify(String segment) { + if (segment == null || segment.isEmpty()) { + return segment; + } + + for (SegmentPattern pattern : values()) { + if (pattern.matches(segment)) { + return pattern.getReplacement(); + } + } + + return segment; + } +} diff --git a/dd-trace-core/src/main/java/datadog/trace/core/tagprocessor/HttpEndpointPostProcessor.java b/dd-trace-core/src/main/java/datadog/trace/core/tagprocessor/HttpEndpointPostProcessor.java new file mode 100644 index 00000000000..66fe07e3d62 --- /dev/null +++ b/dd-trace-core/src/main/java/datadog/trace/core/tagprocessor/HttpEndpointPostProcessor.java @@ -0,0 +1,109 @@ +package datadog.trace.core.tagprocessor; + +import static datadog.trace.bootstrap.instrumentation.api.Tags.HTTP_METHOD; +import static datadog.trace.bootstrap.instrumentation.api.Tags.HTTP_ROUTE; +import static datadog.trace.bootstrap.instrumentation.api.Tags.HTTP_URL; + +import datadog.trace.api.TagMap; +import datadog.trace.bootstrap.instrumentation.api.AgentSpanLink; +import datadog.trace.bootstrap.instrumentation.api.ResourceNamePriorities; +import datadog.trace.core.DDSpanContext; +import datadog.trace.core.endpoint.EndpointResolver; +import java.util.List; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * Post-processes HTTP spans to update resource names based on inferred endpoints. + * + *

This processor implements the trace resource renaming feature by: + * + *

    + *
  • Using EndpointResolver to determine the best endpoint (route or simplified URL) + *
  • Combining HTTP method with the endpoint to create a resource name (e.g., "GET + * /users/{param:int}") + *
  • Updating the span's resource name only when an endpoint is available + *
+ * + *

The processor respects the endpoint resolution logic: + * + *

    + *
  • If alwaysSimplifiedEndpoint=true, always compute from URL + *
  • If http.route exists and is eligible, use it + *
  • Otherwise, compute simplified endpoint from URL + *
+ */ +public class HttpEndpointPostProcessor extends TagsPostProcessor { + private static final Logger log = LoggerFactory.getLogger(HttpEndpointPostProcessor.class); + + private final EndpointResolver endpointResolver; + + /** Creates a new HttpEndpointPostProcessor using the global config. */ + public HttpEndpointPostProcessor() { + this( + new EndpointResolver( + datadog.trace.api.Config.get().isTraceResourceRenamingEnabled(), + datadog.trace.api.Config.get().isTraceResourceRenamingAlwaysSimplifiedEndpoint())); + } + + /** + * Creates a new HttpEndpointPostProcessor with the given endpoint resolver. + * + *

Visible for testing. + * + * @param endpointResolver the resolver to use for endpoint inference + */ + HttpEndpointPostProcessor(EndpointResolver endpointResolver) { + this.endpointResolver = endpointResolver; + } + + @Override + public void processTags( + TagMap unsafeTags, DDSpanContext spanContext, List spanLinks) { + if (!endpointResolver.isEnabled()) { + log.debug("EndpointResolver is not enabled, skipping HTTP endpoint post processing"); + return; + } + + // Extract HTTP tags + Object httpMethodObj = unsafeTags.get(HTTP_METHOD); + Object httpRouteObj = unsafeTags.get(HTTP_ROUTE); + Object httpUrlObj = unsafeTags.get(HTTP_URL); + + log.debug( + "Processing tags for span {}: httpMethod={}, httpRoute={}, httpUrl={}", + spanContext.getSpanId(), + httpMethodObj, + httpRouteObj, + httpUrlObj); + + if (httpMethodObj == null) { + // Not an HTTP span, skip processing + log.debug("No HTTP method found, skipping HTTP endpoint post processing"); + return; + } + + String httpMethod = httpMethodObj.toString(); + String httpRoute = httpRouteObj != null ? httpRouteObj.toString() : null; + String httpUrl = httpUrlObj != null ? httpUrlObj.toString() : null; + + // Resolve endpoint using EndpointResolver + // Pass unsafeTags directly - it's safe to use at this point since span is finished + String endpoint = endpointResolver.resolveEndpoint(unsafeTags, httpRoute, httpUrl); + + if (endpoint != null && !endpoint.isEmpty()) { + // Combine method and endpoint into resource name + String resourceName = httpMethod + " " + endpoint; + spanContext.setResourceName( + resourceName, ResourceNamePriorities.HTTP_SERVER_RESOURCE_RENAMING); + + log.debug( + "Updated resource name to '{}' for span {} (method={}, route={}, url={})", + resourceName, + spanContext.getSpanId(), + httpMethod, + httpRoute, + httpUrl); + } + } +} diff --git a/dd-trace-core/src/main/java/datadog/trace/core/tagprocessor/TagsPostProcessorFactory.java b/dd-trace-core/src/main/java/datadog/trace/core/tagprocessor/TagsPostProcessorFactory.java index 2687211e5b2..679fe25a932 100644 --- a/dd-trace-core/src/main/java/datadog/trace/core/tagprocessor/TagsPostProcessorFactory.java +++ b/dd-trace-core/src/main/java/datadog/trace/core/tagprocessor/TagsPostProcessorFactory.java @@ -13,11 +13,16 @@ private static class Lazy { private static TagsPostProcessor lazyProcessor = createLazyChain(); private static TagsPostProcessor createEagerChain() { - final List processors = new ArrayList<>(2); + final List processors = new ArrayList<>(3); processors.add(new PeerServiceCalculator()); if (addBaseService) { processors.add(new BaseServiceAdder(Config.get().getServiceName())); } + // Add HTTP endpoint post processor for resource renaming + // This must run BEFORE metrics aggregation so the correct resource name is used in metrics + if (Config.get().isTraceResourceRenamingEnabled()) { + processors.add(new HttpEndpointPostProcessor()); + } return new PostProcessorChain(processors.toArray(new TagsPostProcessor[0])); } diff --git a/dd-trace-core/src/test/groovy/datadog/trace/common/metrics/AggregateMetricTest.groovy b/dd-trace-core/src/test/groovy/datadog/trace/common/metrics/AggregateMetricTest.groovy index 3c7a247cae3..baf0e635b90 100644 --- a/dd-trace-core/src/test/groovy/datadog/trace/common/metrics/AggregateMetricTest.groovy +++ b/dd-trace-core/src/test/groovy/datadog/trace/common/metrics/AggregateMetricTest.groovy @@ -52,7 +52,7 @@ class AggregateMetricTest extends DDSpecification { given: AggregateMetric aggregate = new AggregateMetric().recordDurations(3, new AtomicLongArray(0L, 0L, 0L | ERROR_TAG | TOP_LEVEL_TAG)) - Batch batch = new Batch().reset(new MetricKey("foo", "bar", "qux", "type", 0, false, true, "corge", [UTF8BytesString.create("grault:quux")])) + Batch batch = new Batch().reset(new MetricKey("foo", "bar", "qux", "type", 0, false, true, "corge", [UTF8BytesString.create("grault:quux")], null, null)) batch.add(0L, 10) batch.add(0L, 10) batch.add(0L, 10) @@ -127,7 +127,7 @@ class AggregateMetricTest extends DDSpecification { def "consistent under concurrent attempts to read and write"() { given: AggregateMetric aggregate = new AggregateMetric() - MetricKey key = new MetricKey("foo", "bar", "qux", "type", 0, false, true, "corge", [UTF8BytesString.create("grault:quux")]) + MetricKey key = new MetricKey("foo", "bar", "qux", "type", 0, false, true, "corge", [UTF8BytesString.create("grault:quux")], null, null) BlockingDeque queue = new LinkedBlockingDeque<>(1000) ExecutorService reader = Executors.newSingleThreadExecutor() int writerCount = 10 diff --git a/dd-trace-core/src/test/groovy/datadog/trace/common/metrics/ConflatingMetricAggregatorTest.groovy b/dd-trace-core/src/test/groovy/datadog/trace/common/metrics/ConflatingMetricAggregatorTest.groovy index 34f666a61e4..4f4c6aac22f 100644 --- a/dd-trace-core/src/test/groovy/datadog/trace/common/metrics/ConflatingMetricAggregatorTest.groovy +++ b/dd-trace-core/src/test/groovy/datadog/trace/common/metrics/ConflatingMetricAggregatorTest.groovy @@ -127,7 +127,9 @@ class ConflatingMetricAggregatorTest extends DDSpecification { false, false, "baz", - [] + [], + null, + null ), _) >> { MetricKey key, AggregateMetric value -> value.getHitCount() == 1 && value.getTopLevelCount() == 1 && value.getDuration() == 100 } @@ -169,7 +171,9 @@ class ConflatingMetricAggregatorTest extends DDSpecification { false, false, "baz", - [] + [], + null, + null ), _) >> { MetricKey key, AggregateMetric value -> value.getHitCount() == 1 && value.getTopLevelCount() == 1 && value.getDuration() == 100 } @@ -194,6 +198,12 @@ class ConflatingMetricAggregatorTest extends DDSpecification { CountDownLatch latch = new CountDownLatch(1) def span = new SimpleSpan("service", "operation", "resource", "type", false, false, false, 0, 100, HTTP_OK) .setTag(SPAN_KIND, kind) + if (httpMethod != null) { + span.setTag("http.method", httpMethod) + } + if (httpEndpoint != null) { + span.setTag("http.endpoint", httpEndpoint) + } aggregator.publish([span]) aggregator.report() def latchTriggered = latch.await(2, SECONDS) @@ -211,7 +221,9 @@ class ConflatingMetricAggregatorTest extends DDSpecification { false, false, kind, - [] + [], + httpMethod, + httpEndpoint ), { AggregateMetric aggregateMetric -> aggregateMetric.getHitCount() == 1 && aggregateMetric.getTopLevelCount() == 0 && aggregateMetric.getDuration() == 100 }) @@ -221,13 +233,17 @@ class ConflatingMetricAggregatorTest extends DDSpecification { aggregator.close() where: - kind | statsComputed - "client" | true - "producer" | true - "consumer" | true - UTF8BytesString.create("server") | true - "internal" | false - null | false + kind | httpMethod | httpEndpoint | statsComputed + "client" | null | null | true + "producer" | null | null | true + "consumer" | null | null | true + UTF8BytesString.create("server") | null | null | true + "internal" | null | null | false + null | null | null | false + "server" | "GET" | "/api/users/:id" | true + "server" | "POST" | "/api/orders" | true + "server" | "DELETE" | "/api/products/:id" | true + "client" | "GET" | "/external/api" | true } def "should create bucket for each set of peer tags"() { @@ -265,7 +281,9 @@ class ConflatingMetricAggregatorTest extends DDSpecification { false, false, "client", - [UTF8BytesString.create("country:france")] + [UTF8BytesString.create("country:france")], + null, + null ), { AggregateMetric aggregateMetric -> aggregateMetric.getHitCount() == 1 && aggregateMetric.getTopLevelCount() == 0 && aggregateMetric.getDuration() == 100 }) @@ -279,7 +297,9 @@ class ConflatingMetricAggregatorTest extends DDSpecification { false, false, "client", - [UTF8BytesString.create("country:france"), UTF8BytesString.create("georegion:europe")] + [UTF8BytesString.create("country:france"), UTF8BytesString.create("georegion:europe")], + null, + null ), { AggregateMetric aggregateMetric -> aggregateMetric.getHitCount() == 1 && aggregateMetric.getTopLevelCount() == 0 && aggregateMetric.getDuration() == 100 }) @@ -322,7 +342,9 @@ class ConflatingMetricAggregatorTest extends DDSpecification { false, false, kind, - expectedPeerTags + expectedPeerTags, + null, + null ), { AggregateMetric aggregateMetric -> aggregateMetric.getHitCount() == 1 && aggregateMetric.getTopLevelCount() == 0 && aggregateMetric.getDuration() == 100 }) @@ -370,7 +392,9 @@ class ConflatingMetricAggregatorTest extends DDSpecification { false, false, "baz", - [] + [], + null, + null ), { AggregateMetric value -> value.getHitCount() == 1 && value.getTopLevelCount() == topLevelCount && value.getDuration() == 100 }) @@ -425,7 +449,9 @@ class ConflatingMetricAggregatorTest extends DDSpecification { false, false, "baz", - [] + [], + null, + null ), { AggregateMetric value -> value.getHitCount() == count && value.getDuration() == count * duration }) @@ -438,7 +464,9 @@ class ConflatingMetricAggregatorTest extends DDSpecification { false, false, "baz", - [] + [], + null, + null ), { AggregateMetric value -> value.getHitCount() == count && value.getDuration() == count * duration * 2 }) @@ -450,6 +478,298 @@ class ConflatingMetricAggregatorTest extends DDSpecification { count << [10, 100] } + def "aggregate spans with same HTTP endpoint together, separate different endpoints"() { + setup: + MetricWriter writer = Mock(MetricWriter) + Sink sink = Stub(Sink) + DDAgentFeaturesDiscovery features = Mock(DDAgentFeaturesDiscovery) + features.supportsMetrics() >> true + features.peerTags() >> [] + ConflatingMetricsAggregator aggregator = new ConflatingMetricsAggregator(empty, + features, HealthMetrics.NO_OP, sink, writer, 10, queueSize, reportingInterval, SECONDS) + aggregator.start() + + when: "publish multiple spans with same endpoint" + CountDownLatch latch = new CountDownLatch(1) + int count = 5 + long duration = 100 + for (int i = 0; i < count; ++i) { + aggregator.publish([ + new SimpleSpan("service", "operation", "resource", "type", true, false, false, 0, duration, HTTP_OK) + .setTag(SPAN_KIND, "server") + .setTag("http.method", "GET") + .setTag("http.endpoint", "/api/users/:id") + ]) + } + aggregator.report() + def latchTriggered = latch.await(2, SECONDS) + + then: "should aggregate into single metric" + latchTriggered + 1 * writer.startBucket(1, _, _) + 1 * writer.add(new MetricKey( + "resource", + "service", + "operation", + "type", + HTTP_OK, + false, + false, + "server", + [], + "GET", + "/api/users/:id" + ), { AggregateMetric value -> + value.getHitCount() == count && value.getDuration() == count * duration + }) + 1 * writer.finishBucket() >> { latch.countDown() } + + when: "publish spans with different endpoints" + CountDownLatch latch2 = new CountDownLatch(1) + aggregator.publish([ + new SimpleSpan("service", "operation", "resource", "type", true, false, false, 0, duration, HTTP_OK) + .setTag(SPAN_KIND, "server") + .setTag("http.method", "GET") + .setTag("http.endpoint", "/api/users/:id"), + new SimpleSpan("service", "operation", "resource", "type", true, false, false, 0, duration * 2, HTTP_OK) + .setTag(SPAN_KIND, "server") + .setTag("http.method", "GET") + .setTag("http.endpoint", "/api/orders/:id"), + new SimpleSpan("service", "operation", "resource", "type", true, false, false, 0, duration * 3, HTTP_OK) + .setTag(SPAN_KIND, "server") + .setTag("http.method", "POST") + .setTag("http.endpoint", "/api/users/:id") + ]) + aggregator.report() + def latchTriggered2 = latch2.await(2, SECONDS) + + then: "should create separate metrics for each endpoint/method combination" + latchTriggered2 + 1 * writer.startBucket(3, _, _) + 1 * writer.add(new MetricKey( + "resource", + "service", + "operation", + "type", + HTTP_OK, + false, + false, + "server", + [], + "GET", + "/api/users/:id" + ), { AggregateMetric value -> + value.getHitCount() == 1 && value.getDuration() == duration + }) + 1 * writer.add(new MetricKey( + "resource", + "service", + "operation", + "type", + HTTP_OK, + false, + false, + "server", + [], + "GET", + "/api/orders/:id" + ), { AggregateMetric value -> + value.getHitCount() == 1 && value.getDuration() == duration * 2 + }) + 1 * writer.add(new MetricKey( + "resource", + "service", + "operation", + "type", + HTTP_OK, + false, + false, + "server", + [], + "POST", + "/api/users/:id" + ), { AggregateMetric value -> + value.getHitCount() == 1 && value.getDuration() == duration * 3 + }) + 1 * writer.finishBucket() >> { latch2.countDown() } + + cleanup: + aggregator.close() + } + + def "create separate metrics for different HTTP method/endpoint/status combinations"() { + setup: + MetricWriter writer = Mock(MetricWriter) + Sink sink = Stub(Sink) + DDAgentFeaturesDiscovery features = Mock(DDAgentFeaturesDiscovery) + features.supportsMetrics() >> true + features.peerTags() >> [] + ConflatingMetricsAggregator aggregator = new ConflatingMetricsAggregator(empty, + features, HealthMetrics.NO_OP, sink, writer, 10, queueSize, reportingInterval, SECONDS) + aggregator.start() + + when: "publish spans with different combinations" + CountDownLatch latch = new CountDownLatch(1) + long duration = 100 + aggregator.publish([ + // Same endpoint, different methods + new SimpleSpan("service", "operation", "resource", "type", true, false, false, 0, duration, 200) + .setTag(SPAN_KIND, "server") + .setTag("http.method", "GET") + .setTag("http.endpoint", "/api/users/:id"), + new SimpleSpan("service", "operation", "resource", "type", true, false, false, 0, duration * 2, 200) + .setTag(SPAN_KIND, "server") + .setTag("http.method", "POST") + .setTag("http.endpoint", "/api/users/:id"), + // Same method/endpoint, different status + new SimpleSpan("service", "operation", "resource", "type", true, false, false, 0, duration * 3, 404) + .setTag(SPAN_KIND, "server") + .setTag("http.method", "GET") + .setTag("http.endpoint", "/api/users/:id"), + // Different endpoint + new SimpleSpan("service", "operation", "resource", "type", true, false, false, 0, duration * 4, 200) + .setTag(SPAN_KIND, "server") + .setTag("http.method", "GET") + .setTag("http.endpoint", "/api/orders/:id") + ]) + aggregator.report() + def latchTriggered = latch.await(2, SECONDS) + + then: "should create 4 separate metrics" + latchTriggered + 1 * writer.startBucket(4, _, _) + 1 * writer.add(new MetricKey( + "resource", + "service", + "operation", + "type", + 200, + false, + false, + "server", + [], + "GET", + "/api/users/:id" + ), { AggregateMetric value -> + value.getHitCount() == 1 && value.getDuration() == duration + }) + 1 * writer.add(new MetricKey( + "resource", + "service", + "operation", + "type", + 200, + false, + false, + "server", + [], + "POST", + "/api/users/:id" + ), { AggregateMetric value -> + value.getHitCount() == 1 && value.getDuration() == duration * 2 + }) + 1 * writer.add(new MetricKey( + "resource", + "service", + "operation", + "type", + 404, + false, + false, + "server", + [], + "GET", + "/api/users/:id" + ), { AggregateMetric value -> + value.getHitCount() == 1 && value.getDuration() == duration * 3 + }) + 1 * writer.add(new MetricKey( + "resource", + "service", + "operation", + "type", + 200, + false, + false, + "server", + [], + "GET", + "/api/orders/:id" + ), { AggregateMetric value -> + value.getHitCount() == 1 && value.getDuration() == duration * 4 + }) + 1 * writer.finishBucket() >> { latch.countDown() } + + cleanup: + aggregator.close() + } + + def "handle spans without HTTP endpoint tags for backward compatibility"() { + setup: + MetricWriter writer = Mock(MetricWriter) + Sink sink = Stub(Sink) + DDAgentFeaturesDiscovery features = Mock(DDAgentFeaturesDiscovery) + features.supportsMetrics() >> true + features.peerTags() >> [] + ConflatingMetricsAggregator aggregator = new ConflatingMetricsAggregator(empty, + features, HealthMetrics.NO_OP, sink, writer, 10, queueSize, reportingInterval, SECONDS) + aggregator.start() + + when: "publish spans with and without HTTP tags" + CountDownLatch latch = new CountDownLatch(1) + long duration = 100 + aggregator.publish([ + // Span without HTTP tags (legacy behavior) + new SimpleSpan("service", "operation", "resource", "type", true, false, false, 0, duration, 200) + .setTag(SPAN_KIND, "server"), + // Span with HTTP tags (new behavior) + new SimpleSpan("service", "operation", "resource", "type", true, false, false, 0, duration * 2, 200) + .setTag(SPAN_KIND, "server") + .setTag("http.method", "GET") + .setTag("http.endpoint", "/api/users/:id") + ]) + aggregator.report() + def latchTriggered = latch.await(2, SECONDS) + + then: "should create separate metric keys for spans with and without HTTP tags" + latchTriggered + 1 * writer.startBucket(2, _, _) + 1 * writer.add(new MetricKey( + "resource", + "service", + "operation", + "type", + 200, + false, + false, + "server", + [], + null, + null + ), { AggregateMetric value -> + value.getHitCount() == 1 && value.getDuration() == duration + }) + 1 * writer.add(new MetricKey( + "resource", + "service", + "operation", + "type", + 200, + false, + false, + "server", + [], + "GET", + "/api/users/:id" + ), { AggregateMetric value -> + value.getHitCount() == 1 && value.getDuration() == duration * 2 + }) + 1 * writer.finishBucket() >> { latch.countDown() } + + cleanup: + aggregator.close() + } + def "test least recently written to aggregate flushed when size limit exceeded"() { setup: int maxAggregates = 10 @@ -487,7 +807,9 @@ class ConflatingMetricAggregatorTest extends DDSpecification { false, false, "baz", - [] + [], + null, + null ), _) >> { MetricKey key, AggregateMetric value -> value.getHitCount() == 1 && value.getDuration() == duration } @@ -501,7 +823,9 @@ class ConflatingMetricAggregatorTest extends DDSpecification { false, false, "baz", - [] + [], + null, + null ), _) 1 * writer.finishBucket() >> { latch.countDown() } @@ -546,7 +870,9 @@ class ConflatingMetricAggregatorTest extends DDSpecification { false, false, "baz", - [] + [], + null, + null ), { AggregateMetric value -> value.getHitCount() == 1 && value.getDuration() == duration }) @@ -577,7 +903,9 @@ class ConflatingMetricAggregatorTest extends DDSpecification { false, false, "baz", - [] + [], + null, + null ), { AggregateMetric value -> value.getHitCount() == 1 && value.getDuration() == duration }) @@ -591,7 +919,9 @@ class ConflatingMetricAggregatorTest extends DDSpecification { false, false, "baz", - [] + [], + null, + null ), _) 1 * writer.finishBucket() >> { latch.countDown() } @@ -636,7 +966,9 @@ class ConflatingMetricAggregatorTest extends DDSpecification { false, false, "quux", - [] + [], + null, + null ), { AggregateMetric value -> value.getHitCount() == 1 && value.getDuration() == duration }) @@ -691,7 +1023,9 @@ class ConflatingMetricAggregatorTest extends DDSpecification { false, true, "garply", - [] + [], + null, + null ), { AggregateMetric value -> value.getHitCount() == 1 && value.getDuration() == duration }) @@ -854,7 +1188,9 @@ class ConflatingMetricAggregatorTest extends DDSpecification { false, true, "", - [] + [], + null, + null ), { AggregateMetric aggregateMetric -> aggregateMetric.getHitCount() == 1 && aggregateMetric.getTopLevelCount() == 1 && aggregateMetric.getDuration() == 100 }) diff --git a/dd-trace-core/src/test/groovy/datadog/trace/common/metrics/SerializingMetricWriterTest.groovy b/dd-trace-core/src/test/groovy/datadog/trace/common/metrics/SerializingMetricWriterTest.groovy index 88ef1cbc66a..d5c55104787 100644 --- a/dd-trace-core/src/test/groovy/datadog/trace/common/metrics/SerializingMetricWriterTest.groovy +++ b/dd-trace-core/src/test/groovy/datadog/trace/common/metrics/SerializingMetricWriterTest.groovy @@ -57,7 +57,9 @@ class SerializingMetricWriterTest extends DDSpecification { UTF8BytesString.create("country:canada"), UTF8BytesString.create("georegion:amer"), UTF8BytesString.create("peer.service:remote-service") - ] + ], + null, + null ), new AggregateMetric().recordDurations(10, new AtomicLongArray(1L)) ), @@ -76,8 +78,26 @@ class SerializingMetricWriterTest extends DDSpecification { UTF8BytesString.create("georegion:amer"), UTF8BytesString.create("peer.service:remote-service") ], + null, + null ), new AggregateMetric().recordDurations(9, new AtomicLongArray(1L)) + ), + Pair.of( + new MetricKey( + "GET /api/users/:id", + "web-service", + "http.request", + "web", + 200, + false, + true, + "server", + [], + "GET", + "/api/users/:id" + ), + new AggregateMetric().recordDurations(5, new AtomicLongArray(1L)) ) ], (0..10000).collect({ i -> @@ -91,7 +111,9 @@ class SerializingMetricWriterTest extends DDSpecification { false, false, "producer", - [UTF8BytesString.create("messaging.destination:dest" + i)] + [UTF8BytesString.create("messaging.destination:dest" + i)], + null, + null ), new AggregateMetric().recordDurations(10, new AtomicLongArray(1L)) ) @@ -157,7 +179,7 @@ class SerializingMetricWriterTest extends DDSpecification { MetricKey key = pair.getLeft() AggregateMetric value = pair.getRight() int metricMapSize = unpacker.unpackMapHeader() - assert metricMapSize == 15 + assert metricMapSize == 17 int elementCount = 0 assert unpacker.unpackString() == "Name" assert unpacker.unpackString() == key.getOperationName() as String @@ -191,6 +213,12 @@ class SerializingMetricWriterTest extends DDSpecification { assert unpackedPeerTag == key.getPeerTags()[i].toString() } ++elementCount + assert unpacker.unpackString() == "HTTPMethod" + assert unpacker.unpackString() == (key.getHttpMethod() as String ?: "") + ++elementCount + assert unpacker.unpackString() == "HTTPEndpoint" + assert unpacker.unpackString() == (key.getHttpEndpoint() as String ?: "") + ++elementCount assert unpacker.unpackString() == "Hits" assert unpacker.unpackInt() == value.getHitCount() ++elementCount diff --git a/dd-trace-core/src/test/groovy/datadog/trace/core/endpoint/EndpointResolverTest.groovy b/dd-trace-core/src/test/groovy/datadog/trace/core/endpoint/EndpointResolverTest.groovy new file mode 100644 index 00000000000..07525fdafb0 --- /dev/null +++ b/dd-trace-core/src/test/groovy/datadog/trace/core/endpoint/EndpointResolverTest.groovy @@ -0,0 +1,187 @@ +package datadog.trace.core.endpoint + +import spock.lang.Specification + +class EndpointResolverTest extends Specification { + + def "isRouteEligible identifies valid routes"() { + expect: + EndpointResolver.isRouteEligible(input) == expected + + where: + input | expected + // Valid routes + "/users" | true + "/api/v1/users" | true + "/orders/123" | true + "/.*/" | true // Regex fallback is valid + "/users/*" | true // Partial wildcard is valid + // Invalid routes (instrumentation problems) + "*" | false // Catch-all + "*/*" | false // Double catch-all + null | false // Missing + "" | false // Empty + } + + def "computeEndpoint delegates to EndpointSimplifier"() { + expect: + EndpointResolver.computeEndpoint(input) == expected + + where: + input | expected + "/users/123/orders/456" | "/users/{param:int}/orders/{param:int}" + "http://example.com/users/123" | "/users/{param:int}" + null | null + "" | null + } + + def "disabled resolver returns null"() { + given: + def resolver = new EndpointResolver(false, false) + def unsafeTags = [:] + + when: + def result = resolver.resolveEndpoint(unsafeTags, "/users", "http://example.com/users/123") + + then: + result == null + unsafeTags.isEmpty() // No tagging + } + + def "alwaysSimplifiedEndpoint computes from URL even with valid route"() { + given: + def resolver = new EndpointResolver(true, true) + def unsafeTags = [:] + + when: + def result = resolver.resolveEndpoint(unsafeTags, "/users", "http://example.com/users/123") + + then: + result == "/users/{param:int}" + unsafeTags["http.endpoint"] == "/users/{param:int}" + } + + def "uses eligible route without tagging"() { + given: + def resolver = new EndpointResolver(true, false) + def unsafeTags = [:] + + when: + def result = resolver.resolveEndpoint(unsafeTags, "/api/v1/users", "http://example.com/users/123") + + then: + result == "/api/v1/users" + unsafeTags.isEmpty() // No tagging when using route + } + + def "computes endpoint and tags when route is ineligible"() { + given: + def resolver = new EndpointResolver(true, false) + def unsafeTags = [:] + + when: + def result = resolver.resolveEndpoint(unsafeTags, ineligibleRoute, url) + + then: + result == expectedEndpoint + unsafeTags["http.endpoint"] == expectedEndpoint + + where: + ineligibleRoute | url | expectedEndpoint + "*" | "http://example.com/users/123" | "/users/{param:int}" + "*/*" | "http://example.com/orders/456" | "/orders/{param:int}" + null | "http://example.com/api/v1" | "/api/v1" + "" | "http://example.com/search" | "/search" + } + + def "isEnabled returns configuration value"() { + expect: + new EndpointResolver(true, false).isEnabled() == true + new EndpointResolver(false, false).isEnabled() == false + } + + def "isAlwaysSimplifiedEndpoint returns configuration value"() { + expect: + new EndpointResolver(true, true).isAlwaysSimplifiedEndpoint() == true + new EndpointResolver(true, false).isAlwaysSimplifiedEndpoint() == false + } + + def "real-world scenario: service with route"() { + given: + def resolver = new EndpointResolver(true, false) + def unsafeTags = [:] + + when: "framework provides accurate route" + def result = resolver.resolveEndpoint(unsafeTags, "/api/v2/customers/{id}/orders", "http://api.example.com/api/v2/customers/123/orders") + + then: + result == "/api/v2/customers/{id}/orders" + unsafeTags.isEmpty() // Uses route, no endpoint tag + } + + def "real-world scenario: proxy without route"() { + given: + def resolver = new EndpointResolver(true, false) + def unsafeTags = [:] + + when: "no route available (tracer in proxy)" + def result = resolver.resolveEndpoint(unsafeTags, null, "http://api.example.com/api/v2/customers/123/orders") + + then: + result == "/api/v2/customers/{param:int}/orders" + unsafeTags["http.endpoint"] == "/api/v2/customers/{param:int}/orders" + } + + def "real-world scenario: bad instrumentation with catch-all"() { + given: + def resolver = new EndpointResolver(true, false) + def unsafeTags = [:] + + when: "instrumentation provides unhelpful catch-all route" + def result = resolver.resolveEndpoint(unsafeTags, "*/*", "http://api.example.com/users/abc-123/profile") + + then: + result == "/users/{param:hex_id}/profile" + unsafeTags["http.endpoint"] == "/users/{param:hex_id}/profile" + } + + def "real-world scenario: testing with always simplified"() { + given: + def resolver = new EndpointResolver(true, true) + def unsafeTags = [:] + + when: "testing mode to compare with backend inference" + def result = resolver.resolveEndpoint(unsafeTags, "/api/v1/users/{userId}", "http://api.example.com/api/v1/users/123") + + then: + result == "/api/v1/users/{param:int}" + unsafeTags["http.endpoint"] == "/api/v1/users/{param:int}" + } + + def "edge case: malformed URL returns default endpoint"() { + given: + def resolver = new EndpointResolver(true, false) + def unsafeTags = [:] + + when: + def result = resolver.resolveEndpoint(unsafeTags, null, "not-a-url") + + then: + // EndpointSimplifier returns "/" for malformed URLs + result == "/" + unsafeTags["http.endpoint"] == "/" + } + + def "edge case: both route and URL missing"() { + given: + def resolver = new EndpointResolver(true, false) + def unsafeTags = [:] + + when: + def result = resolver.resolveEndpoint(unsafeTags, null, null) + + then: + result == null + unsafeTags.isEmpty() + } +} diff --git a/dd-trace-core/src/test/groovy/datadog/trace/core/endpoint/EndpointSimplifierTest.groovy b/dd-trace-core/src/test/groovy/datadog/trace/core/endpoint/EndpointSimplifierTest.groovy new file mode 100644 index 00000000000..3bc8e808280 --- /dev/null +++ b/dd-trace-core/src/test/groovy/datadog/trace/core/endpoint/EndpointSimplifierTest.groovy @@ -0,0 +1,257 @@ +package datadog.trace.core.endpoint + +import spock.lang.Specification + +class EndpointSimplifierTest extends Specification { + + def "extractPath handles various URL formats"() { + expect: + EndpointSimplifier.extractPath(input) == expected + + where: + input | expected + // Full URLs + "http://example.com/users/123" | "/users/123" + "https://api.example.com/v1/orders" | "/v1/orders" + "http://localhost:8080/path" | "/path" + // Path only + "/users/123" | "/users/123" + "/api/v1/orders" | "/api/v1/orders" + "/" | "/" + // With query strings + "/users/123?foo=bar" | "/users/123" + "http://example.com/path?query=value&x=y" | "/path" + "/search?q=test" | "/search" + // Edge cases + "/path?query" | "/path" + "/path?" | "/path" + "http://example.com/" | "/" + // Invalid/malformed - should return null or handle gracefully + "" | null + "not-a-url" | null + "http://" | null + } + + def "splitAndLimitSegments splits and limits correctly"() { + expect: + EndpointSimplifier.splitAndLimitSegments(input, limit) == expected + + where: + input | limit | expected + "/users/123/orders/456" | 8 | ["users", "123", "orders", "456"] + "/a/b/c/d/e/f/g/h/i/j" | 8 | ["a", "b", "c", "d", "e", "f", "g", "h"] + "/users//orders///items" | 8 | ["users", "orders", "items"] // empty segments removed + "/" | 8 | [] + "/single" | 8 | ["single"] + "/a/b/c" | 2 | ["a", "b"] // limited to 2 + "" | 8 | [] + } + + def "simplifySegment delegates to SegmentPattern"() { + expect: + EndpointSimplifier.simplifySegment(input) == expected + + where: + input | expected + "123" | "{param:int}" + "1-2-3" | "{param:int_id}" + "abc123" | "{param:hex}" + "abc-123" | "{param:hex_id}" + "very-long-string-with-many-characters" | "{param:str}" + "users" | "users" + "api" | "api" + "user-123" | "user-123" // has letters, not simplified + } + + def "simplifyPath handles complete paths"() { + expect: + EndpointSimplifier.simplifyPath(input) == expected + + where: + input | expected + // Simple paths + "/users" | "/users" + "/api/v1/users" | "/api/v1/users" + // Paths with IDs + "/users/123" | "/users/{param:int}" + "/users/123/orders/456" | "/users/{param:int}/orders/{param:int}" + // Mixed parameters + "/users/abc-123/orders" | "/users/{param:hex_id}/orders" + "/api/v1/users/123/profile" | "/api/v1/users/{param:int}/profile" + // Long paths (8 segment limit) + "/a/b/c/d/e/f/g/h/i/j" | "/a/b/c/d/e/f/g/h" + // Complex real-world examples + "/orders/abc-123-def/items/456" | "/orders/{param:hex_id}/items/{param:int}" + "/v1/users/123/orders/def-456/status" | "/v1/users/{param:int}/orders/{param:hex_id}/status" + // Edge cases + "/" | "/" + "" | "/" + null | "/" + // Empty segments + "/users//orders" | "/users/orders" + "///users///123///" | "/users/{param:int}" + } + + def "simplifyUrl handles complete URLs"() { + expect: + EndpointSimplifier.simplifyUrl(input) == expected + + where: + input | expected + // Full URLs + "http://example.com/users/123" | "/users/{param:int}" + "https://api.example.com/v1/orders/456/items" | "/v1/orders/{param:int}/items" + // URLs with query strings + "http://example.com/users/123?foo=bar" | "/users/{param:int}" + "https://api.example.com/search?q=test&page=1" | "/search" + // Path only + "/users/123/orders/456" | "/users/{param:int}/orders/{param:int}" + "/api/v1/users/abc123def" | "/api/v1/users/{param:hex}" + // Edge cases + "/" | "/" + "" | "/" + null | "/" + "http://example.com/" | "/" + // Complex real-world scenarios + "https://api.example.com/v2/customers/abc-123-def/orders/999/items/very-long-item-identifier-here" | + "/v2/customers/{param:hex_id}/orders/{param:int}/items/{param:str}" + } + + def "cardinality test - ensures bounded output"() { + given: + def uniqueEndpoints = [] as Set + + when: "process many URLs with different IDs (starting from 10 to ensure 2+ digits)" + 1000.times { i -> + def id1 = i + 10 // Ensure at least 2 digits for INTEGER pattern + def id2 = (i + 10) * 2 + uniqueEndpoints.add(EndpointSimplifier.simplifyUrl("/users/${id1}/orders/${id2}")) + } + + then: "all map to same endpoint" + uniqueEndpoints.size() == 1 + uniqueEndpoints.first() == "/users/{param:int}/orders/{param:int}" + } + + def "cardinality test - different patterns create different endpoints"() { + given: + def inputs = [ + "/users/123", + "/users/abc123", + "/users/abc-123", + "/users/very-long-identifier-here", + "/orders/456", + "/api/v1/users/123" + ] + + when: + def endpoints = inputs.collect { EndpointSimplifier.simplifyUrl(it) } as Set + + then: "different path structures create different endpoints" + endpoints == [ + "/users/{param:int}", + "/users/{param:hex}", + "/users/{param:hex_id}", + "/users/{param:str}", + "/orders/{param:int}", + "/api/v1/users/{param:int}" + ] as Set + } + + def "stress test - handles malformed URLs gracefully"() { + expect: + EndpointSimplifier.simplifyUrl(input) != null + + where: + input << [ + "not a url", + "://broken", + "http://", + "?query=only", + "##fragments", + "user:pass@host/path", + "/path with spaces", + "/path/with/../dots", + "/path/with/./dots" + ] + } + + def "real-world URL examples"() { + expect: + EndpointSimplifier.simplifyUrl(input) == expected + + where: + input | expected + // REST APIs + "https://api.github.com/repos/owner/repo/pulls/123" | "/repos/owner/repo/pulls/{param:int}" + "https://api.stripe.com/v1/customers/abc123def/cards" | "/v1/customers/{param:hex}/cards" + // E-commerce + "https://shop.example.com/products/abc-12345/reviews" | "/products/{param:hex_id}/reviews" + "https://shop.example.com/cart/abc-123-def/checkout" | "/cart/{param:hex_id}/checkout" + // Microservices + "/api/v2/tenants/123-456/services/abc-def/metrics" | "/api/v2/tenants/{param:int_id}/services/abc-def/metrics" + "/internal/health/check/abc-123/status" | "/internal/health/check/{param:hex_id}/status" + // With query parameters (should be stripped) + "/api/users/123?include=orders&sort=date" | "/api/users/{param:int}" + "/search?q=test&page=1&limit=10" | "/search" + } + + def "special characters in path segments"() { + expect: + EndpointSimplifier.simplifyUrl(input) == expected + + where: + input | expected + "/users/user@example.com/profile" | "/users/{param:str}/profile" + "/search/query%20with%20spaces" | "/search/{param:str}" + "/api/v1/items/item+tag" | "/api/v1/items/{param:str}" + "/path/segment(with)parens" | "/path/{param:str}" + } + + def "preserves static segments"() { + expect: + EndpointSimplifier.simplifyUrl(input) == expected + + where: + input | expected + "/api/v1/users" | "/api/v1/users" + "/health/check" | "/health/check" + "/metrics/prometheus" | "/metrics/prometheus" + "/admin/dashboard" | "/admin/dashboard" + } + + def "handles UUID-like patterns"() { + expect: + EndpointSimplifier.simplifyUrl(input) == expected + + where: + input | expected + "/users/0-1-2-3-4" | "/users/{param:int_id}" + "/orders/abc-def-123" | "/orders/{param:hex_id}" + "/items/ABC123DEF456" | "/items/{param:hex}" + "/items/abc-123-def" | "/items/{param:hex_id}" + } + + def "segment limit prevents cardinality explosion"() { + given: + def deepPath = (1..20).collect { "segment$it" }.join("/") + def url = "/$deepPath" + + when: + def simplified = EndpointSimplifier.simplifyUrl(url) + + then: "only first 8 segments are kept" + simplified == "/segment1/segment2/segment3/segment4/segment5/segment6/segment7/segment8" + } + + def "handles trailing slashes"() { + expect: + EndpointSimplifier.simplifyUrl(input) == expected + + where: + input | expected + "/users/" | "/users" + "/users/123/" | "/users/{param:int}" + "/api/v1/orders/" | "/api/v1/orders" + } +} diff --git a/dd-trace-core/src/test/groovy/datadog/trace/core/endpoint/SegmentPatternTest.groovy b/dd-trace-core/src/test/groovy/datadog/trace/core/endpoint/SegmentPatternTest.groovy new file mode 100644 index 00000000000..08ba5127db6 --- /dev/null +++ b/dd-trace-core/src/test/groovy/datadog/trace/core/endpoint/SegmentPatternTest.groovy @@ -0,0 +1,180 @@ +package datadog.trace.core.endpoint + +import spock.lang.Specification + +class SegmentPatternTest extends Specification { + + def "INTEGER pattern matches integers with 2+ digits"() { + expect: + SegmentPattern.INTEGER.matches(input) == expected + + where: + input | expected + "123" | true + "9876" | true + "10" | true + "99" | true + // Edge cases - should NOT match + "1" | false // single digit + "0" | false // zero + "01" | false // leading zero + "00123" | false // leading zeros + "123abc" | false // contains letters + "abc" | false // no digits + "" | false // empty + } + + def "INT_ID pattern matches mixed strings with digits and delimiters"() { + expect: + SegmentPattern.INT_ID.matches(input) == expected + + where: + input | expected + "123" | true + "1-2-3" | true + "999_888" | true + "12.34.56" | true + "1_2_3_4" | true + // Edge cases - should NOT match + "12" | false // too short (< 3 chars) + "abc" | false // no digits + "---" | false // no digits + "user-123" | false // contains letters + "order_456" | false // contains letters + "item.789" | false // contains letters + "a1b" | false // contains letters + "123abc" | false // contains letters + "" | false // empty + } + + def "HEX pattern matches hexadecimal strings with at least one decimal digit"() { + expect: + SegmentPattern.HEX.matches(input) == expected + + where: + input | expected + "abc123" | true + "123ABC" | true + "deadbeef0" | true + "A1B2C3" | true + "0123456789" | true + "abcdef0" | true + // Edge cases - should NOT match + "ABCDEF" | false // no decimal digit (0-9) + "abcdef" | false // no decimal digit + "12345" | false // too short (< 6 chars) + "abc12" | false // too short + "xyz123" | false // contains non-hex letters + "abc-123" | false // contains delimiter + "" | false // empty + } + + def "HEX_ID pattern matches hex+delimiter strings with at least one decimal digit"() { + expect: + SegmentPattern.HEX_ID.matches(input) == expected + + where: + input | expected + "abc-123" | true + "def_456" | true + "A1B2C3" | true + "abc.123.def" | true + "0-1-2-3-4-5" | true + "aaa-bbb-111" | true + // Edge cases - should NOT match + "abc12" | false // too short (< 6 chars) + "ABCDEF" | false // no decimal digit + "uuid_def456" | false // contains non-hex letter 'u' + "abc-xyz" | false // contains non-hex letters + "123xyz" | false // contains non-hex letters + "" | false // empty + } + + def "STRING pattern matches long strings or strings with special characters"() { + expect: + SegmentPattern.STRING.matches(input) == expected + + where: + input | expected + // Long strings (20+ chars) + "a" * 20 | true + "very-long-string-with-many-characters" | true + "12345678901234567890" | true + // Special characters + "param%20value" | true + "user&admin" | true + "it's" | true + "func(arg)" | true + "val*2" | true + "a+b" | true + "a,b" | true + "key:value" | true + "a=b" | true + "user@example" | true + // Edge cases - should NOT match + "short" | false // < 20 chars, no special chars + "123" | false + "abc-def" | false // dash is not special + "" | false // empty + } + + def "simplify applies patterns in correct order"() { + expect: + SegmentPattern.simplify(input) == expected + + where: + input | expected + // INTEGER (highest priority) + "123" | "{param:int}" + "9876" | "{param:int}" + // INT_ID + "1-2-3" | "{param:int_id}" + "999_888" | "{param:int_id}" + // HEX + "abc123def" | "{param:hex}" + "DEAD0BEEF" | "{param:hex}" + // HEX_ID + "abc-123" | "{param:hex_id}" + "def_456" | "{param:hex_id}" + // STRING + "very-long-string-with-many-characters" | "{param:str}" + "param%20value" | "{param:str}" + // No match - keep original + "users" | "users" + "orders" | "orders" + "v1" | "v1" + "api" | "api" + "user-123" | "user-123" // has letters + "uuid-abc123" | "uuid-abc123" // has 'u' which is not hex + // Edge cases + "" | "" + null | null + } + + def "pattern priority ensures correct replacement"() { + expect: + // "123" could match INT_ID but INTEGER has priority + SegmentPattern.simplify("123") == "{param:int}" + + // "abc123" could match HEX_ID but HEX has priority (no delimiters) + SegmentPattern.simplify("abc123") == "{param:hex}" + + // "abc-123" matches HEX_ID (has delimiter) + SegmentPattern.simplify("abc-123") == "{param:hex_id}" + + // Very long string matches STRING (length priority) + SegmentPattern.simplify("a" * 20) == "{param:str}" + + // Numeric string with delimiters matches INT_ID + SegmentPattern.simplify("1-2-3") == "{param:int_id}" + } + + def "all enum values have valid patterns and replacements"() { + expect: + SegmentPattern.values().each { pattern -> + assert pattern.getReplacement() != null + assert pattern.getReplacement().startsWith("{param:") + assert pattern.getReplacement().endsWith("}") + } + } +} diff --git a/dd-trace-core/src/test/groovy/datadog/trace/core/tagprocessor/HttpEndpointPostProcessorTest.groovy b/dd-trace-core/src/test/groovy/datadog/trace/core/tagprocessor/HttpEndpointPostProcessorTest.groovy new file mode 100644 index 00000000000..e2b3d648f28 --- /dev/null +++ b/dd-trace-core/src/test/groovy/datadog/trace/core/tagprocessor/HttpEndpointPostProcessorTest.groovy @@ -0,0 +1,97 @@ +package datadog.trace.core.tagprocessor + +import datadog.trace.bootstrap.instrumentation.api.ResourceNamePriorities +import datadog.trace.bootstrap.instrumentation.api.Tags +import datadog.trace.core.DDSpanContext +import datadog.trace.core.endpoint.EndpointResolver +import spock.lang.Specification + +class HttpEndpointPostProcessorTest extends Specification { + + def "should update resource name with endpoint when enabled"() { + given: + def endpointResolver = new EndpointResolver(true, false) + def processor = new HttpEndpointPostProcessor(endpointResolver) + def mockContext = Mock(DDSpanContext) + def tags = [ + (Tags.HTTP_METHOD): "GET", + (Tags.HTTP_ROUTE): "/greeting", + (Tags.HTTP_URL): "http://localhost:8080/greeting" + ] + + when: + processor.processTags(tags, mockContext, []) + + then: + 1 * mockContext.setResourceName("GET /greeting", ResourceNamePriorities.HTTP_SERVER_RESOURCE_RENAMING) + } + + def "should compute simplified endpoint from URL when route is invalid"() { + given: + def endpointResolver = new EndpointResolver(true, false) + def processor = new HttpEndpointPostProcessor(endpointResolver) + def mockContext = Mock(DDSpanContext) + def tags = [ + (Tags.HTTP_METHOD): "GET", + (Tags.HTTP_ROUTE): "*", // Invalid route + (Tags.HTTP_URL): "http://localhost:8080/users/123/orders/456" + ] + + when: + processor.processTags(tags, mockContext, []) + + then: + 1 * mockContext.setResourceName("GET /users/{param:int}/orders/{param:int}", ResourceNamePriorities.HTTP_SERVER_RESOURCE_RENAMING) + } + + def "should skip non-HTTP spans"() { + given: + def endpointResolver = new EndpointResolver(true, false) + def processor = new HttpEndpointPostProcessor(endpointResolver) + def mockContext = Mock(DDSpanContext) + def tags = [ + "db.statement": "SELECT * FROM users" + ] + + when: + processor.processTags(tags, mockContext, []) + + then: + 0 * mockContext.setResourceName(_, _) + } + + def "should not process when resource renaming is disabled"() { + given: + def endpointResolver = new EndpointResolver(false, false) + def processor = new HttpEndpointPostProcessor(endpointResolver) + def mockContext = Mock(DDSpanContext) + def tags = [ + (Tags.HTTP_METHOD): "GET", + (Tags.HTTP_ROUTE): "/greeting" + ] + + when: + processor.processTags(tags, mockContext, []) + + then: + 0 * mockContext.setResourceName(_, _) + } + + def "should use simplified endpoint when alwaysSimplified is true"() { + given: + def endpointResolver = new EndpointResolver(true, true) + def processor = new HttpEndpointPostProcessor(endpointResolver) + def mockContext = Mock(DDSpanContext) + def tags = [ + (Tags.HTTP_METHOD): "GET", + (Tags.HTTP_ROUTE): "/greeting", + (Tags.HTTP_URL): "http://localhost:8080/users/123" + ] + + when: + processor.processTags(tags, mockContext, []) + + then: + 1 * mockContext.setResourceName("GET /users/{param:int}", ResourceNamePriorities.HTTP_SERVER_RESOURCE_RENAMING) + } +} diff --git a/dd-trace-core/src/traceAgentTest/groovy/MetricsIntegrationTest.groovy b/dd-trace-core/src/traceAgentTest/groovy/MetricsIntegrationTest.groovy index 9984b9700d0..2f7ccdfd6cb 100644 --- a/dd-trace-core/src/traceAgentTest/groovy/MetricsIntegrationTest.groovy +++ b/dd-trace-core/src/traceAgentTest/groovy/MetricsIntegrationTest.groovy @@ -35,11 +35,11 @@ class MetricsIntegrationTest extends AbstractTraceAgentTest { ) writer.startBucket(2, System.nanoTime(), SECONDS.toNanos(10)) writer.add( - new MetricKey("resource1", "service1", "operation1", "sql", 0, false, true, "xyzzy", [UTF8BytesString.create("grault:quux")]), + new MetricKey("resource1", "service1", "operation1", "sql", 0, false, true, "xyzzy", [UTF8BytesString.create("grault:quux")], null, null), new AggregateMetric().recordDurations(5, new AtomicLongArray(2, 1, 2, 250, 4, 5)) ) writer.add( - new MetricKey("resource2", "service2", "operation2", "web", 200, false, true, "xyzzy", [UTF8BytesString.create("grault:quux")]), + new MetricKey("resource2", "service2", "operation2", "web", 200, false, true, "xyzzy", [UTF8BytesString.create("grault:quux")], null, null), new AggregateMetric().recordDurations(10, new AtomicLongArray(1, 1, 200, 2, 3, 4, 5, 6, 7, 8, 9)) ) writer.finishBucket() diff --git a/internal-api/src/main/java/datadog/trace/api/Config.java b/internal-api/src/main/java/datadog/trace/api/Config.java index 9b080e6bff4..d2db93b520c 100644 --- a/internal-api/src/main/java/datadog/trace/api/Config.java +++ b/internal-api/src/main/java/datadog/trace/api/Config.java @@ -580,6 +580,8 @@ import static datadog.trace.api.config.TraceInstrumentationConfig.SQS_BODY_PROPAGATION_ENABLED; import static datadog.trace.api.config.TraceInstrumentationConfig.TRACE_128_BIT_TRACEID_LOGGING_ENABLED; import static datadog.trace.api.config.TraceInstrumentationConfig.TRACE_HTTP_CLIENT_TAG_QUERY_STRING; +import static datadog.trace.api.config.TraceInstrumentationConfig.TRACE_RESOURCE_RENAMING_ALWAYS_SIMPLIFIED_ENDPOINT; +import static datadog.trace.api.config.TraceInstrumentationConfig.TRACE_RESOURCE_RENAMING_ENABLED; import static datadog.trace.api.config.TraceInstrumentationConfig.TRACE_WEBSOCKET_MESSAGES_INHERIT_SAMPLING; import static datadog.trace.api.config.TraceInstrumentationConfig.TRACE_WEBSOCKET_MESSAGES_SEPARATE_TRACES; import static datadog.trace.api.config.TraceInstrumentationConfig.TRACE_WEBSOCKET_TAG_SESSION_ID; @@ -1003,6 +1005,9 @@ public static String getHostName() { private final int apiSecurityMaxDownstreamRequestBodyAnalysis; private final double apiSecurityDownstreamRequestBodyAnalysisSampleRate; + private final boolean traceResourceRenamingEnabled; + private final boolean traceResourceRenamingAlwaysSimplifiedEndpoint; + private final IastDetectionMode iastDetectionMode; private final int iastMaxConcurrentRequests; private final int iastVulnerabilitiesPerRequest; @@ -2253,6 +2258,19 @@ PROFILING_DATADOG_PROFILER_ENABLED, isDatadogProfilerSafeInCurrentEnvironment()) DEFAULT_API_SECURITY_DOWNSTREAM_REQUEST_BODY_ANALYSIS_SAMPLE_RATE, API_SECURITY_DOWNSTREAM_REQUEST_ANALYSIS_SAMPLE_RATE); + // Trace Resource Renaming (Endpoint Inference) configuration + // Default: enabled if AppSec is enabled, otherwise disabled + // Can be explicitly overridden by setting DD_TRACE_RESOURCE_RENAMING_ENABLED + Boolean traceResourceRenamingExplicit = + configProvider.getBoolean(TRACE_RESOURCE_RENAMING_ENABLED); + this.traceResourceRenamingEnabled = + traceResourceRenamingExplicit != null + ? traceResourceRenamingExplicit + : instrumenterConfig.getAppSecActivation() != ProductActivation.FULLY_DISABLED; + + this.traceResourceRenamingAlwaysSimplifiedEndpoint = + configProvider.getBoolean(TRACE_RESOURCE_RENAMING_ALWAYS_SIMPLIFIED_ENDPOINT, false); + iastDebugEnabled = configProvider.getBoolean(IAST_DEBUG_ENABLED, DEFAULT_IAST_DEBUG_ENABLED); iastContextMode = @@ -3828,6 +3846,14 @@ public boolean isApiSecurityEndpointCollectionEnabled() { return instrumenterConfig.isApiSecurityEndpointCollectionEnabled(); } + public boolean isTraceResourceRenamingEnabled() { + return traceResourceRenamingEnabled; + } + + public boolean isTraceResourceRenamingAlwaysSimplifiedEndpoint() { + return traceResourceRenamingAlwaysSimplifiedEndpoint; + } + public ProductActivation getIastActivation() { return instrumenterConfig.getIastActivation(); } diff --git a/internal-api/src/main/java/datadog/trace/bootstrap/instrumentation/api/ResourceNamePriorities.java b/internal-api/src/main/java/datadog/trace/bootstrap/instrumentation/api/ResourceNamePriorities.java index dddeabaf0e4..65a7bf56dbf 100644 --- a/internal-api/src/main/java/datadog/trace/bootstrap/instrumentation/api/ResourceNamePriorities.java +++ b/internal-api/src/main/java/datadog/trace/bootstrap/instrumentation/api/ResourceNamePriorities.java @@ -6,6 +6,7 @@ public class ResourceNamePriorities { public static final byte HTTP_404 = 2; public static final byte HTTP_FRAMEWORK_ROUTE = 3; public static final byte RPC_COMMAND_NAME = 3; + public static final byte HTTP_SERVER_RESOURCE_RENAMING = 4; public static final byte HTTP_SERVER_CONFIG_PATTERN_MATCH = 4; public static final byte HTTP_CLIENT_CONFIG_PATTERN_MATCH = 4; public static final byte TAG_INTERCEPTOR = 5; diff --git a/internal-api/src/main/java/datadog/trace/bootstrap/instrumentation/api/Tags.java b/internal-api/src/main/java/datadog/trace/bootstrap/instrumentation/api/Tags.java index 3eaa1e292cc..5f4cd800b44 100644 --- a/internal-api/src/main/java/datadog/trace/bootstrap/instrumentation/api/Tags.java +++ b/internal-api/src/main/java/datadog/trace/bootstrap/instrumentation/api/Tags.java @@ -19,6 +19,7 @@ public class Tags { public static final String HTTP_ROUTE = "http.route"; public static final String HTTP_STATUS = "http.status_code"; public static final String HTTP_METHOD = "http.method"; + public static final String HTTP_ENDPOINT = "http.endpoint"; public static final String HTTP_FORWARDED = "http.forwarded"; public static final String HTTP_FORWARDED_PROTO = "http.forwarded.proto"; public static final String HTTP_FORWARDED_HOST = "http.forwarded.host"; diff --git a/metadata/supported-configurations.json b/metadata/supported-configurations.json index c270af1c7d3..f0f9509dbbd 100644 --- a/metadata/supported-configurations.json +++ b/metadata/supported-configurations.json @@ -8801,6 +8801,22 @@ "aliases": [] } ], + "DD_TRACE_RESOURCE_RENAMING_ALWAYS_SIMPLIFIED_ENDPOINT": [ + { + "version": "A", + "type": "boolean", + "default": "false", + "aliases": [] + } + ], + "DD_TRACE_RESOURCE_RENAMING_ENABLED": [ + { + "version": "A", + "type": "boolean", + "default": "false", + "aliases": [] + } + ], "DD_TRACE_RESOURCENAMERULE_ENABLED": [ { "version": "A",