diff --git a/dd-trace-core/src/main/java/datadog/trace/common/writer/ddagent/TraceMapperV0_5.java b/dd-trace-core/src/main/java/datadog/trace/common/writer/ddagent/TraceMapperV0_5.java index 288b08fc542..b8d1c77afdc 100644 --- a/dd-trace-core/src/main/java/datadog/trace/common/writer/ddagent/TraceMapperV0_5.java +++ b/dd-trace-core/src/main/java/datadog/trace/common/writer/ddagent/TraceMapperV0_5.java @@ -125,6 +125,16 @@ public String endpoint() { return "v0.5"; } + // Visible for tests + Map getEncoding() { + return encoding; + } + + // Visible for tests + GrowableBuffer getDictionary() { + return dictionary; + } + private static class DictionaryMapper implements Mapper { @Override diff --git a/dd-trace-core/src/main/java/datadog/trace/core/PendingTrace.java b/dd-trace-core/src/main/java/datadog/trace/core/PendingTrace.java index 47e60f6310a..776c30870dc 100644 --- a/dd-trace-core/src/main/java/datadog/trace/core/PendingTrace.java +++ b/dd-trace-core/src/main/java/datadog/trace/core/PendingTrace.java @@ -233,6 +233,11 @@ void setLongRunningTrackedState(int state) { LONG_RUNNING_STATE.set(this, state); } + // @VisibleForTesting + int getPendingReferenceCount() { + return pendingReferenceCount; + } + boolean empty() { return 0 >= COMPLETED_SPAN_COUNT.get(this) + PENDING_REFERENCE_COUNT.get(this); } diff --git a/dd-trace-core/src/test/groovy/datadog/trace/core/DDSpanContextTest.groovy b/dd-trace-core/src/test/groovy/datadog/trace/core/DDSpanContextTest.groovy deleted file mode 100644 index fc7d4ce93a4..00000000000 --- a/dd-trace-core/src/test/groovy/datadog/trace/core/DDSpanContextTest.groovy +++ /dev/null @@ -1,469 +0,0 @@ -package datadog.trace.core - -import datadog.trace.api.DDTags -import datadog.trace.api.DDTraceId -import datadog.trace.bootstrap.instrumentation.api.AgentSpanContext -import datadog.trace.bootstrap.instrumentation.api.ErrorPriorities -import datadog.trace.bootstrap.instrumentation.api.ProfilingContextIntegration -import datadog.trace.bootstrap.instrumentation.api.ServiceNameSources -import datadog.trace.bootstrap.instrumentation.api.Tags -import datadog.trace.common.writer.ListWriter -import datadog.trace.core.propagation.ExtractedContext -import datadog.trace.core.test.DDCoreSpecification - -import static datadog.trace.api.TracePropagationStyle.DATADOG -import static datadog.trace.api.sampling.PrioritySampling.* -import static datadog.trace.api.sampling.SamplingMechanism.* -import static datadog.trace.core.DDSpanContext.SPAN_SAMPLING_MECHANISM_TAG -import static datadog.trace.core.DDSpanContext.SPAN_SAMPLING_RULE_RATE_TAG -import static datadog.trace.core.DDSpanContext.SPAN_SAMPLING_MAX_PER_SECOND_TAG - -class DDSpanContextTest extends DDCoreSpecification { - - def writer - CoreTracer tracer - def profilingContextIntegration - - def setup() { - writer = new ListWriter() - profilingContextIntegration = Mock(ProfilingContextIntegration) - tracer = tracerBuilder().writer(writer) - .profilingContextIntegration(profilingContextIntegration).build() - } - - def cleanup() { - tracer.close() - } - - def "null values for tags delete existing tags"() { - setup: - def span = tracer.buildSpan("fakeOperation") - .withServiceName("fakeService") - .withResourceName("fakeResource") - .withSpanType("fakeType") - .start() - def context = span.context() - - when: - context.setTag("some.tag", "asdf") - context.setTag(name, null) - context.setErrorFlag(true, ErrorPriorities.DEFAULT) - span.finish() - - writer.waitForTraces(1) - - then: - assertTagmap(context.getTags(), tags) - context.serviceName == "fakeService" - context.resourceName.toString() == "fakeResource" - context.spanType == "fakeType" - - where: - name | tags - DDTags.SERVICE_NAME | ["some.tag": "asdf", (DDTags.THREAD_NAME): Thread.currentThread().name, (DDTags.THREAD_ID): Thread.currentThread().id, (DDTags.DD_SVC_SRC): ServiceNameSources.MANUAL] - DDTags.RESOURCE_NAME | ["some.tag": "asdf", (DDTags.THREAD_NAME): Thread.currentThread().name, (DDTags.THREAD_ID): Thread.currentThread().id, (DDTags.DD_SVC_SRC): ServiceNameSources.MANUAL] - DDTags.SPAN_TYPE | ["some.tag": "asdf", (DDTags.THREAD_NAME): Thread.currentThread().name, (DDTags.THREAD_ID): Thread.currentThread().id, (DDTags.DD_SVC_SRC): ServiceNameSources.MANUAL] - "some.tag" | [(DDTags.THREAD_NAME): Thread.currentThread().name, (DDTags.THREAD_ID): Thread.currentThread().id, (DDTags.DD_SVC_SRC): ServiceNameSources.MANUAL] - } - - def "special tags set certain values"() { - setup: - def span = tracer.buildSpan("fakeOperation") - .withServiceName("fakeService") - .withResourceName("fakeResource") - .withSpanType("fakeType") - .start() - def context = span.context() - - when: - context.setTag(name, value) - span.finish() - writer.waitForTraces(1) - - then: - def thread = Thread.currentThread() - assertTagmap(context.getTags(), [(DDTags.THREAD_NAME): thread.name, (DDTags.THREAD_ID): thread.id, (DDTags.DD_SVC_SRC): ServiceNameSources.MANUAL]) - context."$method" == value - - where: - name | value | method | details - DDTags.SERVICE_NAME | "different service" | "serviceName" | "different service/fakeOperation/fakeResource" - DDTags.RESOURCE_NAME | "different resource" | "resourceName" | "fakeService/fakeOperation/different resource" - DDTags.SPAN_TYPE | "different type" | "spanType" | "fakeService/fakeOperation/fakeResource" - } - - def "tags can be added to the context"() { - setup: - def span = tracer.buildSpan("fakeOperation") - .withServiceName("fakeService") - .withResourceName("fakeResource") - .withSpanType("fakeType") - .start() - def context = span.context() - - when: - context.setTag(name, value) - span.finish() - writer.waitForTraces(1) - def thread = Thread.currentThread() - - then: - assertTagmap(context.getTags(), [ - (name) : value, - (DDTags.THREAD_NAME) : thread.name, - (DDTags.THREAD_ID) : thread.id, - (DDTags.DD_SVC_SRC): ServiceNameSources.MANUAL - ]) - - where: - name | value - "tag.name" | "some value" - "tag with int" | 1234 - "tag-with-bool" | false - "tag_with_float" | 0.321 - } - - def "metrics use the expected types"() { - // floats should be converted to doubles. - setup: - def span = tracer.buildSpan("fakeOperation") - .withServiceName("fakeService") - .withResourceName("fakeResource") - .start() - def context = span.context() - - when: - context.setMetric("test", (Number)value) - - then: - type.isInstance(context.getTag("test")) - - where: - type | value - Integer | 0 - Integer | Integer.MAX_VALUE - Integer | Integer.MIN_VALUE - Short | Short.MAX_VALUE - Short | Short.MIN_VALUE - Float | Float.MAX_VALUE - Float | Float.MIN_VALUE - Double | Double.MAX_VALUE - Double | Double.MIN_VALUE - Float | 1f - Double | 1d - Float | 0.5f - Double | 0.5d - Integer | 0x55 - } - - def "force keep really keeps the trace"() { - setup: - def span = tracer.buildSpan("fakeOperation") - .withServiceName("fakeService") - .withResourceName("fakeResource") - .start() - def context = span.context() - when: - context.setSamplingPriority(SAMPLER_DROP, DEFAULT) - then: "priority should be set" - context.getSamplingPriority() == SAMPLER_DROP - - when: "sampling priority locked" - context.lockSamplingPriority() - then: "override ignored" - !context.setSamplingPriority(USER_DROP, MANUAL) - context.getSamplingPriority() == SAMPLER_DROP - - when: - context.forceKeep() - then: "lock is bypassed and priority set to USER_KEEP" - context.getSamplingPriority() == USER_KEEP - - cleanup: - span.finish() - } - - def "set TraceSegment tags and data on correct span"() { - setup: - def extracted = new ExtractedContext(DDTraceId.from(123), 456, SAMPLER_KEEP, "789", tracer.getPropagationTagsFactory().empty(), DATADOG) - .withRequestContextDataAppSec("dummy") - - def top = tracer.buildSpan("top").asChildOf((AgentSpanContext) extracted).start() - def topC = (DDSpanContext) top.context() - def topTS = top.getRequestContext().getTraceSegment() - def current = tracer.buildSpan("current").asChildOf(top).start() - def currentTS = current.getRequestContext().getTraceSegment() - def currentC = (DDSpanContext) current.context() - - when: - currentTS.setDataTop("ctd", "[1]") - currentTS.setTagTop("ctt", "t1") - currentTS.setDataCurrent("ccd", "[2]") - currentTS.setTagCurrent("cct", "t2") - topTS.setDataTop("ttd", "[3]") - topTS.setTagTop("ttt", "t3") - topTS.setDataCurrent("tcd", "[4]") - topTS.setTagCurrent("tct", "t4") - - then: - assertTagmap(topC.getTags(), [(dataTag("ctd")): "[1]", "ctt": "t1", - (dataTag("ttd")): "[3]", "ttt": "t3", - (dataTag("tcd")): "[4]", "tct": "t4"], true) - assertTagmap(currentC.getTags(), [(dataTag("ccd")): "[2]", "cct": "t2"], true) - - cleanup: - current.finish() - top.finish() - } - - def "set single span sampling tags"() { - setup: - def span = tracer.buildSpan("fakeOperation") - .withServiceName("fakeService") - .withResourceName("fakeResource") - .start() - def context = span.context() as DDSpanContext - - expect: - context.getSamplingPriority() == UNSET - - when: - context.setSpanSamplingPriority(rate, limit) - - then: - context.getTag(SPAN_SAMPLING_MECHANISM_TAG) == SPAN_SAMPLING_RATE - context.getTag(SPAN_SAMPLING_RULE_RATE_TAG) == rate - context.getTag(SPAN_SAMPLING_MAX_PER_SECOND_TAG) == (limit == Integer.MAX_VALUE ? null : limit) - // single span sampling should not change the trace sampling priority - context.getSamplingPriority() == UNSET - // make sure the `_dd.p.dm` tag has not been set by single span sampling - !context.getPropagationTags().createTagMap().containsKey("_dd.p.dm") - - where: - rate | limit - 1.0 | 10 - 0.5 | 100 - 0.25 | Integer.MAX_VALUE - } - - def "setting resource name to null is ignored"() { - setup: - def span = tracer.buildSpan("fakeOperation") - .withServiceName("fakeService") - .withResourceName("fakeResource") - .start() - - when: - span.setResourceName(null) - - then: - span.resourceName == "fakeResource" - } - - def "setting operation name triggers constant encoding"() { - when: - def span = tracer.buildSpan("fakeOperation") - .withServiceName("fakeService") - .withResourceName("fakeResource") - .start() - - then: "encoded operation name matches operation name" - 1 * profilingContextIntegration.encodeOperationName("fakeOperation") >> 1 - 1 * profilingContextIntegration.encodeResourceName("fakeResource") >> -1 - span.context.encodedOperationName == 1 - span.context.encodedResourceName == -1 - - when: - span.setOperationName("newOperationName") - - then: - 1 * profilingContextIntegration.encodeOperationName("newOperationName") >> 2 - span.context.encodedOperationName == 2 - - when: - span.setResourceName("newResourceName") - - then: - 1 * profilingContextIntegration.encodeResourceName("newResourceName") >> -2 - span.context.encodedResourceName == -2 - } - - private static String dataTag(String tag) { - "_dd.${tag}.json" - } - - def "Span IDs printed as unsigned long"() { - setup: - def parent = tracer.buildSpan("fakeOperation") - .withServiceName("fakeService") - .withResourceName("fakeResource") - .withSpanId(-987654321) - .start() - - def span = tracer.buildSpan("fakeOperation") - .withServiceName("fakeService") - .withResourceName("fakeResource") - .withSpanId(-123456789) - .asChildOf(parent.context()) - .start() - - def context = span.context() as DDSpanContext - - expect: - // even though span ID and parent ID are setup as negative numbers, they should be printed as their unsigned value - // asserting there is no negative sign after ids is the best I can do. - context.toString().contains("id=-") == false - } - - def "service name source is propagated from parent to child span"() { - setup: - def parent = tracer.buildSpan("parentOperation") - .withServiceName("fakeService") - .start() - - when: - def child = tracer.buildSpan("childOperation") - .asChildOf(parent.context()) - .start() - def childContext = child.context() as DDSpanContext - - then: - childContext.getServiceNameSource() == ServiceNameSources.MANUAL - - cleanup: - child.finish() - parent.finish() - } - - static void assertTagmap(Map source, Map comparison, boolean removeThread = false) { - def sourceWithoutCommonTags = new HashMap(source) - sourceWithoutCommonTags.remove("runtime-id") - sourceWithoutCommonTags.remove("language") - sourceWithoutCommonTags.remove("_dd.agent_psr") - sourceWithoutCommonTags.remove("_sample_rate") - sourceWithoutCommonTags.remove("process_id") - sourceWithoutCommonTags.remove("_dd.trace_span_attribute_schema") - sourceWithoutCommonTags.remove(DDTags.PROFILING_ENABLED) - sourceWithoutCommonTags.remove(DDTags.PROFILING_CONTEXT_ENGINE) - sourceWithoutCommonTags.remove(DDTags.DSM_ENABLED) - sourceWithoutCommonTags.remove(DDTags.DJM_ENABLED) - if (removeThread) { - sourceWithoutCommonTags.remove(DDTags.THREAD_ID) - sourceWithoutCommonTags.remove(DDTags.THREAD_NAME) - } - assert sourceWithoutCommonTags == comparison - } - - def "span kind ordinal constants and SPAN_KIND_VALUES array stay in sync"() { - expect: "SPAN_KIND_VALUES array covers all ordinals" - DDSpanContext.SPAN_KIND_VALUES.length == DDSpanContext.SPAN_KIND_CUSTOM + 1 - - and: "each known ordinal maps to the correct Tags constant" - DDSpanContext.SPAN_KIND_VALUES[DDSpanContext.SPAN_KIND_SERVER] == Tags.SPAN_KIND_SERVER - DDSpanContext.SPAN_KIND_VALUES[DDSpanContext.SPAN_KIND_CLIENT] == Tags.SPAN_KIND_CLIENT - DDSpanContext.SPAN_KIND_VALUES[DDSpanContext.SPAN_KIND_PRODUCER] == Tags.SPAN_KIND_PRODUCER - DDSpanContext.SPAN_KIND_VALUES[DDSpanContext.SPAN_KIND_CONSUMER] == Tags.SPAN_KIND_CONSUMER - DDSpanContext.SPAN_KIND_VALUES[DDSpanContext.SPAN_KIND_INTERNAL] == Tags.SPAN_KIND_INTERNAL - DDSpanContext.SPAN_KIND_VALUES[DDSpanContext.SPAN_KIND_BROKER] == Tags.SPAN_KIND_BROKER - - and: "UNSET and CUSTOM map to null" - DDSpanContext.SPAN_KIND_VALUES[DDSpanContext.SPAN_KIND_UNSET] == null - DDSpanContext.SPAN_KIND_VALUES[DDSpanContext.SPAN_KIND_CUSTOM] == null - } - - def "setSpanKindOrdinal round-trips with SPAN_KIND_VALUES for all known kinds"() { - when: - def span = tracer.buildSpan("test", "test").start() - def context = (DDSpanContext) span.context() - context.setSpanKindOrdinal(kindString) - - then: - context.getSpanKindOrdinal() == expectedOrdinal - DDSpanContext.SPAN_KIND_VALUES[expectedOrdinal] == kindString - - cleanup: - span.finish() - - where: - kindString | expectedOrdinal - Tags.SPAN_KIND_SERVER | DDSpanContext.SPAN_KIND_SERVER - Tags.SPAN_KIND_CLIENT | DDSpanContext.SPAN_KIND_CLIENT - Tags.SPAN_KIND_PRODUCER | DDSpanContext.SPAN_KIND_PRODUCER - Tags.SPAN_KIND_CONSUMER | DDSpanContext.SPAN_KIND_CONSUMER - Tags.SPAN_KIND_INTERNAL | DDSpanContext.SPAN_KIND_INTERNAL - Tags.SPAN_KIND_BROKER | DDSpanContext.SPAN_KIND_BROKER - } - - def "setTag and getTag round-trip for span.kind"() { - when: - def span = tracer.buildSpan("test", "test").start() - span.setTag(Tags.SPAN_KIND, kindString) - - then: - span.getTag(Tags.SPAN_KIND) == kindString - - cleanup: - span.finish() - - where: - kindString << [ - Tags.SPAN_KIND_SERVER, - Tags.SPAN_KIND_CLIENT, - Tags.SPAN_KIND_PRODUCER, - Tags.SPAN_KIND_CONSUMER, - Tags.SPAN_KIND_INTERNAL, - Tags.SPAN_KIND_BROKER, - ] - } - - def "getTag returns null when span.kind is not set"() { - when: - def span = tracer.buildSpan("test", "test").start() - - then: - span.getTag(Tags.SPAN_KIND) == null - - cleanup: - span.finish() - } - - def "setTag then removeTag clears span.kind"() { - when: - def span = tracer.buildSpan("test", "test").start() - span.setTag(Tags.SPAN_KIND, kindString) - - then: - span.getTag(Tags.SPAN_KIND) == kindString - - when: - ((DDSpan) span).context().removeTag(Tags.SPAN_KIND) - - then: - span.getTag(Tags.SPAN_KIND) == null - - cleanup: - span.finish() - - where: - kindString << [ - Tags.SPAN_KIND_SERVER, - Tags.SPAN_KIND_CLIENT, - Tags.SPAN_KIND_PRODUCER, - Tags.SPAN_KIND_CONSUMER, - Tags.SPAN_KIND_INTERNAL, - Tags.SPAN_KIND_BROKER, - ] - } - - def "setTag with custom span.kind falls back to tag map"() { - when: - def span = tracer.buildSpan("test", "test").start() - span.setTag(Tags.SPAN_KIND, "custom-kind") - - then: - span.getTag(Tags.SPAN_KIND) == "custom-kind" - - cleanup: - span.finish() - } -} diff --git a/dd-trace-core/src/test/groovy/datadog/trace/core/DDSpanSerializationTest.groovy b/dd-trace-core/src/test/groovy/datadog/trace/core/DDSpanSerializationTest.groovy deleted file mode 100644 index 3613743e9ff..00000000000 --- a/dd-trace-core/src/test/groovy/datadog/trace/core/DDSpanSerializationTest.groovy +++ /dev/null @@ -1,496 +0,0 @@ -package datadog.trace.core - -import static datadog.trace.api.config.GeneralConfig.EXPERIMENTAL_PROPAGATE_PROCESS_TAGS_ENABLED - -import datadog.communication.serialization.ByteBufferConsumer -import datadog.communication.serialization.FlushingBuffer -import datadog.communication.serialization.msgpack.MsgPackWriter -import datadog.trace.api.DDSpanId -import datadog.trace.api.DDTraceId -import datadog.trace.api.ProcessTags -import datadog.trace.api.sampling.PrioritySampling -import datadog.trace.api.datastreams.NoopPathwayContext -import datadog.trace.common.writer.ListWriter -import datadog.trace.common.writer.ddagent.TraceMapperV0_4 -import datadog.trace.common.writer.ddagent.TraceMapperV0_5 -import datadog.trace.core.test.DDCoreSpecification -import org.msgpack.core.MessageFormat -import org.msgpack.core.MessagePack -import org.msgpack.core.buffer.ArrayBufferInput -import org.msgpack.value.ValueType - -import java.nio.ByteBuffer - -class DDSpanSerializationTest extends DDCoreSpecification { - - def setupSpec() { - //disable process tags since will generate noise on the meta - injectSysConfig(EXPERIMENTAL_PROPAGATE_PROCESS_TAGS_ENABLED, "false") - ProcessTags.reset() - } - - def cleanupSpec() { - //disable process tags since will generate noise on the meta - injectSysConfig(EXPERIMENTAL_PROPAGATE_PROCESS_TAGS_ENABLED, "true") - ProcessTags.reset() - } - - def "serialize trace with id #value as int"() { - setup: - def writer = new ListWriter() - def tracer = tracerBuilder().writer(writer).build() - def traceId = DDTraceId.from(value) - def spanId = DDSpanId.from(value) - def context = createContext(spanType, tracer, traceId, spanId) - def span = DDSpan.create("test", 0, context, null) - CaptureBuffer capture = new CaptureBuffer() - def packer = new MsgPackWriter(new FlushingBuffer(1024, capture)) - packer.format(Collections.singletonList(span), new TraceMapperV0_4()) - packer.flush() - def unpacker = MessagePack.newDefaultUnpacker(new ArrayBufferInput(capture.bytes)) - int traceCount = capture.messageCount - int spanCount = unpacker.unpackArrayHeader() - int size = unpacker.unpackMapHeader() - - expect: - traceCount == 1 - spanCount == 1 - size == 12 - for (int i = 0; i < size; i++) { - String key = unpacker.unpackString() - - switch (key) { - case "trace_id": - MessageFormat next = unpacker.nextFormat - assert next.valueType == ValueType.INTEGER - if (next == MessageFormat.UINT64) { - assert traceId == DDTraceId.from("${unpacker.unpackBigInteger()}") - } else { - assert traceId == DDTraceId.from(unpacker.unpackLong()) - } - break - case "span_id": - MessageFormat next = unpacker.nextFormat - assert next.valueType == ValueType.INTEGER - if (next == MessageFormat.UINT64) { - assert spanId == DDSpanId.from("${unpacker.unpackBigInteger()}") - } else { - assert spanId == unpacker.unpackLong() - } - break - default: - unpacker.unpackValue() - } - } - - cleanup: - tracer.close() - - where: - value | spanType - "0" | null - "1" | "some-type" - "8223372036854775807" | null - "${BigInteger.valueOf(Long.MAX_VALUE).subtract(1G)}" | "some-type" - "${BigInteger.valueOf(Long.MAX_VALUE).add(1G)}" | null - "${2G.pow(64).subtract(1G)}" | "some-type" - } - - def "serialize trace with id #value as int v0.5"() { - setup: - def writer = new ListWriter() - def tracer = tracerBuilder().writer(writer).build() - def traceId = DDTraceId.from(value) - def spanId = DDSpanId.from(value) - def context = createContext(spanType, tracer, traceId, spanId) - def span = DDSpan.create("test", 0, context, null) - CaptureBuffer capture = new CaptureBuffer() - def packer = new MsgPackWriter(new FlushingBuffer(1024, capture)) - def traceMapper = new TraceMapperV0_5() - packer.format(Collections.singletonList(span), traceMapper) - packer.flush() - def dictionaryUnpacker = MessagePack.newDefaultUnpacker(traceMapper.dictionary.slice()) - String[] dictionary = new String[traceMapper.encoding.size()] - for (int i = 0; i < dictionary.length; ++i) { - dictionary[i] = dictionaryUnpacker.unpackString() - } - def unpacker = MessagePack.newDefaultUnpacker(new ArrayBufferInput(capture.bytes)) - int traceCount = capture.messageCount - - int spanCount = unpacker.unpackArrayHeader() - int size = unpacker.unpackArrayHeader() - - expect: - traceCount == 1 - spanCount == 1 - size == 12 - for (int i = 0; i < size; i++) { - switch (i) { - case 3: - MessageFormat next = unpacker.nextFormat - assert next.valueType == ValueType.INTEGER - if (next == MessageFormat.UINT64) { - assert traceId == DDTraceId.from("${unpacker.unpackBigInteger()}") - } else { - assert traceId == DDTraceId.from(unpacker.unpackLong()) - } - break - case 4: - MessageFormat next = unpacker.nextFormat - assert next.valueType == ValueType.INTEGER - if (next == MessageFormat.UINT64) { - assert spanId == DDSpanId.from("${unpacker.unpackBigInteger()}") - } else { - assert spanId == unpacker.unpackLong() - } - break - default: - unpacker.unpackValue() - } - } - - cleanup: - tracer.close() - - where: - value | spanType - "0" | null - "1" | "some-type" - "8223372036854775807" | null - "${BigInteger.valueOf(Long.MAX_VALUE).subtract(1G)}" | "some-type" - "${BigInteger.valueOf(Long.MAX_VALUE).add(1G)}" | null - "${2G.pow(64).subtract(1G)}" | "some-type" - } - - def "serialize trace with baggage and tags correctly v0.4"() { - setup: - def writer = new ListWriter() - def tracer = tracerBuilder().writer(writer).build() - def context = new DDSpanContext( - DDTraceId.ONE, - 1, - DDSpanId.ZERO, - null, - "fakeService", - "fakeOperation", - "fakeResource", - PrioritySampling.UNSET, - null, - baggage, - false, - null, - tags.size(), - tracer.traceCollectorFactory.create(DDTraceId.ONE), - null, - null, - NoopPathwayContext.INSTANCE, - false, - null, - injectBaggage, - true) - context.setAllTags(tags) - def span = DDSpan.create("test", 0, context, null) - CaptureBuffer capture = new CaptureBuffer() - def packer = new MsgPackWriter(new FlushingBuffer(1024, capture)) - packer.format(Collections.singletonList(span), new TraceMapperV0_4()) - packer.flush() - def unpacker = MessagePack.newDefaultUnpacker(new ArrayBufferInput(capture.bytes)) - int traceCount = capture.messageCount - int spanCount = unpacker.unpackArrayHeader() - int size = unpacker.unpackMapHeader() - - expect: - traceCount == 1 - spanCount == 1 - size == 12 - for (int i = 0; i < size; i++) { - String key = unpacker.unpackString() - - switch (key) { - case "meta": - int packedSize = unpacker.unpackMapHeader() - Map unpackedMeta = [:] - for (int j = 0; j < packedSize; j++) { - def k = unpacker.unpackString() - def v = unpacker.unpackString() - if (k != "thread.name" && k != "thread.id") { - unpackedMeta.put(k, v) - } - } - assert unpackedMeta == expected - break - default: - unpacker.unpackValue() - } - } - - cleanup: - tracer.close() - - where: - baggage | tags | expected | injectBaggage - [:] | [:] | [:] | true - [foo: "bbar"] | [:] | [foo: "bbar"] | true - [foo: "bbar"] | [bar: "tfoo"] | [foo: "bbar", bar: "tfoo"] | true - [foo: "bbar"] | [foo: "tbar"] | [foo: "tbar"] | true - [:] | [:] | [:] | false - [foo: "bbar"] | [:] | [:] | false - [foo: "bbar"] | [bar: "tfoo"] | [bar: "tfoo"] | false - [foo: "bbar"] | [foo: "tbar"] | [foo: "tbar"] | false - } - - def "serialize trace with baggage and tags correctly v0.5"() { - setup: - def writer = new ListWriter() - def tracer = tracerBuilder().writer(writer).build() - def context = new DDSpanContext( - DDTraceId.ONE, - 1, - DDSpanId.ZERO, - null, - "fakeService", - "fakeOperation", - "fakeResource", - PrioritySampling.UNSET, - null, - baggage, - false, - null, - tags.size(), - tracer.traceCollectorFactory.create(DDTraceId.ONE), - null, - null, - NoopPathwayContext.INSTANCE, - false, - null, - injectBaggage, - true) - context.setAllTags(tags) - def span = DDSpan.create("test", 0, context, null) - CaptureBuffer capture = new CaptureBuffer() - def packer = new MsgPackWriter(new FlushingBuffer(1024, capture)) - def mapper = new TraceMapperV0_5() - packer.format(Collections.singletonList(span), mapper) - packer.flush() - def unpacker = MessagePack.newDefaultUnpacker(new ArrayBufferInput(capture.bytes)) - int traceCount = capture.messageCount - int spanCount = unpacker.unpackArrayHeader() - int size = unpacker.unpackArrayHeader() - def dictionaryUnpacker = MessagePack.newDefaultUnpacker(mapper.dictionary.slice()) - String[] dictionary = new String[mapper.encoding.size()] - for (int i = 0; i < dictionary.length; ++i) { - dictionary[i] = dictionaryUnpacker.unpackString() - } - - expect: - traceCount == 1 - spanCount == 1 - size == 12 - for (int i = 0; i < 9; ++i) { - unpacker.skipValue() - } - - int packedSize = unpacker.unpackMapHeader() - Map unpackedMeta = [:] - for (int j = 0; j < packedSize; j++) { - def k = dictionary[unpacker.unpackInt()] - def v = dictionary[unpacker.unpackInt()] - if (k != "thread.name" && k != "thread.id") { - unpackedMeta.put(k, v) - } - } - assert unpackedMeta == expected - - cleanup: - tracer.close() - - where: - baggage | tags | expected | injectBaggage - [:] | [:] | [:] | true - [foo: "bbar"] | [:] | [foo: "bbar"] | true - [foo: "bbar"] | [bar: "tfoo"] | [foo: "bbar", bar: "tfoo"] | true - [foo: "bbar"] | [foo: "tbar"] | [foo: "tbar"] | true - [:] | [:] | [:] | false - [foo: "bbar"] | [:] | [:] | false - [foo: "bbar"] | [bar: "tfoo"] | [bar: "tfoo"] | false - [foo: "bbar"] | [foo: "tbar"] | [foo: "tbar"] | false - } - - def "serialize trace with flat map tag v0.4"() { - setup: - def tracer = tracerBuilder().writer(new ListWriter()).build() - def context = new DDSpanContext( - DDTraceId.ONE, - 1, - DDSpanId.ZERO, - null, - "fakeService", - "fakeOperation", - "fakeResource", - PrioritySampling.UNSET, - null, - null, - false, - null, - 0, - tracer.traceCollectorFactory.create(DDTraceId.ONE), - null, - null, - NoopPathwayContext.INSTANCE, - false, - null) - context.setTag('key1', 'value1') - context.setTag('key2', [ - 'sub1': 'v1', - 'sub2': 'v2' - ]) - def span = DDSpan.create("test", 0, context, null) - - CaptureBuffer capture = new CaptureBuffer() - def packer = new MsgPackWriter(new FlushingBuffer(1024, capture)) - packer.format(Collections.singletonList(span), new TraceMapperV0_4()) - packer.flush() - def unpacker = MessagePack.newDefaultUnpacker(new ArrayBufferInput(capture.bytes)) - int traceCount = capture.messageCount - int spanCount = unpacker.unpackArrayHeader() - int size = unpacker.unpackMapHeader() - - def expectedMeta = ['key1': 'value1', 'key2.sub1': 'v1', 'key2.sub2': 'v2'] - - expect: - traceCount == 1 - spanCount == 1 - - for (int i = 0; i < size; i++) { - String key = unpacker.unpackString() - - switch (key) { - case "meta": - int packedSize = unpacker.unpackMapHeader() - Map unpackedMeta = [:] - for (int j = 0; j < packedSize; j++) { - def k = unpacker.unpackString() - def v = unpacker.unpackString() - if (k != "thread.name" && k != "thread.id") { - unpackedMeta.put(k, v) - } - } - assert unpackedMeta == expectedMeta - break - default: - unpacker.unpackValue() - } - } - - cleanup: - tracer.close() - } - - def "serialize trace with flat map tag v0.5"() { - setup: - def tracer = tracerBuilder().writer(new ListWriter()).build() - def context = new DDSpanContext( - DDTraceId.ONE, - 1, - DDSpanId.ZERO, - null, - "fakeService", - "fakeOperation", - "fakeResource", - PrioritySampling.UNSET, - null, - null, - false, - null, - 0, - tracer.traceCollectorFactory.create(DDTraceId.ONE), - null, - null, - NoopPathwayContext.INSTANCE, - false, - null) - context.setTag('key1', 'value1') - context.setTag('key2', [ - 'sub1': 'v1', - 'sub2': 'v2' - ]) - def span = DDSpan.create("test", 0, context, null) - - CaptureBuffer capture = new CaptureBuffer() - def packer = new MsgPackWriter(new FlushingBuffer(1024, capture)) - def mapper = new TraceMapperV0_5() - packer.format(Collections.singletonList(span), mapper) - packer.flush() - def unpacker = MessagePack.newDefaultUnpacker(new ArrayBufferInput(capture.bytes)) - int traceCount = capture.messageCount - int spanCount = unpacker.unpackArrayHeader() - int size = unpacker.unpackArrayHeader() - def dictionaryUnpacker = MessagePack.newDefaultUnpacker(mapper.dictionary.slice()) - String[] dictionary = new String[mapper.encoding.size()] - for (int i = 0; i < dictionary.length; ++i) { - dictionary[i] = dictionaryUnpacker.unpackString() - } - - def expectedMeta = ['key1': 'value1', 'key2.sub1': 'v1', 'key2.sub2': 'v2'] - - expect: - traceCount == 1 - spanCount == 1 - size == 12 - for (int i = 0; i < 9; ++i) { - unpacker.skipValue() - } - - int packedSize = unpacker.unpackMapHeader() - Map unpackedMeta = [:] - for (int j = 0; j < packedSize; j++) { - def k = dictionary[unpacker.unpackInt()] - def v = dictionary[unpacker.unpackInt()] - if (k != "thread.name" && k != "thread.id") { - unpackedMeta.put(k, v) - } - } - assert unpackedMeta == expectedMeta - - cleanup: - tracer.close() - } - - private class CaptureBuffer implements ByteBufferConsumer { - - private byte[] bytes - int messageCount - - @Override - void accept(int messageCount, ByteBuffer buffer) { - this.messageCount = messageCount - this.bytes = new byte[buffer.limit() - buffer.position()] - buffer.get(bytes) - } - } - - def createContext(String spanType, CoreTracer tracer, DDTraceId traceId, long spanId) { - DDSpanContext ctx = new DDSpanContext( - traceId, - spanId, - DDSpanId.ZERO, - null, - "fakeService", - "fakeOperation", - "fakeResource", - PrioritySampling.UNSET, - null, - ["a-baggage": "value"], - false, - spanType, - 1, - tracer.traceCollectorFactory.create(DDTraceId.ONE), - null, - null, - NoopPathwayContext.INSTANCE, - false, - null) - ctx.setAllTags(["k1": "v1"]) - return ctx - } -} diff --git a/dd-trace-core/src/test/groovy/datadog/trace/core/DDSpanTest.groovy b/dd-trace-core/src/test/groovy/datadog/trace/core/DDSpanTest.groovy deleted file mode 100644 index e7882432d9c..00000000000 --- a/dd-trace-core/src/test/groovy/datadog/trace/core/DDSpanTest.groovy +++ /dev/null @@ -1,470 +0,0 @@ -package datadog.trace.core - -import datadog.trace.api.DDSpanId -import datadog.trace.api.DDTags -import datadog.trace.api.DDTraceId -import datadog.trace.api.TagMap -import datadog.trace.api.gateway.RequestContextSlot -import datadog.trace.api.sampling.PrioritySampling -import datadog.trace.bootstrap.instrumentation.api.AgentSpanContext -import datadog.trace.api.datastreams.NoopPathwayContext -import datadog.trace.bootstrap.instrumentation.api.ErrorPriorities -import datadog.trace.bootstrap.instrumentation.api.TagContext -import datadog.trace.bootstrap.instrumentation.api.UTF8BytesString -import datadog.trace.common.sampling.RateByServiceTraceSampler -import datadog.trace.common.writer.ListWriter -import datadog.trace.core.propagation.ExtractedContext -import datadog.trace.core.test.DDCoreSpecification -import spock.lang.Shared - -import java.util.concurrent.TimeUnit - -import static datadog.trace.api.TracePropagationStyle.DATADOG -import static datadog.trace.api.sampling.PrioritySampling.UNSET -import static datadog.trace.api.sampling.SamplingMechanism.SPAN_SAMPLING_RATE -import static datadog.trace.core.DDSpanContext.SPAN_SAMPLING_MAX_PER_SECOND_TAG -import static datadog.trace.core.DDSpanContext.SPAN_SAMPLING_MECHANISM_TAG -import static datadog.trace.core.DDSpanContext.SPAN_SAMPLING_RULE_RATE_TAG - -class DDSpanTest extends DDCoreSpecification { - - @Shared def writer = new ListWriter() - @Shared def sampler = new RateByServiceTraceSampler() - @Shared def tracer = tracerBuilder().writer(writer).sampler(sampler).build() - @Shared def propagationTagsFactory = tracer.getPropagationTagsFactory() - - def cleanup() { - tracer?.close() - } - - def "getters and setters"() { - setup: - def span = tracer.buildSpan("fakeOperation") - .withServiceName("fakeService") - .withResourceName("fakeResource") - .withSpanType("fakeType") - .start() - - when: - span.setServiceName("service") - then: - span.getServiceName() == "service" - - when: - span.setOperationName("operation") - then: - span.getOperationName() == "operation" - - when: - span.setResourceName("resource") - then: - span.getResourceName() == "resource" - - when: - span.setSpanType("type") - then: - span.getType() == "type" - - when: - span.setSamplingPriority(PrioritySampling.UNSET) - then: - span.getSamplingPriority() == null - - when: - span.setSamplingPriority(PrioritySampling.SAMPLER_KEEP) - then: - span.getSamplingPriority() == PrioritySampling.SAMPLER_KEEP - - when: - span.context().lockSamplingPriority() - span.setSamplingPriority(PrioritySampling.USER_KEEP) - then: - span.getSamplingPriority() == PrioritySampling.SAMPLER_KEEP - } - - def "resource name equals operation name if null"() { - setup: - final String opName = "operationName" - def span - - when: - span = tracer.buildSpan(opName).start() - then: - span.getResourceName() == opName - span.getServiceName() != "" - - when: - final String resourceName = "fake" - final String serviceName = "myService" - span = tracer - .buildSpan(opName) - .withResourceName(resourceName) - .withServiceName(serviceName) - .start() - then: - span.getResourceName() == resourceName - span.getServiceName() == serviceName - } - - def "duration measured in nanoseconds"() { - setup: - def mod = TimeUnit.MILLISECONDS.toNanos(1) - def builder = tracer.buildSpan("test") - def start = System.nanoTime() - def span = builder.start() - def between = System.nanoTime() - def betweenDur = System.nanoTime() - between - span.finish() - def total = System.nanoTime() - start - - expect: - // Generous 5 seconds to execute this test - Math.abs(TimeUnit.NANOSECONDS.toSeconds(span.startTime) - TimeUnit.MILLISECONDS.toSeconds(System.currentTimeMillis())) < 5 - span.durationNano > betweenDur - span.durationNano < total - span.durationNano % mod > 0 // Very slim chance of a false negative. - } - - def "phasedFinish captures duration but doesn't publish immediately"() { - setup: - def mod = TimeUnit.MILLISECONDS.toNanos(1) - def builder = tracer.buildSpan("test") - def start = System.nanoTime() - def span = builder.start() - def between = System.nanoTime() - def betweenDur = System.nanoTime() - between - - when: "calling publish before phasedFinish" - span.publish() - - then: "has no effect" - span.durationNano == 0 - span.context().traceCollector.pendingReferenceCount == 1 - writer.size() == 0 - - when: - def finish = span.phasedFinish() - def total = System.nanoTime() - start - - then: - finish - span.context().traceCollector.pendingReferenceCount == 1 - span.context().traceCollector.spans.isEmpty() - writer.isEmpty() - - and: "duration is recorded as negative to allow publishing" - span.durationNano < 0 - def actualDurationNano = span.durationNano & Long.MAX_VALUE - // Generous 5 seconds to execute this test - Math.abs(TimeUnit.NANOSECONDS.toSeconds(span.startTime) - TimeUnit.MILLISECONDS.toSeconds(System.currentTimeMillis())) < 5 - actualDurationNano > betweenDur - actualDurationNano < total - actualDurationNano % mod > 0 // Very slim chance of a false negative. - - when: "extra finishes" - finish = span.phasedFinish() - span.finish() // verify conflicting finishes are ignored - - then: "have no effect" - !finish - span.context().traceCollector.pendingReferenceCount == 1 - span.context().traceCollector.spans.isEmpty() - writer.isEmpty() - - when: - span.publish() - - then: "duration is flipped to positive" - span.durationNano > 0 - span.durationNano == actualDurationNano - span.context().traceCollector.pendingReferenceCount == 0 - writer.size() == 1 - - when: "duplicate call to publish" - span.publish() - - then: "has no effect" - span.context().traceCollector.pendingReferenceCount == 0 - writer.size() == 1 - } - - def "starting with a timestamp disables nanotime"() { - setup: - def mod = TimeUnit.MILLISECONDS.toNanos(1) - def start = System.currentTimeMillis() - def builder = tracer.buildSpan("test") - .withStartTimestamp(TimeUnit.MILLISECONDS.toMicros(System.currentTimeMillis())) - def span = builder.start() - def between = System.currentTimeMillis() - def betweenDur = System.currentTimeMillis() - between - span.finish() - def total = Math.max(1, System.currentTimeMillis() - start) - - expect: - // Generous 5 seconds to execute this test - Math.abs(TimeUnit.NANOSECONDS.toSeconds(span.startTime) - TimeUnit.MILLISECONDS.toSeconds(System.currentTimeMillis())) < 5 - span.durationNano >= TimeUnit.MILLISECONDS.toNanos(betweenDur) - span.durationNano <= TimeUnit.MILLISECONDS.toNanos(total) - span.durationNano % mod == 0 || span.durationNano == 1 - } - - def "stopping with a timestamp disables nanotime"() { - setup: - def mod = TimeUnit.MILLISECONDS.toNanos(1) - def builder = tracer.buildSpan("test") - def start = System.currentTimeMillis() - def span = builder.start() - def between = System.currentTimeMillis() - def betweenDur = System.currentTimeMillis() - between - span.finish(TimeUnit.MILLISECONDS.toMicros(System.currentTimeMillis() + 1)) - def total = System.currentTimeMillis() - start + 1 - - expect: - // Generous 5 seconds to execute this test - Math.abs(TimeUnit.NANOSECONDS.toSeconds(span.startTime) - TimeUnit.MILLISECONDS.toSeconds(System.currentTimeMillis())) < 5 - span.durationNano >= TimeUnit.MILLISECONDS.toNanos(betweenDur) - span.durationNano <= TimeUnit.MILLISECONDS.toNanos(total) - // true span duration can be <1ms if clock was about to tick over, so allow for that - (span.durationNano % mod == 0) || (span.durationNano == 1) - } - - def "stopping with a timestamp before start time yields a min duration of 1"() { - setup: - def span = tracer.buildSpan("test").start() - - // remove tick precision part of our internal time to match previous test condition - span.finish(TimeUnit.MILLISECONDS.toMicros(TimeUnit.NANOSECONDS.toMillis(span.startTimeNano)) - 10) - - expect: - span.durationNano == 1 - } - - def "priority sampling metric set only on root span"() { - setup: - def parent = tracer.buildSpan("testParent").start() - def child1 = tracer.buildSpan("testChild1").asChildOf(parent).start() - - child1.setSamplingPriority(PrioritySampling.SAMPLER_KEEP) - child1.context().lockSamplingPriority() - parent.setSamplingPriority(PrioritySampling.SAMPLER_DROP) - child1.finish() - def child2 = tracer.buildSpan("testChild2").asChildOf(parent).start() - child2.finish() - parent.finish() - - expect: - parent.context().samplingPriority == PrioritySampling.SAMPLER_KEEP - parent.getSamplingPriority() == PrioritySampling.SAMPLER_KEEP - parent.hasSamplingPriority() - child1.getSamplingPriority() == parent.getSamplingPriority() - child2.getSamplingPriority() == parent.getSamplingPriority() - !child1.hasSamplingPriority() - !child2.hasSamplingPriority() - } - - def "origin set only on root span"() { - setup: - def parent = tracer.buildSpan("testParent").asChildOf(extractedContext).start().context() - def child = tracer.buildSpan("testChild1").asChildOf(parent).start().context() - - expect: - parent.origin == "some-origin" - parent.@origin == "some-origin" // Access field directly instead of getter. - child.origin == "some-origin" - child.@origin == null // Access field directly instead of getter. - - where: - extractedContext | _ - new TagContext("some-origin", TagMap.fromMap([:])) | _ - new ExtractedContext(DDTraceId.ONE, 2, PrioritySampling.SAMPLER_DROP, "some-origin", propagationTagsFactory.empty(), DATADOG) | _ - } - - def "isRootSpan() in and not in the context of distributed tracing"() { - setup: - def root = tracer.buildSpan("root").asChildOf((AgentSpanContext) extractedContext).start() - def child = tracer.buildSpan("child").asChildOf(root).start() - - expect: - root.isRootSpan() == isTraceRootSpan - !child.isRootSpan() - - cleanup: - child.finish() - root.finish() - - where: - extractedContext | isTraceRootSpan - null | true - new ExtractedContext(DDTraceId.from(123), 456, PrioritySampling.SAMPLER_KEEP, "789", propagationTagsFactory.empty(), DATADOG) | false - } - - def "getApplicationRootSpan() in and not in the context of distributed tracing"() { - setup: - def root = tracer.buildSpan("root").asChildOf((AgentSpanContext) extractedContext).start() - def child = tracer.buildSpan("child").asChildOf(root).start() - - expect: - root.localRootSpan == root - child.localRootSpan == root - // Checking for backward compatibility method names - root.rootSpan == root - child.rootSpan == root - - cleanup: - child.finish() - root.finish() - - where: - extractedContext | isTraceRootSpan - null | true - new ExtractedContext(DDTraceId.from(123), 456, PrioritySampling.SAMPLER_KEEP, "789", propagationTagsFactory.empty(), DATADOG) | false - } - - def 'publishing of root span closes the request context data'() { - setup: - def reqContextData = Mock(Closeable) - def context = new TagContext().withRequestContextDataAppSec(reqContextData) - def root = tracer.buildSpan("root").asChildOf(context).start() - def child = tracer.buildSpan("child").asChildOf(root).start() - - expect: - root.requestContext.getData(RequestContextSlot.APPSEC).is(reqContextData) - child.requestContext.getData(RequestContextSlot.APPSEC).is(reqContextData) - - when: - child.finish() - - then: - 0 * reqContextData.close() - - when: - root.finish() - - then: - 1 * reqContextData.close() - } - - def "infer top level from parent service name"() { - setup: - def propagationTagsFactory = tracer.getPropagationTagsFactory() - when: - DDSpanContext context = - new DDSpanContext( - DDTraceId.ONE, - 1, - DDSpanId.ZERO, - parentServiceName, - "fakeService", - "fakeOperation", - "fakeResource", - PrioritySampling.UNSET, - null, - Collections. emptyMap(), - false, - "fakeType", - 0, - tracer.traceCollectorFactory.create(DDTraceId.ONE), - null, - null, - NoopPathwayContext.INSTANCE, - false, - propagationTagsFactory.empty()) - then: - context.isTopLevel() == expectTopLevel - - where: - parentServiceName | expectTopLevel - "foo" | true - UTF8BytesString.create("foo") | true - "fakeService" | false - UTF8BytesString.create("fakeService") | false - "" | true - null | true - } - - def "broken pipe exception does not create error span"() { - when: - def span = tracer.buildSpan("root").start() - span.addThrowable(new IOException("Broken pipe")) - then: - !span.isError() - span.getTag(DDTags.ERROR_STACK) == null - span.getTag(DDTags.ERROR_MSG) == "Broken pipe" - } - - def "wrapped broken pipe exception does not create error span"() { - when: - def span = tracer.buildSpan("root").start() - span.addThrowable(new RuntimeException(new IOException("Broken pipe"))) - then: - !span.isError() - span.getTag(DDTags.ERROR_STACK) == null - span.getTag(DDTags.ERROR_MSG) == "java.io.IOException: Broken pipe" - } - - def "null exception safe to add"() { - when: - def span = tracer.buildSpan("root").start() - span.addThrowable(null) - then: - !span.isError() - span.getTag(DDTags.ERROR_STACK) == null - } - - def "set single span sampling tags"() { - setup: - def span = tracer.buildSpan("testSpan").start() as DDSpan - - expect: - span.samplingPriority() == UNSET - - when: - span.setSpanSamplingPriority(rate, limit) - - then: - span.getTag(SPAN_SAMPLING_MECHANISM_TAG) == SPAN_SAMPLING_RATE - span.getTag(SPAN_SAMPLING_RULE_RATE_TAG) == rate - span.getTag(SPAN_SAMPLING_MAX_PER_SECOND_TAG) == (limit == Integer.MAX_VALUE ? null : limit) - // single span sampling should not change the trace sampling priority - span.samplingPriority() == UNSET - - where: - rate | limit - 1.0 | 10 - 0.5 | 100 - 0.25 | Integer.MAX_VALUE - } - - def "error priorities should be respected"() { - setup: - def span = tracer.buildSpan("testSpan").start() as DDSpan - - expect: - !span.isError() - - when: - span.setError(true) - then: - span.isError() - - when: - span.setError(false) - then: - !span.isError() - - when: - span.setError(true, ErrorPriorities.HTTP_SERVER_DECORATOR) - then: - !span.isError() - - when: - span.setError(true, ErrorPriorities.MANUAL_INSTRUMENTATION) - then: - span.isError() - - when: - span.setError(true, Byte.MAX_VALUE) - then: - span.isError() - } -} diff --git a/dd-trace-core/src/test/groovy/datadog/trace/core/KnuthSamplingRateTest.groovy b/dd-trace-core/src/test/groovy/datadog/trace/core/KnuthSamplingRateTest.groovy deleted file mode 100644 index 74cad1573f7..00000000000 --- a/dd-trace-core/src/test/groovy/datadog/trace/core/KnuthSamplingRateTest.groovy +++ /dev/null @@ -1,234 +0,0 @@ -package datadog.trace.core - -import datadog.trace.common.sampling.PrioritySampler -import datadog.trace.common.sampling.RateByServiceTraceSampler -import datadog.trace.common.sampling.Sampler -import datadog.trace.common.writer.ListWriter -import datadog.trace.common.writer.ddagent.DDAgentApi -import datadog.trace.core.propagation.PropagationTags -import datadog.trace.core.test.DDCoreSpecification - -import static datadog.trace.api.config.TracerConfig.TRACE_RATE_LIMIT -import static datadog.trace.api.config.TracerConfig.TRACE_SAMPLE_RATE -import static datadog.trace.api.config.TracerConfig.TRACE_SAMPLING_RULES -import static datadog.trace.api.config.TracerConfig.TRACE_SAMPLING_SERVICE_RULES -import static datadog.trace.api.sampling.PrioritySampling.SAMPLER_KEEP - -class KnuthSamplingRateTest extends DDCoreSpecification { - static serializer = DDAgentApi.RESPONSE_ADAPTER - - def "updateKnuthSamplingRate formats rate correctly"() { - setup: - def pTags = PropagationTags.factory().empty() - - when: - pTags.updateKnuthSamplingRate(rate) - def tagMap = pTags.createTagMap() - - then: - tagMap.get('_dd.p.ksr') == expected - - where: - rate | expected - 1.0d | "1" - 0.5d | "0.5" - 0.1d | "0.1" - 0.0d | "0" - 0.765432d | "0.765432" - 0.7654321d | "0.765432" - 0.123456d | "0.123456" - 0.100000d | "0.1" - 0.250d | "0.25" - 0.05d | "0.05" - // 6 decimal places: round(0.0123456789 * 1e6) = round(12345.6789) = 12346 - 0.0123456789d | "0.012346" - 0.001d | "0.001" - 0.00500d | "0.005" - // 6 decimal places: round(0.00123456789 * 1e6) = round(1234.56789) = 1235 - 0.00123456789d | "0.001235" - 0.0001d | "0.0001" - 0.000500d | "0.0005" - // 6 decimal places: round(0.000123456789 * 1e6) = round(123.456789) = 123 - 0.000123456789d | "0.000123" - // rounding boundary: round(0.9999995 * 1e6) = round(999999.5) = 1000000 >= 1e6 -> "1" - 0.9999995d | "1" - // values in (0, 1e-4): fixed 6 decimal places, no scientific notation - 0.00001d | "0.00001" - 0.000050d | "0.00005" - // round(1.23456789e-5 * 1e6) = round(12.3456789) = 12 - 1.23456789e-5d | "0.000012" - // below 6-decimal-place precision: round to 0 - 1e-7d | "0" - 5.5e-10d | "0" - // system-tests Test_Knuth_Sample_Rate boundary cases - 0.000001d | "0.000001" // six_decimal_precision_boundary - 0.00000051d | "0.000001" // rounds_up_to_one_millionth - } - - def "agent rate sampler sets ksr propagated tag"() { - setup: - def serviceSampler = new RateByServiceTraceSampler() - def tracer = tracerBuilder().writer(new ListWriter()).build() - String response = '{"rate_by_service": {"service:,env:":' + rate + '}}' - serviceSampler.onResponse("traces", serializer.fromJson(response)) - - when: - DDSpan span = tracer.buildSpan("fakeOperation") - .withServiceName("spock") - .withTag("env", "test") - .ignoreActiveSpan().start() - serviceSampler.setSamplingPriority(span) - - def propagationMap = span.context.propagationTags.createTagMap() - def ksr = propagationMap.get('_dd.p.ksr') - - then: - ksr == expectedKsr - - cleanup: - tracer.close() - - where: - rate | expectedKsr - 1.0 | "1" - 0.5 | "0.5" - 0.0 | "0" - } - - def "rule-based sampler sets ksr propagated tag when rule matches"() { - setup: - Properties properties = new Properties() - properties.setProperty(TRACE_SAMPLING_RULES, jsonRules) - properties.setProperty(TRACE_RATE_LIMIT, "50") - def tracer = tracerBuilder().writer(new ListWriter()).build() - - when: - Sampler sampler = Sampler.Builder.forConfig(properties) - DDSpan span = tracer.buildSpan("operation") - .withServiceName("service") - .withTag("env", "bar") - .ignoreActiveSpan().start() - ((PrioritySampler) sampler).setSamplingPriority(span) - - def propagationMap = span.context.propagationTags.createTagMap() - def ksr = propagationMap.get('_dd.p.ksr') - - then: - ksr == expectedKsr - - cleanup: - tracer.close() - - where: - jsonRules | expectedKsr - // Matching rule with rate 1 -> ksr is "1" - '[{"service": "service", "sample_rate": 1}]' | "1" - // Matching rule with rate 0.5 -> ksr is "0.5" - '[{"service": "service", "sample_rate": 0.5}]' | "0.5" - // Matching rule with rate 0 -> ksr is "0" (drop, but ksr still set) - '[{"service": "service", "sample_rate": 0}]' | "0" - } - - def "rule-based sampler fallback to agent sampler sets ksr"() { - setup: - Properties properties = new Properties() - // Rule that does NOT match "service" - properties.setProperty(TRACE_SAMPLING_RULES, '[{"service": "nomatch", "sample_rate": 0.5}]') - properties.setProperty(TRACE_RATE_LIMIT, "50") - def tracer = tracerBuilder().writer(new ListWriter()).build() - - when: - Sampler sampler = Sampler.Builder.forConfig(properties) - DDSpan span = tracer.buildSpan("operation") - .withServiceName("service") - .withTag("env", "bar") - .ignoreActiveSpan().start() - ((PrioritySampler) sampler).setSamplingPriority(span) - - def propagationMap = span.context.propagationTags.createTagMap() - def ksr = propagationMap.get('_dd.p.ksr') - - then: - // When falling back to agent sampler, ksr should still be set (agent rate = 1.0 by default) - ksr == "1" - span.getSamplingPriority() == SAMPLER_KEEP - - cleanup: - tracer.close() - } - - def "service rule sampler sets ksr propagated tag"() { - setup: - Properties properties = new Properties() - properties.setProperty(TRACE_SAMPLING_SERVICE_RULES, "service:0.75") - properties.setProperty(TRACE_RATE_LIMIT, "50") - def tracer = tracerBuilder().writer(new ListWriter()).build() - - when: - Sampler sampler = Sampler.Builder.forConfig(properties) - DDSpan span = tracer.buildSpan("operation") - .withServiceName("service") - .withTag("env", "bar") - .ignoreActiveSpan().start() - ((PrioritySampler) sampler).setSamplingPriority(span) - - def propagationMap = span.context.propagationTags.createTagMap() - def ksr = propagationMap.get('_dd.p.ksr') - - then: - ksr == "0.75" - - cleanup: - tracer.close() - } - - def "default rate sampler sets ksr propagated tag"() { - setup: - Properties properties = new Properties() - properties.setProperty(TRACE_SAMPLE_RATE, "0.25") - properties.setProperty(TRACE_RATE_LIMIT, "50") - def tracer = tracerBuilder().writer(new ListWriter()).build() - - when: - Sampler sampler = Sampler.Builder.forConfig(properties) - DDSpan span = tracer.buildSpan("operation") - .withServiceName("service") - .withTag("env", "bar") - .ignoreActiveSpan().start() - ((PrioritySampler) sampler).setSamplingPriority(span) - - def propagationMap = span.context.propagationTags.createTagMap() - def ksr = propagationMap.get('_dd.p.ksr') - - then: - ksr == "0.25" - - cleanup: - tracer.close() - } - - def "ksr is propagated via x-datadog-tags header"() { - setup: - def serviceSampler = new RateByServiceTraceSampler() - def tracer = tracerBuilder().writer(new ListWriter()).build() - String response = '{"rate_by_service": {"service:,env:":0.5}}' - serviceSampler.onResponse("traces", serializer.fromJson(response)) - - when: - DDSpan span = tracer.buildSpan("fakeOperation") - .withServiceName("spock") - .withTag("env", "test") - .ignoreActiveSpan().start() - serviceSampler.setSamplingPriority(span) - - def headerValue = span.context.propagationTags.headerValue( - datadog.trace.core.propagation.PropagationTags.HeaderType.DATADOG) - - then: - headerValue != null - headerValue.contains("_dd.p.ksr=0.5") - - cleanup: - tracer.close() - } -} diff --git a/dd-trace-core/src/test/java/datadog/trace/common/writer/ddagent/TraceMapperTestBridge.java b/dd-trace-core/src/test/java/datadog/trace/common/writer/ddagent/TraceMapperTestBridge.java new file mode 100644 index 00000000000..5632ee2e047 --- /dev/null +++ b/dd-trace-core/src/test/java/datadog/trace/common/writer/ddagent/TraceMapperTestBridge.java @@ -0,0 +1,17 @@ +package datadog.trace.common.writer.ddagent; + +import datadog.communication.serialization.GrowableBuffer; +import java.util.Map; + +/** + * Bridge class to allow tests to access package-private method exposed by the {@code TraceMapper} + */ +public class TraceMapperTestBridge { + public static GrowableBuffer getDictionary(TraceMapperV0_5 traceMapperV05) { + return traceMapperV05.getDictionary(); + } + + public static Map getEncoding(TraceMapperV0_5 traceMapperV05) { + return traceMapperV05.getEncoding(); + } +} diff --git a/dd-trace-core/src/test/java/datadog/trace/core/DDSpanContextTest.java b/dd-trace-core/src/test/java/datadog/trace/core/DDSpanContextTest.java new file mode 100644 index 00000000000..c23ba292fb2 --- /dev/null +++ b/dd-trace-core/src/test/java/datadog/trace/core/DDSpanContextTest.java @@ -0,0 +1,567 @@ +package datadog.trace.core; + +import static datadog.trace.api.TracePropagationStyle.DATADOG; +import static datadog.trace.api.sampling.PrioritySampling.SAMPLER_DROP; +import static datadog.trace.api.sampling.PrioritySampling.SAMPLER_KEEP; +import static datadog.trace.api.sampling.PrioritySampling.UNSET; +import static datadog.trace.api.sampling.PrioritySampling.USER_DROP; +import static datadog.trace.api.sampling.PrioritySampling.USER_KEEP; +import static datadog.trace.api.sampling.SamplingMechanism.DEFAULT; +import static datadog.trace.api.sampling.SamplingMechanism.MANUAL; +import static datadog.trace.api.sampling.SamplingMechanism.SPAN_SAMPLING_RATE; +import static datadog.trace.core.DDSpanContext.SPAN_SAMPLING_MAX_PER_SECOND_TAG; +import static datadog.trace.core.DDSpanContext.SPAN_SAMPLING_MECHANISM_TAG; +import static datadog.trace.core.DDSpanContext.SPAN_SAMPLING_RULE_RATE_TAG; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.params.provider.Arguments.arguments; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +import datadog.trace.api.DDTags; +import datadog.trace.api.DDTraceId; +import datadog.trace.api.internal.TraceSegment; +import datadog.trace.bootstrap.instrumentation.api.AgentSpan; +import datadog.trace.bootstrap.instrumentation.api.AgentSpanContext; +import datadog.trace.bootstrap.instrumentation.api.ErrorPriorities; +import datadog.trace.bootstrap.instrumentation.api.ProfilingContextIntegration; +import datadog.trace.bootstrap.instrumentation.api.ServiceNameSources; +import datadog.trace.bootstrap.instrumentation.api.Tags; +import datadog.trace.common.writer.ListWriter; +import datadog.trace.core.propagation.ExtractedContext; +import java.util.HashMap; +import java.util.Map; +import java.util.stream.Stream; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.MethodSource; +import org.tabletest.junit.TableTest; +import org.tabletest.junit.TypeConverter; + +public class DDSpanContextTest extends DDCoreJavaSpecification { + + private ListWriter writer; + private CoreTracer tracer; + private ProfilingContextIntegration profilingContextIntegration; + + @BeforeEach + void setup() { + writer = new ListWriter(); + profilingContextIntegration = mock(ProfilingContextIntegration.class); + tracer = + tracerBuilder() + .writer(writer) + .profilingContextIntegration(profilingContextIntegration) + .build(); + } + + // spotless:off + @TableTest({ + "scenario | name", + "SERVICE_NAME | " + DDTags.SERVICE_NAME, + "RESOURCE_NAME | " + DDTags.RESOURCE_NAME, + "SPAN_TYPE | " + DDTags.SPAN_TYPE, + "spme.tag | some.tag" + }) + //spotless:on + void nullValuesForTagsDeleteExistingTags(String scenario, String name) throws Exception { + AgentSpan span = + tracer + .buildSpan("fakeOperation") + .withServiceName("fakeService") + .withResourceName("fakeResource") + .withSpanType("fakeType") + .start(); + DDSpanContext context = (DDSpanContext) span.context(); + + context.setTag("some.tag", "asdf"); + context.setTag(name, (String) null); + context.setErrorFlag(true, ErrorPriorities.DEFAULT); + span.finish(); + writer.waitForTraces(1); + + Thread thread = Thread.currentThread(); + Map expectedTags = new HashMap<>(); + if (!name.equals("some.tag")) { + expectedTags.put("some.tag", "asdf"); + } + expectedTags.put(DDTags.THREAD_NAME, thread.getName()); + expectedTags.put(DDTags.THREAD_ID, thread.getId()); + expectedTags.put(DDTags.DD_SVC_SRC, ServiceNameSources.MANUAL); + + assertTagmap(context.getTags(), expectedTags); + assertEquals("fakeService", context.getServiceName()); + assertEquals("fakeResource", context.getResourceName().toString()); + assertEquals("fakeType", context.getSpanType().toString()); + } + + // spotless:off + @TableTest({ + "scenario | name | value | method ", + "SERVICE_NAME tag | " + DDTags.SERVICE_NAME + " | different service | serviceName ", + "RESOURCE_NAME tag | " + DDTags.RESOURCE_NAME + " | different resource | resourceName ", + "SPAN_TYPE tag | " + DDTags.SPAN_TYPE + " | different type | spanType " + }) + //spotless:on + void specialTagsSetCertainValues(String scenario, String name, String value, String method) + throws Exception { + AgentSpan span = + tracer + .buildSpan("fakeOperation") + .withServiceName("fakeService") + .withResourceName("fakeResource") + .withSpanType("fakeType") + .start(); + DDSpanContext context = (DDSpanContext) span.context(); + + context.setTag(name, value); + span.finish(); + writer.waitForTraces(1); + + Thread thread = Thread.currentThread(); + Map expectedTags = new HashMap<>(); + expectedTags.put(DDTags.THREAD_NAME, thread.getName()); + expectedTags.put(DDTags.THREAD_ID, thread.getId()); + expectedTags.put(DDTags.DD_SVC_SRC, ServiceNameSources.MANUAL); + assertTagmap(context.getTags(), expectedTags); + + Object actualValue; + switch (method) { + case "serviceName": + actualValue = context.getServiceName(); + break; + case "resourceName": + actualValue = context.getResourceName().toString(); + break; + case "spanType": + actualValue = context.getSpanType().toString(); + break; + default: + throw new IllegalArgumentException("Unknown method: " + method); + } + assertEquals(value, actualValue); + } + + static Stream tagsCanBeAddedToContextArguments() { + return Stream.of( + arguments("tag.name", "some value"), + arguments("tag with int", 1234), + arguments("tag-with-bool", false), + arguments("tag_with_float", 0.321)); + } + + @ParameterizedTest + @MethodSource("tagsCanBeAddedToContextArguments") + void tagsCanBeAddedToContext(String name, Object value) throws Exception { + AgentSpan span = + tracer + .buildSpan("fakeOperation") + .withServiceName("fakeService") + .withResourceName("fakeResource") + .withSpanType("fakeType") + .start(); + DDSpanContext context = (DDSpanContext) span.context(); + + context.setTag(name, value); + span.finish(); + writer.waitForTraces(1); + + Thread thread = Thread.currentThread(); + Map expectedTags = new HashMap<>(); + expectedTags.put(name, value); + expectedTags.put(DDTags.THREAD_NAME, thread.getName()); + expectedTags.put(DDTags.THREAD_ID, thread.getId()); + expectedTags.put(DDTags.DD_SVC_SRC, ServiceNameSources.MANUAL); + + assertTagmap(context.getTags(), expectedTags); + } + + @TableTest({ + "expectedType | value ", + "java.lang.Integer | 0 ", + "java.lang.Integer | Integer.MAX_VALUE", + "java.lang.Integer | Integer.MIN_VALUE", + "java.lang.Short | Short.MAX_VALUE ", + "java.lang.Short | Short.MIN_VALUE ", + "java.lang.Float | Float.MAX_VALUE ", + "java.lang.Float | Float.MIN_VALUE ", + "java.lang.Double | Double.MAX_VALUE ", + "java.lang.Double | Double.MIN_VALUE ", + "java.lang.Float | 1f ", + "java.lang.Double | 1d ", + "java.lang.Float | 0.5f ", + "java.lang.Double | 0.5d ", + "java.lang.Integer | 0x55 " + }) + void metricsUseExpectedTypes(Class expectedType, Number value) { + AgentSpan span = + tracer + .buildSpan("fakeOperation") + .withServiceName("fakeService") + .withResourceName("fakeResource") + .start(); + DDSpanContext context = (DDSpanContext) span.context(); + + context.setMetric("test", value); + + assertTrue(expectedType.isInstance(context.getTag("test"))); + + span.finish(); + } + + @TypeConverter + public static Number toNumber(String value) { + if (value == null) { + throw new IllegalArgumentException("Value cannot be null"); + } + switch (value) { + case "Integer.MAX_VALUE": + return Integer.MAX_VALUE; + case "Integer.MIN_VALUE": + return Integer.MIN_VALUE; + case "Short.MAX_VALUE": + return Short.MAX_VALUE; + case "Short.MIN_VALUE": + return Short.MIN_VALUE; + case "Float.MAX_VALUE": + return Float.MAX_VALUE; + case "Float.MIN_VALUE": + return Float.MIN_VALUE; + case "Double.MAX_VALUE": + return Double.MAX_VALUE; + case "Double.MIN_VALUE": + return Double.MIN_VALUE; + default: + if (value.endsWith("f")) { + return Float.parseFloat(value); + } + if (value.endsWith("d")) { + return Double.parseDouble(value); + } + return Integer.decode(value); + } + } + + @Test + void forceKeepReallyKeepsTrace() { + AgentSpan span = + tracer + .buildSpan("fakeOperation") + .withServiceName("fakeService") + .withResourceName("fakeResource") + .start(); + DDSpanContext context = (DDSpanContext) span.context(); + + context.setSamplingPriority(SAMPLER_DROP, DEFAULT); + assertEquals(SAMPLER_DROP, context.getSamplingPriority()); + + context.lockSamplingPriority(); + assertFalse(context.setSamplingPriority(USER_DROP, MANUAL)); + assertEquals(SAMPLER_DROP, context.getSamplingPriority()); + + context.forceKeep(); + assertEquals(USER_KEEP, context.getSamplingPriority()); + + span.finish(); + } + + @Test + void setTraceSegmentTagsAndDataOnCorrectSpan() { + ExtractedContext extracted = + (ExtractedContext) + new ExtractedContext( + DDTraceId.from(123), + 456, + SAMPLER_KEEP, + "789", + tracer.getPropagationTagsFactory().empty(), + DATADOG) + .withRequestContextDataAppSec("dummy"); + + AgentSpan top = tracer.buildSpan("top").asChildOf((AgentSpanContext) extracted).start(); + DDSpanContext topC = (DDSpanContext) top.context(); + TraceSegment topTS = top.getRequestContext().getTraceSegment(); + + AgentSpan current = tracer.buildSpan("current").asChildOf(top.context()).start(); + TraceSegment currentTS = current.getRequestContext().getTraceSegment(); + DDSpanContext currentC = (DDSpanContext) current.context(); + + currentTS.setDataTop("ctd", "[1]"); + currentTS.setTagTop("ctt", "t1"); + currentTS.setDataCurrent("ccd", "[2]"); + currentTS.setTagCurrent("cct", "t2"); + topTS.setDataTop("ttd", "[3]"); + topTS.setTagTop("ttt", "t3"); + topTS.setDataCurrent("tcd", "[4]"); + topTS.setTagCurrent("tct", "t4"); + + Map expectedTopTags = new HashMap<>(); + expectedTopTags.put(dataTag("ctd"), "[1]"); + expectedTopTags.put("ctt", "t1"); + expectedTopTags.put(dataTag("ttd"), "[3]"); + expectedTopTags.put("ttt", "t3"); + expectedTopTags.put(dataTag("tcd"), "[4]"); + expectedTopTags.put("tct", "t4"); + assertTagmap(topC.getTags(), expectedTopTags, true); + + Map expectedCurrentTags = new HashMap<>(); + expectedCurrentTags.put(dataTag("ccd"), "[2]"); + expectedCurrentTags.put("cct", "t2"); + assertTagmap(currentC.getTags(), expectedCurrentTags, true); + + current.finish(); + top.finish(); + } + + @TableTest({ + "scenario | rate | limit ", + "rate=1.0 limit=10 | 1.0 | 10 ", + "rate=0.5 limit=100 | 0.5 | 100 ", + "rate=0.25 no limit | 0.25 | 2147483647" + }) + void setSingleSpanSamplingTags(String scenario, double rate, int limit) { + AgentSpan span = + tracer + .buildSpan("fakeOperation") + .withServiceName("fakeService") + .withResourceName("fakeResource") + .start(); + DDSpanContext context = (DDSpanContext) span.context(); + + assertEquals(UNSET, context.getSamplingPriority()); + + context.setSpanSamplingPriority(rate, limit); + + assertEquals((int) SPAN_SAMPLING_RATE, context.getTag(SPAN_SAMPLING_MECHANISM_TAG)); + assertEquals(rate, context.getTag(SPAN_SAMPLING_RULE_RATE_TAG)); + assertEquals( + limit == Integer.MAX_VALUE ? null : Double.valueOf(limit), + context.getTag(SPAN_SAMPLING_MAX_PER_SECOND_TAG)); + assertEquals(UNSET, context.getSamplingPriority()); + assertFalse(context.getPropagationTags().createTagMap().containsKey("_dd.p.dm")); + + span.finish(); + } + + @Test + void settingResourceNameToNullIsIgnored() { + AgentSpan span = + tracer + .buildSpan("fakeOperation") + .withServiceName("fakeService") + .withResourceName("fakeResource") + .start(); + + span.setResourceName(null); + + assertEquals("fakeResource", ((DDSpan) span).getResourceName().toString()); + + span.finish(); + } + + @Test + void settingOperationNameTriggersConstantEncoding() { + when(profilingContextIntegration.encodeOperationName("fakeOperation")).thenReturn(1); + when(profilingContextIntegration.encodeResourceName("fakeResource")).thenReturn(-1); + + AgentSpan span = + tracer + .buildSpan("fakeOperation") + .withServiceName("fakeService") + .withResourceName("fakeResource") + .start(); + + verify(profilingContextIntegration, times(1)).encodeOperationName("fakeOperation"); + verify(profilingContextIntegration, times(1)).encodeResourceName("fakeResource"); + assertEquals(1, ((DDSpanContext) span.context()).getEncodedOperationName()); + assertEquals(-1, ((DDSpanContext) span.context()).getEncodedResourceName()); + + when(profilingContextIntegration.encodeOperationName("newOperationName")).thenReturn(2); + span.setOperationName("newOperationName"); + + verify(profilingContextIntegration, times(1)).encodeOperationName("newOperationName"); + assertEquals(2, ((DDSpanContext) span.context()).getEncodedOperationName()); + + when(profilingContextIntegration.encodeResourceName("newResourceName")).thenReturn(-2); + span.setResourceName("newResourceName"); + + verify(profilingContextIntegration, times(1)).encodeResourceName("newResourceName"); + assertEquals(-2, ((DDSpanContext) span.context()).getEncodedResourceName()); + + span.finish(); + } + + @Test + void spanIdsPrintedAsUnsignedLong() { + AgentSpan parent = + tracer + .buildSpan("fakeOperation") + .withServiceName("fakeService") + .withResourceName("fakeResource") + .withSpanId(-987654321) + .start(); + + AgentSpan span = + tracer + .buildSpan("fakeOperation") + .withServiceName("fakeService") + .withResourceName("fakeResource") + .withSpanId(-123456789) + .asChildOf(parent.context()) + .start(); + + DDSpanContext context = (DDSpanContext) span.context(); + + assertFalse(context.toString().contains("id=-")); + + span.finish(); + parent.finish(); + } + + @Test + void serviceNameSourceIsPropagatedFromParentToChildSpan() { + AgentSpan parent = tracer.buildSpan("parentOperation").withServiceName("fakeService").start(); + + AgentSpan child = tracer.buildSpan("childOperation").asChildOf(parent.context()).start(); + DDSpanContext childContext = (DDSpanContext) child.context(); + + assertEquals(ServiceNameSources.MANUAL, childContext.getServiceNameSource()); + + child.finish(); + parent.finish(); + } + + @Test + void spanKindOrdinalConstantsAndSpanKindValuesArrayStayInSync() { + assertEquals(DDSpanContext.SPAN_KIND_CUSTOM + 1, DDSpanContext.SPAN_KIND_VALUES.length); + + assertEquals( + Tags.SPAN_KIND_SERVER, DDSpanContext.SPAN_KIND_VALUES[DDSpanContext.SPAN_KIND_SERVER]); + assertEquals( + Tags.SPAN_KIND_CLIENT, DDSpanContext.SPAN_KIND_VALUES[DDSpanContext.SPAN_KIND_CLIENT]); + assertEquals( + Tags.SPAN_KIND_PRODUCER, DDSpanContext.SPAN_KIND_VALUES[DDSpanContext.SPAN_KIND_PRODUCER]); + assertEquals( + Tags.SPAN_KIND_CONSUMER, DDSpanContext.SPAN_KIND_VALUES[DDSpanContext.SPAN_KIND_CONSUMER]); + assertEquals( + Tags.SPAN_KIND_INTERNAL, DDSpanContext.SPAN_KIND_VALUES[DDSpanContext.SPAN_KIND_INTERNAL]); + assertEquals( + Tags.SPAN_KIND_BROKER, DDSpanContext.SPAN_KIND_VALUES[DDSpanContext.SPAN_KIND_BROKER]); + + assertNull(DDSpanContext.SPAN_KIND_VALUES[DDSpanContext.SPAN_KIND_UNSET]); + assertNull(DDSpanContext.SPAN_KIND_VALUES[DDSpanContext.SPAN_KIND_CUSTOM]); + } + + @TableTest({ + "scenario | kindString | expectedOrdinal", + "server | server | 1 ", + "client | client | 2 ", + "producer | producer | 3 ", + "consumer | consumer | 4 ", + "internal | internal | 5 ", + "broker | broker | 6 " + }) + void setSpanKindOrdinalRoundTripsWithSpanKindValues( + String scenario, String kindString, int expectedOrdinal) { + AgentSpan span = tracer.buildSpan("test", "test").start(); + DDSpanContext context = (DDSpanContext) span.context(); + context.setSpanKindOrdinal(kindString); + + assertEquals(expectedOrdinal, context.getSpanKindOrdinal()); + assertEquals(kindString, DDSpanContext.SPAN_KIND_VALUES[expectedOrdinal]); + + span.finish(); + } + + @TableTest({ + "scenario | kindString", + "server | server ", + "client | client ", + "producer | producer ", + "consumer | consumer ", + "internal | internal ", + "broker | broker " + }) + void setTagAndGetTagRoundTripForSpanKind(String scenario, String kindString) { + AgentSpan span = tracer.buildSpan("test", "test").start(); + span.setTag(Tags.SPAN_KIND, kindString); + + assertEquals(kindString, span.getTag(Tags.SPAN_KIND)); + + span.finish(); + } + + @Test + void getTagReturnsNullWhenSpanKindNotSet() { + AgentSpan span = tracer.buildSpan("test", "test").start(); + + assertNull(span.getTag(Tags.SPAN_KIND)); + + span.finish(); + } + + @TableTest({ + "scenario | kindString", + "server | server ", + "client | client ", + "producer | producer ", + "consumer | consumer ", + "internal | internal ", + "broker | broker " + }) + void setTagThenRemoveTagClearsSpanKind(String scenario, String kindString) { + AgentSpan span = tracer.buildSpan("test", "test").start(); + span.setTag(Tags.SPAN_KIND, kindString); + + assertEquals(kindString, span.getTag(Tags.SPAN_KIND)); + + ((DDSpan) span).context().removeTag(Tags.SPAN_KIND); + + assertNull(span.getTag(Tags.SPAN_KIND)); + + span.finish(); + } + + @Test + void setTagWithCustomSpanKindFallsBackToTagMap() { + AgentSpan span = tracer.buildSpan("test", "test").start(); + span.setTag(Tags.SPAN_KIND, "custom-kind"); + + assertEquals("custom-kind", span.getTag(Tags.SPAN_KIND)); + + span.finish(); + } + + static void assertTagmap(Map source, Map comparison) { + assertTagmap(source, comparison, false); + } + + static void assertTagmap(Map source, Map comparison, boolean removeThread) { + Map sourceWithoutCommonTags = new HashMap<>(source); + sourceWithoutCommonTags.remove("runtime-id"); + sourceWithoutCommonTags.remove("language"); + sourceWithoutCommonTags.remove("_dd.agent_psr"); + sourceWithoutCommonTags.remove("_sample_rate"); + sourceWithoutCommonTags.remove("process_id"); + sourceWithoutCommonTags.remove("_dd.trace_span_attribute_schema"); + sourceWithoutCommonTags.remove(DDTags.PROFILING_ENABLED); + sourceWithoutCommonTags.remove(DDTags.PROFILING_CONTEXT_ENGINE); + sourceWithoutCommonTags.remove(DDTags.DSM_ENABLED); + sourceWithoutCommonTags.remove(DDTags.DJM_ENABLED); + if (removeThread) { + sourceWithoutCommonTags.remove(DDTags.THREAD_ID); + sourceWithoutCommonTags.remove(DDTags.THREAD_NAME); + } + assertEquals(comparison, sourceWithoutCommonTags); + } + + private static String dataTag(String tag) { + return "_dd." + tag + ".json"; + } +} diff --git a/dd-trace-core/src/test/java/datadog/trace/core/DDSpanSerializationTest.java b/dd-trace-core/src/test/java/datadog/trace/core/DDSpanSerializationTest.java new file mode 100644 index 00000000000..78c8dda3c94 --- /dev/null +++ b/dd-trace-core/src/test/java/datadog/trace/core/DDSpanSerializationTest.java @@ -0,0 +1,500 @@ +package datadog.trace.core; + +import static datadog.trace.api.config.GeneralConfig.EXPERIMENTAL_PROPAGATE_PROCESS_TAGS_ENABLED; +import static org.junit.jupiter.api.Assertions.assertEquals; + +import datadog.communication.serialization.ByteBufferConsumer; +import datadog.communication.serialization.FlushingBuffer; +import datadog.communication.serialization.GrowableBuffer; +import datadog.communication.serialization.msgpack.MsgPackWriter; +import datadog.trace.api.Config; +import datadog.trace.api.DDSpanId; +import datadog.trace.api.DDTraceId; +import datadog.trace.api.ProcessTags; +import datadog.trace.api.datastreams.NoopPathwayContext; +import datadog.trace.api.sampling.PrioritySampling; +import datadog.trace.common.writer.ListWriter; +import datadog.trace.common.writer.ddagent.TraceMapperTestBridge; +import datadog.trace.common.writer.ddagent.TraceMapperV0_4; +import datadog.trace.common.writer.ddagent.TraceMapperV0_5; +import datadog.trace.junit.utils.config.WithConfig; +import java.nio.ByteBuffer; +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; +import org.msgpack.core.MessageFormat; +import org.msgpack.core.MessagePack; +import org.msgpack.core.MessageUnpacker; +import org.msgpack.core.buffer.ArrayBufferInput; +import org.msgpack.value.ValueType; +import org.tabletest.junit.TableTest; + +@WithConfig(key = EXPERIMENTAL_PROPAGATE_PROCESS_TAGS_ENABLED, value = "false") +public class DDSpanSerializationTest extends DDCoreJavaSpecification { + + @BeforeAll + static void beforeAll() { + ProcessTags.reset(Config.get()); + } + + @AfterAll + static void afterAll() { + ProcessTags.reset(Config.get()); + } + + @TableTest({ + "scenario | value | spanType ", + "zero | 0 | ", + "one with type | 1 | some-type", + "max long minus one billion | 8223372036854775807 | ", + "Long.MAX_VALUE minus one | 9223372036854775806 | some-type", + "Long.MAX_VALUE plus one | 9223372036854775808 | ", + "2^64 minus one | 18446744073709551615 | some-type" + }) + void serializeTraceWithIdAsInt(String scenario, String value, String spanType) throws Exception { + ListWriter writer = new ListWriter(); + CoreTracer tracer = tracerBuilder().writer(writer).build(); + DDTraceId traceId = DDTraceId.from(value); + long spanId = DDSpanId.from(value); + DDSpanContext context = createContext(spanType, tracer, traceId, spanId); + DDSpan span = DDSpan.create("test", 0, context, null); + CaptureBuffer capture = new CaptureBuffer(); + MsgPackWriter packer = new MsgPackWriter(new FlushingBuffer(1024, capture)); + packer.format(Collections.singletonList(span), new TraceMapperV0_4()); + packer.flush(); + MessageUnpacker unpacker = MessagePack.newDefaultUnpacker(new ArrayBufferInput(capture.bytes)); + int traceCount = capture.messageCount; + int spanCount = unpacker.unpackArrayHeader(); + int size = unpacker.unpackMapHeader(); + + assertEquals(1, traceCount); + assertEquals(1, spanCount); + assertEquals(12, size); + for (int i = 0; i < size; i++) { + String key = unpacker.unpackString(); + switch (key) { + case "trace_id": + MessageFormat traceIdFormat = unpacker.getNextFormat(); + assertEquals(ValueType.INTEGER, traceIdFormat.getValueType()); + if (traceIdFormat == MessageFormat.UINT64) { + assertEquals(traceId, DDTraceId.from(unpacker.unpackBigInteger().toString())); + } else { + assertEquals(traceId, DDTraceId.from(unpacker.unpackLong())); + } + break; + case "span_id": + MessageFormat spanIdFormat = unpacker.getNextFormat(); + assertEquals(ValueType.INTEGER, spanIdFormat.getValueType()); + if (spanIdFormat == MessageFormat.UINT64) { + assertEquals(spanId, DDSpanId.from(unpacker.unpackBigInteger().toString())); + } else { + assertEquals(spanId, unpacker.unpackLong()); + } + break; + default: + unpacker.unpackValue(); + } + } + tracer.close(); + } + + @TableTest({ + "scenario | value | spanType ", + "zero | 0 | ", + "one with type | 1 | some-type", + "max long minus one billion | 8223372036854775807 | ", + "Long.MAX_VALUE minus one | 9223372036854775806 | some-type", + "Long.MAX_VALUE plus one | 9223372036854775808 | ", + "2^64 minus one | 18446744073709551615 | some-type" + }) + void serializeTraceWithIdAsIntV05(String scenario, String value, String spanType) + throws Exception { + ListWriter writer = new ListWriter(); + CoreTracer tracer = tracerBuilder().writer(writer).build(); + DDTraceId traceId = DDTraceId.from(value); + long spanId = DDSpanId.from(value); + DDSpanContext context = createContext(spanType, tracer, traceId, spanId); + DDSpan span = DDSpan.create("test", 0, context, null); + CaptureBuffer capture = new CaptureBuffer(); + MsgPackWriter packer = new MsgPackWriter(new FlushingBuffer(1024, capture)); + TraceMapperV0_5 traceMapper = new TraceMapperV0_5(); + packer.format(Collections.singletonList(span), traceMapper); + packer.flush(); + MessageUnpacker unpacker = MessagePack.newDefaultUnpacker(new ArrayBufferInput(capture.bytes)); + int traceCount = capture.messageCount; + int spanCount = unpacker.unpackArrayHeader(); + int size = unpacker.unpackArrayHeader(); + + assertEquals(1, traceCount); + assertEquals(1, spanCount); + assertEquals(12, size); + for (int i = 0; i < size; i++) { + switch (i) { + case 3: + MessageFormat traceIdFormat = unpacker.getNextFormat(); + assertEquals(ValueType.INTEGER, traceIdFormat.getValueType()); + if (traceIdFormat == MessageFormat.UINT64) { + assertEquals(traceId, DDTraceId.from(unpacker.unpackBigInteger().toString())); + } else { + assertEquals(traceId, DDTraceId.from(unpacker.unpackLong())); + } + break; + case 4: + MessageFormat spanIdFormat = unpacker.getNextFormat(); + assertEquals(ValueType.INTEGER, spanIdFormat.getValueType()); + if (spanIdFormat == MessageFormat.UINT64) { + assertEquals(spanId, DDSpanId.from(unpacker.unpackBigInteger().toString())); + } else { + assertEquals(spanId, unpacker.unpackLong()); + } + break; + default: + unpacker.unpackValue(); + } + } + tracer.close(); + } + + @TableTest({ + "scenario | baggage | tags | expected | injectBaggage", + "empty baggage and tags inject | [:] | [:] | [:] | true ", + "baggage only inject | [foo: bbar] | [:] | [foo: bbar] | true ", + "baggage and tags inject no overlap | [foo: bbar] | [bar: tfoo] | [foo: bbar, bar: tfoo] | true ", + "baggage and tags inject tag wins | [foo: bbar] | [foo: tbar] | [foo: tbar] | true ", + "empty baggage and tags no inject | [:] | [:] | [:] | false ", + "baggage only no inject | [foo: bbar] | [:] | [:] | false ", + "baggage and tags no inject no overlap | [foo: bbar] | [bar: tfoo] | [bar: tfoo] | false ", + "baggage and tags no inject tag wins | [foo: bbar] | [foo: tbar] | [foo: tbar] | false " + }) + void serializeTraceWithBaggageAndTagsCorrectlyV04( + String scenario, + Map baggage, + Map tags, + Map expected, + boolean injectBaggage) + throws Exception { + ListWriter writer = new ListWriter(); + CoreTracer tracer = tracerBuilder().writer(writer).build(); + DDSpanContext context = + new DDSpanContext( + DDTraceId.ONE, + 1, + DDSpanId.ZERO, + null, + "fakeService", + "fakeOperation", + "fakeResource", + PrioritySampling.UNSET, + null, + baggage, + false, + null, + tags.size(), + tracer.createTraceCollector(DDTraceId.ONE), + null, + null, + NoopPathwayContext.INSTANCE, + false, + null, + injectBaggage, + true); + context.setAllTags(tags); + DDSpan span = DDSpan.create("test", 0, context, null); + CaptureBuffer capture = new CaptureBuffer(); + MsgPackWriter packer = new MsgPackWriter(new FlushingBuffer(1024, capture)); + packer.format(Collections.singletonList(span), new TraceMapperV0_4()); + packer.flush(); + MessageUnpacker unpacker = MessagePack.newDefaultUnpacker(new ArrayBufferInput(capture.bytes)); + int traceCount = capture.messageCount; + int spanCount = unpacker.unpackArrayHeader(); + int size = unpacker.unpackMapHeader(); + + assertEquals(1, traceCount); + assertEquals(1, spanCount); + assertEquals(12, size); + for (int i = 0; i < size; i++) { + String key = unpacker.unpackString(); + if ("meta".equals(key)) { + int packedSize = unpacker.unpackMapHeader(); + Map unpackedMeta = new HashMap<>(); + for (int j = 0; j < packedSize; j++) { + String k = unpacker.unpackString(); + String v = unpacker.unpackString(); + if (!"thread.name".equals(k) && !"thread.id".equals(k)) { + unpackedMeta.put(k, v); + } + } + assertEquals(expected, unpackedMeta); + } else { + unpacker.unpackValue(); + } + } + tracer.close(); + } + + @TableTest({ + "scenario | baggage | tags | expected | injectBaggage", + "empty baggage and tags inject | [:] | [:] | [:] | true ", + "baggage only inject | [foo: bbar] | [:] | [foo: bbar] | true ", + "baggage and tags inject no overlap | [foo: bbar] | [bar: tfoo] | [foo: bbar, bar: tfoo] | true ", + "baggage and tags inject tag wins | [foo: bbar] | [foo: tbar] | [foo: tbar] | true ", + "empty baggage and tags no inject | [:] | [:] | [:] | false ", + "baggage only no inject | [foo: bbar] | [:] | [:] | false ", + "baggage and tags no inject no overlap | [foo: bbar] | [bar: tfoo] | [bar: tfoo] | false ", + "baggage and tags no inject tag wins | [foo: bbar] | [foo: tbar] | [foo: tbar] | false " + }) + void serializeTraceWithBaggageAndTagsCorrectlyV05( + String scenario, + Map baggage, + Map tags, + Map expected, + boolean injectBaggage) + throws Exception { + ListWriter writer = new ListWriter(); + CoreTracer tracer = tracerBuilder().writer(writer).build(); + DDSpanContext context = + new DDSpanContext( + DDTraceId.ONE, + 1, + DDSpanId.ZERO, + null, + "fakeService", + "fakeOperation", + "fakeResource", + PrioritySampling.UNSET, + null, + baggage, + false, + null, + tags.size(), + tracer.createTraceCollector(DDTraceId.ONE), + null, + null, + NoopPathwayContext.INSTANCE, + false, + null, + injectBaggage, + true); + context.setAllTags(tags); + DDSpan span = DDSpan.create("test", 0, context, null); + CaptureBuffer capture = new CaptureBuffer(); + MsgPackWriter packer = new MsgPackWriter(new FlushingBuffer(1024, capture)); + TraceMapperV0_5 mapper = new TraceMapperV0_5(); + packer.format(Collections.singletonList(span), mapper); + packer.flush(); + MessageUnpacker unpacker = MessagePack.newDefaultUnpacker(new ArrayBufferInput(capture.bytes)); + int traceCount = capture.messageCount; + int spanCount = unpacker.unpackArrayHeader(); + int size = unpacker.unpackArrayHeader(); + String[] dictionary = buildDictionary(mapper); + + assertEquals(1, traceCount); + assertEquals(1, spanCount); + assertEquals(12, size); + for (int i = 0; i < 9; ++i) { + unpacker.skipValue(); + } + int packedSize = unpacker.unpackMapHeader(); + Map unpackedMeta = new HashMap<>(); + for (int j = 0; j < packedSize; j++) { + String k = dictionary[unpacker.unpackInt()]; + String v = dictionary[unpacker.unpackInt()]; + if (!"thread.name".equals(k) && !"thread.id".equals(k)) { + unpackedMeta.put(k, v); + } + } + assertEquals(expected, unpackedMeta); + tracer.close(); + } + + @Test + void serializeTraceWithFlatMapTagV04() throws Exception { + CoreTracer tracer = tracerBuilder().writer(new ListWriter()).build(); + DDSpanContext context = + new DDSpanContext( + DDTraceId.ONE, + 1, + DDSpanId.ZERO, + null, + "fakeService", + "fakeOperation", + "fakeResource", + PrioritySampling.UNSET, + null, + null, + false, + null, + 0, + tracer.createTraceCollector(DDTraceId.ONE), + null, + null, + NoopPathwayContext.INSTANCE, + false, + null); + context.setTag("key1", "value1"); + Map nested = new HashMap<>(); + nested.put("sub1", "v1"); + nested.put("sub2", "v2"); + context.setTag("key2", nested); + DDSpan span = DDSpan.create("test", 0, context, null); + + CaptureBuffer capture = new CaptureBuffer(); + MsgPackWriter packer = new MsgPackWriter(new FlushingBuffer(1024, capture)); + packer.format(Collections.singletonList(span), new TraceMapperV0_4()); + packer.flush(); + MessageUnpacker unpacker = MessagePack.newDefaultUnpacker(new ArrayBufferInput(capture.bytes)); + int traceCount = capture.messageCount; + int spanCount = unpacker.unpackArrayHeader(); + int size = unpacker.unpackMapHeader(); + + Map expectedMeta = new HashMap<>(); + expectedMeta.put("key1", "value1"); + expectedMeta.put("key2.sub1", "v1"); + expectedMeta.put("key2.sub2", "v2"); + + assertEquals(1, traceCount); + assertEquals(1, spanCount); + for (int i = 0; i < size; i++) { + String key = unpacker.unpackString(); + if ("meta".equals(key)) { + int packedSize = unpacker.unpackMapHeader(); + Map unpackedMeta = new HashMap<>(); + for (int j = 0; j < packedSize; j++) { + String k = unpacker.unpackString(); + String v = unpacker.unpackString(); + if (!"thread.name".equals(k) && !"thread.id".equals(k)) { + unpackedMeta.put(k, v); + } + } + assertEquals(expectedMeta, unpackedMeta); + } else { + unpacker.unpackValue(); + } + } + tracer.close(); + } + + @Test + void serializeTraceWithFlatMapTagV05() throws Exception { + CoreTracer tracer = tracerBuilder().writer(new ListWriter()).build(); + DDSpanContext context = + new DDSpanContext( + DDTraceId.ONE, + 1, + DDSpanId.ZERO, + null, + "fakeService", + "fakeOperation", + "fakeResource", + PrioritySampling.UNSET, + null, + null, + false, + null, + 0, + tracer.createTraceCollector(DDTraceId.ONE), + null, + null, + NoopPathwayContext.INSTANCE, + false, + null); + context.setTag("key1", "value1"); + Map nested = new HashMap<>(); + nested.put("sub1", "v1"); + nested.put("sub2", "v2"); + context.setTag("key2", nested); + DDSpan span = DDSpan.create("test", 0, context, null); + + CaptureBuffer capture = new CaptureBuffer(); + MsgPackWriter packer = new MsgPackWriter(new FlushingBuffer(1024, capture)); + TraceMapperV0_5 mapper = new TraceMapperV0_5(); + packer.format(Collections.singletonList(span), mapper); + packer.flush(); + MessageUnpacker unpacker = MessagePack.newDefaultUnpacker(new ArrayBufferInput(capture.bytes)); + int traceCount = capture.messageCount; + int spanCount = unpacker.unpackArrayHeader(); + int size = unpacker.unpackArrayHeader(); + String[] dictionary = buildDictionary(mapper); + + Map expectedMeta = new HashMap<>(); + expectedMeta.put("key1", "value1"); + expectedMeta.put("key2.sub1", "v1"); + expectedMeta.put("key2.sub2", "v2"); + + assertEquals(1, traceCount); + assertEquals(1, spanCount); + assertEquals(12, size); + for (int i = 0; i < 9; ++i) { + unpacker.skipValue(); + } + int packedSize = unpacker.unpackMapHeader(); + Map unpackedMeta = new HashMap<>(); + for (int j = 0; j < packedSize; j++) { + String k = dictionary[unpacker.unpackInt()]; + String v = dictionary[unpacker.unpackInt()]; + if (!"thread.name".equals(k) && !"thread.id".equals(k)) { + unpackedMeta.put(k, v); + } + } + assertEquals(expectedMeta, unpackedMeta); + tracer.close(); + } + + private static class CaptureBuffer implements ByteBufferConsumer { + private byte[] bytes; + int messageCount; + + @Override + public void accept(int messageCount, ByteBuffer buffer) { + this.messageCount = messageCount; + this.bytes = new byte[buffer.limit() - buffer.position()]; + buffer.get(bytes); + } + } + + private DDSpanContext createContext( + String spanType, CoreTracer tracer, DDTraceId traceId, long spanId) { + Map baggage = new HashMap<>(); + baggage.put("a-baggage", "value"); + DDSpanContext ctx = + new DDSpanContext( + traceId, + spanId, + DDSpanId.ZERO, + null, + "fakeService", + "fakeOperation", + "fakeResource", + PrioritySampling.UNSET, + null, + baggage, + false, + spanType, + 1, + tracer.createTraceCollector(DDTraceId.ONE), + null, + null, + NoopPathwayContext.INSTANCE, + false, + null); + Map tags = new HashMap<>(); + tags.put("k1", "v1"); + ctx.setAllTags(tags); + return ctx; + } + + private static String[] buildDictionary(TraceMapperV0_5 mapper) throws Exception { + GrowableBuffer dictionaryBuffer = TraceMapperTestBridge.getDictionary(mapper); + Map encoding = TraceMapperTestBridge.getEncoding(mapper); + + String[] dictionary = new String[encoding.size()]; + MessageUnpacker unpacker = MessagePack.newDefaultUnpacker(dictionaryBuffer.slice()); + for (int i = 0; i < dictionary.length; ++i) { + dictionary[i] = unpacker.unpackString(); + } + return dictionary; + } +} diff --git a/dd-trace-core/src/test/java/datadog/trace/core/DDSpanTest.java b/dd-trace-core/src/test/java/datadog/trace/core/DDSpanTest.java new file mode 100644 index 00000000000..57a14cabf02 --- /dev/null +++ b/dd-trace-core/src/test/java/datadog/trace/core/DDSpanTest.java @@ -0,0 +1,484 @@ +package datadog.trace.core; + +import static datadog.trace.api.TracePropagationStyle.DATADOG; +import static datadog.trace.api.sampling.PrioritySampling.SAMPLER_DROP; +import static datadog.trace.api.sampling.PrioritySampling.SAMPLER_KEEP; +import static datadog.trace.api.sampling.PrioritySampling.UNSET; +import static datadog.trace.api.sampling.SamplingMechanism.SPAN_SAMPLING_RATE; +import static datadog.trace.core.DDSpanContext.SPAN_SAMPLING_MAX_PER_SECOND_TAG; +import static datadog.trace.core.DDSpanContext.SPAN_SAMPLING_MECHANISM_TAG; +import static datadog.trace.core.DDSpanContext.SPAN_SAMPLING_RULE_RATE_TAG; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.params.provider.Arguments.arguments; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.never; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; + +import datadog.trace.api.DDSpanId; +import datadog.trace.api.DDTags; +import datadog.trace.api.DDTraceId; +import datadog.trace.api.TagMap; +import datadog.trace.api.datastreams.NoopPathwayContext; +import datadog.trace.api.gateway.RequestContextSlot; +import datadog.trace.api.sampling.PrioritySampling; +import datadog.trace.bootstrap.instrumentation.api.AgentSpanContext; +import datadog.trace.bootstrap.instrumentation.api.ErrorPriorities; +import datadog.trace.bootstrap.instrumentation.api.TagContext; +import datadog.trace.bootstrap.instrumentation.api.UTF8BytesString; +import datadog.trace.common.sampling.RateByServiceTraceSampler; +import datadog.trace.common.writer.ListWriter; +import datadog.trace.core.propagation.ExtractedContext; +import datadog.trace.core.propagation.PropagationTags; +import java.io.Closeable; +import java.io.IOException; +import java.lang.reflect.Field; +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.concurrent.TimeUnit; +import java.util.stream.Stream; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.MethodSource; +import org.tabletest.junit.TableTest; + +public class DDSpanTest extends DDCoreJavaSpecification { + + private ListWriter writer; + private CoreTracer tracer; + + @BeforeEach + void setup() { + writer = new ListWriter(); + tracer = tracerBuilder().writer(writer).sampler(new RateByServiceTraceSampler()).build(); + } + + @Test + void gettersAndSetters() { + DDSpan span = + (DDSpan) + tracer + .buildSpan("fakeOperation") + .withServiceName("fakeService") + .withResourceName("fakeResource") + .withSpanType("fakeType") + .start(); + + span.setServiceName("service"); + assertEquals("service", span.getServiceName()); + + span.setOperationName("operation"); + assertEquals("operation", span.getOperationName().toString()); + + span.setResourceName("resource"); + assertEquals("resource", span.getResourceName().toString()); + + span.setSpanType("type"); + assertEquals("type", span.getType()); + + span.setSamplingPriority(PrioritySampling.UNSET); + assertNull(span.getSamplingPriority()); + + span.setSamplingPriority(PrioritySampling.SAMPLER_KEEP); + assertEquals(PrioritySampling.SAMPLER_KEEP, (int) span.getSamplingPriority()); + + span.context().lockSamplingPriority(); + span.setSamplingPriority(PrioritySampling.USER_KEEP); + assertEquals(PrioritySampling.SAMPLER_KEEP, (int) span.getSamplingPriority()); + } + + @Test + void resourceNameEqualsOperationNameIfNull() { + String opName = "operationName"; + DDSpan span = (DDSpan) tracer.buildSpan(opName).start(); + assertEquals(opName, span.getResourceName().toString()); + assertFalse(span.getServiceName().isEmpty()); + + String resourceName = "fake"; + String serviceName = "myService"; + span = + (DDSpan) + tracer + .buildSpan(opName) + .withResourceName(resourceName) + .withServiceName(serviceName) + .start(); + assertEquals(resourceName, span.getResourceName().toString()); + assertEquals(serviceName, span.getServiceName()); + } + + @Test + void durationMeasuredInNanoseconds() { + long mod = TimeUnit.MILLISECONDS.toNanos(1); + long start = System.nanoTime(); + DDSpan span = (DDSpan) tracer.buildSpan("test").start(); + long between = System.nanoTime(); + long betweenDur = System.nanoTime() - between; + span.finish(); + long total = System.nanoTime() - start; + + assertTrue( + Math.abs( + TimeUnit.NANOSECONDS.toSeconds(span.getStartTime()) + - TimeUnit.MILLISECONDS.toSeconds(System.currentTimeMillis())) + < 5); + assertTrue(span.getDurationNano() > betweenDur); + assertTrue(span.getDurationNano() < total); + assertTrue(span.getDurationNano() % mod > 0); + } + + @Test + void phasedFinishCapturesDurationButDoesNotPublishImmediately() throws Exception { + long mod = TimeUnit.MILLISECONDS.toNanos(1); + long start = System.nanoTime(); + DDSpan span = (DDSpan) tracer.buildSpan("test").start(); + long between = System.nanoTime(); + long betweenDur = System.nanoTime() - between; + + span.publish(); + assertEquals(0, span.getDurationNano()); + assertEquals(1, pendingReferenceCount(span)); + assertEquals(0, writer.size()); + + boolean finish = span.phasedFinish(); + long total = System.nanoTime() - start; + + assertTrue(finish); + assertEquals(1, pendingReferenceCount(span)); + assertTrue(spans(span).isEmpty()); + assertTrue(writer.isEmpty()); + + assertTrue(span.getDurationNano() < 0); + long actualDurationNano = span.getDurationNano() & Long.MAX_VALUE; + assertTrue( + Math.abs( + TimeUnit.NANOSECONDS.toSeconds(span.getStartTime()) + - TimeUnit.MILLISECONDS.toSeconds(System.currentTimeMillis())) + < 5); + assertTrue(actualDurationNano > betweenDur); + assertTrue(actualDurationNano < total); + assertTrue(actualDurationNano % mod > 0); + + finish = span.phasedFinish(); + span.finish(); + assertFalse(finish); + assertEquals(1, pendingReferenceCount(span)); + assertTrue(spans(span).isEmpty()); + assertTrue(writer.isEmpty()); + + span.publish(); + assertTrue(span.getDurationNano() > 0); + assertEquals(actualDurationNano, span.getDurationNano()); + assertEquals(0, pendingReferenceCount(span)); + assertEquals(1, writer.size()); + + span.publish(); + assertEquals(0, pendingReferenceCount(span)); + assertEquals(1, writer.size()); + } + + @Test + void startingWithTimestampDisablesNanotime() { + long mod = TimeUnit.MILLISECONDS.toNanos(1); + long start = System.currentTimeMillis(); + DDSpan span = + (DDSpan) + tracer + .buildSpan("test") + .withStartTimestamp(TimeUnit.MILLISECONDS.toMicros(System.currentTimeMillis())) + .start(); + long between = System.currentTimeMillis(); + long betweenDur = System.currentTimeMillis() - between; + span.finish(); + long total = Math.max(1, System.currentTimeMillis() - start); + + assertTrue( + Math.abs( + TimeUnit.NANOSECONDS.toSeconds(span.getStartTime()) + - TimeUnit.MILLISECONDS.toSeconds(System.currentTimeMillis())) + < 5); + assertTrue(span.getDurationNano() >= TimeUnit.MILLISECONDS.toNanos(betweenDur)); + assertTrue(span.getDurationNano() <= TimeUnit.MILLISECONDS.toNanos(total)); + assertTrue(span.getDurationNano() % mod == 0 || span.getDurationNano() == 1); + } + + @Test + void stoppingWithTimestampDisablesNanotime() { + long mod = TimeUnit.MILLISECONDS.toNanos(1); + long start = System.currentTimeMillis(); + DDSpan span = (DDSpan) tracer.buildSpan("test").start(); + long between = System.currentTimeMillis(); + long betweenDur = System.currentTimeMillis() - between; + span.finish(TimeUnit.MILLISECONDS.toMicros(System.currentTimeMillis() + 1)); + long total = System.currentTimeMillis() - start + 1; + + assertTrue( + Math.abs( + TimeUnit.NANOSECONDS.toSeconds(span.getStartTime()) + - TimeUnit.MILLISECONDS.toSeconds(System.currentTimeMillis())) + < 5); + assertTrue(span.getDurationNano() >= TimeUnit.MILLISECONDS.toNanos(betweenDur)); + assertTrue(span.getDurationNano() <= TimeUnit.MILLISECONDS.toNanos(total)); + assertTrue(span.getDurationNano() % mod == 0 || span.getDurationNano() == 1); + } + + @Test + void stoppingWithTimestampBeforeStartTimeYieldsMinDurationOfOne() { + DDSpan span = (DDSpan) tracer.buildSpan("test").start(); + span.finish( + TimeUnit.MILLISECONDS.toMicros(TimeUnit.NANOSECONDS.toMillis(span.getStartTimeNano())) + - 10); + assertEquals(1, span.getDurationNano()); + } + + @Test + void prioritySamplingMetricSetOnlyOnRootSpan() { + DDSpan parent = (DDSpan) tracer.buildSpan("testParent").start(); + DDSpan child1 = (DDSpan) tracer.buildSpan("testChild1").asChildOf(parent.context()).start(); + + child1.setSamplingPriority(PrioritySampling.SAMPLER_KEEP); + child1.context().lockSamplingPriority(); + parent.setSamplingPriority(PrioritySampling.SAMPLER_DROP); + child1.finish(); + DDSpan child2 = (DDSpan) tracer.buildSpan("testChild2").asChildOf(parent.context()).start(); + child2.finish(); + parent.finish(); + + assertEquals(PrioritySampling.SAMPLER_KEEP, parent.context().getSamplingPriority()); + assertEquals(PrioritySampling.SAMPLER_KEEP, (int) parent.getSamplingPriority()); + assertTrue(parent.hasSamplingPriority()); + assertEquals(parent.getSamplingPriority(), child1.getSamplingPriority()); + assertEquals(parent.getSamplingPriority(), child2.getSamplingPriority()); + assertFalse(child1.hasSamplingPriority()); + assertFalse(child2.hasSamplingPriority()); + } + + static Stream originSetOnlyOnRootSpanArguments() { + return Stream.of( + arguments("TagContext", new TagContext("some-origin", TagMap.fromMap(new HashMap<>()))), + arguments( + "ExtractedContext", + new ExtractedContext( + DDTraceId.ONE, + 2, + SAMPLER_DROP, + "some-origin", + PropagationTags.factory().empty(), + DATADOG))); + } + + @ParameterizedTest + @MethodSource("originSetOnlyOnRootSpanArguments") + void originSetOnlyOnRootSpan(String scenario, AgentSpanContext extractedContext) + throws Exception { + DDSpanContext parent = + (DDSpanContext) + tracer.buildSpan("testParent").asChildOf(extractedContext).start().context(); + DDSpanContext child = + (DDSpanContext) tracer.buildSpan("testChild1").asChildOf(parent).start().context(); + + assertEquals("some-origin", parent.getOrigin().toString()); + Field originField = DDSpanContext.class.getDeclaredField("origin"); + originField.setAccessible(true); + assertEquals("some-origin", originField.get(parent).toString()); + assertEquals("some-origin", child.getOrigin().toString()); + assertNull(originField.get(child)); + } + + static Stream isRootSpanArguments() { + return Stream.of( + arguments("no parent", null, true), + arguments( + "distributed parent", + new ExtractedContext( + DDTraceId.from(123), + 456, + SAMPLER_KEEP, + "789", + PropagationTags.factory().empty(), + DATADOG), + false)); + } + + @ParameterizedTest + @MethodSource("isRootSpanArguments") + void isRootSpanInAndNotInContextOfDistributedTracing( + String scenario, AgentSpanContext extractedContext, boolean isTraceRootSpan) { + DDSpan root = (DDSpan) tracer.buildSpan("root").asChildOf(extractedContext).start(); + DDSpan child = (DDSpan) tracer.buildSpan("child").asChildOf(root.context()).start(); + + assertEquals(isTraceRootSpan, root.isRootSpan()); + assertFalse(child.isRootSpan()); + + child.finish(); + root.finish(); + } + + static Stream getApplicationRootSpanArguments() { + return Stream.of( + arguments("no parent", null), + arguments( + "distributed parent", + new ExtractedContext( + DDTraceId.from(123), + 456, + SAMPLER_KEEP, + "789", + PropagationTags.factory().empty(), + DATADOG))); + } + + @ParameterizedTest + @MethodSource("getApplicationRootSpanArguments") + void getApplicationRootSpanInAndNotInContextOfDistributedTracing( + String scenario, AgentSpanContext extractedContext) { + DDSpan root = (DDSpan) tracer.buildSpan("root").asChildOf(extractedContext).start(); + DDSpan child = (DDSpan) tracer.buildSpan("child").asChildOf(root.context()).start(); + + assertEquals(root, root.getLocalRootSpan()); + assertEquals(root, child.getLocalRootSpan()); + assertEquals(root, root.getRootSpan()); + assertEquals(root, child.getRootSpan()); + + child.finish(); + root.finish(); + } + + @Test + void publishingOfRootSpanClosesRequestContextData() throws Exception { + Closeable reqContextData = mock(Closeable.class); + TagContext context = new TagContext().withRequestContextDataAppSec(reqContextData); + DDSpan root = (DDSpan) tracer.buildSpan("root").asChildOf(context).start(); + DDSpan child = (DDSpan) tracer.buildSpan("child").asChildOf(root.context()).start(); + + assertEquals(reqContextData, root.getRequestContext().getData(RequestContextSlot.APPSEC)); + assertEquals(reqContextData, child.getRequestContext().getData(RequestContextSlot.APPSEC)); + + child.finish(); + verify(reqContextData, never()).close(); + + root.finish(); + verify(reqContextData, times(1)).close(); + } + + static Stream inferTopLevelFromParentServiceNameArguments() { + return Stream.of( + arguments("String foo", "foo", true), + arguments("UTF8BytesString foo", UTF8BytesString.create("foo"), true), + arguments("String fakeService", "fakeService", false), + arguments("UTF8BytesString fakeService", UTF8BytesString.create("fakeService"), false), + arguments("empty string", "", true), + arguments("null", null, true)); + } + + @ParameterizedTest + @MethodSource("inferTopLevelFromParentServiceNameArguments") + void inferTopLevelFromParentServiceName( + String scenario, CharSequence parentServiceName, boolean expectTopLevel) { + DDSpanContext context = + new DDSpanContext( + DDTraceId.ONE, + 1, + DDSpanId.ZERO, + parentServiceName, + "fakeService", + "fakeOperation", + "fakeResource", + PrioritySampling.UNSET, + null, + Collections.emptyMap(), + false, + "fakeType", + 0, + tracer.createTraceCollector(DDTraceId.ONE), + null, + null, + NoopPathwayContext.INSTANCE, + false, + PropagationTags.factory().empty()); + assertEquals(expectTopLevel, context.isTopLevel()); + } + + @Test + void brokenPipeExceptionDoesNotCreateErrorSpan() { + DDSpan span = (DDSpan) tracer.buildSpan("root").start(); + span.addThrowable(new IOException("Broken pipe")); + assertFalse(span.isError()); + assertNull(span.getTag(DDTags.ERROR_STACK)); + assertEquals("Broken pipe", span.getTag(DDTags.ERROR_MSG)); + } + + @Test + void wrappedBrokenPipeExceptionDoesNotCreateErrorSpan() { + DDSpan span = (DDSpan) tracer.buildSpan("root").start(); + span.addThrowable(new RuntimeException(new IOException("Broken pipe"))); + assertFalse(span.isError()); + assertNull(span.getTag(DDTags.ERROR_STACK)); + assertEquals("java.io.IOException: Broken pipe", span.getTag(DDTags.ERROR_MSG)); + } + + @Test + void nullExceptionSafeToAdd() { + DDSpan span = (DDSpan) tracer.buildSpan("root").start(); + span.addThrowable(null); + assertFalse(span.isError()); + assertNull(span.getTag(DDTags.ERROR_STACK)); + } + + @TableTest({ + "scenario | rate | limit ", + "rate=1.0 lim=10 | 1.0 | 10 ", + "rate=0.5 lim=100 | 0.5 | 100 ", + "rate=0.25 no lim | 0.25 | 2147483647" + }) + void setSingleSpanSamplingTags(String scenario, double rate, int limit) { + DDSpan span = (DDSpan) tracer.buildSpan("testSpan").start(); + assertEquals(UNSET, span.samplingPriority()); + + span.setSpanSamplingPriority(rate, limit); + + assertEquals((int) SPAN_SAMPLING_RATE, span.getTag(SPAN_SAMPLING_MECHANISM_TAG)); + assertEquals(rate, span.getTag(SPAN_SAMPLING_RULE_RATE_TAG)); + assertEquals( + limit == Integer.MAX_VALUE ? null : (double) limit, + span.getTag(SPAN_SAMPLING_MAX_PER_SECOND_TAG)); + assertEquals(UNSET, span.samplingPriority()); + } + + @Test + void errorPrioritiesShouldBeRespected() { + DDSpan span = (DDSpan) tracer.buildSpan("testSpan").start(); + assertFalse(span.isError()); + + span.setError(true); + assertTrue(span.isError()); + + span.setError(false); + assertFalse(span.isError()); + + span.setError(true, ErrorPriorities.HTTP_SERVER_DECORATOR); + assertFalse(span.isError()); + + span.setError(true, ErrorPriorities.MANUAL_INSTRUMENTATION); + assertTrue(span.isError()); + + span.setError(true, Byte.MAX_VALUE); + assertTrue(span.isError()); + } + + private static int pendingReferenceCount(DDSpan span) { + PendingTrace trace = (PendingTrace) span.context().getTraceCollector(); + return PendingTraceTestBridge.getPendingReferenceCount(trace); + } + + private static Collection spans(DDSpan span) { + PendingTrace trace = (PendingTrace) span.context().getTraceCollector(); + return trace.getSpans(); + } +} diff --git a/dd-trace-core/src/test/java/datadog/trace/core/KnuthSamplingRateTest.java b/dd-trace-core/src/test/java/datadog/trace/core/KnuthSamplingRateTest.java new file mode 100644 index 00000000000..3b82beb2f6f --- /dev/null +++ b/dd-trace-core/src/test/java/datadog/trace/core/KnuthSamplingRateTest.java @@ -0,0 +1,229 @@ +package datadog.trace.core; + +import static datadog.trace.api.config.TracerConfig.TRACE_RATE_LIMIT; +import static datadog.trace.api.config.TracerConfig.TRACE_SAMPLE_RATE; +import static datadog.trace.api.config.TracerConfig.TRACE_SAMPLING_RULES; +import static datadog.trace.api.config.TracerConfig.TRACE_SAMPLING_SERVICE_RULES; +import static datadog.trace.api.sampling.PrioritySampling.SAMPLER_KEEP; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertTrue; + +import datadog.trace.common.sampling.PrioritySampler; +import datadog.trace.common.sampling.RateByServiceTraceSampler; +import datadog.trace.common.sampling.Sampler; +import datadog.trace.common.writer.ListWriter; +import datadog.trace.core.propagation.PropagationTags; +import java.util.HashMap; +import java.util.Map; +import java.util.Properties; +import org.junit.jupiter.api.Test; +import org.tabletest.junit.TableTest; + +public class KnuthSamplingRateTest extends DDCoreJavaSpecification { + + @TableTest({ + "scenario | rate | expected", + "1.0 | 1.0 | 1 ", + "0.5 | 0.5 | 0.5 ", + "0.1 | 0.1 | 0.1 ", + "0.0 | 0.0 | 0 ", + "0.765432 | 0.765432 | 0.765432", + "0.7654321 rounds to 6 decimal | 0.7654321 | 0.765432", + "0.123456 | 0.123456 | 0.123456", + "0.100000 trailing zeros | 0.100000 | 0.1 ", + "0.250 trailing zero | 0.250 | 0.25 ", + "0.05 | 0.05 | 0.05 ", + "0.0123456789 rounds at 6dp | 0.0123456789 | 0.012346", + "0.001 | 0.001 | 0.001 ", + "0.00500 trailing zeros | 0.00500 | 0.005 ", + "0.00123456789 rounds at 6dp | 0.00123456789 | 0.001235", + "0.0001 | 0.0001 | 0.0001 ", + "0.000500 trailing zeros | 0.000500 | 0.0005 ", + "0.000123456789 rounds at 6dp | 0.000123456789 | 0.000123", + "0.9999995 rounds up to 1 | 0.9999995 | 1 ", + "0.00001 | 0.00001 | 0.00001 ", + "0.000050 trailing zeros | 0.000050 | 0.00005 ", + "1.23456789e-5 rounds at 6dp | 0.0000123456789 | 0.000012", + "1e-7 below precision rounds to 0 | 0.0000001 | 0 ", + "5.5e-10 below precision | 0.00000000055 | 0 ", + "0.000001 six decimal boundary | 0.000001 | 0.000001", + "0.00000051 rounds up | 0.00000051 | 0.000001" + }) + void updateKnuthSamplingRateFormatsRateCorrectly(String scenario, double rate, String expected) { + PropagationTags pTags = PropagationTags.factory().empty(); + pTags.updateKnuthSamplingRate(rate); + Map tagMap = pTags.createTagMap(); + assertEquals(expected, tagMap.get("_dd.p.ksr")); + } + + @TableTest({ + "scenario | rate | expectedKsr", + "rate 1.0 | 1.0 | 1 ", + "rate 0.5 | 0.5 | 0.5 ", + "rate 0.0 | 0.0 | 0 " + }) + void agentRateSamplerSetsKsrPropagatedTag(String scenario, double rate, String expectedKsr) { + RateByServiceTraceSampler serviceSampler = new RateByServiceTraceSampler(); + CoreTracer tracer = tracerBuilder().writer(new ListWriter()).build(); + + Map rates = new HashMap<>(); + rates.put("service:,env:", rate); + Map> response = new HashMap<>(); + response.put("rate_by_service", rates); + serviceSampler.onResponse("traces", response); + + DDSpan span = + (DDSpan) + tracer + .buildSpan("fakeOperation") + .withServiceName("spock") + .withTag("env", "test") + .ignoreActiveSpan() + .start(); + serviceSampler.setSamplingPriority(span); + + Map propagationMap = span.context().getPropagationTags().createTagMap(); + String ksr = propagationMap.get("_dd.p.ksr"); + + assertEquals(expectedKsr, ksr); + tracer.close(); + } + + @TableTest({ + "scenario | jsonRules | expectedKsr", + "rate 1 matches | '[{\"service\": \"service\", \"sample_rate\": 1}]' | 1 ", + "rate 0.5 matches | '[{\"service\": \"service\", \"sample_rate\": 0.5}]' | 0.5 ", + "rate 0 matches | '[{\"service\": \"service\", \"sample_rate\": 0}]' | 0 " + }) + void ruleBasedSamplerSetsKsrPropagatedTagWhenRuleMatches( + String scenario, String jsonRules, String expectedKsr) { + Properties properties = new Properties(); + properties.setProperty(TRACE_SAMPLING_RULES, jsonRules); + properties.setProperty(TRACE_RATE_LIMIT, "50"); + CoreTracer tracer = tracerBuilder().writer(new ListWriter()).build(); + + Sampler sampler = Sampler.Builder.forConfig(properties); + DDSpan span = + (DDSpan) + tracer + .buildSpan("operation") + .withServiceName("service") + .withTag("env", "bar") + .ignoreActiveSpan() + .start(); + ((PrioritySampler) sampler).setSamplingPriority(span); + + Map propagationMap = span.context().getPropagationTags().createTagMap(); + String ksr = propagationMap.get("_dd.p.ksr"); + + assertEquals(expectedKsr, ksr); + tracer.close(); + } + + @Test + void ruleBasedSamplerFallbackToAgentSamplerSetsKsr() { + Properties properties = new Properties(); + properties.setProperty( + TRACE_SAMPLING_RULES, "[{\"service\": \"nomatch\", \"sample_rate\": 0.5}]"); + properties.setProperty(TRACE_RATE_LIMIT, "50"); + CoreTracer tracer = tracerBuilder().writer(new ListWriter()).build(); + + Sampler sampler = Sampler.Builder.forConfig(properties); + DDSpan span = + (DDSpan) + tracer + .buildSpan("operation") + .withServiceName("service") + .withTag("env", "bar") + .ignoreActiveSpan() + .start(); + ((PrioritySampler) sampler).setSamplingPriority(span); + + Map propagationMap = span.context().getPropagationTags().createTagMap(); + String ksr = propagationMap.get("_dd.p.ksr"); + + assertEquals("1", ksr); + assertEquals(SAMPLER_KEEP, (int) span.getSamplingPriority()); + tracer.close(); + } + + @Test + void serviceRuleSamplerSetsKsrPropagatedTag() { + Properties properties = new Properties(); + properties.setProperty(TRACE_SAMPLING_SERVICE_RULES, "service:0.75"); + properties.setProperty(TRACE_RATE_LIMIT, "50"); + CoreTracer tracer = tracerBuilder().writer(new ListWriter()).build(); + + Sampler sampler = Sampler.Builder.forConfig(properties); + DDSpan span = + (DDSpan) + tracer + .buildSpan("operation") + .withServiceName("service") + .withTag("env", "bar") + .ignoreActiveSpan() + .start(); + ((PrioritySampler) sampler).setSamplingPriority(span); + + Map propagationMap = span.context().getPropagationTags().createTagMap(); + String ksr = propagationMap.get("_dd.p.ksr"); + + assertEquals("0.75", ksr); + tracer.close(); + } + + @Test + void defaultRateSamplerSetsKsrPropagatedTag() { + Properties properties = new Properties(); + properties.setProperty(TRACE_SAMPLE_RATE, "0.25"); + properties.setProperty(TRACE_RATE_LIMIT, "50"); + CoreTracer tracer = tracerBuilder().writer(new ListWriter()).build(); + + Sampler sampler = Sampler.Builder.forConfig(properties); + DDSpan span = + (DDSpan) + tracer + .buildSpan("operation") + .withServiceName("service") + .withTag("env", "bar") + .ignoreActiveSpan() + .start(); + ((PrioritySampler) sampler).setSamplingPriority(span); + + Map propagationMap = span.context().getPropagationTags().createTagMap(); + String ksr = propagationMap.get("_dd.p.ksr"); + + assertEquals("0.25", ksr); + tracer.close(); + } + + @Test + void ksrIsPropagatedViaXDatadogTagsHeader() { + RateByServiceTraceSampler serviceSampler = new RateByServiceTraceSampler(); + CoreTracer tracer = tracerBuilder().writer(new ListWriter()).build(); + + Map rates = new HashMap<>(); + rates.put("service:,env:", 0.5); + Map> response = new HashMap<>(); + response.put("rate_by_service", rates); + serviceSampler.onResponse("traces", response); + + DDSpan span = + (DDSpan) + tracer + .buildSpan("fakeOperation") + .withServiceName("spock") + .withTag("env", "test") + .ignoreActiveSpan() + .start(); + serviceSampler.setSamplingPriority(span); + + String headerValue = + span.context().getPropagationTags().headerValue(PropagationTags.HeaderType.DATADOG); + + assertNotNull(headerValue); + assertTrue(headerValue.contains("_dd.p.ksr=0.5")); + tracer.close(); + } +} diff --git a/dd-trace-core/src/test/java/datadog/trace/core/PendingTraceTestBridge.java b/dd-trace-core/src/test/java/datadog/trace/core/PendingTraceTestBridge.java new file mode 100644 index 00000000000..0d4fc04a9ac --- /dev/null +++ b/dd-trace-core/src/test/java/datadog/trace/core/PendingTraceTestBridge.java @@ -0,0 +1,10 @@ +package datadog.trace.core; + +/** + * Bridge class to allow tests to access package-private method exposed by the {@code PendingTrace} + */ +public class PendingTraceTestBridge { + public static int getPendingReferenceCount(PendingTrace pendingTrace) { + return pendingTrace.getPendingReferenceCount(); + } +}