Skip to content

Commit

Permalink
Fix support for OpenAI developer messages (#539)
Browse files Browse the repository at this point in the history
* log instead of throw on unhandled message
* Report developer messages as developer role
* remove unnecessary string concat
* spotless and fix 0.8 compatibility
  • Loading branch information
JonasKunz authored Feb 12, 2025
1 parent 3a9ef8e commit 5fedd11
Show file tree
Hide file tree
Showing 16 changed files with 243 additions and 19 deletions.
2 changes: 1 addition & 1 deletion custom/build.gradle.kts
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ plugins {
}

val instrumentations = listOf<String>(
":instrumentation:openai-client-instrumentation:instrumentation-0.2",
":instrumentation:openai-client-instrumentation:instrumentation-0.8",
":instrumentation:openai-client-instrumentation:instrumentation-0.14"
)

Expand Down
4 changes: 3 additions & 1 deletion docs/configure.md
Original file line number Diff line number Diff line change
Expand Up @@ -122,8 +122,10 @@ It supports:
* Tracing for requests, including GenAI-specific attributes such as token usage
* Opt-In logging of OpenAI request and response content payloads

The minimum supported version is `0.8.0`.

Note that this instrumentation is currently in **tech preview**, because the OpenAI client itself is still in beta.
Once the OpenAI client is stable, this instrumentation will be GAed, but we'll likely drop support for beta versions of the client after some time.
Once the OpenAI client is stable, this instrumentation will be GAed. We'll drop support for older beta versions of the client after some time.
The instrumentation is on by default and can be disabled by setting either the `OTEL_INSTRUMENTATION_OPENAI_CLIENT_ENABLED` environment variable or the `otel.instrumentation.openai-client.enabled` JVM property to `false`.

In addition, this instrumentation provides the following configuration options:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@
import com.openai.models.ChatCompletionAssistantMessageParam;
import com.openai.models.ChatCompletionContentPart;
import com.openai.models.ChatCompletionCreateParams;
import com.openai.models.ChatCompletionDeveloperMessageParam;
import com.openai.models.ChatCompletionMessageParam;
import com.openai.models.ChatCompletionSystemMessageParam;
import com.openai.models.ChatCompletionToolMessageParam;
Expand Down Expand Up @@ -75,6 +76,11 @@ protected static void init(Supplier<ApiAdapter> implementation) {
*/
public abstract String asText(ChatCompletionSystemMessageParam.Content content);

/**
* @return the contained text, if the content is text. null otherwise.
*/
public abstract String asText(ChatCompletionDeveloperMessageParam.Content content);

/**
* @return the contained text, if the content is text. null otherwise.
*/
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@
import com.openai.models.ChatCompletionAssistantMessageParam;
import com.openai.models.ChatCompletionContentPartText;
import com.openai.models.ChatCompletionCreateParams;
import com.openai.models.ChatCompletionDeveloperMessageParam;
import com.openai.models.ChatCompletionMessage;
import com.openai.models.ChatCompletionMessageParam;
import com.openai.models.ChatCompletionMessageToolCall;
Expand All @@ -40,10 +41,14 @@
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.logging.Level;
import java.util.stream.Collectors;

public class ChatCompletionEventsHelper {

private static final java.util.logging.Logger LOG =
java.util.logging.Logger.getLogger(ChatCompletionEventsHelper.class.getName());

private static final Logger EV_LOGGER =
GlobalOpenTelemetry.get().getLogsBridge().get(Constants.INSTRUMENTATION_NAME);

Expand All @@ -66,6 +71,14 @@ public static void emitPromptLogEvents(
if (settings.captureMessageContent) {
putIfNotEmpty(bodyBuilder, "content", contentToString(sysMsg.content()));
}
} else if (concreteMessageParam instanceof ChatCompletionDeveloperMessageParam) {
ChatCompletionDeveloperMessageParam sysMsg =
(ChatCompletionDeveloperMessageParam) concreteMessageParam;
eventType = "gen_ai.system.message";
putIfNotEmpty(bodyBuilder, "role", "developer");
if (settings.captureMessageContent) {
putIfNotEmpty(bodyBuilder, "content", contentToString(sysMsg.content()));
}
} else if (concreteMessageParam instanceof ChatCompletionUserMessageParam) {
ChatCompletionUserMessageParam userMsg =
(ChatCompletionUserMessageParam) concreteMessageParam;
Expand Down Expand Up @@ -102,7 +115,8 @@ public static void emitPromptLogEvents(
bodyBuilder.put("id", toolMsg.toolCallId());
}
} else {
throw new IllegalStateException("Unhandled type : " + msg.getClass().getName());
LOG.log(Level.WARNING, "Unhandled OpenAI message type will be dropped: {0}", msg);
continue;
}
newEvent(eventType).setBody(bodyBuilder.build()).emit();
}
Expand All @@ -119,9 +133,7 @@ private static String contentToString(ChatCompletionToolMessageParam.Content con
if (text != null) {
return text;
} else if (content.isArrayOfContentParts()) {
return content.asArrayOfContentParts().stream()
.map(ChatCompletionContentPartText::text)
.collect(Collectors.joining());
return joinContentParts(content.asArrayOfContentParts());
} else {
throw new IllegalStateException("Unhandled content type for " + content);
}
Expand All @@ -146,14 +158,29 @@ private static String contentToString(ChatCompletionSystemMessageParam.Content c
if (text != null) {
return text;
} else if (content.isArrayOfContentParts()) {
return content.asArrayOfContentParts().stream()
.map(ChatCompletionContentPartText::text)
.collect(Collectors.joining());
return joinContentParts(content.asArrayOfContentParts());
} else {
throw new IllegalStateException("Unhandled content type for " + content);
}
}

private static String contentToString(ChatCompletionDeveloperMessageParam.Content content) {
String text = ApiAdapter.get().asText(content);
if (text != null) {
return text;
} else if (content.isArrayOfContentParts()) {
return joinContentParts(content.asArrayOfContentParts());
} else {
throw new IllegalStateException("Unhandled content type for " + content);
}
}

private static String joinContentParts(List<ChatCompletionContentPartText> contentParts) {
return contentParts.stream()
.map(ChatCompletionContentPartText::text)
.collect(Collectors.joining());
}

private static String contentToString(ChatCompletionUserMessageParam.Content content) {
String text = ApiAdapter.get().asText(content);
if (text != null) {
Expand Down Expand Up @@ -228,7 +255,7 @@ private static LogRecordBuilder newEvent(String name) {
private static Value<?> buildToolCallEventObject(ChatCompletionMessageToolCall call) {
Map<String, Value<?>> result = new HashMap<>();
result.put("id", Value.of(call.id()));
result.put("type", Value.of(call._type().toString()));
result.put("type", Value.of("function")); // "function" is the only currently supported type
result.put("function", buildFunctionEventObject(call.function()));
return Value.of(result);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@
import com.openai.models.ChatCompletionAssistantMessageParam;
import com.openai.models.ChatCompletionContentPart;
import com.openai.models.ChatCompletionCreateParams;
import com.openai.models.ChatCompletionDeveloperMessageParam;
import com.openai.models.ChatCompletionMessageParam;
import com.openai.models.ChatCompletionSystemMessageParam;
import com.openai.models.ChatCompletionToolMessageParam;
Expand All @@ -38,6 +39,9 @@ public Object extractConcreteCompletionMessageParam(ChatCompletionMessageParam b
if (base.isSystem()) {
return base.asSystem();
}
if (base.isDeveloper()) {
return base.asDeveloper();
}
if (base.isUser()) {
return base.asUser();
}
Expand All @@ -47,7 +51,7 @@ public Object extractConcreteCompletionMessageParam(ChatCompletionMessageParam b
if (base.isTool()) {
return base.asTool();
}
throw new IllegalStateException("Unhandled message param type: " + base);
return null;
}

@Override
Expand All @@ -65,6 +69,11 @@ public String asText(ChatCompletionSystemMessageParam.Content content) {
return content.isText() ? content.asText() : null;
}

@Override
public String asText(ChatCompletionDeveloperMessageParam.Content content) {
return content.isText() ? content.asText() : null;
}

@Override
public String asText(ChatCompletionUserMessageParam.Content content) {
return content.isText() ? content.asText() : null;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@

import co.elastic.otel.openai.ChatTestBase;
import com.openai.models.ChatCompletionAssistantMessageParam;
import com.openai.models.ChatCompletionDeveloperMessageParam;
import com.openai.models.ChatCompletionMessageParam;
import com.openai.models.ChatCompletionMessageToolCall;
import com.openai.models.ChatCompletionSystemMessageParam;
Expand Down Expand Up @@ -63,6 +64,14 @@ protected ChatCompletionMessageParam createSystemMessage(String content) {
.build());
}

@Override
protected ChatCompletionMessageParam createDeveloperMessage(String content) {
return ChatCompletionMessageParam.ofDeveloper(
ChatCompletionDeveloperMessageParam.builder()
.content(ChatCompletionDeveloperMessageParam.Content.ofText(content))
.build());
}

@Override
protected ChatCompletionMessageParam createToolMessage(String response, String id) {
return ChatCompletionMessageParam.ofTool(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ plugins {
id("elastic-otel.instrumentation-conventions")
}

val openAiVersion = "0" +".13.0"; // DO NOT UPGRADE, string operations are used to prevent renovate upgrades
val openAiVersion = "0.13.0"; // DO NOT UPGRADE

dependencies {
compileOnly("com.openai:openai-java:${openAiVersion}")
Expand All @@ -19,7 +19,7 @@ muzzle {
pass {
group.set("com.openai")
module.set("openai-java")
versions.set("(,${openAiVersion}]")
versions.set("[0.8,${openAiVersion}]")
assertInverse.set(true)
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@
import com.openai.models.ChatCompletionAssistantMessageParam;
import com.openai.models.ChatCompletionContentPart;
import com.openai.models.ChatCompletionCreateParams;
import com.openai.models.ChatCompletionDeveloperMessageParam;
import com.openai.models.ChatCompletionMessageParam;
import com.openai.models.ChatCompletionSystemMessageParam;
import com.openai.models.ChatCompletionToolMessageParam;
Expand All @@ -38,6 +39,9 @@ public Object extractConcreteCompletionMessageParam(ChatCompletionMessageParam b
if (base.isChatCompletionSystemMessageParam()) {
return base.asChatCompletionSystemMessageParam();
}
if (base.isChatCompletionDeveloperMessageParam()) {
return base.asChatCompletionDeveloperMessageParam();
}
if (base.isChatCompletionUserMessageParam()) {
return base.asChatCompletionUserMessageParam();
}
Expand All @@ -47,7 +51,7 @@ public Object extractConcreteCompletionMessageParam(ChatCompletionMessageParam b
if (base.isChatCompletionToolMessageParam()) {
return base.asChatCompletionToolMessageParam();
}
throw new IllegalStateException("Unhandled message param type: " + base);
return null;
}

@Override
Expand All @@ -61,11 +65,11 @@ public String extractText(ChatCompletionContentPart part) {
@Override
public String extractType(ChatCompletionCreateParams.ResponseFormat val) {
if (val.isResponseFormatText()) {
return val.asResponseFormatText()._type().toString();
return "text";
} else if (val.isResponseFormatJsonObject()) {
return val.asResponseFormatJsonObject()._type().toString();
return "json_object";
} else if (val.isResponseFormatJsonSchema()) {
return val.asResponseFormatJsonSchema()._type().toString();
return "json_schema";
}
return null;
}
Expand Down Expand Up @@ -93,6 +97,11 @@ public String asText(ChatCompletionSystemMessageParam.Content content) {
return content.isTextContent() ? content.asTextContent() : null;
}

@Override
public String asText(ChatCompletionDeveloperMessageParam.Content content) {
return content.isTextContent() ? content.asTextContent() : null;
}

@Override
public String asText(ChatCompletionAssistantMessageParam.Content content) {
return content.isTextContent() ? content.asTextContent() : null;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@

import co.elastic.otel.openai.ChatTestBase;
import com.openai.models.ChatCompletionAssistantMessageParam;
import com.openai.models.ChatCompletionDeveloperMessageParam;
import com.openai.models.ChatCompletionMessageParam;
import com.openai.models.ChatCompletionMessageToolCall;
import com.openai.models.ChatCompletionSystemMessageParam;
Expand Down Expand Up @@ -63,6 +64,14 @@ protected ChatCompletionMessageParam createSystemMessage(String content) {
.build());
}

@Override
protected ChatCompletionMessageParam createDeveloperMessage(String content) {
return ChatCompletionMessageParam.ofChatCompletionDeveloperMessageParam(
ChatCompletionDeveloperMessageParam.builder()
.content(ChatCompletionDeveloperMessageParam.Content.ofTextContent(content))
.build());
}

@Override
protected ChatCompletionMessageParam createToolMessage(String response, String id) {
return ChatCompletionMessageParam.ofChatCompletionToolMessageParam(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -238,6 +238,75 @@ void chat() throws Exception {
equalTo(SERVER_PORT, (long) openai.getPort())))));
}

@Test
void testDeveloperMessage() {
InstrumentationSettingsAccessor.setCaptureMessageContent(openai.client, true);

ChatCompletionCreateParams params =
ChatCompletionCreateParams.builder()
.messages(
Arrays.asList(
createDeveloperMessage(
"You are an assistant which just answers every query with tomato"),
createUserMessage("Say something")))
.model(TEST_CHAT_MODEL)
.build();

ChatCompletion chatCompletion = openai.client.chat().completions().create(params);
chatCompletion.validate();

List<SpanData> spans = testing.spans();
assertThat(spans.size()).isEqualTo(1);
SpanContext spanCtx = spans.get(0).getSpanContext();
assertThat(testing.logRecords())
.anySatisfy(
log -> {
assertThat(log)
.hasAttributesSatisfying(
attr ->
assertThat(attr)
.containsEntry(GEN_AI_SYSTEM, "openai")
.containsEntry("event.name", "gen_ai.system.message"))
.hasSpanContext(spanCtx);
assertThat(log.getBodyValue())
.satisfies(
ValAssert.map()
.entry(
"content",
"You are an assistant which just answers every query with tomato")
.entry("role", "developer"));
})
.anySatisfy(
log -> {
assertThat(log)
.hasAttributesSatisfying(
attr ->
assertThat(attr)
.containsEntry(GEN_AI_SYSTEM, "openai")
.containsEntry("event.name", "gen_ai.user.message"))
.hasSpanContext(spanCtx);
assertThat(log.getBodyValue())
.satisfies(ValAssert.map().entry("content", "Say something"));
})
.anySatisfy(
log -> {
assertThat(log)
.hasAttributesSatisfying(
attr ->
assertThat(attr)
.containsEntry(GEN_AI_SYSTEM, "openai")
.containsEntry("event.name", "gen_ai.choice"))
.hasSpanContext(spanCtx);
assertThat(log.getBodyValue())
.satisfies(
ValAssert.map()
.entry("finish_reason", "stop")
.entry("index", 0)
.entry("message", ValAssert.map().entry("content", "Tomato.")));
})
.hasSize(3);
}

@Test
void allTheClientOptions() {
ChatCompletionCreateParams params =
Expand Down Expand Up @@ -2343,5 +2412,7 @@ protected abstract ChatCompletionMessageParam createAssistantMessage(

protected abstract ChatCompletionMessageParam createSystemMessage(String content);

protected abstract ChatCompletionMessageParam createDeveloperMessage(String content);

protected abstract ChatCompletionMessageParam createToolMessage(String response, String id);
}
Loading

0 comments on commit 5fedd11

Please sign in to comment.