Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
19 changes: 19 additions & 0 deletions .env.local.example
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
# Credentials for examples/langfuse_tracing.cpp.
# Copy to .env.local and fill in your keys, then:
# set -a; source .env.local; set +a
# ./build/examples/langfuse_tracing_debug

# --- LLM provider (required by the example) ---
OPENAI_API_KEY=

# --- Langfuse (required) ---
# Project keys from your Langfuse instance -> Settings -> API keys
LANGFUSE_PUBLIC_KEY=
LANGFUSE_SECRET_KEY=

# --- Langfuse host (optional) ---
# EU cloud (default): https://cloud.langfuse.com
# US cloud: https://us.cloud.langfuse.com
# Self-hosted (langfuse/ docker-compose): http://localhost:3000
# The example also accepts LANGFUSE_BASE_URL as a synonym.
LANGFUSE_HOST=https://cloud.langfuse.com
37 changes: 35 additions & 2 deletions CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -45,18 +45,21 @@ set(HTTPLIB_COMPILE_DEFS
add_library(ai-sdk-cpp-core)
add_library(ai-sdk-cpp-openai)
add_library(ai-sdk-cpp-anthropic)
add_library(ai-sdk-cpp-langfuse)
add_library(ai-sdk-cpp INTERFACE)

# Set library aliases
add_library(ai::sdk ALIAS ai-sdk-cpp)
add_library(ai::core ALIAS ai-sdk-cpp-core)
add_library(ai::openai ALIAS ai-sdk-cpp-openai)
add_library(ai::anthropic ALIAS ai-sdk-cpp-anthropic)
add_library(ai::langfuse ALIAS ai-sdk-cpp-langfuse)

# Configure all component libraries
configure_ai_component(ai-sdk-cpp-core)
configure_ai_component(ai-sdk-cpp-openai)
configure_ai_component(ai-sdk-cpp-anthropic)
configure_ai_component(ai-sdk-cpp-langfuse)

# Configure main interface library
target_include_directories(ai-sdk-cpp
Expand Down Expand Up @@ -103,6 +106,12 @@ set(ANTHROPIC_SOURCES
target_sources(ai-sdk-cpp-openai PRIVATE ${OPENAI_SOURCES})
target_sources(ai-sdk-cpp-anthropic PRIVATE ${ANTHROPIC_SOURCES})

# Langfuse tracing component
target_sources(ai-sdk-cpp-langfuse
PRIVATE
src/langfuse/tracer.cpp
)

# Link dependencies
target_link_libraries(ai-sdk-cpp-core
PUBLIC
Expand All @@ -124,7 +133,7 @@ foreach(provider openai anthropic)
$<BUILD_INTERFACE:httplib::httplib>
$<BUILD_INTERFACE:concurrentqueue>
)

# Set component availability and HTTP definitions
string(TOUPPER ${provider} PROVIDER_UPPER)
target_compile_definitions(ai-sdk-cpp-${provider}
Expand All @@ -135,6 +144,23 @@ foreach(provider openai anthropic)
)
endforeach()

# Langfuse tracing component links to core and uses httplib + OpenSSL.
target_link_libraries(ai-sdk-cpp-langfuse
PUBLIC
ai::core
nlohmann_json::nlohmann_json
PRIVATE
$<BUILD_INTERFACE:httplib::httplib>
$<BUILD_INTERFACE:OpenSSL::SSL>
$<BUILD_INTERFACE:OpenSSL::Crypto>
)
target_compile_definitions(ai-sdk-cpp-langfuse
PUBLIC
AI_SDK_HAS_LANGFUSE=1
PRIVATE
${HTTPLIB_COMPILE_DEFS}
)

# Core needs HTTP definitions too
target_compile_definitions(ai-sdk-cpp-core
PRIVATE
Expand All @@ -147,17 +173,24 @@ target_link_libraries(ai-sdk-cpp
ai::core
ai::openai
ai::anthropic
ai::langfuse
)

# Define all component availability for main library
target_compile_definitions(ai-sdk-cpp
INTERFACE
AI_SDK_HAS_OPENAI=1
AI_SDK_HAS_ANTHROPIC=1
AI_SDK_HAS_LANGFUSE=1
)

# List of all concrete component targets
set(COMPONENT_TARGETS ai-sdk-cpp-core ai-sdk-cpp-openai ai-sdk-cpp-anthropic)
set(COMPONENT_TARGETS
ai-sdk-cpp-core
ai-sdk-cpp-openai
ai-sdk-cpp-anthropic
ai-sdk-cpp-langfuse
)

# Common compile options
if(MSVC)
Expand Down
3 changes: 3 additions & 0 deletions examples/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -53,6 +53,9 @@ add_ai_example(test_tool_integration test_tool_integration.cpp)
# Embeddings example
add_ai_example(embeddings_example embeddings_example.cpp)

# Langfuse tracing example
add_ai_example(langfuse_tracing langfuse_tracing.cpp)

# Component-specific examples
add_subdirectory(components/openai)
add_subdirectory(components/anthropic)
Expand Down
119 changes: 119 additions & 0 deletions examples/langfuse_tracing.cpp
Original file line number Diff line number Diff line change
@@ -0,0 +1,119 @@
/**
* Langfuse Tracing Example - AI SDK C++
*
* Demonstrates how to wrap an `ai::Client::generate_text` call (with tools and
* multi-step) so the LLM call and each tool execution show up as a trace in
* Langfuse.
*
* Required env:
* OPENAI_API_KEY (or use ai::anthropic instead)
* LANGFUSE_PUBLIC_KEY
* LANGFUSE_SECRET_KEY
* Optional:
* LANGFUSE_HOST (default: https://cloud.langfuse.com)
*/

#include <cstdlib>
#include <iostream>
#include <map>
#include <string>

#include <ai/langfuse.h>
#include <ai/openai.h>
#include <ai/tools.h>

namespace {

const char* env_or(const char* name, const char* fallback) {
const char* v = std::getenv(name);
return (v && *v) ? v : fallback;
}

ai::JsonValue lookup_user(const ai::JsonValue& args,
const ai::ToolExecutionContext&) {
static const std::map<std::string, ai::JsonValue> users = {
{"alice", {{"name", "Alice"}, {"city", "San Francisco"}}},
{"bob", {{"name", "Bob"}, {"city", "New York"}}},
};
auto id = args.value("user_id", std::string{});
auto it = users.find(id);
if (it != users.end())
return it->second;
return ai::JsonValue{{"error", "user not found"}};
}

ai::JsonValue get_weather(const ai::JsonValue& args,
const ai::ToolExecutionContext&) {
auto loc = args.value("location", std::string{"unknown"});
return ai::JsonValue{
{"location", loc}, {"temperature_c", 21}, {"sky", "clear"}};
}

} // namespace

int main() {
const char* lf_pk = std::getenv("LANGFUSE_PUBLIC_KEY");
const char* lf_sk = std::getenv("LANGFUSE_SECRET_KEY");
if (!lf_pk || !lf_sk) {
std::cerr << "Set LANGFUSE_PUBLIC_KEY and LANGFUSE_SECRET_KEY first.\n";
return 1;
}

const char* host = std::getenv("LANGFUSE_HOST");
if (!host || !*host)
host = std::getenv("LANGFUSE_BASE_URL");
if (!host || !*host)
host = "https://cloud.langfuse.com";

ai::langfuse::Tracer tracer({
.host = host,
.public_key = lf_pk,
.secret_key = lf_sk,
.environment = "ai-sdk-cpp-example",
});
if (!tracer.is_valid()) {
std::cerr << "Langfuse tracer not configured.\n";
return 1;
}

auto client = ai::openai::create_client();
if (!client.is_valid()) {
std::cerr << "OpenAI client not configured (set OPENAI_API_KEY).\n";
return 1;
}

ai::ToolSet tools;
tools["lookup_user"] = ai::create_tool(
"Look up a user's profile by id",
ai::create_object_schema({{"user_id", "string"}}), lookup_user);
tools["get_weather"] = ai::create_tool(
"Get the current weather for a location",
ai::create_object_schema({{"location", "string"}}), get_weather);

ai::GenerateOptions options;
options.model = ai::openai::models::kGpt4oMini;
options.system =
"You are a concise assistant. Use the available tools when helpful.";
options.prompt = "Look up alice and tell me the weather where she lives.";
options.tools = std::move(tools);
options.max_steps = 4;
options.temperature = 0.0;

auto trace = tracer.start_trace("langfuse_tracing_example");
trace->set_input(options.prompt);
trace->set_metadata({{"example", "langfuse_tracing"}, {"sdk", "ai-sdk-cpp"}});

auto result = ai::langfuse::generate_text(client, std::move(options), *trace);

if (result) {
std::cout << "Output: " << result.text << "\n";
trace->set_output(result.text);
} else {
std::cerr << "Generation failed: " << result.error_message() << "\n";
trace->set_output(ai::JsonValue{{"error", result.error_message()}});
}

trace->end();
std::cout << "Trace flushed: id=" << trace->id() << "\n";
return result ? 0 : 2;
}
Loading
Loading