Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
16 changes: 8 additions & 8 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ int main() {
auto client = ai::openai::create_client();

auto result = client.generate_text({
.model = ai::openai::models::kGpt4o, // this can also be a string like "gpt-4o"
.model = ai::openai::models::kGpt54, // this can also be a string like "gpt-5.4"
.system = "You are a friendly assistant!",
.prompt = "Why is the sky blue?"
});
Expand All @@ -56,7 +56,7 @@ int main() {
// Ensure ANTHROPIC_API_KEY environment variable is set
auto client = ai::anthropic::create_client();
auto result = client.generate_text({
.model = ai::anthropic::models::kClaudeSonnet45,
.model = ai::anthropic::models::kClaudeSonnet46,
.system = "You are a helpful assistant.",
.prompt = "Explain quantum computing in simple terms."
});
Expand All @@ -80,7 +80,7 @@ int main() {
auto client = ai::openai::create_client();

auto stream = client.stream_text({
.model = ai::openai::models::kGpt4o, // this can also be a string like "gpt-4o"
.model = ai::openai::models::kGpt54, // this can also be a string like "gpt-5.4"
.system = "You are a helpful assistant.",
.prompt = "Write a short story about a robot."
});
Expand Down Expand Up @@ -113,7 +113,7 @@ int main() {
};

auto result = client.generate_text({
.model = ai::openai::models::kGpt4o, // this can also be a string like "gpt-4o"
.model = ai::openai::models::kGpt54, // this can also be a string like "gpt-5.4"
.messages = messages
});

Expand Down Expand Up @@ -161,7 +161,7 @@ int main() {
};

auto result = client.generate_text({
.model = ai::openai::models::kGpt4o,
.model = ai::openai::models::kGpt54,
.prompt = "What's the weather like in San Francisco?",
.tools = tools,
.max_steps = 3 // Enable multi-step tool calling
Expand Down Expand Up @@ -217,7 +217,7 @@ int main() {

// Multiple async tools will execute in parallel
auto result = client.generate_text({
.model = ai::openai::models::kGpt4o,
.model = ai::openai::models::kGpt54,
.prompt = "Fetch data from the user and product APIs",
.tools = tools
});
Expand Down Expand Up @@ -252,7 +252,7 @@ int main() {
// - Network errors
// - HTTP 408, 409, 429 (rate limits), and 5xx errors
auto result = client.generate_text({
.model = ai::openai::models::kGpt4o,
.model = ai::openai::models::kGpt54,
.prompt = "Hello, world!"
});

Expand Down Expand Up @@ -286,7 +286,7 @@ int main() {

// Use any model available on OpenRouter
auto result = client.generate_text({
.model = "anthropic/claude-3.5-sonnet", // or "meta-llama/llama-3.1-8b-instruct", etc.
.model = "anthropic/claude-sonnet-4-6", // or "meta-llama/llama-3.1-8b-instruct", etc.
.system = "You are a helpful assistant.",
.prompt = "What are the benefits of using OpenRouter?"
});
Expand Down
12 changes: 6 additions & 6 deletions examples/basic_chat.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -24,12 +24,12 @@ int main() {
std::cout << "================================\n\n";

// Example 1: Simple text generation with OpenAI
std::cout << "1. Generating text with OpenAI GPT-4o:\n";
std::cout << "1. Generating text with OpenAI GPT-5.4:\n";
std::cout << "Question: What is the capital of France?\n\n";

auto client1 = ai::openai::create_client();
ai::GenerateOptions options1;
options1.model = ai::openai::models::kGpt4o;
options1.model = ai::openai::models::kGpt54;
options1.prompt =
"What is the capital of France? Please provide a brief answer.";

Expand All @@ -50,7 +50,7 @@ int main() {
std::cout << "Question: Explain what a prime number is.\n\n";

ai::GenerateOptions options2;
options2.model = ai::openai::models::kGpt4o;
options2.model = ai::openai::models::kGpt54;
options2.system =
"You are a helpful math tutor who explains concepts clearly.";
options2.prompt =
Expand Down Expand Up @@ -80,7 +80,7 @@ int main() {
"Which one should I use for frequent insertions in the middle?")};

ai::GenerateOptions options3;
options3.model = ai::openai::models::kGpt4o;
options3.model = ai::openai::models::kGpt54;
options3.messages = conversation;

auto result3 = client1.generate_text(options3);
Expand All @@ -98,7 +98,7 @@ int main() {

auto client4 = ai::anthropic::create_client();
ai::GenerateOptions options4;
options4.model = ai::anthropic::models::kClaudeSonnet45;
options4.model = ai::anthropic::models::kClaudeSonnet46;
options4.prompt =
"Write a haiku about programming. Just the haiku, nothing else.";

Expand All @@ -115,7 +115,7 @@ int main() {
std::cout << "5. Using GenerateOptions for fine control:\n";

ai::GenerateOptions options;
options.model = ai::openai::models::kGpt4o;
options.model = ai::openai::models::kGpt54;
options.prompt = "List 3 benefits of using C++ for systems programming.";
options.max_tokens = 150;
options.temperature = 0.7;
Expand Down
8 changes: 4 additions & 4 deletions examples/components/all/main.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ int main() {
// Test core functionality
std::cout << "Testing core functionality...\n";
ai::GenerateOptions options;
options.model = "gpt-4o";
options.model = "gpt-5.4";
options.prompt = "Hello world";
std::cout << "✓ Core types work fine\n\n";

Expand All @@ -36,8 +36,8 @@ int main() {
try {
auto openai_client = ai::openai::create_client();
std::cout << "✓ OpenAI client created successfully\n";
std::cout << "✓ Available models: " << ai::openai::models::kGpt4o << ", "
<< ai::openai::models::kGpt4oMini << "\n";
std::cout << "✓ Available models: " << ai::openai::models::kGpt54 << ", "
<< ai::openai::models::kGpt54Mini << "\n";
} catch (const std::exception& e) {
std::cout << "✗ OpenAI client failed: " << e.what() << "\n";
}
Expand All @@ -52,7 +52,7 @@ int main() {
auto anthropic_client = ai::anthropic::create_client();
std::cout << "✓ Anthropic client created successfully\n";
std::cout << "✓ Available models: "
<< ai::anthropic::models::kClaudeSonnet45 << ", "
<< ai::anthropic::models::kClaudeSonnet46 << ", "
<< ai::anthropic::models::kClaudeHaiku45 << "\n";
} catch (const std::exception& e) {
std::cout << "✗ Anthropic client failed: " << e.what() << "\n";
Expand Down
4 changes: 2 additions & 2 deletions examples/components/anthropic/main.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ int main() {
// Test core functionality
std::cout << "Testing core functionality...\n";
ai::GenerateOptions options;
options.model = "claude-sonnet-4-5-20250929";
options.model = "claude-sonnet-4-6";
options.prompt = "Hello world";
std::cout << "✓ Core types work fine\n\n";

Expand All @@ -37,7 +37,7 @@ int main() {
auto client = ai::anthropic::create_client();
std::cout << "✓ Anthropic client created successfully\n";
std::cout << "✓ Available models: "
<< ai::anthropic::models::kClaudeSonnet45 << ", "
<< ai::anthropic::models::kClaudeSonnet46 << ", "
<< ai::anthropic::models::kClaudeHaiku45 << "\n";
} catch (const std::exception& e) {
std::cout << "✗ Anthropic client failed: " << e.what() << "\n";
Expand Down
6 changes: 3 additions & 3 deletions examples/components/openai/main.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ int main() {
// Test core functionality
std::cout << "Testing core functionality...\n";
ai::GenerateOptions options;
options.model = "gpt-4o";
options.model = "gpt-5.4";
options.prompt = "Hello world";
std::cout << "✓ Core types work fine\n\n";

Expand All @@ -36,8 +36,8 @@ int main() {
try {
auto client = ai::openai::create_client();
std::cout << "✓ OpenAI client created successfully\n";
std::cout << "✓ Available models: " << ai::openai::models::kGpt4o << ", "
<< ai::openai::models::kGpt4oMini << "\n";
std::cout << "✓ Available models: " << ai::openai::models::kGpt54 << ", "
<< ai::openai::models::kGpt54Mini << "\n";
} catch (const std::exception& e) {
std::cout << "✗ OpenAI client failed: " << e.what() << "\n";
}
Expand Down
8 changes: 4 additions & 4 deletions examples/error_handling.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ void demonstrate_api_errors() {
// Test with empty prompt
std::cout << "Testing with empty prompt:\n";
ai::GenerateOptions options2;
options2.model = ai::openai::models::kGpt4o;
options2.model = ai::openai::models::kGpt54;
options2.prompt = "";
auto result2 = client.generate_text(options2);

Expand Down Expand Up @@ -81,7 +81,7 @@ void demonstrate_validation() {

// Test valid options
ai::GenerateOptions valid_options;
valid_options.model = ai::openai::models::kGpt4o;
valid_options.model = ai::openai::models::kGpt54;
valid_options.prompt = "Hello";

if (valid_options.is_valid()) {
Expand Down Expand Up @@ -181,8 +181,8 @@ void demonstrate_recovery_patterns() {

std::vector<std::string> fallback_models = {
"primary-model-v3", // This will fail
ai::openai::models::kGpt4o, // This should work (if API key is available)
ai::openai::models::kGpt4oMini // Faster fallback
ai::openai::models::kGpt54, // This should work (if API key is available)
ai::openai::models::kGpt54Mini // Faster fallback
};

std::string prompt = "What is machine learning?";
Expand Down
18 changes: 9 additions & 9 deletions examples/multi_provider.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -88,13 +88,13 @@ int main() {

// Test OpenAI models
results1.push_back(
test_provider("OpenAI", ai::openai::models::kGpt4o, simple_question));
test_provider("OpenAI", ai::openai::models::kGpt54, simple_question));
results1.push_back(
test_provider("OpenAI", ai::openai::models::kGpt4oMini, simple_question));
test_provider("OpenAI", ai::openai::models::kGpt54Mini, simple_question));

// Test Anthropic models
results1.push_back(test_provider(
"Anthropic", ai::anthropic::models::kClaudeSonnet45, simple_question));
"Anthropic", ai::anthropic::models::kClaudeSonnet46, simple_question));
results1.push_back(test_provider(
"Anthropic", ai::anthropic::models::kClaudeHaiku45, simple_question));

Expand All @@ -116,9 +116,9 @@ int main() {

// Test with different providers for creativity
results2.push_back(
test_provider("OpenAI", ai::openai::models::kGpt4o, creative_prompt));
test_provider("OpenAI", ai::openai::models::kGpt54, creative_prompt));
results2.push_back(test_provider(
"Anthropic", ai::anthropic::models::kClaudeSonnet45, creative_prompt));
"Anthropic", ai::anthropic::models::kClaudeSonnet46, creative_prompt));

for (const auto& result : results2) {
print_result(result);
Expand All @@ -135,9 +135,9 @@ int main() {
std::vector<ProviderResult> results3;

results3.push_back(
test_provider("OpenAI", ai::openai::models::kGpt4o, technical_prompt));
test_provider("OpenAI", ai::openai::models::kGpt54, technical_prompt));
results3.push_back(test_provider(
"Anthropic", ai::anthropic::models::kClaudeSonnet45, technical_prompt));
"Anthropic", ai::anthropic::models::kClaudeSonnet46, technical_prompt));

for (const auto& result : results3) {
print_result(result);
Expand Down Expand Up @@ -248,8 +248,8 @@ int main() {
std::cout << " - Token usage affects cost - consider model efficiency\n";
std::cout << " - The AI SDK provides a unified interface across providers\n";
std::cout << "\nTip: Choose models based on your specific use case:\n";
std::cout << " - Fast responses: GPT-4o-mini, Claude-3-5-haiku\n";
std::cout << " - High quality: GPT-4o, Claude-3-5-sonnet\n";
std::cout << " - Fast responses: GPT-5.4-mini, Claude-haiku-4.5\n";
std::cout << " - High quality: GPT-5.5, Claude-sonnet-4.6\n";
std::cout << " - Creative tasks: Models with higher temperature settings\n";

return 0;
Expand Down
6 changes: 3 additions & 3 deletions examples/openrouter_example.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -32,11 +32,11 @@ int main() {
std::cout << "Testing text generation with OpenRouter...\n\n";

// Using a model that's available on OpenRouter
// Common models: "openai/gpt-4o", "anthropic/claude-sonnet-4-5",
// Common models: "openai/gpt-5.4", "anthropic/claude-sonnet-4-6",
// "meta-llama/llama-3.1-8b-instruct" See https://openrouter.ai/models for
// available models
ai::GenerateOptions options(
"anthropic/claude-sonnet-4-5", "You are a helpful assistant.",
"anthropic/claude-sonnet-4-6", "You are a helpful assistant.",
"What are the benefits of using OpenRouter for AI applications? Give a "
"brief answer.");

Expand All @@ -55,7 +55,7 @@ int main() {
// Test streaming with OpenRouter
std::cout << "\n\nTesting streaming with OpenRouter...\n";

ai::GenerateOptions stream_opts("anthropic/claude-sonnet-4-5",
ai::GenerateOptions stream_opts("anthropic/claude-sonnet-4-6",
"You are a creative writer.",
"Write a haiku about API compatibility.");
ai::StreamOptions stream_options(stream_opts);
Expand Down
4 changes: 2 additions & 2 deletions examples/retry_config_example.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ int main() {
auto default_client = ai::openai::create_client();

ai::GenerateOptions options;
options.model = ai::openai::models::kGpt4oMini;
options.model = ai::openai::models::kGpt54Mini;
options.prompt = "Say 'Hello with default retry config!'";

auto result1 = default_client.generate_text(options);
Expand Down Expand Up @@ -138,7 +138,7 @@ int main() {
options.prompt =
"Generate a list of 5 creative project names for a batch processing "
"system.";
options.model = ai::openai::models::kGpt4o;
options.model = ai::openai::models::kGpt54;

std::cout << "Processing batch request (this might take a while if retries "
"occur)...\n";
Expand Down
8 changes: 4 additions & 4 deletions examples/streaming_chat.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ int main() {

auto client = ai::openai::create_client();
ai::GenerateOptions gen_options1;
gen_options1.model = ai::openai::models::kGpt4o;
gen_options1.model = ai::openai::models::kGpt54;
gen_options1.prompt =
"Write a short story about a robot learning "
"to paint. Keep it under 200 words.";
Expand Down Expand Up @@ -77,7 +77,7 @@ int main() {
};

ai::GenerateOptions gen_options2;
gen_options2.model = ai::openai::models::kGpt4oMini;
gen_options2.model = ai::openai::models::kGpt54Mini;
gen_options2.prompt =
"Explain quantum computing in simple terms that a "
"high school student could understand.";
Expand All @@ -102,7 +102,7 @@ int main() {

std::cout << "Response: ";
ai::GenerateOptions gen_options3;
gen_options3.model = ai::openai::models::kGpt4o;
gen_options3.model = ai::openai::models::kGpt54;
gen_options3.messages = conversation;
ai::StreamOptions options3(std::move(gen_options3));
auto stream3 = client.stream_text(options3);
Expand Down Expand Up @@ -139,7 +139,7 @@ int main() {
std::cout << "Prompt: Write 3 unusual ice cream flavors.\n\n";

ai::GenerateOptions gen_options4;
gen_options4.model = ai::openai::models::kGpt4o;
gen_options4.model = ai::openai::models::kGpt54;
gen_options4.prompt =
"Invent 3 unusual but delicious ice cream flavors with creative names.";
gen_options4.temperature = 1.2; // High creativity
Expand Down
4 changes: 2 additions & 2 deletions examples/test_anthropic.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ int main() {
// Test simple generation
std::cout << "Testing Anthropic text generation...\n\n";

ai::GenerateOptions options(ai::anthropic::models::kClaudeHaiku35,
ai::GenerateOptions options(ai::anthropic::models::kClaudeHaiku45,
"You are a helpful assistant.",
"Why is the sky blue? Give a short answer.");

Expand All @@ -35,7 +35,7 @@ int main() {
std::cout << "\nTesting streaming...\n";

ai::GenerateOptions stream_opts(
ai::anthropic::models::kClaudeHaiku35,
ai::anthropic::models::kClaudeHaiku45,
"Count from 1 to 5 slowly and with each number say 'tick'");
ai::StreamOptions stream_options(stream_opts);

Expand Down
4 changes: 2 additions & 2 deletions examples/test_openai.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ int main() {
// Test simple generation
std::cout << "Testing OpenAI text generation...\n\n";

ai::GenerateOptions options(ai::openai::models::kGpt4oMini,
ai::GenerateOptions options(ai::openai::models::kGpt54Mini,
"You are a friendly assistant!",
"Why is the sky blue? Give a short answer.");

Expand All @@ -35,7 +35,7 @@ int main() {
std::cout << "\nTesting streaming...\n";

ai::GenerateOptions stream_opts(
ai::openai::models::kGpt4oMini,
ai::openai::models::kGpt54Mini,
"Count from 1 to 5 slowly and along with each number say 'tick'");
ai::StreamOptions stream_options(stream_opts);

Expand Down
Loading
Loading