Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
87 changes: 83 additions & 4 deletions cmd/testai/main-testai.go
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@ const (
DefaultAnthropicModel = "claude-sonnet-4-5"
DefaultOpenAIModel = "gpt-5.1"
DefaultOpenRouterModel = "mistralai/mistral-small-3.2-24b-instruct"
DefaultNanoGPTModel = "zai-org/glm-4.7"
DefaultGeminiModel = "gemini-3-pro-preview"
)

Expand Down Expand Up @@ -207,6 +208,12 @@ func testOpenAIComp(ctx context.Context, model, message string, tools []uctypes.
}
}

// testOpenRouter sends the provided message to the OpenRouter chat completions endpoint using WaveAIPostMessageWrap
// and streams the model's output to a test SSE writer.
//
// It requires the OPENROUTER_APIKEY environment variable and will exit the process with a message if the key is missing.
// The given model, message, and tool definitions are included in the chat request. The function prints test metadata
// (model, message, chat ID) and any streaming error to stdout.
func testOpenRouter(ctx context.Context, model, message string, tools []uctypes.ToolDefinition) {
apiKey := os.Getenv("OPENROUTER_APIKEY")
if apiKey == "" {
Expand Down Expand Up @@ -257,6 +264,60 @@ func testOpenRouter(ctx context.Context, model, message string, tools []uctypes.
}
}

// testNanoGPT sends a single test message to a NanoGPT endpoint using WaveAIPostMessageWrap and streams the AI response to a TestResponseWriter.
// It prints the chosen model, message, and generated chat ID to stdout. If the NANOGPT_KEY environment variable is not set, the function prints an error and exits the process.
func testNanoGPT(ctx context.Context, model, message string, tools []uctypes.ToolDefinition) {
apiKey := os.Getenv("NANOGPT_KEY")
if apiKey == "" {
fmt.Println("Error: NANOGPT_KEY environment variable not set")
os.Exit(1)
}

opts := &uctypes.AIOptsType{
APIType: uctypes.APIType_OpenAIChat,
APIToken: apiKey,
Endpoint: "https://nano-gpt.com/api/v1/chat/completions",
Model: model,
MaxTokens: 4096,
}

chatID := uuid.New().String()

aiMessage := &uctypes.AIMessage{
MessageId: uuid.New().String(),
Parts: []uctypes.AIMessagePart{
{
Type: uctypes.AIMessagePartTypeText,
Text: message,
},
},
}

fmt.Printf("Testing NanoGPT with WaveAIPostMessageWrap, model: %s\n", model)
fmt.Printf("Message: %s\n", message)
fmt.Printf("Chat ID: %s\n", chatID)
fmt.Println("---")

testWriter := &TestResponseWriter{}
sseHandler := sse.MakeSSEHandlerCh(testWriter, ctx)
defer sseHandler.Close()

chatOpts := uctypes.WaveChatOpts{
ChatId: chatID,
ClientId: uuid.New().String(),
Config: *opts,
Tools: tools,
SystemPrompt: []string{"You are a helpful assistant. Be concise and clear in your responses."},
}
err := aiusechat.WaveAIPostMessageWrap(ctx, sseHandler, aiMessage, chatOpts)
if err != nil {
fmt.Printf("NanoGPT streaming error: %v\n", err)
}
}

// testAnthropic runs a streaming test against Anthropic using the given model, message, and tools.
// It requires the ANTHROPIC_APIKEY environment variable and will print an error and exit if it is unset.
// The function prints test info (model, message, chat ID) and streams the model's responses to stdout via an SSE test writer using WaveAIPostMessageWrap.
func testAnthropic(ctx context.Context, model, message string, tools []uctypes.ToolDefinition) {
apiKey := os.Getenv("ANTHROPIC_APIKEY")
if apiKey == "" {
Expand Down Expand Up @@ -374,14 +435,16 @@ func testT3(ctx context.Context) {
testOpenAIComp(ctx, "gpt-4o", "what is 2+2? please be brief", nil)
}

// testT4 runs a preset Gemini integration test that asks the model to compute 2+2+8 using the provider adder tool.
func testT4(ctx context.Context) {
tool := aiusechat.GetAdderToolDefinition()
tools := []uctypes.ToolDefinition{tool}
testGemini(ctx, DefaultGeminiModel, "what is 2+2+8, use the provider adder tool", tools)
}

// printUsage prints the program usage help including example invocations, the default model names for each supported provider, and the environment variables required for API keys.
func printUsage() {
fmt.Println("Usage: go run main-testai.go [--anthropic|--openaicomp|--openrouter|--gemini] [--tools] [--model <model>] [message]")
fmt.Println("Usage: go run main-testai.go [--anthropic|--openaicomp|--openrouter|--nanogpt|--gemini] [--tools] [--model <model>] [message]")
fmt.Println("Examples:")
fmt.Println(" go run main-testai.go 'What is 2+2?'")
fmt.Println(" go run main-testai.go --model o4-mini 'What is 2+2?'")
Expand All @@ -390,6 +453,8 @@ func printUsage() {
fmt.Println(" go run main-testai.go --openaicomp --model gpt-4o 'What is 2+2?'")
fmt.Println(" go run main-testai.go --openrouter 'What is 2+2?'")
fmt.Println(" go run main-testai.go --openrouter --model anthropic/claude-3.5-sonnet 'What is 2+2?'")
fmt.Println(" go run main-testai.go --nanogpt 'What is 2+2?'")
fmt.Println(" go run main-testai.go --nanogpt --model gpt-4o 'What is 2+2?'")
fmt.Println(" go run main-testai.go --gemini 'What is 2+2?'")
fmt.Println(" go run main-testai.go --gemini --model gemini-1.5-pro 'What is 2+2?'")
fmt.Println(" go run main-testai.go --tools 'Help me configure GitHub Actions monitoring'")
Expand All @@ -399,24 +464,34 @@ func printUsage() {
fmt.Printf(" Anthropic: %s\n", DefaultAnthropicModel)
fmt.Printf(" OpenAI Completions: gpt-4o\n")
fmt.Printf(" OpenRouter: %s\n", DefaultOpenRouterModel)
fmt.Printf(" NanoGPT: %s\n", DefaultNanoGPTModel)
fmt.Printf(" Google Gemini: %s\n", DefaultGeminiModel)
fmt.Println("")
fmt.Println("Environment variables:")
fmt.Println(" OPENAI_APIKEY (for OpenAI models)")
fmt.Println(" ANTHROPIC_APIKEY (for Anthropic models)")
fmt.Println(" OPENROUTER_APIKEY (for OpenRouter models)")
fmt.Println(" NANOGPT_KEY (for NanoGPT models)")
fmt.Println(" GOOGLE_APIKEY (for Google Gemini models)")
}

// main parses command-line flags, selects an AI provider and model, and runs the corresponding test flow.
//
// Recognized flags allow choosing among Anthropic, OpenAI (chat or completions), OpenRouter, NanoGPT, and Google Gemini,
// enable optional test tools, request usage, or run one of the preset tests (t1–t4). If a preset test is requested the
// program runs it and exits. When no model is provided, a sensible default is chosen based on the selected provider.
// The first non-flag argument is used as the prompt (defaults to "What is 2+2?"). If tools are enabled, tool definitions
// are loaded and passed to the selected provider-specific test function, which performs the streaming test run.
func main() {
var anthropic, openaicomp, openrouter, gemini, tools, help, t1, t2, t3, t4 bool
var anthropic, openaicomp, openrouter, nanogpt, gemini, tools, help, t1, t2, t3, t4 bool
var model string
flag.BoolVar(&anthropic, "anthropic", false, "Use Anthropic API instead of OpenAI")
flag.BoolVar(&openaicomp, "openaicomp", false, "Use OpenAI Completions API")
flag.BoolVar(&openrouter, "openrouter", false, "Use OpenRouter API")
flag.BoolVar(&nanogpt, "nanogpt", false, "Use NanoGPT API")
flag.BoolVar(&gemini, "gemini", false, "Use Google Gemini API")
flag.BoolVar(&tools, "tools", false, "Enable GitHub Actions Monitor tools for testing")
flag.StringVar(&model, "model", "", fmt.Sprintf("AI model to use (defaults: %s for OpenAI, %s for Anthropic, %s for OpenRouter, %s for Gemini)", DefaultOpenAIModel, DefaultAnthropicModel, DefaultOpenRouterModel, DefaultGeminiModel))
flag.StringVar(&model, "model", "", fmt.Sprintf("AI model to use (defaults: %s for OpenAI, %s for Anthropic, %s for OpenRouter, %s for NanoGPT, %s for Gemini)", DefaultOpenAIModel, DefaultAnthropicModel, DefaultOpenRouterModel, DefaultNanoGPTModel, DefaultGeminiModel))
flag.BoolVar(&help, "help", false, "Show usage information")
flag.BoolVar(&t1, "t1", false, fmt.Sprintf("Run preset T1 test (%s with 'what is 2+2')", DefaultAnthropicModel))
flag.BoolVar(&t2, "t2", false, fmt.Sprintf("Run preset T2 test (%s with 'what is 2+2')", DefaultOpenAIModel))
Expand Down Expand Up @@ -457,6 +532,8 @@ func main() {
model = "gpt-4o"
} else if openrouter {
model = DefaultOpenRouterModel
} else if nanogpt {
model = DefaultNanoGPTModel
} else if gemini {
model = DefaultGeminiModel
} else {
Expand All @@ -481,9 +558,11 @@ func main() {
testOpenAIComp(ctx, model, message, toolDefs)
} else if openrouter {
testOpenRouter(ctx, model, message, toolDefs)
} else if nanogpt {
testNanoGPT(ctx, model, message, toolDefs)
} else if gemini {
testGemini(ctx, model, message, toolDefs)
} else {
testOpenAI(ctx, model, message, toolDefs)
}
}
}
19 changes: 18 additions & 1 deletion pkg/aiusechat/usechat-mode.go
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@
OpenAIResponsesEndpoint = "https://api.openai.com/v1/responses"
OpenAIChatEndpoint = "https://api.openai.com/v1/chat/completions"
OpenRouterChatEndpoint = "https://openrouter.ai/api/v1/chat/completions"
NanoGPTChatEndpoint = "https://nano-gpt.com/api/v1/chat/completions"
AzureLegacyEndpointTemplate = "https://%s.openai.azure.com/openai/deployments/%s/chat/completions?api-version=%s"
AzureResponsesEndpointTemplate = "https://%s.openai.azure.com/openai/v1/responses"
AzureChatEndpointTemplate = "https://%s.openai.azure.com/openai/v1/chat/completions"
Expand All @@ -30,6 +31,7 @@

OpenAIAPITokenSecretName = "OPENAI_KEY"
OpenRouterAPITokenSecretName = "OPENROUTER_KEY"
NanoGPTAPITokenSecretName = "NANOGPT_KEY"
AzureOpenAIAPITokenSecretName = "AZURE_OPENAI_KEY"
GoogleAIAPITokenSecretName = "GOOGLE_AI_KEY"
)
Expand All @@ -53,6 +55,10 @@
return mode, config, nil
}

// applyProviderDefaults fills missing fields in an AIModeConfigType with sensible provider-specific defaults.
// It mutates the provided config in-place.
// Defaults set include APIType, Endpoint, APITokenSecretName, Capabilities, Azure API version and endpoints,
// Wave cloud enablement, and provider-specific endpoint templates where applicable.
func applyProviderDefaults(config *wconfig.AIModeConfigType) {
if config.Provider == uctypes.AIProvider_Wave {
config.WaveAICloud = true
Expand Down Expand Up @@ -99,6 +105,17 @@
config.APITokenSecretName = OpenRouterAPITokenSecretName
}
}
if config.Provider == uctypes.AIProvider_NanoGPT {

Check failure on line 108 in pkg/aiusechat/usechat-mode.go

View workflow job for this annotation

GitHub Actions / Build for TestDriver.ai

undefined: uctypes.AIProvider_NanoGPT

Check failure on line 108 in pkg/aiusechat/usechat-mode.go

View workflow job for this annotation

GitHub Actions / Analyze (go)

undefined: uctypes.AIProvider_NanoGPT
if config.APIType == "" {
config.APIType = uctypes.APIType_OpenAIChat
}
if config.Endpoint == "" {
config.Endpoint = NanoGPTChatEndpoint
}
if config.APITokenSecretName == "" {
config.APITokenSecretName = NanoGPTAPITokenSecretName
}
}
if config.Provider == uctypes.AIProvider_AzureLegacy {
if config.AzureAPIVersion == "" {
config.AzureAPIVersion = AzureLegacyDefaultAPIVersion
Expand Down Expand Up @@ -264,4 +281,4 @@
Event: wps.Event_AIModeConfig,
Data: update,
})
}
}
Loading