blob: 5f6b3faf8beacf15d5a73739e59dbbd743e59390 [file] [log] [blame]
package config
import (
"context"
"os"
"testing"
"github.com/iomodo/staff/llm"
"github.com/iomodo/staff/llm/openai"
)
// TestOpenAIIntegration tests the OpenAI integration with real API calls
// This test requires OPENAI_API_KEY environment variable to be set
func TestOpenAIIntegration(t *testing.T) {
apiKey := os.Getenv("OPENAI_API_KEY")
if apiKey == "" {
t.Skip("OPENAI_API_KEY not set, skipping OpenAI integration test")
}
// Create OpenAI config
config := llm.Config{
Provider: llm.ProviderOpenAI,
APIKey: apiKey,
BaseURL: "https://api.openai.com/v1",
}
// Create OpenAI provider
factory := &openai.OpenAIFactory{}
provider, err := factory.CreateProvider(config)
if err != nil {
t.Fatalf("Failed to create OpenAI provider: %v", err)
}
defer provider.Close()
// Test chat completion
t.Run("ChatCompletion", func(t *testing.T) {
req := llm.ChatCompletionRequest{
Model: "gpt-3.5-turbo",
Messages: []llm.Message{
{
Role: llm.RoleSystem,
Content: "You are a helpful assistant.",
},
{
Role: llm.RoleUser,
Content: "Hello! Just say 'Hello from OpenAI' and nothing else.",
},
},
}
resp, err := provider.ChatCompletion(context.Background(), req)
if err != nil {
t.Fatalf("ChatCompletion failed: %v", err)
}
if len(resp.Choices) == 0 {
t.Fatal("No choices returned")
}
message := resp.Choices[0].Message
if message.Content == "" {
t.Fatal("Empty response content")
}
t.Logf("OpenAI Response: %s", message.Content)
})
// Test embeddings
t.Run("Embeddings", func(t *testing.T) {
req := llm.EmbeddingRequest{
Model: "text-embedding-ada-002",
Input: "Hello, world!",
}
resp, err := provider.CreateEmbeddings(context.Background(), req)
if err != nil {
t.Fatalf("CreateEmbeddings failed: %v", err)
}
if len(resp.Data) == 0 {
t.Fatal("No embeddings returned")
}
embedding := resp.Data[0]
if len(embedding.Embedding) == 0 {
t.Fatal("Empty embedding vector")
}
t.Logf("Embedding dimensions: %d", len(embedding.Embedding))
})
}
// TestConfigurationLoading tests the configuration loading functionality
func TestConfigurationLoading(t *testing.T) {
// Create a temporary config file
configContent := `
openai:
api_key: "test-key"
model: "gpt-4"
github:
token: "test-token"
owner: "test-owner"
repo: "test-repo"
agents:
- name: "ceo"
role: "CEO"
system_prompt_file: "operations/agents/ceo/system.md"
tasks:
storage_path: "tasks/"
`
// Write temp config file
tmpFile, err := os.CreateTemp("", "staff-config-*.yaml")
if err != nil {
t.Fatalf("Failed to create temp file: %v", err)
}
defer os.Remove(tmpFile.Name())
if _, err := tmpFile.WriteString(configContent); err != nil {
t.Fatalf("Failed to write config: %v", err)
}
tmpFile.Close()
// Test loading config
config, err := LoadConfig(tmpFile.Name())
if err != nil {
t.Fatalf("Failed to load config: %v", err)
}
// Validate loaded config
if config.OpenAI.APIKey != "test-key" {
t.Errorf("Expected API key 'test-key', got '%s'", config.OpenAI.APIKey)
}
if config.OpenAI.Model != "gpt-4" {
t.Errorf("Expected model 'gpt-4', got '%s'", config.OpenAI.Model)
}
if len(config.Agents) != 1 {
t.Errorf("Expected 1 agent, got %d", len(config.Agents))
}
if config.Agents[0].Name != "ceo" {
t.Errorf("Expected agent name 'ceo', got '%s'", config.Agents[0].Name)
}
}
// TestEnvironmentOverrides tests environment variable overrides
func TestEnvironmentOverrides(t *testing.T) {
// Set environment variables
os.Setenv("OPENAI_API_KEY", "env-openai-key")
os.Setenv("GITHUB_TOKEN", "env-github-token")
defer func() {
os.Unsetenv("OPENAI_API_KEY")
os.Unsetenv("GITHUB_TOKEN")
}()
// Create a temporary config file
configContent := `
openai:
api_key: "config-key"
github:
token: "config-token"
owner: "test-owner"
repo: "test-repo"
agents:
- name: "ceo"
role: "CEO"
system_prompt_file: "operations/agents/ceo/system.md"
`
tmpFile, err := os.CreateTemp("", "staff-config-*.yaml")
if err != nil {
t.Fatalf("Failed to create temp file: %v", err)
}
defer os.Remove(tmpFile.Name())
if _, err := tmpFile.WriteString(configContent); err != nil {
t.Fatalf("Failed to write config: %v", err)
}
tmpFile.Close()
// Test loading config with env overrides
config, err := LoadConfigWithEnvOverrides(tmpFile.Name())
if err != nil {
t.Fatalf("Failed to load config: %v", err)
}
// Verify environment overrides
if config.OpenAI.APIKey != "env-openai-key" {
t.Errorf("Expected env API key 'env-openai-key', got '%s'", config.OpenAI.APIKey)
}
if config.GitHub.Token != "env-github-token" {
t.Errorf("Expected env GitHub token 'env-github-token', got '%s'", config.GitHub.Token)
}
}