task: task-1753636924-a1d4c708 - created

Change-Id: Ic78528c47ae38114b9b7504f1c4a76f95e93eb13
diff --git a/server/config/config.go b/server/config/config.go
new file mode 100644
index 0000000..84441d2
--- /dev/null
+++ b/server/config/config.go
@@ -0,0 +1,234 @@
+package config
+
+import (
+	"fmt"
+	"os"
+	"time"
+
+	"gopkg.in/yaml.v3"
+)
+
+// Config represents the Staff MVP configuration
+type Config struct {
+	OpenAI OpenAIConfig `yaml:"openai"`
+	GitHub GitHubConfig `yaml:"github"`
+	Agents []AgentConfig `yaml:"agents"`
+	Tasks  TasksConfig   `yaml:"tasks"`
+	Git    GitConfig     `yaml:"git"`
+}
+
+// OpenAIConfig represents OpenAI provider configuration
+type OpenAIConfig struct {
+	APIKey     string        `yaml:"api_key"`
+	Model      string        `yaml:"model"`
+	BaseURL    string        `yaml:"base_url"`
+	Timeout    time.Duration `yaml:"timeout"`
+	MaxRetries int           `yaml:"max_retries"`
+}
+
+// GitHubConfig represents GitHub integration configuration
+type GitHubConfig struct {
+	Token string `yaml:"token"`
+	Owner string `yaml:"owner"`
+	Repo  string `yaml:"repo"`
+}
+
+// AgentConfig represents individual agent configuration
+type AgentConfig struct {
+	Name             string   `yaml:"name"`
+	Role             string   `yaml:"role"`
+	Model            string   `yaml:"model"`
+	SystemPromptFile string   `yaml:"system_prompt_file"`
+	Capabilities     []string `yaml:"capabilities"`     // For auto-assignment
+	TaskTypes        []string `yaml:"task_types"`       // Types of tasks this agent handles
+	MaxTokens        *int     `yaml:"max_tokens"`       // Model-specific token limits
+	Temperature      *float64 `yaml:"temperature"`      // Model creativity setting
+}
+
+// TasksConfig represents task management configuration
+type TasksConfig struct {
+	StoragePath   string `yaml:"storage_path"`
+	CompletedPath string `yaml:"completed_path"`
+}
+
+// GitConfig represents Git operation configuration
+type GitConfig struct {
+	BranchPrefix          string `yaml:"branch_prefix"`
+	CommitMessageTemplate string `yaml:"commit_message_template"`
+	PRTemplate            string `yaml:"pr_template"`
+}
+
+// LoadConfig loads configuration from a YAML file
+func LoadConfig(configPath string) (*Config, error) {
+	// Read the config file
+	data, err := os.ReadFile(configPath)
+	if err != nil {
+		return nil, fmt.Errorf("failed to read config file: %w", err)
+	}
+
+	// Parse YAML
+	var config Config
+	if err := yaml.Unmarshal(data, &config); err != nil {
+		return nil, fmt.Errorf("failed to parse config YAML: %w", err)
+	}
+
+	// Apply defaults
+	config = applyDefaults(config)
+
+	// Validate configuration
+	if err := validateConfig(config); err != nil {
+		return nil, fmt.Errorf("invalid configuration: %w", err)
+	}
+
+	return &config, nil
+}
+
+// LoadConfigWithEnvOverrides loads config with environment variable overrides
+func LoadConfigWithEnvOverrides(configPath string) (*Config, error) {
+	config, err := LoadConfig(configPath)
+	if err != nil {
+		return nil, err
+	}
+
+	// Override with environment variables if present
+	if apiKey := os.Getenv("OPENAI_API_KEY"); apiKey != "" {
+		config.OpenAI.APIKey = apiKey
+	}
+	if githubToken := os.Getenv("GITHUB_TOKEN"); githubToken != "" {
+		config.GitHub.Token = githubToken
+	}
+	if owner := os.Getenv("GITHUB_OWNER"); owner != "" {
+		config.GitHub.Owner = owner
+	}
+	if repo := os.Getenv("GITHUB_REPO"); repo != "" {
+		config.GitHub.Repo = repo
+	}
+
+	// Re-validate after env overrides
+	if err := validateConfig(*config); err != nil {
+		return nil, fmt.Errorf("invalid configuration after env overrides: %w", err)
+	}
+
+	return config, nil
+}
+
+// applyDefaults applies default values to configuration
+func applyDefaults(config Config) Config {
+	// OpenAI defaults
+	if config.OpenAI.Model == "" {
+		config.OpenAI.Model = "gpt-4"
+	}
+	if config.OpenAI.BaseURL == "" {
+		config.OpenAI.BaseURL = "https://api.openai.com/v1"
+	}
+	if config.OpenAI.Timeout == 0 {
+		config.OpenAI.Timeout = 30 * time.Second
+	}
+	if config.OpenAI.MaxRetries == 0 {
+		config.OpenAI.MaxRetries = 3
+	}
+
+	// Tasks defaults
+	if config.Tasks.StoragePath == "" {
+		config.Tasks.StoragePath = "tasks/"
+	}
+	if config.Tasks.CompletedPath == "" {
+		config.Tasks.CompletedPath = "tasks/completed/"
+	}
+
+	// Git defaults
+	if config.Git.BranchPrefix == "" {
+		config.Git.BranchPrefix = "task/"
+	}
+	if config.Git.CommitMessageTemplate == "" {
+		config.Git.CommitMessageTemplate = "Task {task_id}: {task_title}\n\n{solution}\n\nGenerated by Staff AI Agent: {agent_name}"
+	}
+	if config.Git.PRTemplate == "" {
+		config.Git.PRTemplate = `## Task: {task_title}
+
+**Task ID:** {task_id}  
+**Agent:** {agent_name}  
+**Priority:** {priority}
+
+### Description
+{task_description}
+
+### Solution
+{solution}
+
+### Files Changed
+{files_changed}
+
+---
+*Generated by Staff AI Multi-Agent System*`
+	}
+
+	// Agent defaults
+	for i := range config.Agents {
+		if config.Agents[i].Model == "" {
+			config.Agents[i].Model = config.OpenAI.Model
+		}
+	}
+
+	return config
+}
+
+// validateConfig validates the configuration
+func validateConfig(config Config) error {
+	// Validate OpenAI config
+	if config.OpenAI.APIKey == "" {
+		return fmt.Errorf("openai.api_key is required")
+	}
+	if config.OpenAI.Model == "" {
+		return fmt.Errorf("openai.model is required")
+	}
+
+	// Validate GitHub config
+	if config.GitHub.Token == "" {
+		return fmt.Errorf("github.token is required")
+	}
+	if config.GitHub.Owner == "" {
+		return fmt.Errorf("github.owner is required")
+	}
+	if config.GitHub.Repo == "" {
+		return fmt.Errorf("github.repo is required")
+	}
+
+	// Validate agents
+	if len(config.Agents) == 0 {
+		return fmt.Errorf("at least one agent must be configured")
+	}
+
+	for i, agent := range config.Agents {
+		if agent.Name == "" {
+			return fmt.Errorf("agent[%d].name is required", i)
+		}
+		if agent.Role == "" {
+			return fmt.Errorf("agent[%d].role is required", i)
+		}
+		if agent.SystemPromptFile == "" {
+			return fmt.Errorf("agent[%d].system_prompt_file is required", i)
+		}
+	}
+
+	return nil
+}
+
+// GetAgentByName returns an agent config by name
+func (c *Config) GetAgentByName(name string) (*AgentConfig, error) {
+	for _, agent := range c.Agents {
+		if agent.Name == name {
+			return &agent, nil
+		}
+	}
+	return nil, fmt.Errorf("agent not found: %s", name)
+}
+
+// ListAgentNames returns a list of all configured agent names
+func (c *Config) ListAgentNames() []string {
+	names := make([]string, len(c.Agents))
+	for i, agent := range c.Agents {
+		names[i] = agent.Name
+	}
+	return names
+}
\ No newline at end of file
diff --git a/server/config/openai_test.go b/server/config/openai_test.go
new file mode 100644
index 0000000..bd53e9c
--- /dev/null
+++ b/server/config/openai_test.go
@@ -0,0 +1,202 @@
+package config
+
+import (
+	"context"
+	"os"
+	"testing"
+
+	"github.com/iomodo/staff/llm"
+	"github.com/iomodo/staff/llm/openai"
+)
+
+// TestOpenAIIntegration tests the OpenAI integration with real API calls
+// This test requires OPENAI_API_KEY environment variable to be set
+func TestOpenAIIntegration(t *testing.T) {
+	apiKey := os.Getenv("OPENAI_API_KEY")
+	if apiKey == "" {
+		t.Skip("OPENAI_API_KEY not set, skipping OpenAI integration test")
+	}
+
+	// Create OpenAI config
+	config := llm.Config{
+		Provider: llm.ProviderOpenAI,
+		APIKey:   apiKey,
+		BaseURL:  "https://api.openai.com/v1",
+	}
+
+	// Create OpenAI provider
+	factory := &openai.OpenAIFactory{}
+	provider, err := factory.CreateProvider(config)
+	if err != nil {
+		t.Fatalf("Failed to create OpenAI provider: %v", err)
+	}
+	defer provider.Close()
+
+	// Test chat completion
+	t.Run("ChatCompletion", func(t *testing.T) {
+		req := llm.ChatCompletionRequest{
+			Model: "gpt-3.5-turbo",
+			Messages: []llm.Message{
+				{
+					Role:    llm.RoleSystem,
+					Content: "You are a helpful assistant.",
+				},
+				{
+					Role:    llm.RoleUser,
+					Content: "Hello! Just say 'Hello from OpenAI' and nothing else.",
+				},
+			},
+		}
+
+		resp, err := provider.ChatCompletion(context.Background(), req)
+		if err != nil {
+			t.Fatalf("ChatCompletion failed: %v", err)
+		}
+
+		if len(resp.Choices) == 0 {
+			t.Fatal("No choices returned")
+		}
+
+		message := resp.Choices[0].Message
+		if message.Content == "" {
+			t.Fatal("Empty response content")
+		}
+
+		t.Logf("OpenAI Response: %s", message.Content)
+	})
+
+	// Test embeddings
+	t.Run("Embeddings", func(t *testing.T) {
+		req := llm.EmbeddingRequest{
+			Model: "text-embedding-ada-002",
+			Input: "Hello, world!",
+		}
+
+		resp, err := provider.CreateEmbeddings(context.Background(), req)
+		if err != nil {
+			t.Fatalf("CreateEmbeddings failed: %v", err)
+		}
+
+		if len(resp.Data) == 0 {
+			t.Fatal("No embeddings returned")
+		}
+
+		embedding := resp.Data[0]
+		if len(embedding.Embedding) == 0 {
+			t.Fatal("Empty embedding vector")
+		}
+
+		t.Logf("Embedding dimensions: %d", len(embedding.Embedding))
+	})
+}
+
+// TestConfigurationLoading tests the configuration loading functionality
+func TestConfigurationLoading(t *testing.T) {
+	// Create a temporary config file
+	configContent := `
+openai:
+  api_key: "test-key"
+  model: "gpt-4"
+  
+github:
+  token: "test-token"
+  owner: "test-owner"
+  repo: "test-repo"
+  
+agents:
+  - name: "ceo"
+    role: "CEO"
+    system_prompt_file: "operations/agents/ceo/system.md"
+    
+tasks:
+  storage_path: "tasks/"
+`
+
+	// Write temp config file
+	tmpFile, err := os.CreateTemp("", "staff-config-*.yaml")
+	if err != nil {
+		t.Fatalf("Failed to create temp file: %v", err)
+	}
+	defer os.Remove(tmpFile.Name())
+
+	if _, err := tmpFile.WriteString(configContent); err != nil {
+		t.Fatalf("Failed to write config: %v", err)
+	}
+	tmpFile.Close()
+
+	// Test loading config
+	config, err := LoadConfig(tmpFile.Name())
+	if err != nil {
+		t.Fatalf("Failed to load config: %v", err)
+	}
+
+	// Validate loaded config
+	if config.OpenAI.APIKey != "test-key" {
+		t.Errorf("Expected API key 'test-key', got '%s'", config.OpenAI.APIKey)
+	}
+
+	if config.OpenAI.Model != "gpt-4" {
+		t.Errorf("Expected model 'gpt-4', got '%s'", config.OpenAI.Model)
+	}
+
+	if len(config.Agents) != 1 {
+		t.Errorf("Expected 1 agent, got %d", len(config.Agents))
+	}
+
+	if config.Agents[0].Name != "ceo" {
+		t.Errorf("Expected agent name 'ceo', got '%s'", config.Agents[0].Name)
+	}
+}
+
+// TestEnvironmentOverrides tests environment variable overrides
+func TestEnvironmentOverrides(t *testing.T) {
+	// Set environment variables
+	os.Setenv("OPENAI_API_KEY", "env-openai-key")
+	os.Setenv("GITHUB_TOKEN", "env-github-token")
+	defer func() {
+		os.Unsetenv("OPENAI_API_KEY")
+		os.Unsetenv("GITHUB_TOKEN")
+	}()
+
+	// Create a temporary config file
+	configContent := `
+openai:
+  api_key: "config-key"
+  
+github:
+  token: "config-token"
+  owner: "test-owner"
+  repo: "test-repo"
+  
+agents:
+  - name: "ceo"
+    role: "CEO"
+    system_prompt_file: "operations/agents/ceo/system.md"
+`
+
+	tmpFile, err := os.CreateTemp("", "staff-config-*.yaml")
+	if err != nil {
+		t.Fatalf("Failed to create temp file: %v", err)
+	}
+	defer os.Remove(tmpFile.Name())
+
+	if _, err := tmpFile.WriteString(configContent); err != nil {
+		t.Fatalf("Failed to write config: %v", err)
+	}
+	tmpFile.Close()
+
+	// Test loading config with env overrides
+	config, err := LoadConfigWithEnvOverrides(tmpFile.Name())
+	if err != nil {
+		t.Fatalf("Failed to load config: %v", err)
+	}
+
+	// Verify environment overrides
+	if config.OpenAI.APIKey != "env-openai-key" {
+		t.Errorf("Expected env API key 'env-openai-key', got '%s'", config.OpenAI.APIKey)
+	}
+
+	if config.GitHub.Token != "env-github-token" {
+		t.Errorf("Expected env GitHub token 'env-github-token', got '%s'", config.GitHub.Token)
+	}
+}
\ No newline at end of file