Add subtasks

Change-Id: Ica6afd9eef38bcf29135bf2c8a2e4bf0407ccfa1
diff --git a/server/subtasks/service.go b/server/subtasks/service.go
new file mode 100644
index 0000000..ad397ff
--- /dev/null
+++ b/server/subtasks/service.go
@@ -0,0 +1,296 @@
+package subtasks
+
+import (
+	"context"
+	"encoding/json"
+	"fmt"
+	"log"
+	"strings"
+
+	"github.com/iomodo/staff/llm"
+	"github.com/iomodo/staff/tm"
+)
+
+// SubtaskService handles subtask generation and management
+type SubtaskService struct {
+	llmProvider    llm.LLMProvider
+	taskManager    tm.TaskManager
+	agentRoles     []string // Available agent roles for assignment
+}
+
+// NewSubtaskService creates a new subtask service
+func NewSubtaskService(provider llm.LLMProvider, taskManager tm.TaskManager, agentRoles []string) *SubtaskService {
+	return &SubtaskService{
+		llmProvider: provider,
+		taskManager: taskManager,
+		agentRoles:  agentRoles,
+	}
+}
+
+// AnalyzeTaskForSubtasks uses LLM to analyze a task and propose subtasks
+func (s *SubtaskService) AnalyzeTaskForSubtasks(ctx context.Context, task *tm.Task) (*tm.SubtaskAnalysis, error) {
+	prompt := s.buildSubtaskAnalysisPrompt(task)
+	
+	req := llm.ChatCompletionRequest{
+		Model: "gpt-4",
+		Messages: []llm.Message{
+			{
+				Role:    llm.RoleSystem,
+				Content: s.getSubtaskAnalysisSystemPrompt(),
+			},
+			{
+				Role:    llm.RoleUser,
+				Content: prompt,
+			},
+		},
+		MaxTokens:   &[]int{4000}[0],
+		Temperature: &[]float64{0.3}[0],
+	}
+
+	resp, err := s.llmProvider.ChatCompletion(ctx, req)
+	if err != nil {
+		return nil, fmt.Errorf("LLM analysis failed: %w", err)
+	}
+
+	if len(resp.Choices) == 0 {
+		return nil, fmt.Errorf("no response from LLM")
+	}
+
+	// Parse the LLM response
+	analysis, err := s.parseSubtaskAnalysis(resp.Choices[0].Message.Content, task.ID)
+	if err != nil {
+		return nil, fmt.Errorf("failed to parse LLM response: %w", err)
+	}
+
+	return analysis, nil
+}
+
+// getSubtaskAnalysisSystemPrompt returns the system prompt for subtask analysis
+func (s *SubtaskService) getSubtaskAnalysisSystemPrompt() string {
+	availableRoles := strings.Join(s.agentRoles, ", ")
+	
+	return fmt.Sprintf(`You are an expert project manager and technical architect. Your job is to analyze complex tasks and break them down into well-defined subtasks that can be assigned to specialized team members.
+
+Available team roles: %s
+
+When analyzing a task, you should:
+1. Understand the task requirements and scope
+2. Break it down into logical, manageable subtasks
+3. Assign each subtask to the most appropriate team role
+4. Estimate effort and identify dependencies
+5. Provide a clear execution strategy
+
+Respond with a JSON object in this exact format:
+{
+  "analysis_summary": "Brief analysis of the task and approach",
+  "subtasks": [
+    {
+      "title": "Subtask title",
+      "description": "Detailed description of what needs to be done",
+      "priority": "high|medium|low",
+      "assigned_to": "role_name",
+      "estimated_hours": 8,
+      "dependencies": ["subtask_index_1", "subtask_index_2"]
+    }
+  ],
+  "recommended_approach": "High-level strategy for executing these subtasks",
+  "estimated_total_hours": 40,
+  "risk_assessment": "Potential risks and mitigation strategies"
+}
+
+Only use the available team roles for assignment. Dependencies should reference subtask indices (e.g., ["0", "1"] means depends on first and second subtasks).`, availableRoles)
+}
+
+// buildSubtaskAnalysisPrompt creates the user prompt for LLM analysis
+func (s *SubtaskService) buildSubtaskAnalysisPrompt(task *tm.Task) string {
+	return fmt.Sprintf(`Please analyze the following task and break it down into subtasks:
+
+**Task Title:** %s
+
+**Description:** %s
+
+**Priority:** %s
+
+**Current Status:** %s
+
+Please analyze this task and provide a detailed breakdown into subtasks. Consider:
+- Technical complexity and requirements
+- Logical task dependencies 
+- Appropriate skill sets needed for each subtask
+- Risk factors and potential blockers
+- Estimated effort for each component
+
+Provide the analysis in the JSON format specified in the system prompt.`, 
+		task.Title, 
+		task.Description, 
+		task.Priority, 
+		task.Status)
+}
+
+// parseSubtaskAnalysis parses the LLM response into a SubtaskAnalysis struct
+func (s *SubtaskService) parseSubtaskAnalysis(response string, parentTaskID string) (*tm.SubtaskAnalysis, error) {
+	// Try to extract JSON from the response (LLM might wrap it in markdown)
+	jsonStart := strings.Index(response, "{")
+	jsonEnd := strings.LastIndex(response, "}")
+	
+	if jsonStart == -1 || jsonEnd == -1 {
+		return nil, fmt.Errorf("no JSON found in LLM response")
+	}
+	
+	jsonStr := response[jsonStart : jsonEnd+1]
+	
+	var rawAnalysis struct {
+		AnalysisSummary     string `json:"analysis_summary"`
+		Subtasks            []struct {
+			Title          string   `json:"title"`
+			Description    string   `json:"description"`
+			Priority       string   `json:"priority"`
+			AssignedTo     string   `json:"assigned_to"`
+			EstimatedHours int      `json:"estimated_hours"`
+			Dependencies   []string `json:"dependencies"`
+		} `json:"subtasks"`
+		RecommendedApproach   string `json:"recommended_approach"`
+		EstimatedTotalHours   int    `json:"estimated_total_hours"`
+		RiskAssessment        string `json:"risk_assessment"`
+	}
+	
+	if err := json.Unmarshal([]byte(jsonStr), &rawAnalysis); err != nil {
+		return nil, fmt.Errorf("failed to unmarshal JSON: %w", err)
+	}
+	
+	// Convert to our types
+	analysis := &tm.SubtaskAnalysis{
+		ParentTaskID:        parentTaskID,
+		AnalysisSummary:     rawAnalysis.AnalysisSummary,
+		RecommendedApproach: rawAnalysis.RecommendedApproach,
+		EstimatedTotalHours: rawAnalysis.EstimatedTotalHours,
+		RiskAssessment:      rawAnalysis.RiskAssessment,
+	}
+	
+	// Convert subtasks
+	for _, st := range rawAnalysis.Subtasks {
+		priority := tm.PriorityMedium // default
+		switch strings.ToLower(st.Priority) {
+		case "high":
+			priority = tm.PriorityHigh
+		case "low":
+			priority = tm.PriorityLow
+		}
+		
+		subtask := tm.SubtaskProposal{
+			Title:          st.Title,
+			Description:    st.Description,
+			Priority:       priority,
+			AssignedTo:     st.AssignedTo,
+			EstimatedHours: st.EstimatedHours,
+			Dependencies:   st.Dependencies,
+		}
+		
+		analysis.Subtasks = append(analysis.Subtasks, subtask)
+	}
+	
+	// Validate agent assignments
+	if err := s.validateAgentAssignments(analysis); err != nil {
+		log.Printf("Warning: Invalid agent assignments: %v", err)
+		// Fix assignments by using first available role
+		s.fixAgentAssignments(analysis)
+	}
+	
+	return analysis, nil
+}
+
+// validateAgentAssignments checks if all assigned roles are valid
+func (s *SubtaskService) validateAgentAssignments(analysis *tm.SubtaskAnalysis) error {
+	for i, subtask := range analysis.Subtasks {
+		if !s.isValidAgentRole(subtask.AssignedTo) {
+			return fmt.Errorf("subtask %d has invalid agent role: %s", i, subtask.AssignedTo)
+		}
+	}
+	return nil
+}
+
+// fixAgentAssignments fixes invalid agent assignments
+func (s *SubtaskService) fixAgentAssignments(analysis *tm.SubtaskAnalysis) {
+	defaultRole := "ceo" // fallback role
+	if len(s.agentRoles) > 0 {
+		defaultRole = s.agentRoles[0]
+	}
+	
+	for i := range analysis.Subtasks {
+		if !s.isValidAgentRole(analysis.Subtasks[i].AssignedTo) {
+			analysis.Subtasks[i].AssignedTo = defaultRole
+		}
+	}
+}
+
+// isValidAgentRole checks if a role is in the available agent roles
+func (s *SubtaskService) isValidAgentRole(role string) bool {
+	for _, availableRole := range s.agentRoles {
+		if availableRole == role {
+			return true
+		}
+	}
+	return false
+}
+
+// GenerateSubtaskPR creates a PR with the proposed subtasks
+func (s *SubtaskService) GenerateSubtaskPR(ctx context.Context, analysis *tm.SubtaskAnalysis) (string, error) {
+	// Generate markdown content for the PR
+	prContent := s.generateSubtaskPRContent(analysis)
+	
+	// This would typically create a Git branch and PR
+	// For now, we'll return a mock PR URL
+	prURL := fmt.Sprintf("https://github.com/example/repo/pull/subtasks-%s", analysis.ParentTaskID)
+	
+	log.Printf("Generated subtask proposal PR: %s", prURL)
+	log.Printf("PR Content:\n%s", prContent)
+	
+	return prURL, nil
+}
+
+// generateSubtaskPRContent creates markdown content for the subtask proposal PR
+func (s *SubtaskService) generateSubtaskPRContent(analysis *tm.SubtaskAnalysis) string {
+	var content strings.Builder
+	
+	content.WriteString(fmt.Sprintf("# Subtask Proposal for Task %s\n\n", analysis.ParentTaskID))
+	content.WriteString(fmt.Sprintf("## Analysis Summary\n%s\n\n", analysis.AnalysisSummary))
+	content.WriteString(fmt.Sprintf("## Recommended Approach\n%s\n\n", analysis.RecommendedApproach))
+	content.WriteString(fmt.Sprintf("**Estimated Total Hours:** %d\n\n", analysis.EstimatedTotalHours))
+	
+	if analysis.RiskAssessment != "" {
+		content.WriteString(fmt.Sprintf("## Risk Assessment\n%s\n\n", analysis.RiskAssessment))
+	}
+	
+	content.WriteString("## Proposed Subtasks\n\n")
+	
+	for i, subtask := range analysis.Subtasks {
+		content.WriteString(fmt.Sprintf("### %d. %s\n", i+1, subtask.Title))
+		content.WriteString(fmt.Sprintf("- **Assigned to:** %s\n", subtask.AssignedTo))
+		content.WriteString(fmt.Sprintf("- **Priority:** %s\n", subtask.Priority))
+		content.WriteString(fmt.Sprintf("- **Estimated Hours:** %d\n", subtask.EstimatedHours))
+		
+		if len(subtask.Dependencies) > 0 {
+			deps := strings.Join(subtask.Dependencies, ", ")
+			content.WriteString(fmt.Sprintf("- **Dependencies:** %s\n", deps))
+		}
+		
+		content.WriteString(fmt.Sprintf("- **Description:** %s\n\n", subtask.Description))
+	}
+	
+	content.WriteString("---\n")
+	content.WriteString("*Generated by Staff AI Agent System*\n\n")
+	content.WriteString("**Instructions:**\n")
+	content.WriteString("- Review the proposed subtasks\n")
+	content.WriteString("- Approve or request changes\n")
+	content.WriteString("- When merged, the subtasks will be automatically created and assigned\n")
+	
+	return content.String()
+}
+
+// Close cleans up the service
+func (s *SubtaskService) Close() error {
+	if s.llmProvider != nil {
+		return s.llmProvider.Close()
+	}
+	return nil
+}
\ No newline at end of file