| 123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138 |
- package main
- import (
- "bufio"
- "bytes"
- "encoding/json"
- "fmt"
- "io"
- "net/http"
- "strings"
- )
- // Configuration - Replace with your API credentials
- const (
- // Option 1: OpenAI API
- apiKey = "YOUR_API_KEY_HERE"
- apiURL = "https://api.openai.com/v1/chat/completions"
- modelName = "gpt-3.5-turbo" // or gpt-4, etc.
- // Option 2: Anthropic Claude API (uncomment to use)
- // apiKey = "YOUR_API_KEY_HERE"
- // apiURL = "https://api.anthropic.com/v1/messages"
- // modelName = "claude-3-haiku" // or claude-3-opus, etc.
- // Option 3: Local LLM (uncomment to use)
- // apiKey = "" // No API key needed for local models
- // apiURL = "http://localhost:11434/api/chat" // Ollama example
- // modelName = "llama2" // or mixtral, phi, etc.
- )
- // LLM calls the language model. If stream is nil, returns complete response via return value.
- // If stream is provided, streams response chunks to channel and returns empty string.
- // Input can be a string (wrapped as user message) or []map[string]string for full message history.
- func LLM(input interface{}, stream chan<- string) (string, error) {
- // Build messages array
- var messages []map[string]string
- switch v := input.(type) {
- case string:
- messages = []map[string]string{
- {"role": "user", "content": v},
- }
- case []map[string]string:
- messages = v
- default:
- return "", fmt.Errorf("invalid input type")
- }
-
- // Build request
- requestBody := map[string]interface{}{
- "model": modelName,
- "messages": messages,
- "temperature": 0.7,
- "max_tokens": 500,
- }
-
- if stream != nil {
- requestBody["stream"] = true
- defer close(stream)
- }
- jsonBody, err := json.Marshal(requestBody)
- if err != nil {
- return "", err
- }
- req, err := http.NewRequest("POST", apiURL, bytes.NewBuffer(jsonBody))
- if err != nil {
- return "", err
- }
- req.Header.Set("Content-Type", "application/json")
- if apiKey != "" {
- req.Header.Set("Authorization", "Bearer "+apiKey)
- }
- client := &http.Client{}
- resp, err := client.Do(req)
- if err != nil {
- return "", err
- }
- defer resp.Body.Close()
- if resp.StatusCode != http.StatusOK {
- body, _ := io.ReadAll(resp.Body)
- return "", fmt.Errorf("API error (status %d): %s", resp.StatusCode, string(body))
- }
- // Handle streaming response
- if stream != nil {
- scanner := bufio.NewScanner(resp.Body)
- for scanner.Scan() {
- line := scanner.Text()
- if strings.HasPrefix(line, "data: ") {
- data := strings.TrimPrefix(line, "data: ")
- if data == "[DONE]" {
- return "", nil
- }
-
- var chunk map[string]interface{}
- if err := json.Unmarshal([]byte(data), &chunk); err == nil {
- if choices, ok := chunk["choices"].([]interface{}); ok && len(choices) > 0 {
- if choice, ok := choices[0].(map[string]interface{}); ok {
- if delta, ok := choice["delta"].(map[string]interface{}); ok {
- if content, ok := delta["content"].(string); ok {
- stream <- content
- }
- }
- }
- }
- }
- }
- }
- return "", scanner.Err()
- }
- // Handle non-streaming response
- body, err := io.ReadAll(resp.Body)
- if err != nil {
- return "", err
- }
- var response map[string]interface{}
- if err := json.Unmarshal(body, &response); err != nil {
- return "", err
- }
- if choices, ok := response["choices"].([]interface{}); ok && len(choices) > 0 {
- if choice, ok := choices[0].(map[string]interface{}); ok {
- if message, ok := choice["message"].(map[string]interface{}); ok {
- if content, ok := message["content"].(string); ok {
- return content, nil
- }
- }
- }
- }
- return "", fmt.Errorf("unexpected response format")
- }
|