| 123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372 |
- package main
- import (
- "encoding/json"
- "fmt"
- "html"
- "io"
- "net/http"
- "strings"
- "time"
- )
- const htmlHeader = `<!DOCTYPE html>
- <html>
- <head>
- <title>ch.at</title>
- <meta name="viewport" content="width=device-width, initial-scale=1.0">
- <style>
- body { text-align: center; margin: 40px; }
- pre { text-align: left; max-width: 600px; margin: 20px auto; padding: 20px;
- white-space: pre-wrap; word-wrap: break-word; }
- input[type="text"] { width: 300px; }
- </style>
- </head>
- <body>
- <h1>ch.at</h1>
- <p>Universal Basic Chat</p>
- <p><small><i>pronounced "ch-dot-at"</i></small></p>
- <pre>`
- const htmlFooterTemplate = `</pre>
- <form method="POST" action="/">
- <input type="text" name="q" placeholder="Type your message..." autofocus>
- <input type="submit" value="Send">
- <textarea name="h" style="display:none">%s</textarea>
- </form>
- <p><a href="/">New Chat</a></p>
- <p><small>
- Also available: ssh ch.at • curl ch.at/?q=hello • dig @ch.at "question" TXT<br>
- No logs • No accounts • Free software • <a href="https://github.com/Deep-ai-inc/ch.at">GitHub</a>
- </small></p>
- </body>
- </html>`
- func StartHTTPServer(port int) error {
- http.HandleFunc("/", handleRoot)
- http.HandleFunc("/v1/chat/completions", handleChatCompletions)
- addr := fmt.Sprintf(":%d", port)
- return http.ListenAndServe(addr, nil)
- }
- func StartHTTPSServer(port int, certFile, keyFile string) error {
- addr := fmt.Sprintf(":%d", port)
- return http.ListenAndServeTLS(addr, certFile, keyFile, nil)
- }
- func handleRoot(w http.ResponseWriter, r *http.Request) {
- if !rateLimitAllow(r.RemoteAddr) {
- http.Error(w, "Rate limit exceeded", http.StatusTooManyRequests)
- return
- }
- var query, history, prompt string
- content := ""
- jsonResponse := ""
- if r.Method == "POST" {
- if err := r.ParseForm(); err != nil {
- http.Error(w, "Failed to parse form", http.StatusBadRequest)
- return
- }
- query = r.FormValue("q")
- history = r.FormValue("h")
- // Limit history size to ensure compatibility
- if len(history) > 65536 {
- history = history[len(history)-65536:]
- }
- if query == "" {
- body, err := io.ReadAll(io.LimitReader(r.Body, 65536)) // Limit body size
- if err != nil {
- http.Error(w, "Failed to read request body", http.StatusBadRequest)
- return
- }
- query = string(body)
- }
- } else {
- query = r.URL.Query().Get("q")
- // Support path-based queries like /what-is-go
- if query == "" && r.URL.Path != "/" {
- query = strings.ReplaceAll(strings.TrimPrefix(r.URL.Path, "/"), "-", " ")
- }
- }
- accept := r.Header.Get("Accept")
- wantsJSON := strings.Contains(accept, "application/json")
- wantsHTML := strings.Contains(accept, "text/html")
- wantsStream := strings.Contains(accept, "text/event-stream")
- if query != "" {
- prompt = query
- if history != "" {
- prompt = history + "Q: " + query
- }
- if wantsHTML && r.Header.Get("Accept") != "application/json" {
- w.Header().Set("Content-Type", "text/html; charset=utf-8")
- w.Header().Set("Transfer-Encoding", "chunked")
- w.Header().Set("X-Accel-Buffering", "no")
- w.Header().Set("Cache-Control", "no-cache")
- flusher := w.(http.Flusher)
- displayHistory := history
- headerSize := len(htmlHeader)
- historySize := len(html.EscapeString(history))
- querySize := len(html.EscapeString(query))
- currentSize := headerSize + historySize + querySize + 10
- // Browser streaming needs significant content - working version used 6KB
- const minThreshold = 6144 // 6KB threshold (matching what worked before)
- if currentSize < minThreshold {
- // Each zero-width space is 3 bytes in UTF-8
- paddingNeeded := (minThreshold - currentSize) / 3
- if paddingNeeded > 0 {
- padding := strings.Repeat("\u200B", paddingNeeded)
- displayHistory = padding + history
- }
- }
- fmt.Fprint(w, htmlHeader)
- fmt.Fprintf(w, "%sQ: %s\nA: ", html.EscapeString(displayHistory), html.EscapeString(query))
- flusher.Flush()
- ch := make(chan string)
- go func() {
- if _, err := LLM(prompt, ch); err != nil {
- ch <- err.Error()
- close(ch)
- }
- }()
- response := ""
- for chunk := range ch {
- if _, err := fmt.Fprint(w, html.EscapeString(chunk)); err != nil {
- return
- }
- response += chunk
- flusher.Flush()
- }
- finalHistory := history + fmt.Sprintf("Q: %s\nA: %s\n\n", query, response)
- fmt.Fprintf(w, htmlFooterTemplate, html.EscapeString(finalHistory))
- return
- }
- // Plain text streaming for curl
- userAgent := r.Header.Get("User-Agent")
- isCurl := strings.Contains(userAgent, "curl") && !wantsHTML && !wantsJSON && !wantsStream
- if isCurl {
- w.Header().Set("Content-Type", "text/plain; charset=utf-8")
- w.Header().Set("Transfer-Encoding", "chunked")
- w.Header().Set("X-Accel-Buffering", "no")
- flusher := w.(http.Flusher)
- fmt.Fprintf(w, "Q: %s\nA: ", query)
- flusher.Flush()
- ch := make(chan string)
- go func() {
- if _, err := LLM(prompt, ch); err != nil {
- ch <- err.Error()
- close(ch)
- }
- }()
- response := ""
- for chunk := range ch {
- fmt.Fprint(w, chunk)
- response += chunk
- flusher.Flush()
- }
- fmt.Fprint(w, "\n")
- return
- }
- response, err := LLM(prompt, nil)
- if err != nil {
- content = err.Error()
- errJSON, _ := json.Marshal(map[string]string{"error": err.Error()})
- jsonResponse = string(errJSON)
- } else {
- respJSON, _ := json.Marshal(map[string]string{
- "question": query,
- "answer": response,
- })
- jsonResponse = string(respJSON)
- newExchange := fmt.Sprintf("Q: %s\nA: %s\n\n", query, response)
- if history != "" {
- content = history + newExchange
- } else {
- content = newExchange
- }
- if len(content) > 65536 {
- newExchangeLen := len(newExchange)
- if newExchangeLen > 65536 {
- content = newExchange[:65536]
- } else {
- maxHistory := 65536 - newExchangeLen
- if len(history) > maxHistory {
- content = history[len(history)-maxHistory:] + newExchange
- }
- }
- }
- }
- } else if history != "" {
- content = history
- }
- if wantsStream && query != "" {
- w.Header().Set("Content-Type", "text/event-stream")
- w.Header().Set("Cache-Control", "no-cache")
- w.Header().Set("Connection", "keep-alive")
- flusher, ok := w.(http.Flusher)
- if !ok {
- http.Error(w, "Streaming not supported", http.StatusInternalServerError)
- return
- }
- ch := make(chan string)
- go func() {
- if _, err := LLM(prompt, ch); err != nil {
- fmt.Fprintf(w, "data: Error: %s\n\n", err.Error())
- flusher.Flush()
- }
- }()
- for chunk := range ch {
- fmt.Fprintf(w, "data: %s\n\n", chunk)
- flusher.Flush()
- }
- fmt.Fprintf(w, "data: [DONE]\n\n")
- return
- }
- if wantsJSON && jsonResponse != "" {
- w.Header().Set("Content-Type", "application/json; charset=utf-8")
- fmt.Fprint(w, jsonResponse)
- } else if wantsHTML && query == "" {
- w.Header().Set("Content-Type", "text/html; charset=utf-8")
- fmt.Fprint(w, htmlHeader)
- fmt.Fprint(w, html.EscapeString(content))
- fmt.Fprintf(w, htmlFooterTemplate, html.EscapeString(content))
- } else {
- w.Header().Set("Content-Type", "text/plain; charset=utf-8")
- fmt.Fprint(w, content)
- }
- }
- type ChatRequest struct {
- Model string `json:"model"`
- Messages []Message `json:"messages"`
- Stream bool `json:"stream,omitempty"`
- }
- type Message struct {
- Role string `json:"role"`
- Content string `json:"content"`
- }
- type ChatResponse struct {
- ID string `json:"id"`
- Object string `json:"object"`
- Created int64 `json:"created"`
- Model string `json:"model"`
- Choices []Choice `json:"choices"`
- }
- type Choice struct {
- Index int `json:"index"`
- Message Message `json:"message"`
- }
- func handleChatCompletions(w http.ResponseWriter, r *http.Request) {
- if !rateLimitAllow(r.RemoteAddr) {
- http.Error(w, "Rate limit exceeded", http.StatusTooManyRequests)
- return
- }
- if r.Method != "POST" {
- http.Error(w, "Method not allowed", http.StatusMethodNotAllowed)
- return
- }
- var req ChatRequest
- if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
- http.Error(w, "Invalid JSON", http.StatusBadRequest)
- return
- }
- messages := make([]map[string]string, len(req.Messages))
- for i, msg := range req.Messages {
- messages[i] = map[string]string{
- "role": msg.Role,
- "content": msg.Content,
- }
- }
- if req.Stream {
- w.Header().Set("Content-Type", "text/event-stream")
- w.Header().Set("Cache-Control", "no-cache")
- w.Header().Set("Connection", "keep-alive")
- flusher, ok := w.(http.Flusher)
- if !ok {
- http.Error(w, "Streaming not supported", http.StatusInternalServerError)
- return
- }
- ch := make(chan string)
- go LLM(messages, ch)
- for chunk := range ch {
- resp := map[string]interface{}{
- "id": fmt.Sprintf("chatcmpl-%d", time.Now().Unix()),
- "object": "chat.completion.chunk",
- "created": time.Now().Unix(),
- "model": req.Model,
- "choices": []map[string]interface{}{{
- "index": 0,
- "delta": map[string]string{"content": chunk},
- }},
- }
- data, err := json.Marshal(resp)
- if err != nil {
- fmt.Fprintf(w, "data: Failed to marshal response\n\n")
- return
- }
- fmt.Fprintf(w, "data: %s\n\n", data)
- flusher.Flush()
- }
- fmt.Fprintf(w, "data: [DONE]\n\n")
- } else {
- response, err := LLM(messages, nil)
- if err != nil {
- http.Error(w, err.Error(), http.StatusInternalServerError)
- return
- }
- chatResp := ChatResponse{
- ID: fmt.Sprintf("chatcmpl-%d", time.Now().Unix()),
- Object: "chat.completion",
- Created: time.Now().Unix(),
- Model: req.Model,
- Choices: []Choice{{
- Index: 0,
- Message: Message{
- Role: "assistant",
- Content: response,
- },
- }},
- }
- w.Header().Set("Content-Type", "application/json")
- json.NewEncoder(w).Encode(chatResp)
- }
- }
|