Add patch file for server mode changes
This commit is contained in:
parent
3f628f0805
commit
ac208fabdd
|
@ -0,0 +1,258 @@
|
|||
diff --git a/go.mod b/go.mod
|
||||
--- a/go.mod
|
||||
+++ b/go.mod
|
||||
@@ -7,4 +7,5 @@ require (
|
||||
gitea.r8z.us/stwhite/arxiva v0.1.0
|
||||
gitea.r8z.us/stwhite/paperformatter v0.1.3
|
||||
gitea.r8z.us/stwhite/paperprocessor v0.1.8
|
||||
+ github.com/go-chi/chi/v5 v5.0.11
|
||||
)
|
||||
|
||||
diff --git a/papers.go b/papers.go
|
||||
--- a/papers.go
|
||||
+++ b/papers.go
|
||||
@@ -6,6 +6,7 @@ import (
|
||||
"fmt"
|
||||
"io"
|
||||
"log"
|
||||
+ "net/http" // Added for server mode
|
||||
"os"
|
||||
"regexp"
|
||||
"strings"
|
||||
@@ -47,6 +48,8 @@ func main() {
|
||||
flag.Usage = func() {
|
||||
fmt.Fprintf(os.Stderr, "Usage: %s [options]\n\n", os.Args[0])
|
||||
fmt.Fprintf(os.Stderr, "Description:\n")
|
||||
- fmt.Fprintf(os.Stderr, " Fetches papers from arXiv (or uses input file), processes them using an LLM, and generates both JSON and Markdown outputs.\n\n")
|
||||
+ fmt.Fprintf(os.Stderr, " Fetches papers from arXiv (or uses input file), processes them using an LLM,\n")
|
||||
+ fmt.Fprintf(os.Stderr, " and generates both JSON and Markdown outputs. Can also run as an HTTP server.\n\n")
|
||||
+ fmt.Fprintf(os.Stderr, "Server Mode:\n")
|
||||
+ fmt.Fprintf(os.Stderr, " Run as an HTTP server with: %s -serve [-port 8080]\n\n", os.Args[0])
|
||||
fmt.Fprintf(os.Stderr, "Pipeline:\n")
|
||||
// ... rest of usage function
|
||||
}
|
||||
|
||||
// Parse command line arguments
|
||||
+ serveMode := flag.Bool("serve", false, "Run as HTTP server")
|
||||
+ port := flag.String("port", "8080", "Port to run server on when using -serve")
|
||||
searchOnly := flag.Bool("search-only", false, "Only fetch papers from arXiv and save to JSON file")
|
||||
// ... rest of flag declarations
|
||||
|
||||
+ // Check if we should run in server mode
|
||||
+ if *serveMode {
|
||||
+ server := NewServer(*port)
|
||||
+ server.apiEndpoint = *apiEndpoint
|
||||
+ server.apiKey = *apiKey
|
||||
+ server.model = *model
|
||||
+
|
||||
+ log.Printf("Starting server on port %s...", *port)
|
||||
+ if err := server.Run(); err != nil {
|
||||
+ log.Fatalf("Server failed: %v", err)
|
||||
+ }
|
||||
+ return
|
||||
+ }
|
||||
+
|
||||
// Rest of main function unchanged
|
||||
}
|
||||
|
||||
diff --git a/server.go b/server.go
|
||||
new file mode 100644
|
||||
--- /dev/null
|
||||
+++ b/server.go
|
||||
@@ -0,0 +1,189 @@
|
||||
+package main
|
||||
+
|
||||
+import (
|
||||
+ "encoding/json"
|
||||
+ "fmt"
|
||||
+ "log"
|
||||
+ "net/http"
|
||||
+
|
||||
+ "github.com/go-chi/chi/v5"
|
||||
+ "github.com/go-chi/chi/v5/middleware"
|
||||
+)
|
||||
+
|
||||
+type Server struct {
|
||||
+ router *chi.Mux
|
||||
+ port string
|
||||
+ apiEndpoint string
|
||||
+ apiKey string
|
||||
+ model string
|
||||
+}
|
||||
+
|
||||
+func NewServer(port string) *Server {
|
||||
+ s := &Server{
|
||||
+ router: chi.NewRouter(),
|
||||
+ port: port,
|
||||
+ }
|
||||
+
|
||||
+ s.setupRoutes()
|
||||
+ return s
|
||||
+}
|
||||
+
|
||||
+func (s *Server) setupRoutes() {
|
||||
+ s.router.Use(middleware.Logger)
|
||||
+ s.router.Use(middleware.Recoverer)
|
||||
+
|
||||
+ s.router.Post("/api/papers/search", s.handleSearch)
|
||||
+ s.router.Post("/api/papers/process", s.handleProcess)
|
||||
+ s.router.Post("/api/papers/search-process", s.handleSearchAndProcess)
|
||||
+}
|
||||
+
|
||||
+func (s *Server) Run() error {
|
||||
+ addr := fmt.Sprintf(":%s", s.port)
|
||||
+ log.Printf("Starting server on %s", addr)
|
||||
+ return http.ListenAndServe(addr, s.router)
|
||||
+}
|
||||
+
|
||||
+func (s *Server) handleSearch(w http.ResponseWriter, r *http.Request) {
|
||||
+ var req struct {
|
||||
+ StartDate string `json:"start_date"`
|
||||
+ EndDate string `json:"end_date"`
|
||||
+ Query string `json:"query"`
|
||||
+ MaxResults int `json:"max_results"`
|
||||
+ }
|
||||
+
|
||||
+ if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
|
||||
+ http.Error(w, "Invalid request body", http.StatusBadRequest)
|
||||
+ return
|
||||
+ }
|
||||
+
|
||||
+ // Reuse existing validation
|
||||
+ if !isValidDate(req.StartDate) || !isValidDate(req.EndDate) {
|
||||
+ http.Error(w, "Invalid date format", http.StatusBadRequest)
|
||||
+ return
|
||||
+ }
|
||||
+
|
||||
+ papers, err := arxiva.FetchPapers(req.StartDate, req.EndDate, req.Query, req.MaxResults)
|
||||
+ if err != nil {
|
||||
+ http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||
+ return
|
||||
+ }
|
||||
+
|
||||
+ json.NewEncoder(w).Encode(papers)
|
||||
+}
|
||||
+
|
||||
+func (s *Server) handleProcess(w http.ResponseWriter, r *http.Request) {
|
||||
+ var req struct {
|
||||
+ InputFile string `json:"input_file"`
|
||||
+ CriteriaFile string `json:"criteria_file"`
|
||||
+ ApiKey string `json:"api_key"`
|
||||
+ Model string `json:"model"`
|
||||
+ }
|
||||
+
|
||||
+ if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
|
||||
+ http.Error(w, "Invalid request body", http.StatusBadRequest)
|
||||
+ return
|
||||
+ }
|
||||
+
|
||||
+ // Create processor configuration
|
||||
+ config := paperprocessor.Config{
|
||||
+ APIEndpoint: s.apiEndpoint,
|
||||
+ APIKey: req.ApiKey,
|
||||
+ Model: req.Model,
|
||||
+ RequestDelay: 2 * time.Second,
|
||||
+ }
|
||||
+
|
||||
+ // Process the papers
|
||||
+ outputJSON := req.InputFile + "-processed.json"
|
||||
+ if err := paperprocessor.ProcessFile(
|
||||
+ req.InputFile,
|
||||
+ outputJSON,
|
||||
+ req.CriteriaFile,
|
||||
+ config,
|
||||
+ ); err != nil {
|
||||
+ http.Error(w, fmt.Sprintf("Processing failed: %v", err), http.StatusInternalServerError)
|
||||
+ return
|
||||
+ }
|
||||
+
|
||||
+ // Format to markdown
|
||||
+ outputMD := req.InputFile + "-processed.md"
|
||||
+ if err := paperformatter.FormatPapers(outputJSON, outputMD); err != nil {
|
||||
+ http.Error(w, fmt.Sprintf("Formatting failed: %v", err), http.StatusInternalServerError)
|
||||
+ return
|
||||
+ }
|
||||
+
|
||||
+ // Return the paths to the generated files
|
||||
+ json.NewEncoder(w).Encode(struct {
|
||||
+ JSONOutput string `json:"json_output"`
|
||||
+ MDOutput string `json:"md_output"`
|
||||
+ }{
|
||||
+ JSONOutput: outputJSON,
|
||||
+ MDOutput: outputMD,
|
||||
+ })
|
||||
+}
|
||||
+
|
||||
+func (s *Server) handleSearchAndProcess(w http.ResponseWriter, r *http.Request) {
|
||||
+ var req struct {
|
||||
+ StartDate string `json:"start_date"`
|
||||
+ EndDate string `json:"end_date"`
|
||||
+ Query string `json:"query"`
|
||||
+ MaxResults int `json:"max_results"`
|
||||
+ CriteriaFile string `json:"criteria_file"`
|
||||
+ ApiKey string `json:"api_key"`
|
||||
+ Model string `json:"model"`
|
||||
+ }
|
||||
+
|
||||
+ if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
|
||||
+ http.Error(w, "Invalid request body", http.StatusBadRequest)
|
||||
+ return
|
||||
+ }
|
||||
+
|
||||
+ // Validate dates
|
||||
+ if !isValidDate(req.StartDate) || !isValidDate(req.EndDate) {
|
||||
+ http.Error(w, "Invalid date format", http.StatusBadRequest)
|
||||
+ return
|
||||
+ }
|
||||
+
|
||||
+ // Fetch papers
|
||||
+ papers, err := arxiva.FetchPapers(req.StartDate, req.EndDate, req.Query, req.MaxResults)
|
||||
+ if err != nil {
|
||||
+ http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||
+ return
|
||||
+ }
|
||||
+
|
||||
+ // Save papers to temporary JSON file
|
||||
+ baseFilename := fmt.Sprintf("%s-%s-%s", req.StartDate, req.EndDate, sanitizeFilename(req.Query))
|
||||
+ inputJSON := baseFilename + ".json"
|
||||
+ if err := arxiva.SaveToFile(papers, req.StartDate, req.EndDate, req.Query); err != nil {
|
||||
+ http.Error(w, fmt.Sprintf("Failed to save papers: %v", err), http.StatusInternalServerError)
|
||||
+ return
|
||||
+ }
|
||||
+
|
||||
+ // Create processor configuration
|
||||
+ config := paperprocessor.Config{
|
||||
+ APIEndpoint: s.apiEndpoint,
|
||||
+ APIKey: req.ApiKey,
|
||||
+ Model: req.Model,
|
||||
+ RequestDelay: 2 * time.Second,
|
||||
+ }
|
||||
+
|
||||
+ // Process the papers
|
||||
+ outputJSON := baseFilename + "-processed.json"
|
||||
+ if err := paperprocessor.ProcessFile(
|
||||
+ inputJSON,
|
||||
+ outputJSON,
|
||||
+ req.CriteriaFile,
|
||||
+ config,
|
||||
+ ); err != nil {
|
||||
+ http.Error(w, fmt.Sprintf("Processing failed: %v", err), http.StatusInternalServerError)
|
||||
+ return
|
||||
+ }
|
||||
+
|
||||
+ // Format to markdown
|
||||
+ outputMD := baseFilename + "-processed.md"
|
||||
+ if err := paperformatter.FormatPapers(outputJSON, outputMD); err != nil {
|
||||
+ http.Error(w, fmt.Sprintf("Formatting failed: %v", err), http.StatusInternalServerError)
|
||||
+ return
|
||||
+ }
|
||||
+
|
||||
+ // Return the paths to the generated files
|
||||
+ json.NewEncoder(w).Encode(struct {
|
||||
+ JSONOutput string `json:"json_output"`
|
||||
+ MDOutput string `json:"md_output"`
|
||||
+ }{
|
||||
+ JSONOutput: outputJSON,
|
||||
+ MDOutput: outputMD,
|
||||
+ })
|
||||
+}
|
Loading…
Reference in New Issue