forked from Ivasoft/opds-proxy
fix: single conversion at a time
See the notes for a full explanation, but basically Kobo is making simultaneous requests and the files were conflicting because there were 2 being downloaded / converted at the same time which resulted in unreliable behavior. The solution is to protect the conversion section to allow a single conversion to complete before allowing the next one to start.
This commit is contained in:
@@ -2,7 +2,6 @@ package convert
|
||||
|
||||
import (
|
||||
"os/exec"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"sync"
|
||||
)
|
||||
@@ -26,8 +25,7 @@ func (kc *KepubConverter) Convert(input string) (string, error) {
|
||||
kc.mutex.Lock()
|
||||
defer kc.mutex.Unlock()
|
||||
|
||||
dir := filepath.Dir(input)
|
||||
kepubFile := filepath.Join(dir, strings.Replace(filepath.Base(input), ".epub", ".kepub.epub", 1))
|
||||
kepubFile := strings.Replace(input, ".epub", ".kepub.epub", 1)
|
||||
|
||||
cmd := exec.Command("kepubify", "-v", "-u", "-o", kepubFile, input)
|
||||
if err := cmd.Run(); err != nil {
|
||||
|
||||
18
notes.md
Normal file
18
notes.md
Normal file
@@ -0,0 +1,18 @@
|
||||
Kobo Browser Quirks
|
||||
|
||||
Apparently the browser is using a very old version of WebKit.
|
||||
|
||||
- Doesn't support `fetch`
|
||||
- No HTMX
|
||||
- Doesn't support `secure` or `httpOnly` cookies
|
||||
- They just silently fail to be set with these flags
|
||||
- Makes 2 parallel requests whenever an `<a>` link is clicked
|
||||
- This doesn't seem to apply to URL bar navigation
|
||||
- This poses issues when a request is not idempotent. Need to figure out a solution
|
||||
for these cases... For example when converting to Kepub, we encounter failures
|
||||
for the second requests due to file conflicts / deletions happening at the same time.
|
||||
I've fixed this by locking the conversion to a single request at a time with a mutex,
|
||||
but we still do the conversion twice, just one after the other.
|
||||
I was planning on creating an OPDS interface for OpenBooks but this will make all
|
||||
search / download requests send twice which is no good. The fix isn't as simple
|
||||
in that case.
|
||||
30
server.go
30
server.go
@@ -5,12 +5,14 @@ import (
|
||||
"fmt"
|
||||
"io"
|
||||
"log/slog"
|
||||
"math/rand"
|
||||
"mime"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/evan-buss/opds-proxy/convert"
|
||||
@@ -55,9 +57,9 @@ func NewServer(config *ProxyConfig) (*Server, error) {
|
||||
s := securecookie.New(hashKey, blockKey)
|
||||
|
||||
router := http.NewServeMux()
|
||||
router.HandleFunc("GET /{$}", handleHome(config.Feeds))
|
||||
router.HandleFunc("GET /feed", handleFeed("tmp/", config.Feeds, s))
|
||||
router.HandleFunc("/auth", handleAuth(s))
|
||||
router.Handle("GET /{$}", logger(handleHome(config.Feeds)))
|
||||
router.Handle("GET /feed", logger(handleFeed("tmp/", config.Feeds, s)))
|
||||
router.Handle("/auth", logger(handleAuth(s)))
|
||||
router.Handle("GET /static/", http.FileServer(http.FS(html.StaticFiles())))
|
||||
|
||||
return &Server{
|
||||
@@ -67,6 +69,15 @@ func NewServer(config *ProxyConfig) (*Server, error) {
|
||||
}, nil
|
||||
}
|
||||
|
||||
func logger(next http.Handler) http.Handler {
|
||||
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
start := time.Now()
|
||||
id := rand.Intn(1000)
|
||||
next.ServeHTTP(w, r)
|
||||
slog.Info("Request", slog.Int("id", id), slog.String("path", r.URL.Path), slog.String("query", r.URL.RawQuery), slog.Duration("duration", time.Since(start)))
|
||||
})
|
||||
}
|
||||
|
||||
func (s *Server) Serve() error {
|
||||
slog.Info("Starting server", slog.String("port", s.addr))
|
||||
return http.ListenAndServe(s.addr, s.router)
|
||||
@@ -90,6 +101,8 @@ func handleFeed(outputDir string, feeds []FeedConfig, s *securecookie.SecureCook
|
||||
kepubConverter := &convert.KepubConverter{}
|
||||
mobiConverter := &convert.MobiConverter{}
|
||||
|
||||
mutex := sync.Mutex{}
|
||||
|
||||
return func(w http.ResponseWriter, r *http.Request) {
|
||||
queryURL := r.URL.Query().Get("q")
|
||||
if queryURL == "" {
|
||||
@@ -145,6 +158,9 @@ func handleFeed(outputDir string, feeds []FeedConfig, s *securecookie.SecureCook
|
||||
}
|
||||
}
|
||||
|
||||
mutex.Lock()
|
||||
defer mutex.Unlock()
|
||||
|
||||
var converter convert.Converter
|
||||
if strings.Contains(r.UserAgent(), "Kobo") && kepubConverter.Available() {
|
||||
converter = kepubConverter
|
||||
@@ -291,7 +307,7 @@ func fetchFromUrl(url string, credentials *Credentials) (*http.Response, error)
|
||||
}
|
||||
|
||||
func handleError(r *http.Request, w http.ResponseWriter, message string, err error) {
|
||||
slog.Error(message, slog.String("path", r.URL.RawPath), slog.Any("error", err))
|
||||
slog.Error(message, slog.String("path", r.URL.Path), slog.String("query", r.URL.RawQuery), slog.Any("error", err))
|
||||
http.Error(w, "An unexpected error occurred", http.StatusInternalServerError)
|
||||
}
|
||||
|
||||
@@ -329,11 +345,15 @@ func forwardResponse(w http.ResponseWriter, resp *http.Response) {
|
||||
}
|
||||
|
||||
func sendConvertedFile(w http.ResponseWriter, filePath string) error {
|
||||
defer os.Remove(filePath)
|
||||
file, err := os.Open(filePath)
|
||||
if err != nil {
|
||||
os.Remove(filePath)
|
||||
return err
|
||||
}
|
||||
defer func() {
|
||||
file.Close()
|
||||
os.Remove(filePath)
|
||||
}()
|
||||
|
||||
info, err := file.Stat()
|
||||
if err != nil {
|
||||
|
||||
Reference in New Issue
Block a user