Compare commits
2 Commits
1.2.1
...
ab1e723446
| Author | SHA1 | Date | |
|---|---|---|---|
| ab1e723446 | |||
| 7428f068c7 |
28
Makefile
28
Makefile
@@ -1,10 +1,9 @@
|
||||
BIN := yoink
|
||||
BUILD_DIR := build
|
||||
REGISTRY := git.brizzle.dev/bryan/yoink-go
|
||||
VERSION ?= $(shell git describe --tags --always --dirty)
|
||||
NOTES ?= ""
|
||||
VERSION := $(shell git describe --tags --always --dirty)
|
||||
|
||||
.PHONY: all windows linux darwin clean docker-build docker-push tag gitea-release release
|
||||
.PHONY: all windows linux darwin clean docker-build docker-push
|
||||
|
||||
all: windows linux darwin
|
||||
|
||||
@@ -29,28 +28,5 @@ docker-push: docker-build
|
||||
podman push $(REGISTRY):$(VERSION)
|
||||
podman push $(REGISTRY):latest
|
||||
|
||||
tag:
|
||||
@if [ -z "$(VERSION)" ]; then echo "Usage: make tag VERSION=1.2.0"; exit 1; fi
|
||||
git tag $(VERSION)
|
||||
git push origin $(VERSION)
|
||||
|
||||
gitea-release:
|
||||
tea release create \
|
||||
--tag $(VERSION) \
|
||||
--title "$(VERSION)" \
|
||||
--note $(NOTES) \
|
||||
--asset $(BUILD_DIR)/$(BIN)-windows-amd64.exe \
|
||||
--asset $(BUILD_DIR)/$(BIN)-linux-amd64 \
|
||||
--asset $(BUILD_DIR)/$(BIN)-linux-arm64 \
|
||||
--asset $(BUILD_DIR)/$(BIN)-darwin-amd64 \
|
||||
--asset $(BUILD_DIR)/$(BIN)-darwin-arm64
|
||||
|
||||
release:
|
||||
@if [ -z "$(VERSION)" ]; then echo "Usage: make release VERSION=1.3.0 NOTES='...'"; exit 1; fi
|
||||
$(MAKE) tag VERSION=$(VERSION)
|
||||
$(MAKE) clean all
|
||||
$(MAKE) gitea-release VERSION=$(VERSION) NOTES=$(NOTES)
|
||||
$(MAKE) docker-push VERSION=$(VERSION)
|
||||
|
||||
clean:
|
||||
rm -rf $(BUILD_DIR)
|
||||
|
||||
17
README.md
17
README.md
@@ -1,6 +1,6 @@
|
||||
# yoink
|
||||
|
||||
A tool for downloading comics from readallcomics.com and packaging them as `.cbz` archives. Available as a CLI command or a self-hosted web application. The web UI also lets you package local image folders into `.cbz` archives directly from your browser.
|
||||
A tool for downloading comics from readallcomics.com and packaging them as `.cbz` archives. Available as a CLI command or a self-hosted web application.
|
||||
|
||||
## How it works
|
||||
|
||||
@@ -92,23 +92,10 @@ The web UI is then available at `http://localhost:8080`.
|
||||
### Features
|
||||
|
||||
- **Download queue** — paste a comic URL into the input bar and track download progress in real time
|
||||
- **Local packaging** — drag and drop a folder of images (or use the file picker) to package them as a `.cbz` archive and add it to your library without downloading anything
|
||||
- **Library grid** — browse your comics as a 150×300 cover grid with title-initial placeholders for missing covers
|
||||
- **Library grid** — browse your downloaded comics as a 150×300 cover grid
|
||||
- **Filter & sort** — filter by title and sort by newest, oldest, A–Z, or Z–A
|
||||
- **One-click download** — click any cover to download the `.cbz` archive directly
|
||||
|
||||
#### Packaging local images
|
||||
|
||||

|
||||
|
||||
Click the upload icon (↑) in the header to open the packaging panel. Enter a title, then either:
|
||||
|
||||
- **Drag and drop** a folder or image files onto the drop zone
|
||||
- **Select folder** to pick an entire directory at once
|
||||
- **Select files** to pick individual images
|
||||
|
||||
Images are sorted by filename, the first image is used as the cover, and the result is saved to your library as `<Title>/<Title>.cbz`.
|
||||
|
||||
### Library volume
|
||||
|
||||
Downloaded comics are stored at the path set by `YOINK_LIBRARY`. When using Docker, mount this as a volume to persist your library across container restarts:
|
||||
|
||||
Binary file not shown.
|
Before Width: | Height: | Size: 560 KiB |
Binary file not shown.
|
Before Width: | Height: | Size: 270 KiB |
@@ -1,110 +0,0 @@
|
||||
package comic
|
||||
|
||||
import (
|
||||
"archive/zip"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestArchiveError(t *testing.T) {
|
||||
err := ArchiveError{Message: "archive failed", Code: 1}
|
||||
if err.Error() != "archive failed" {
|
||||
t.Errorf("Error() = %q, want %q", err.Error(), "archive failed")
|
||||
}
|
||||
}
|
||||
|
||||
func TestArchive(t *testing.T) {
|
||||
t.Run("creates cbz with image files", func(t *testing.T) {
|
||||
tmpDir := t.TempDir()
|
||||
title := "TestComic"
|
||||
comicDir := filepath.Join(tmpDir, title)
|
||||
os.MkdirAll(comicDir, os.ModePerm)
|
||||
|
||||
// Create fake image files
|
||||
for _, name := range []string{"TestComic 001.jpg", "TestComic 002.jpg", "TestComic 003.png"} {
|
||||
os.WriteFile(filepath.Join(comicDir, name), []byte("fake image"), 0644)
|
||||
}
|
||||
|
||||
c := &Comic{
|
||||
Title: title,
|
||||
LibraryPath: tmpDir,
|
||||
}
|
||||
|
||||
err := c.Archive()
|
||||
if err != nil {
|
||||
t.Fatalf("Archive() unexpected error: %v", err)
|
||||
}
|
||||
|
||||
archivePath := filepath.Join(comicDir, title+".cbz")
|
||||
if _, err := os.Stat(archivePath); os.IsNotExist(err) {
|
||||
t.Fatalf("expected archive %s to exist", archivePath)
|
||||
}
|
||||
|
||||
// Verify the zip contains the image files
|
||||
reader, err := zip.OpenReader(archivePath)
|
||||
if err != nil {
|
||||
t.Fatalf("failed to open archive: %v", err)
|
||||
}
|
||||
defer reader.Close()
|
||||
|
||||
if len(reader.File) != 3 {
|
||||
t.Errorf("archive contains %d files, want 3", len(reader.File))
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("excludes non-image files from archive", func(t *testing.T) {
|
||||
tmpDir := t.TempDir()
|
||||
title := "TestComic"
|
||||
comicDir := filepath.Join(tmpDir, title)
|
||||
os.MkdirAll(comicDir, os.ModePerm)
|
||||
|
||||
// Create mixed files
|
||||
os.WriteFile(filepath.Join(comicDir, "page-001.jpg"), []byte("image"), 0644)
|
||||
os.WriteFile(filepath.Join(comicDir, "readme.txt"), []byte("text"), 0644)
|
||||
os.WriteFile(filepath.Join(comicDir, "data.json"), []byte("json"), 0644)
|
||||
|
||||
c := &Comic{
|
||||
Title: title,
|
||||
LibraryPath: tmpDir,
|
||||
}
|
||||
|
||||
err := c.Archive()
|
||||
if err != nil {
|
||||
t.Fatalf("Archive() unexpected error: %v", err)
|
||||
}
|
||||
|
||||
archivePath := filepath.Join(comicDir, title+".cbz")
|
||||
reader, err := zip.OpenReader(archivePath)
|
||||
if err != nil {
|
||||
t.Fatalf("failed to open archive: %v", err)
|
||||
}
|
||||
defer reader.Close()
|
||||
|
||||
if len(reader.File) != 1 {
|
||||
t.Errorf("archive contains %d files, want 1 (only .jpg)", len(reader.File))
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("handles empty directory", func(t *testing.T) {
|
||||
tmpDir := t.TempDir()
|
||||
title := "EmptyComic"
|
||||
comicDir := filepath.Join(tmpDir, title)
|
||||
os.MkdirAll(comicDir, os.ModePerm)
|
||||
|
||||
c := &Comic{
|
||||
Title: title,
|
||||
LibraryPath: tmpDir,
|
||||
}
|
||||
|
||||
err := c.Archive()
|
||||
if err != nil {
|
||||
t.Fatalf("Archive() unexpected error: %v", err)
|
||||
}
|
||||
|
||||
archivePath := filepath.Join(comicDir, title+".cbz")
|
||||
if _, err := os.Stat(archivePath); os.IsNotExist(err) {
|
||||
t.Fatalf("expected archive %s to exist even if empty", archivePath)
|
||||
}
|
||||
})
|
||||
}
|
||||
@@ -1,93 +0,0 @@
|
||||
package comic
|
||||
|
||||
import (
|
||||
"os"
|
||||
"path/filepath"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestCleanup(t *testing.T) {
|
||||
t.Run("keeps cover image 001 and removes others", func(t *testing.T) {
|
||||
tmpDir := t.TempDir()
|
||||
title := "TestComic"
|
||||
comicDir := filepath.Join(tmpDir, title)
|
||||
os.MkdirAll(comicDir, os.ModePerm)
|
||||
|
||||
files := map[string]bool{
|
||||
"TestComic 001.jpg": true, // should be kept
|
||||
"TestComic 002.jpg": false, // should be removed
|
||||
"TestComic 003.jpg": false, // should be removed
|
||||
}
|
||||
|
||||
for name := range files {
|
||||
os.WriteFile(filepath.Join(comicDir, name), []byte("fake"), 0644)
|
||||
}
|
||||
|
||||
c := &Comic{
|
||||
Title: title,
|
||||
LibraryPath: tmpDir,
|
||||
}
|
||||
|
||||
err := c.Cleanup()
|
||||
if err != nil {
|
||||
t.Fatalf("Cleanup() unexpected error: %v", err)
|
||||
}
|
||||
|
||||
for name, shouldExist := range files {
|
||||
path := filepath.Join(comicDir, name)
|
||||
_, err := os.Stat(path)
|
||||
exists := !os.IsNotExist(err)
|
||||
|
||||
if shouldExist && !exists {
|
||||
t.Errorf("expected %s to be kept, but it was removed", name)
|
||||
}
|
||||
if !shouldExist && exists {
|
||||
t.Errorf("expected %s to be removed, but it still exists", name)
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("keeps non-image files", func(t *testing.T) {
|
||||
tmpDir := t.TempDir()
|
||||
title := "TestComic"
|
||||
comicDir := filepath.Join(tmpDir, title)
|
||||
os.MkdirAll(comicDir, os.ModePerm)
|
||||
|
||||
os.WriteFile(filepath.Join(comicDir, "TestComic.cbz"), []byte("archive"), 0644)
|
||||
os.WriteFile(filepath.Join(comicDir, "metadata.json"), []byte("data"), 0644)
|
||||
|
||||
c := &Comic{
|
||||
Title: title,
|
||||
LibraryPath: tmpDir,
|
||||
}
|
||||
|
||||
err := c.Cleanup()
|
||||
if err != nil {
|
||||
t.Fatalf("Cleanup() unexpected error: %v", err)
|
||||
}
|
||||
|
||||
for _, name := range []string{"TestComic.cbz", "metadata.json"} {
|
||||
path := filepath.Join(comicDir, name)
|
||||
if _, err := os.Stat(path); os.IsNotExist(err) {
|
||||
t.Errorf("expected non-image file %s to be kept", name)
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("handles empty directory", func(t *testing.T) {
|
||||
tmpDir := t.TempDir()
|
||||
title := "EmptyComic"
|
||||
comicDir := filepath.Join(tmpDir, title)
|
||||
os.MkdirAll(comicDir, os.ModePerm)
|
||||
|
||||
c := &Comic{
|
||||
Title: title,
|
||||
LibraryPath: tmpDir,
|
||||
}
|
||||
|
||||
err := c.Cleanup()
|
||||
if err != nil {
|
||||
t.Fatalf("Cleanup() unexpected error for empty dir: %v", err)
|
||||
}
|
||||
})
|
||||
}
|
||||
@@ -26,52 +26,21 @@ type Comic struct {
|
||||
// Returns the extracted title as a string.
|
||||
func extractTitleFromMarkup(c Comic) string {
|
||||
yearFormat := `^(.*?)\s+\(\d{4}(?:\s+.+)?\)`
|
||||
regex := regexp.MustCompile(yearFormat)
|
||||
|
||||
extractFrom := func(text string) string {
|
||||
matches := regex.FindStringSubmatch(text)
|
||||
if len(matches) != 2 {
|
||||
return ""
|
||||
}
|
||||
return strings.ReplaceAll(matches[1], ":", "")
|
||||
}
|
||||
|
||||
title := extractFrom(c.Markup.Find("title").First().Text())
|
||||
|
||||
if strings.HasPrefix(title, "#") {
|
||||
if h1 := extractFrom(c.Markup.Find("h1").First().Text()); h1 != "" && !strings.HasPrefix(h1, "#") {
|
||||
return h1
|
||||
}
|
||||
if slug := titleFromSlug(c.URL); slug != "" {
|
||||
return slug
|
||||
}
|
||||
}
|
||||
|
||||
if title != "" {
|
||||
return title
|
||||
}
|
||||
selection := c.Markup.Find("title")
|
||||
|
||||
if selection.Length() == 0 {
|
||||
return "Untitled"
|
||||
}
|
||||
}
|
||||
|
||||
// titleFromSlug derives a comic title from the last path segment of a URL.
|
||||
// It strips a trailing year (-YYYY), replaces hyphens with spaces, and title-cases the result.
|
||||
func titleFromSlug(url string) string {
|
||||
slug := strings.TrimRight(url, "/")
|
||||
if i := strings.LastIndex(slug, "/"); i >= 0 {
|
||||
slug = slug[i+1:]
|
||||
content := selection.First().Text()
|
||||
regex := regexp.MustCompile(yearFormat)
|
||||
matches := regex.FindStringSubmatch(content)
|
||||
|
||||
if len(matches) != 2 {
|
||||
return "Untitled"
|
||||
}
|
||||
slug = regexp.MustCompile(`-\d{4}$`).ReplaceAllString(slug, "")
|
||||
if slug == "" {
|
||||
return ""
|
||||
}
|
||||
words := strings.Split(slug, "-")
|
||||
for i, w := range words {
|
||||
if len(w) > 0 {
|
||||
words[i] = strings.ToUpper(w[:1]) + w[1:]
|
||||
}
|
||||
}
|
||||
return strings.Join(words, " ")
|
||||
|
||||
return strings.ReplaceAll(matches[1], ":", "")
|
||||
}
|
||||
|
||||
// NewComic creates a new Comic instance from the provided URL and library path.
|
||||
@@ -92,21 +61,13 @@ func NewComic(
|
||||
LibraryPath: libraryPath,
|
||||
}
|
||||
|
||||
if strings.Contains(url, "batcave.biz") {
|
||||
go BatcaveBizMarkup(url, markupChannel)
|
||||
} else {
|
||||
go Markup(url, markupChannel)
|
||||
}
|
||||
go Markup(c.URL, markupChannel)
|
||||
|
||||
markup := <-markupChannel
|
||||
c.Markup = markup
|
||||
c.Title = extractTitleFromMarkup(*c)
|
||||
|
||||
if strings.Contains(url, "batcave.biz") {
|
||||
go ParseBatcaveBizImageLinks(markup, imageChannel)
|
||||
} else {
|
||||
go ParseImageLinks(markup, imageChannel)
|
||||
}
|
||||
links := <-imageChannel
|
||||
|
||||
c.Filelist = links
|
||||
|
||||
@@ -1,170 +0,0 @@
|
||||
package comic
|
||||
|
||||
import (
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/PuerkitoBio/goquery"
|
||||
)
|
||||
|
||||
func newDocFromHTML(html string) *goquery.Document {
|
||||
doc, _ := goquery.NewDocumentFromReader(strings.NewReader(html))
|
||||
return doc
|
||||
}
|
||||
|
||||
func TestExtractTitleFromMarkup(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
html string
|
||||
url string
|
||||
expected string
|
||||
}{
|
||||
{
|
||||
name: "standard title with year",
|
||||
html: `<html><head><title>Ultraman X Avengers 001 (2024)</title></head></html>`,
|
||||
expected: "Ultraman X Avengers 001",
|
||||
},
|
||||
{
|
||||
name: "title with year and extra text",
|
||||
html: `<html><head><title>Batman 042 (2023 Digital)</title></head></html>`,
|
||||
expected: "Batman 042",
|
||||
},
|
||||
{
|
||||
name: "title with colon removed",
|
||||
html: `<html><head><title>Spider-Man: No Way Home 001 (2022)</title></head></html>`,
|
||||
expected: "Spider-Man No Way Home 001",
|
||||
},
|
||||
{
|
||||
name: "no title tag",
|
||||
html: `<html><head></head></html>`,
|
||||
expected: "Untitled",
|
||||
},
|
||||
{
|
||||
name: "title without year pattern",
|
||||
html: `<html><head><title>Some Random Page</title></head></html>`,
|
||||
expected: "Untitled",
|
||||
},
|
||||
{
|
||||
name: "empty title",
|
||||
html: `<html><head><title></title></head></html>`,
|
||||
expected: "Untitled",
|
||||
},
|
||||
{
|
||||
name: "title starts with # falls back to h1",
|
||||
html: `<html><head><title>#018 (2026)</title></head><body><h1>Absolute Batman #018 (2026)</h1></body></html>`,
|
||||
expected: "Absolute Batman #018",
|
||||
},
|
||||
{
|
||||
name: "title starts with # but h1 also starts with #, falls back to slug",
|
||||
html: `<html><head><title>#018 (2026)</title></head><body><h1>#018 (2026)</h1></body></html>`,
|
||||
url: "https://readallcomics.com/absolute-batman-018-2026/",
|
||||
expected: "Absolute Batman 018",
|
||||
},
|
||||
{
|
||||
name: "title starts with # falls back to slug when no h1",
|
||||
html: `<html><head><title>#018 (2026)</title></head></html>`,
|
||||
url: "https://readallcomics.com/absolute-batman-018-2026/",
|
||||
expected: "Absolute Batman 018",
|
||||
},
|
||||
{
|
||||
name: "title starts with # no h1 no url",
|
||||
html: `<html><head><title>#018 (2026)</title></head></html>`,
|
||||
expected: "#018",
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
doc := newDocFromHTML(tt.html)
|
||||
c := Comic{Markup: doc, URL: tt.url}
|
||||
result := extractTitleFromMarkup(c)
|
||||
if result != tt.expected {
|
||||
t.Errorf("extractTitleFromMarkup() = %q, want %q", result, tt.expected)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestTitleFromSlug(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
url string
|
||||
expected string
|
||||
}{
|
||||
{
|
||||
name: "standard comic URL",
|
||||
url: "https://readallcomics.com/absolute-batman-018-2026/",
|
||||
expected: "Absolute Batman 018",
|
||||
},
|
||||
{
|
||||
name: "no trailing slash",
|
||||
url: "https://readallcomics.com/absolute-batman-018-2026",
|
||||
expected: "Absolute Batman 018",
|
||||
},
|
||||
{
|
||||
name: "no year in slug",
|
||||
url: "https://readallcomics.com/absolute-batman-018/",
|
||||
expected: "Absolute Batman 018",
|
||||
},
|
||||
{
|
||||
name: "single word slug",
|
||||
url: "https://readallcomics.com/batman/",
|
||||
expected: "Batman",
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
result := titleFromSlug(tt.url)
|
||||
if result != tt.expected {
|
||||
t.Errorf("titleFromSlug() = %q, want %q", result, tt.expected)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestCover(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
filelist []string
|
||||
wantSuffix string
|
||||
expectErr bool
|
||||
}{
|
||||
{
|
||||
name: "finds cover ending in 001.jpg",
|
||||
filelist: []string{"https://example.com/image-002.jpg", "https://example.com/image-001.jpg", "https://example.com/image-003.jpg"},
|
||||
wantSuffix: "image-001.jpg",
|
||||
},
|
||||
{
|
||||
name: "finds cover ending in 000.jpg",
|
||||
filelist: []string{"https://example.com/image-000.jpg", "https://example.com/image-001.jpg"},
|
||||
wantSuffix: "image-000.jpg",
|
||||
},
|
||||
{
|
||||
name: "returns error when no cover found",
|
||||
filelist: []string{"https://example.com/image-002.jpg", "https://example.com/image-003.jpg"},
|
||||
expectErr: true,
|
||||
},
|
||||
{
|
||||
name: "returns error for empty filelist",
|
||||
filelist: []string{},
|
||||
expectErr: true,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
c := &Comic{Filelist: tt.filelist}
|
||||
cover, err := c.Cover()
|
||||
if tt.expectErr && err == nil {
|
||||
t.Error("Cover() expected error, got nil")
|
||||
}
|
||||
if !tt.expectErr && err != nil {
|
||||
t.Errorf("Cover() unexpected error: %v", err)
|
||||
}
|
||||
if tt.wantSuffix != "" && !strings.HasSuffix(cover, tt.wantSuffix) {
|
||||
t.Errorf("Cover() = %q, want path ending in %q", cover, tt.wantSuffix)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -1,145 +0,0 @@
|
||||
package comic
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
"net/http/httptest"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestComicDownloadError(t *testing.T) {
|
||||
err := ComicDownloadError{Message: "download failed", Code: 1}
|
||||
if err.Error() != "download failed" {
|
||||
t.Errorf("Error() = %q, want %q", err.Error(), "download failed")
|
||||
}
|
||||
}
|
||||
|
||||
func TestHandleRequest(t *testing.T) {
|
||||
t.Run("successful request", func(t *testing.T) {
|
||||
server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
if r.Header.Get("User-Agent") == "" {
|
||||
t.Error("expected User-Agent header to be set")
|
||||
}
|
||||
w.WriteHeader(http.StatusOK)
|
||||
w.Write([]byte("image data"))
|
||||
}))
|
||||
defer server.Close()
|
||||
|
||||
resp, err := handleRequest(server.URL)
|
||||
if err != nil {
|
||||
t.Fatalf("handleRequest() unexpected error: %v", err)
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
if resp.StatusCode != http.StatusOK {
|
||||
t.Errorf("handleRequest() status = %d, want %d", resp.StatusCode, http.StatusOK)
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("non-200 response", func(t *testing.T) {
|
||||
server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
w.WriteHeader(http.StatusNotFound)
|
||||
}))
|
||||
defer server.Close()
|
||||
|
||||
_, err := handleRequest(server.URL)
|
||||
if err == nil {
|
||||
t.Error("handleRequest() expected error for 404 response, got nil")
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("invalid URL", func(t *testing.T) {
|
||||
_, err := handleRequest("http://invalid.localhost:0/bad")
|
||||
if err == nil {
|
||||
t.Error("handleRequest() expected error for invalid URL, got nil")
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
func TestDownloadFile(t *testing.T) {
|
||||
t.Run("successful download", func(t *testing.T) {
|
||||
server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
w.WriteHeader(http.StatusOK)
|
||||
w.Write([]byte("fake image content"))
|
||||
}))
|
||||
defer server.Close()
|
||||
|
||||
tmpDir := t.TempDir()
|
||||
c := &Comic{
|
||||
Title: "TestComic",
|
||||
LibraryPath: tmpDir,
|
||||
}
|
||||
|
||||
err := downloadFile(server.URL+"/image.jpg", 1, c)
|
||||
if err != nil {
|
||||
t.Fatalf("downloadFile() unexpected error: %v", err)
|
||||
}
|
||||
|
||||
expectedPath := filepath.Join(tmpDir, "TestComic", "TestComic 001.jpg")
|
||||
if _, err := os.Stat(expectedPath); os.IsNotExist(err) {
|
||||
t.Errorf("expected file %s to exist", expectedPath)
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("formats page number with leading zeros", func(t *testing.T) {
|
||||
server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
w.WriteHeader(http.StatusOK)
|
||||
w.Write([]byte("fake image content"))
|
||||
}))
|
||||
defer server.Close()
|
||||
|
||||
tmpDir := t.TempDir()
|
||||
c := &Comic{
|
||||
Title: "TestComic",
|
||||
LibraryPath: tmpDir,
|
||||
}
|
||||
|
||||
err := downloadFile(server.URL+"/image.jpg", 42, c)
|
||||
if err != nil {
|
||||
t.Fatalf("downloadFile() unexpected error: %v", err)
|
||||
}
|
||||
|
||||
expectedPath := filepath.Join(tmpDir, "TestComic", "TestComic 042.jpg")
|
||||
if _, err := os.Stat(expectedPath); os.IsNotExist(err) {
|
||||
t.Errorf("expected file %s to exist", expectedPath)
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("server error returns error", func(t *testing.T) {
|
||||
server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
w.WriteHeader(http.StatusInternalServerError)
|
||||
}))
|
||||
defer server.Close()
|
||||
|
||||
tmpDir := t.TempDir()
|
||||
c := &Comic{
|
||||
Title: "TestComic",
|
||||
LibraryPath: tmpDir,
|
||||
}
|
||||
|
||||
err := downloadFile(server.URL+"/image.jpg", 1, c)
|
||||
if err == nil {
|
||||
t.Error("downloadFile() expected error for server error, got nil")
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("empty response body returns error", func(t *testing.T) {
|
||||
server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
w.WriteHeader(http.StatusOK)
|
||||
// write nothing
|
||||
}))
|
||||
defer server.Close()
|
||||
|
||||
tmpDir := t.TempDir()
|
||||
c := &Comic{
|
||||
Title: "TestComic",
|
||||
LibraryPath: tmpDir,
|
||||
}
|
||||
|
||||
err := downloadFile(server.URL+"/image.jpg", 1, c)
|
||||
if err == nil {
|
||||
t.Error("downloadFile() expected error for empty body, got nil")
|
||||
}
|
||||
})
|
||||
}
|
||||
192
comic/parser.go
192
comic/parser.go
@@ -3,9 +3,6 @@ package comic
|
||||
import (
|
||||
"io"
|
||||
"net/http"
|
||||
"net/http/cookiejar"
|
||||
"net/url"
|
||||
"regexp"
|
||||
"strings"
|
||||
|
||||
"github.com/PuerkitoBio/goquery"
|
||||
@@ -50,143 +47,6 @@ func Markup(url string, c chan *goquery.Document) *goquery.Document {
|
||||
return markup
|
||||
}
|
||||
|
||||
func BatcaveBizMarkup(referer string, c chan *goquery.Document) *goquery.Document {
|
||||
jar, _ := cookiejar.New(nil)
|
||||
client := &http.Client{
|
||||
Jar: jar,
|
||||
CheckRedirect: func(req *http.Request, via []*http.Request) error {
|
||||
return nil
|
||||
},
|
||||
}
|
||||
|
||||
headers := map[string]string{
|
||||
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36",
|
||||
"Accept-Language": "en-US,en;q=0.9",
|
||||
"Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8",
|
||||
}
|
||||
|
||||
// GET the challange page to obtain cookies and any necessary tokens
|
||||
req, err := http.NewRequest("GET", referer, nil)
|
||||
if err != nil {
|
||||
if c != nil {
|
||||
c <- &goquery.Document{}
|
||||
}
|
||||
return &goquery.Document{}
|
||||
}
|
||||
for k, v := range headers {
|
||||
req.Header.Set(k, v)
|
||||
}
|
||||
|
||||
res, err := client.Do(req)
|
||||
if err != nil {
|
||||
if c != nil {
|
||||
c <- &goquery.Document{}
|
||||
}
|
||||
return &goquery.Document{}
|
||||
}
|
||||
defer res.Body.Close()
|
||||
|
||||
body, err := io.ReadAll(res.Body)
|
||||
if err != nil {
|
||||
if c != nil {
|
||||
c <- &goquery.Document{}
|
||||
}
|
||||
return &goquery.Document{}
|
||||
}
|
||||
|
||||
tokenRegex := regexp.MustCompile(`token:\s*"([^"]+)"`)
|
||||
matches := tokenRegex.FindSubmatch(body)
|
||||
|
||||
if matches == nil {
|
||||
// no challenge, parse directly
|
||||
doc, err := goquery.NewDocumentFromReader(strings.NewReader(string(body)))
|
||||
if err != nil {
|
||||
if c != nil {
|
||||
c <- &goquery.Document{}
|
||||
}
|
||||
return &goquery.Document{}
|
||||
}
|
||||
if c != nil {
|
||||
c <- doc
|
||||
}
|
||||
return doc
|
||||
}
|
||||
|
||||
encodedToken := string(matches[1])
|
||||
token, err := url.QueryUnescape(encodedToken)
|
||||
if err != nil {
|
||||
token = encodedToken
|
||||
}
|
||||
|
||||
// Step 3: POST to /_v with fake browser metrics
|
||||
params := url.Values{}
|
||||
params.Set("token", token)
|
||||
params.Set("mode", "modern")
|
||||
params.Set("workTime", "462")
|
||||
params.Set("iterations", "183")
|
||||
params.Set("webdriver", "0")
|
||||
params.Set("touch", "0")
|
||||
params.Set("screen_w", "1920")
|
||||
params.Set("screen_h", "1080")
|
||||
params.Set("screen_cd", "24")
|
||||
|
||||
postReq, err := http.NewRequest("POST", "https://batcave.biz/_v", strings.NewReader(params.Encode()))
|
||||
if err != nil {
|
||||
if c != nil {
|
||||
c <- &goquery.Document{}
|
||||
}
|
||||
return &goquery.Document{}
|
||||
}
|
||||
for k, v := range headers {
|
||||
postReq.Header.Set(k, v)
|
||||
}
|
||||
postReq.Header.Set("Content-Type", "application/x-www-form-urlencoded")
|
||||
postReq.Header.Set("Referer", referer)
|
||||
|
||||
postRes, err := client.Do(postReq)
|
||||
if err != nil {
|
||||
if c != nil {
|
||||
c <- &goquery.Document{}
|
||||
}
|
||||
return &goquery.Document{}
|
||||
}
|
||||
defer postRes.Body.Close()
|
||||
io.ReadAll(postRes.Body)
|
||||
|
||||
// GET the real page with the set cookie
|
||||
realReq, err := http.NewRequest("GET", referer, nil)
|
||||
if err != nil {
|
||||
if c != nil {
|
||||
c <- &goquery.Document{}
|
||||
}
|
||||
return &goquery.Document{}
|
||||
}
|
||||
for k, v := range headers {
|
||||
realReq.Header.Set(k, v)
|
||||
}
|
||||
|
||||
realRes, err := client.Do(realReq)
|
||||
if err != nil {
|
||||
if c != nil {
|
||||
c <- &goquery.Document{}
|
||||
}
|
||||
return &goquery.Document{}
|
||||
}
|
||||
defer realRes.Body.Close()
|
||||
|
||||
doc, err := goquery.NewDocumentFromReader(realRes.Body)
|
||||
if err != nil {
|
||||
if c != nil {
|
||||
c <- &goquery.Document{}
|
||||
}
|
||||
return &goquery.Document{}
|
||||
}
|
||||
if c != nil {
|
||||
c <- doc
|
||||
}
|
||||
return doc
|
||||
}
|
||||
|
||||
// ParseImageLinks parses a goquery document to extract image links.
|
||||
//
|
||||
// markup is the goquery document to parse for image links.
|
||||
@@ -209,55 +69,3 @@ func ParseImageLinks(markup *goquery.Document, c chan []string) ([]string, error
|
||||
|
||||
return links, ImageParseError{Message: "No images found", Code: 1}
|
||||
}
|
||||
|
||||
func ParseReadAllComicsLinks(markup *goquery.Document, c chan []string) ([]string, error) {
|
||||
var links []string
|
||||
markup.Find("img").Each(func(_ int, image *goquery.Selection) {
|
||||
link, _ := image.Attr("src")
|
||||
if !strings.Contains(link, "logo") && (strings.Contains(link, "bp.blogspot.com") || strings.Contains(link, "blogger.googleusercontent") || strings.Contains(link, "covers")) {
|
||||
links = append(links, link)
|
||||
}
|
||||
})
|
||||
|
||||
c <- links
|
||||
|
||||
if len(links) > 0 {
|
||||
return links, nil
|
||||
}
|
||||
|
||||
return links, ImageParseError{Message: "No images found", Code: 1}
|
||||
}
|
||||
|
||||
// ParseBatcaveBizImageLinks extracts image URLs from the __DATA__.images JavaScript
|
||||
// variable embedded in a batcave.biz page.
|
||||
func ParseBatcaveBizImageLinks(markup *goquery.Document, c chan []string) ([]string, error) {
|
||||
var links []string
|
||||
|
||||
markup.Find("script").Each(func(_ int, s *goquery.Selection) {
|
||||
text := s.Text()
|
||||
if !strings.Contains(text, "__DATA__") {
|
||||
return
|
||||
}
|
||||
|
||||
arrayRegex := regexp.MustCompile(`"images"\s*:\s*\[([^\]]+)\]`)
|
||||
arrayMatch := arrayRegex.FindStringSubmatch(text)
|
||||
if len(arrayMatch) < 2 {
|
||||
return
|
||||
}
|
||||
|
||||
urlRegex := regexp.MustCompile(`"([^"]+)"`)
|
||||
for _, m := range urlRegex.FindAllStringSubmatch(arrayMatch[1], -1) {
|
||||
if len(m) >= 2 {
|
||||
links = append(links, m[1])
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
c <- links
|
||||
|
||||
if len(links) > 0 {
|
||||
return links, nil
|
||||
}
|
||||
|
||||
return links, ImageParseError{Message: "No images found", Code: 1}
|
||||
}
|
||||
|
||||
@@ -1,183 +0,0 @@
|
||||
package comic
|
||||
|
||||
import (
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/PuerkitoBio/goquery"
|
||||
)
|
||||
|
||||
func TestParseBatcaveBizImageLinks(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
html string
|
||||
expectCount int
|
||||
expectErr bool
|
||||
expectURLs []string
|
||||
}{
|
||||
{
|
||||
name: "extracts images from __DATA__",
|
||||
html: `<html><body><script>
|
||||
var __DATA__ = {"images":["https://cdn.batcave.biz/img/001.jpg","https://cdn.batcave.biz/img/002.jpg"]};
|
||||
</script></body></html>`,
|
||||
expectCount: 2,
|
||||
expectErr: false,
|
||||
expectURLs: []string{"https://cdn.batcave.biz/img/001.jpg", "https://cdn.batcave.biz/img/002.jpg"},
|
||||
},
|
||||
{
|
||||
name: "extracts images with spaces around colon and bracket",
|
||||
html: `<html><body><script>
|
||||
var __DATA__ = {"images" : [ "https://cdn.batcave.biz/img/001.jpg" ]};
|
||||
</script></body></html>`,
|
||||
expectCount: 1,
|
||||
expectErr: false,
|
||||
expectURLs: []string{"https://cdn.batcave.biz/img/001.jpg"},
|
||||
},
|
||||
{
|
||||
name: "no __DATA__ script",
|
||||
html: `<html><body><script>
|
||||
var foo = "bar";
|
||||
</script></body></html>`,
|
||||
expectCount: 0,
|
||||
expectErr: true,
|
||||
},
|
||||
{
|
||||
name: "__DATA__ present but no images key",
|
||||
html: `<html><body><script>
|
||||
var __DATA__ = {"title":"Nightwing"};
|
||||
</script></body></html>`,
|
||||
expectCount: 0,
|
||||
expectErr: true,
|
||||
},
|
||||
{
|
||||
name: "no script tags",
|
||||
html: `<html><body><p>nothing here</p></body></html>`,
|
||||
expectCount: 0,
|
||||
expectErr: true,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
doc, _ := goquery.NewDocumentFromReader(strings.NewReader(tt.html))
|
||||
ch := make(chan []string, 1)
|
||||
|
||||
links, err := ParseBatcaveBizImageLinks(doc, ch)
|
||||
|
||||
if tt.expectErr && err == nil {
|
||||
t.Error("ParseBatcaveBizImageLinks() expected error, got nil")
|
||||
}
|
||||
if !tt.expectErr && err != nil {
|
||||
t.Errorf("ParseBatcaveBizImageLinks() unexpected error: %v", err)
|
||||
}
|
||||
if len(links) != tt.expectCount {
|
||||
t.Errorf("ParseBatcaveBizImageLinks() returned %d links, want %d", len(links), tt.expectCount)
|
||||
}
|
||||
for i, expected := range tt.expectURLs {
|
||||
if i >= len(links) {
|
||||
t.Errorf("missing link at index %d: want %q", i, expected)
|
||||
continue
|
||||
}
|
||||
if links[i] != expected {
|
||||
t.Errorf("links[%d] = %q, want %q", i, links[i], expected)
|
||||
}
|
||||
}
|
||||
|
||||
channelLinks := <-ch
|
||||
if len(channelLinks) != tt.expectCount {
|
||||
t.Errorf("channel received %d links, want %d", len(channelLinks), tt.expectCount)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestImageParseError(t *testing.T) {
|
||||
err := ImageParseError{Message: "test error", Code: 1}
|
||||
if err.Error() != "test error" {
|
||||
t.Errorf("Error() = %q, want %q", err.Error(), "test error")
|
||||
}
|
||||
}
|
||||
|
||||
func TestParseImageLinks(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
html string
|
||||
expectCount int
|
||||
expectErr bool
|
||||
}{
|
||||
{
|
||||
name: "extracts blogspot images",
|
||||
html: `<html><body>
|
||||
<img src="https://bp.blogspot.com/page-001.jpg" />
|
||||
<img src="https://bp.blogspot.com/page-002.jpg" />
|
||||
</body></html>`,
|
||||
expectCount: 2,
|
||||
expectErr: false,
|
||||
},
|
||||
{
|
||||
name: "extracts blogger googleusercontent images",
|
||||
html: `<html><body>
|
||||
<img src="https://blogger.googleusercontent.com/page-001.jpg" />
|
||||
</body></html>`,
|
||||
expectCount: 1,
|
||||
expectErr: false,
|
||||
},
|
||||
{
|
||||
name: "extracts covers images",
|
||||
html: `<html><body>
|
||||
<img src="https://example.com/covers/cover-001.jpg" />
|
||||
</body></html>`,
|
||||
expectCount: 1,
|
||||
expectErr: false,
|
||||
},
|
||||
{
|
||||
name: "excludes logo images",
|
||||
html: `<html><body>
|
||||
<img src="https://bp.blogspot.com/logo-site.jpg" />
|
||||
<img src="https://bp.blogspot.com/page-001.jpg" />
|
||||
</body></html>`,
|
||||
expectCount: 1,
|
||||
expectErr: false,
|
||||
},
|
||||
{
|
||||
name: "excludes non-matching images",
|
||||
html: `<html><body>
|
||||
<img src="https://other-site.com/image.jpg" />
|
||||
<img src="https://cdn.example.com/banner.png" />
|
||||
</body></html>`,
|
||||
expectCount: 0,
|
||||
expectErr: true,
|
||||
},
|
||||
{
|
||||
name: "no images at all",
|
||||
html: `<html><body><p>No images here</p></body></html>`,
|
||||
expectCount: 0,
|
||||
expectErr: true,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
doc, _ := goquery.NewDocumentFromReader(strings.NewReader(tt.html))
|
||||
ch := make(chan []string, 1)
|
||||
|
||||
links, err := ParseImageLinks(doc, ch)
|
||||
|
||||
if tt.expectErr && err == nil {
|
||||
t.Error("ParseImageLinks() expected error, got nil")
|
||||
}
|
||||
if !tt.expectErr && err != nil {
|
||||
t.Errorf("ParseImageLinks() unexpected error: %v", err)
|
||||
}
|
||||
if len(links) != tt.expectCount {
|
||||
t.Errorf("ParseImageLinks() returned %d links, want %d", len(links), tt.expectCount)
|
||||
}
|
||||
|
||||
// Verify the channel also received the links
|
||||
channelLinks := <-ch
|
||||
if len(channelLinks) != tt.expectCount {
|
||||
t.Errorf("channel received %d links, want %d", len(channelLinks), tt.expectCount)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
1
go.mod
1
go.mod
@@ -5,7 +5,6 @@ go 1.22.3
|
||||
require (
|
||||
github.com/DaRealFreak/cloudflare-bp-go v1.0.4
|
||||
github.com/PuerkitoBio/goquery v1.9.2
|
||||
github.com/andybalholm/brotli v1.2.0
|
||||
github.com/spf13/cobra v1.8.1
|
||||
)
|
||||
|
||||
|
||||
4
go.sum
4
go.sum
@@ -4,8 +4,6 @@ github.com/EDDYCJY/fake-useragent v0.2.0 h1:Jcnkk2bgXmDpX0z+ELlUErTkoLb/mxFBNd2Y
|
||||
github.com/EDDYCJY/fake-useragent v0.2.0/go.mod h1:5wn3zzlDxhKW6NYknushqinPcAqZcAPHy8lLczCdJdc=
|
||||
github.com/PuerkitoBio/goquery v1.9.2 h1:4/wZksC3KgkQw7SQgkKotmKljk0M6V8TUvA8Wb4yPeE=
|
||||
github.com/PuerkitoBio/goquery v1.9.2/go.mod h1:GHPCaP0ODyyxqcNoFGYlAprUFH81NuRPd0GX3Zu2Mvk=
|
||||
github.com/andybalholm/brotli v1.2.0 h1:ukwgCxwYrmACq68yiUqwIWnGY0cTPox/M94sVwToPjQ=
|
||||
github.com/andybalholm/brotli v1.2.0/go.mod h1:rzTDkvFWvIrjDXZHkuS16NPggd91W3kUSvPlQ1pLaKY=
|
||||
github.com/andybalholm/cascadia v1.3.2 h1:3Xi6Dw5lHF15JtdcmAHD3i1+T8plmv7BQ/nsViSLyss=
|
||||
github.com/andybalholm/cascadia v1.3.2/go.mod h1:7gtRlve5FxPPgIgX36uWBX58OdBsSS6lUvCFb+h7KvU=
|
||||
github.com/cpuguy83/go-md2man/v2 v2.0.4/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o=
|
||||
@@ -22,8 +20,6 @@ github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA=
|
||||
github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
|
||||
github.com/stretchr/testify v1.4.0 h1:2E4SXV/wtOkTonXsotYi4li6zVWxYlZuYNCXe9XRJyk=
|
||||
github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4=
|
||||
github.com/xyproto/randomstring v1.0.5 h1:YtlWPoRdgMu3NZtP45drfy1GKoojuR7hmRcnhZqKjWU=
|
||||
github.com/xyproto/randomstring v1.0.5/go.mod h1:rgmS5DeNXLivK7YprL0pY+lTuhNQW3iGxZ18UQApw/E=
|
||||
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
|
||||
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
||||
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
|
||||
|
||||
111
web/server.go
111
web/server.go
@@ -1,11 +1,9 @@
|
||||
package web
|
||||
|
||||
import (
|
||||
"archive/zip"
|
||||
"embed"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"io/fs"
|
||||
"net/http"
|
||||
"net/url"
|
||||
@@ -74,7 +72,6 @@ func (s *Server) Handler() http.Handler {
|
||||
|
||||
// API
|
||||
mux.HandleFunc("/api/download", s.handleDownload)
|
||||
mux.HandleFunc("/api/upload", s.handleUpload)
|
||||
mux.HandleFunc("/api/comics", s.handleComics)
|
||||
mux.HandleFunc("/api/jobs", s.handleJobs)
|
||||
mux.HandleFunc("/health", func(w http.ResponseWriter, r *http.Request) {
|
||||
@@ -243,114 +240,6 @@ func (s *Server) handleJobs(w http.ResponseWriter, r *http.Request) {
|
||||
json.NewEncoder(w).Encode(jobs)
|
||||
}
|
||||
|
||||
func (s *Server) handleUpload(w http.ResponseWriter, r *http.Request) {
|
||||
if r.Method != http.MethodPost {
|
||||
http.Error(w, "method not allowed", http.StatusMethodNotAllowed)
|
||||
return
|
||||
}
|
||||
|
||||
// 500 MB limit
|
||||
if err := r.ParseMultipartForm(500 << 20); err != nil {
|
||||
http.Error(w, "request too large", http.StatusRequestEntityTooLarge)
|
||||
return
|
||||
}
|
||||
|
||||
title := strings.TrimSpace(r.FormValue("title"))
|
||||
if title == "" {
|
||||
http.Error(w, "title required", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
// Sanitize: no path separators or shell-special characters
|
||||
title = filepath.Base(title)
|
||||
title = strings.Map(func(r rune) rune {
|
||||
if strings.ContainsRune(`/\:*?"<>|`, r) {
|
||||
return '_'
|
||||
}
|
||||
return r
|
||||
}, title)
|
||||
|
||||
fileHeaders := r.MultipartForm.File["images"]
|
||||
if len(fileHeaders) == 0 {
|
||||
http.Error(w, "no images provided", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
|
||||
// Sort by original filename so page order is preserved
|
||||
sort.Slice(fileHeaders, func(i, j int) bool {
|
||||
return fileHeaders[i].Filename < fileHeaders[j].Filename
|
||||
})
|
||||
|
||||
dir := filepath.Join(s.libraryPath, title)
|
||||
if err := os.MkdirAll(dir, 0o755); err != nil {
|
||||
http.Error(w, "failed to create directory", http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
cbzPath := filepath.Join(dir, title+".cbz")
|
||||
cbzFile, err := os.Create(cbzPath)
|
||||
if err != nil {
|
||||
http.Error(w, "failed to create archive", http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
defer cbzFile.Close()
|
||||
|
||||
zw := zip.NewWriter(cbzFile)
|
||||
defer zw.Close()
|
||||
|
||||
imageExts := map[string]bool{".jpg": true, ".jpeg": true, ".png": true, ".webp": true}
|
||||
idx := 1
|
||||
|
||||
for _, fh := range fileHeaders {
|
||||
ext := strings.ToLower(filepath.Ext(fh.Filename))
|
||||
if !imageExts[ext] {
|
||||
continue
|
||||
}
|
||||
if ext == ".jpeg" {
|
||||
ext = ".jpg"
|
||||
}
|
||||
|
||||
entryName := fmt.Sprintf("%03d%s", idx, ext)
|
||||
|
||||
src, err := fh.Open()
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
|
||||
// Save first image as cover: "<Title> 001.jpg"
|
||||
if idx == 1 {
|
||||
coverPath := filepath.Join(dir, title+" "+entryName)
|
||||
if cf, err := os.Create(coverPath); err == nil {
|
||||
io.Copy(cf, src)
|
||||
cf.Close()
|
||||
src.Close()
|
||||
src, err = fh.Open()
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
ze, err := zw.Create(entryName)
|
||||
if err != nil {
|
||||
src.Close()
|
||||
continue
|
||||
}
|
||||
io.Copy(ze, src)
|
||||
src.Close()
|
||||
idx++
|
||||
}
|
||||
|
||||
if idx == 1 {
|
||||
// Nothing was written — no valid images
|
||||
os.RemoveAll(dir)
|
||||
http.Error(w, "no valid images in upload", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
json.NewEncoder(w).Encode(map[string]string{"title": title, "status": "complete"})
|
||||
}
|
||||
|
||||
func Listen(addr string, libraryPath string) error {
|
||||
srv := NewServer(libraryPath)
|
||||
fmt.Printf("Yoink web server listening on %s\n", addr)
|
||||
|
||||
@@ -587,206 +587,6 @@
|
||||
.toast-msg.is-error { border-left-color: var(--error); }
|
||||
.toast-msg.fade { opacity: 0; }
|
||||
|
||||
/* ── Upload button ───────────────────────────────────────────────────── */
|
||||
.upload-btn {
|
||||
height: 36px;
|
||||
width: 36px;
|
||||
background: var(--surface2);
|
||||
border: 1px solid var(--border);
|
||||
border-radius: var(--radius);
|
||||
color: var(--text2);
|
||||
cursor: pointer;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
flex-shrink: 0;
|
||||
transition: background 0.15s, border-color 0.15s, color 0.15s;
|
||||
}
|
||||
|
||||
.upload-btn:hover { background: var(--border); border-color: var(--border2); color: var(--text); }
|
||||
|
||||
/* ── Upload modal ────────────────────────────────────────────────────── */
|
||||
.modal-backdrop {
|
||||
position: fixed;
|
||||
inset: 0;
|
||||
background: rgba(0,0,0,0.6);
|
||||
backdrop-filter: blur(4px);
|
||||
-webkit-backdrop-filter: blur(4px);
|
||||
z-index: 200;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
opacity: 0;
|
||||
pointer-events: none;
|
||||
transition: opacity 0.2s;
|
||||
}
|
||||
|
||||
.modal-backdrop.open { opacity: 1; pointer-events: all; }
|
||||
|
||||
.modal {
|
||||
background: var(--surface);
|
||||
border: 1px solid var(--border2);
|
||||
border-radius: 12px;
|
||||
padding: 28px;
|
||||
width: 480px;
|
||||
max-width: calc(100vw - 32px);
|
||||
box-shadow: 0 24px 80px rgba(0,0,0,0.6);
|
||||
transform: translateY(12px);
|
||||
transition: transform 0.2s;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 18px;
|
||||
}
|
||||
|
||||
.modal-backdrop.open .modal { transform: translateY(0); }
|
||||
|
||||
.modal-header {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: space-between;
|
||||
}
|
||||
|
||||
.modal-title {
|
||||
font-size: 0.95rem;
|
||||
font-weight: 700;
|
||||
color: var(--text);
|
||||
}
|
||||
|
||||
.modal-close {
|
||||
background: none;
|
||||
border: none;
|
||||
color: var(--muted);
|
||||
cursor: pointer;
|
||||
font-size: 1.2rem;
|
||||
line-height: 1;
|
||||
width: 28px;
|
||||
height: 28px;
|
||||
border-radius: var(--radius-sm);
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
transition: color 0.12s, background 0.12s;
|
||||
}
|
||||
|
||||
.modal-close:hover { color: var(--text); background: var(--border); }
|
||||
|
||||
.upload-title-input {
|
||||
width: 100%;
|
||||
height: 38px;
|
||||
padding: 0 14px;
|
||||
background: var(--surface2);
|
||||
border: 1px solid var(--border);
|
||||
border-radius: var(--radius);
|
||||
color: var(--text);
|
||||
font-family: inherit;
|
||||
font-size: 0.875rem;
|
||||
outline: none;
|
||||
transition: border-color 0.15s, box-shadow 0.15s;
|
||||
}
|
||||
|
||||
.upload-title-input::placeholder { color: var(--muted); }
|
||||
|
||||
.upload-title-input:focus {
|
||||
border-color: var(--accent);
|
||||
box-shadow: 0 0 0 3px rgba(91,140,245,0.15);
|
||||
}
|
||||
|
||||
.drop-zone {
|
||||
border: 2px dashed var(--border2);
|
||||
border-radius: var(--radius);
|
||||
padding: 36px 20px;
|
||||
text-align: center;
|
||||
cursor: pointer;
|
||||
transition: border-color 0.15s, background 0.15s;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
align-items: center;
|
||||
gap: 10px;
|
||||
color: var(--muted);
|
||||
}
|
||||
|
||||
.drop-zone:hover, .drop-zone.drag-over {
|
||||
border-color: var(--accent);
|
||||
background: var(--accent-dim);
|
||||
color: var(--text2);
|
||||
}
|
||||
|
||||
.drop-zone svg { opacity: 0.5; }
|
||||
.drop-zone.drag-over svg { opacity: 0.8; }
|
||||
|
||||
.drop-zone-label {
|
||||
font-size: 0.85rem;
|
||||
font-weight: 500;
|
||||
color: var(--text2);
|
||||
}
|
||||
|
||||
.drop-zone-sub {
|
||||
font-size: 0.75rem;
|
||||
color: var(--muted);
|
||||
}
|
||||
|
||||
.drop-zone-actions {
|
||||
display: flex;
|
||||
gap: 8px;
|
||||
margin-top: 4px;
|
||||
}
|
||||
|
||||
.pick-btn {
|
||||
height: 30px;
|
||||
padding: 0 14px;
|
||||
background: var(--surface2);
|
||||
border: 1px solid var(--border2);
|
||||
border-radius: var(--radius-sm);
|
||||
color: var(--text2);
|
||||
font-family: inherit;
|
||||
font-size: 0.75rem;
|
||||
font-weight: 600;
|
||||
cursor: pointer;
|
||||
transition: border-color 0.12s, color 0.12s, background 0.12s;
|
||||
}
|
||||
|
||||
.pick-btn:hover { border-color: var(--accent); color: var(--text); background: var(--accent-dim); }
|
||||
|
||||
.file-count {
|
||||
font-size: 0.78rem;
|
||||
color: var(--accent);
|
||||
font-weight: 600;
|
||||
min-height: 1.2em;
|
||||
}
|
||||
|
||||
.upload-progress {
|
||||
height: 3px;
|
||||
background: var(--border);
|
||||
border-radius: 999px;
|
||||
overflow: hidden;
|
||||
display: none;
|
||||
}
|
||||
|
||||
.upload-progress-bar {
|
||||
height: 100%;
|
||||
background: var(--accent);
|
||||
border-radius: 999px;
|
||||
width: 0%;
|
||||
transition: width 0.2s;
|
||||
}
|
||||
|
||||
.modal-submit {
|
||||
height: 40px;
|
||||
background: var(--accent);
|
||||
color: #fff;
|
||||
border: none;
|
||||
border-radius: var(--radius);
|
||||
font-family: inherit;
|
||||
font-size: 0.875rem;
|
||||
font-weight: 700;
|
||||
cursor: pointer;
|
||||
transition: background 0.15s, transform 0.1s;
|
||||
}
|
||||
|
||||
.modal-submit:hover { background: var(--accent-hv); }
|
||||
.modal-submit:active { transform: scale(0.98); }
|
||||
.modal-submit:disabled { opacity: 0.4; cursor: not-allowed; transform: none; }
|
||||
|
||||
.toast-icon {
|
||||
display: inline-flex;
|
||||
align-items: center;
|
||||
@@ -909,46 +709,8 @@
|
||||
/>
|
||||
<button class="url-btn" id="url-btn" type="submit">Download</button>
|
||||
</form>
|
||||
|
||||
<button class="upload-btn" id="upload-open-btn" title="Package local images as CBZ">
|
||||
<svg width="17" height="17" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2.2" stroke-linecap="round" stroke-linejoin="round">
|
||||
<path d="M21 15v4a2 2 0 0 1-2 2H5a2 2 0 0 1-2-2v-4"/>
|
||||
<polyline points="17 8 12 3 7 8"/>
|
||||
<line x1="12" y1="3" x2="12" y2="15"/>
|
||||
</svg>
|
||||
</button>
|
||||
</header>
|
||||
|
||||
<!-- Upload modal -->
|
||||
<div class="modal-backdrop" id="upload-modal">
|
||||
<div class="modal" role="dialog" aria-modal="true" aria-labelledby="modal-title-label">
|
||||
<div class="modal-header">
|
||||
<span class="modal-title" id="modal-title-label">Package images as CBZ</span>
|
||||
<button class="modal-close" id="upload-close-btn" aria-label="Close">×</button>
|
||||
</div>
|
||||
|
||||
<input class="upload-title-input" id="upload-title" type="text" placeholder="Comic title…" autocomplete="off" />
|
||||
|
||||
<div class="drop-zone" id="drop-zone">
|
||||
<svg width="32" height="32" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="1.5" stroke-linecap="round" stroke-linejoin="round">
|
||||
<rect x="3" y="3" width="18" height="18" rx="2"/>
|
||||
<path d="M3 9h18M9 21V9"/>
|
||||
</svg>
|
||||
<span class="drop-zone-label">Drop images or a folder here</span>
|
||||
<span class="drop-zone-sub">JPG, PNG, WebP — sorted by filename</span>
|
||||
<div class="drop-zone-actions">
|
||||
<button class="pick-btn" id="pick-folder-btn" type="button">Select folder</button>
|
||||
<button class="pick-btn" id="pick-files-btn" type="button">Select files</button>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="file-count" id="file-count"></div>
|
||||
<div class="upload-progress" id="upload-progress"><div class="upload-progress-bar" id="upload-progress-bar"></div></div>
|
||||
|
||||
<button class="modal-submit" id="upload-submit-btn" disabled>Package & add to library</button>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div id="queue"></div>
|
||||
|
||||
<main>
|
||||
@@ -998,10 +760,6 @@
|
||||
|
||||
<div id="toast"></div>
|
||||
|
||||
<!-- Hidden file inputs for upload modal -->
|
||||
<input type="file" id="file-input-folder" style="display:none" multiple webkitdirectory />
|
||||
<input type="file" id="file-input-files" style="display:none" multiple accept="image/*" />
|
||||
|
||||
<script>
|
||||
// ── State ──────────────────────────────────────────────────────────────
|
||||
const PAGE_SIZE = 48;
|
||||
@@ -1356,155 +1114,6 @@
|
||||
fetchComics();
|
||||
setInterval(pollJobs, 2000);
|
||||
setInterval(fetchComics, 10000);
|
||||
|
||||
// ── Upload modal ───────────────────────────────────────────────────────
|
||||
const uploadModal = document.getElementById('upload-modal');
|
||||
const uploadOpenBtn = document.getElementById('upload-open-btn');
|
||||
const uploadCloseBtn = document.getElementById('upload-close-btn');
|
||||
const uploadTitleInput = document.getElementById('upload-title');
|
||||
const dropZone = document.getElementById('drop-zone');
|
||||
const pickFolderBtn = document.getElementById('pick-folder-btn');
|
||||
const pickFilesBtn = document.getElementById('pick-files-btn');
|
||||
const fileInputFolder = document.getElementById('file-input-folder');
|
||||
const fileInputFiles = document.getElementById('file-input-files');
|
||||
const fileCountEl = document.getElementById('file-count');
|
||||
const uploadProgress = document.getElementById('upload-progress');
|
||||
const uploadProgressBar= document.getElementById('upload-progress-bar');
|
||||
const uploadSubmitBtn = document.getElementById('upload-submit-btn');
|
||||
|
||||
let pendingFiles = [];
|
||||
|
||||
const IMAGE_EXTS = new Set(['.jpg', '.jpeg', '.png', '.webp']);
|
||||
|
||||
function isImage(name) {
|
||||
const ext = name.slice(name.lastIndexOf('.')).toLowerCase();
|
||||
return IMAGE_EXTS.has(ext);
|
||||
}
|
||||
|
||||
function setFiles(fileList) {
|
||||
const imgs = Array.from(fileList).filter(f => isImage(f.name));
|
||||
imgs.sort((a, b) => a.name.localeCompare(b.name, undefined, { numeric: true }));
|
||||
pendingFiles = imgs;
|
||||
fileCountEl.textContent = imgs.length
|
||||
? `${imgs.length} image${imgs.length !== 1 ? 's' : ''} selected`
|
||||
: '';
|
||||
uploadSubmitBtn.disabled = imgs.length === 0 || !uploadTitleInput.value.trim();
|
||||
}
|
||||
|
||||
function openModal() {
|
||||
uploadModal.classList.add('open');
|
||||
uploadTitleInput.focus();
|
||||
}
|
||||
|
||||
function closeModal() {
|
||||
uploadModal.classList.remove('open');
|
||||
pendingFiles = [];
|
||||
fileCountEl.textContent = '';
|
||||
uploadTitleInput.value = '';
|
||||
uploadSubmitBtn.disabled = true;
|
||||
uploadProgress.style.display = 'none';
|
||||
uploadProgressBar.style.width = '0%';
|
||||
fileInputFolder.value = '';
|
||||
fileInputFiles.value = '';
|
||||
}
|
||||
|
||||
uploadOpenBtn.addEventListener('click', openModal);
|
||||
uploadCloseBtn.addEventListener('click', closeModal);
|
||||
uploadModal.addEventListener('click', e => { if (e.target === uploadModal) closeModal(); });
|
||||
document.addEventListener('keydown', e => { if (e.key === 'Escape' && uploadModal.classList.contains('open')) closeModal(); });
|
||||
|
||||
pickFolderBtn.addEventListener('click', () => fileInputFolder.click());
|
||||
pickFilesBtn.addEventListener('click', () => fileInputFiles.click());
|
||||
|
||||
fileInputFolder.addEventListener('change', e => {
|
||||
// Auto-fill title from folder name via webkitRelativePath
|
||||
const files = Array.from(e.target.files);
|
||||
if (files.length && !uploadTitleInput.value.trim()) {
|
||||
const rel = files[0].webkitRelativePath;
|
||||
if (rel) uploadTitleInput.value = rel.split('/')[0];
|
||||
}
|
||||
setFiles(e.target.files);
|
||||
});
|
||||
|
||||
fileInputFiles.addEventListener('change', e => setFiles(e.target.files));
|
||||
|
||||
uploadTitleInput.addEventListener('input', () => {
|
||||
uploadSubmitBtn.disabled = pendingFiles.length === 0 || !uploadTitleInput.value.trim();
|
||||
});
|
||||
|
||||
// Drag & drop
|
||||
dropZone.addEventListener('dragover', e => {
|
||||
e.preventDefault();
|
||||
dropZone.classList.add('drag-over');
|
||||
});
|
||||
dropZone.addEventListener('dragleave', () => dropZone.classList.remove('drag-over'));
|
||||
dropZone.addEventListener('drop', async e => {
|
||||
e.preventDefault();
|
||||
dropZone.classList.remove('drag-over');
|
||||
|
||||
const collected = [];
|
||||
const entries = Array.from(e.dataTransfer.items)
|
||||
.filter(i => i.kind === 'file')
|
||||
.map(i => i.webkitGetAsEntry());
|
||||
|
||||
async function readEntry(entry) {
|
||||
if (entry.isFile) {
|
||||
await new Promise(res => entry.file(f => { collected.push(f); res(); }));
|
||||
} else if (entry.isDirectory) {
|
||||
if (!uploadTitleInput.value.trim()) uploadTitleInput.value = entry.name;
|
||||
const reader = entry.createReader();
|
||||
await new Promise(res => reader.readEntries(async entries => {
|
||||
for (const e of entries) await readEntry(e);
|
||||
res();
|
||||
}));
|
||||
}
|
||||
}
|
||||
|
||||
for (const entry of entries) if (entry) await readEntry(entry);
|
||||
setFiles(collected);
|
||||
});
|
||||
|
||||
// Submit
|
||||
uploadSubmitBtn.addEventListener('click', async () => {
|
||||
const title = uploadTitleInput.value.trim();
|
||||
if (!title || pendingFiles.length === 0) return;
|
||||
|
||||
uploadSubmitBtn.disabled = true;
|
||||
uploadProgress.style.display = 'block';
|
||||
|
||||
const form = new FormData();
|
||||
form.append('title', title);
|
||||
pendingFiles.forEach(f => form.append('images', f, f.name));
|
||||
|
||||
const xhr = new XMLHttpRequest();
|
||||
xhr.open('POST', '/api/upload');
|
||||
|
||||
xhr.upload.addEventListener('progress', e => {
|
||||
if (e.lengthComputable) {
|
||||
uploadProgressBar.style.width = Math.round((e.loaded / e.total) * 100) + '%';
|
||||
}
|
||||
});
|
||||
|
||||
xhr.addEventListener('load', () => {
|
||||
if (xhr.status === 200) {
|
||||
toast(`"${title}" added to library`);
|
||||
closeModal();
|
||||
fetchComics();
|
||||
} else {
|
||||
toast('Upload failed: ' + xhr.responseText, true);
|
||||
uploadSubmitBtn.disabled = false;
|
||||
uploadProgress.style.display = 'none';
|
||||
}
|
||||
});
|
||||
|
||||
xhr.addEventListener('error', () => {
|
||||
toast('Upload failed', true);
|
||||
uploadSubmitBtn.disabled = false;
|
||||
uploadProgress.style.display = 'none';
|
||||
});
|
||||
|
||||
xhr.send(form);
|
||||
});
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
||||
|
||||
Reference in New Issue
Block a user