commit cdcc5b5293e2e0d19fb0fdb816274256ac9371ef Author: soup Date: Sat Jan 17 01:09:23 2026 -0500 Initial commit: Lookbook personal collection app Pinterest-like app for saving images, videos, quotes, and embeds. Features: - Go backend with PostgreSQL, SSR templates - Console-based admin auth (login/logout via browser console) - Item types: images, videos (ffmpeg transcoding), quotes, embeds - Media stored as BLOBs in PostgreSQL - OpenGraph metadata extraction for links - Embed detection for YouTube, Vimeo, Twitter/X - Masonry grid layout, item detail pages - Tag system with filtering - Refresh metadata endpoint with change warnings - Replace media endpoint for updating item images/videos diff --git a/.envrc b/.envrc new file mode 100644 index 0000000..62c0224 --- /dev/null +++ b/.envrc @@ -0,0 +1 @@ +use nix --impure diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..b1c02c5 --- /dev/null +++ b/.gitignore @@ -0,0 +1,30 @@ +.direnv/ + +# Environment files +.env +.env.local + +# IDE +.idea/ +.vscode/ +*.swp +*.swo + +# macOS +.DS_Store + +# OpenCode +.opencode/ + +# Go +/bin/ +*.exe +*.out +*.test +result + +# Windows zone identifiers +*:Zone.Identifier + +# Inspiration/test files +/inspo/ diff --git a/Makefile b/Makefile new file mode 100644 index 0000000..2add9f0 --- /dev/null +++ b/Makefile @@ -0,0 +1,26 @@ +APP := ./cmd/server +VERSION := $(shell git rev-parse --short HEAD 2>/dev/null || echo "dev") +LDFLAGS := -X lookbook/internal/static.Version=$(VERSION) + +.PHONY: dev run migrate rollback fmt test build + +dev: + hivemind + +run: + go run $(APP) web + +migrate: + go run $(APP) -migrate + +rollback: + go run $(APP) -rollback + +fmt: + go fmt ./... + +test: + go test -v ./... + +build: + CGO_ENABLED=0 go build -ldflags "$(LDFLAGS)" -o bin/lookbook $(APP) diff --git a/Procfile b/Procfile new file mode 100644 index 0000000..1a99a11 --- /dev/null +++ b/Procfile @@ -0,0 +1 @@ +web: fd . -e go -e html -e css -e js | entr -r go run ./cmd/server web diff --git a/cmd/server/main.go b/cmd/server/main.go new file mode 100644 index 0000000..05aa700 --- /dev/null +++ b/cmd/server/main.go @@ -0,0 +1,191 @@ +package main + +import ( + "context" + "database/sql" + "flag" + "fmt" + "log/slog" + "net/http" + "os" + "os/signal" + "syscall" + "time" + + _ "github.com/jackc/pgx/v5/stdlib" + + "git.soup.land/soup/sxgo/ssr" + "lookbook/internal/handlers" + "lookbook/internal/middleware" + "lookbook/internal/migrations" + "lookbook/internal/static" +) + +const defaultAddr = ":8080" + +func main() { + migrate := flag.Bool("migrate", false, "run database migrations and exit") + rollback := flag.Bool("rollback", false, "roll back migrations and exit (one step by default)") + rollbackTarget := flag.Int64("to", -1, "target version for rollback when using -rollback") + dbURLFlag := flag.String("db-url", "", "database connection URL") + flag.Parse() + + logger := slog.New(slog.NewTextHandler(os.Stdout, &slog.HandlerOptions{ + Level: slog.LevelInfo, + })) + + dbURL := *dbURLFlag + if dbURL == "" { + dbURL = os.Getenv("DATABASE_URL") + } + if dbURL == "" { + dbURL = migrations.DefaultURL + } + + switch { + case *migrate && *rollback: + logger.Error("choose either -migrate or -rollback, not both") + os.Exit(1) + case *migrate: + if err := migrations.Up(context.Background(), dbURL, logger); err != nil { + logger.Error("migration failed", slog.Any("err", err)) + os.Exit(1) + } + return + case *rollback: + if err := migrations.Down(context.Background(), dbURL, *rollbackTarget, logger); err != nil { + logger.Error("rollback failed", slog.Any("err", err)) + os.Exit(1) + } + return + } + + // Check for subcommand + args := flag.Args() + if len(args) < 1 { + printUsage() + os.Exit(1) + } + + mode := args[0] + switch mode { + case "web": + runWebServer(dbURL, logger) + default: + logger.Error("unknown mode", slog.String("mode", mode)) + printUsage() + os.Exit(1) + } +} + +func printUsage() { + fmt.Fprintln(os.Stderr, "Usage:") + fmt.Fprintln(os.Stderr, " lookbook web - Run web server") + fmt.Fprintln(os.Stderr, "") + fmt.Fprintln(os.Stderr, "Flags:") + fmt.Fprintln(os.Stderr, " -migrate Run database migrations") + fmt.Fprintln(os.Stderr, " -rollback Roll back migrations") + fmt.Fprintln(os.Stderr, " -to Target version for rollback") + fmt.Fprintln(os.Stderr, " -db-url Database connection URL") +} + +func runWebServer(dbURL string, logger *slog.Logger) { + // Check for pending migrations + pending, err := migrations.CheckPending(context.Background(), dbURL, logger) + if err != nil { + logger.Warn("could not check migration status", slog.Any("err", err)) + } else if pending > 0 { + logger.Warn("database has pending migrations", + slog.Int("pending", pending), + slog.String("hint", "run 'make migrate' to apply")) + } + + db, err := sql.Open("pgx", dbURL) + if err != nil { + logger.Error("failed to open database", slog.Any("err", err)) + os.Exit(1) + } + defer db.Close() + + if err := db.Ping(); err != nil { + logger.Error("failed to ping database", slog.Any("err", err)) + os.Exit(1) + } + + rc := &handlers.RequestContext{ + DB: db, + Logger: logger, + TmplCache: ssr.NewTmplCache(handlers.TemplateFuncs), + } + router := handlers.NewRouter(rc) + + // Pages + router.Handle("GET /", handlers.HandleHome) + router.Handle("GET /item/{id}", handlers.HandleItemPage) + + // Static files + router.HandleStd("GET /static/{version}/", static.Handler()) + + // Media + router.Handle("GET /media/{id}", handlers.HandleGetMedia) + + // Auth API + router.Handle("POST /api/auth/login", handlers.HandleLogin) + router.Handle("POST /api/auth/logout", handlers.HandleLogout) + router.Handle("GET /api/auth/status", handlers.HandleAuthStatus) + + // Items API + router.Handle("GET /api/items", handlers.HandleListItems) + router.Handle("GET /api/items/{id}", handlers.HandleGetItem) + router.Handle("POST /api/items", handlers.HandleCreateItem) + router.Handle("PUT /api/items/{id}", handlers.HandleUpdateItem) + router.Handle("DELETE /api/items/{id}", handlers.HandleDeleteItem) + + // Item creation endpoints + router.Handle("POST /api/preview", handlers.HandlePreviewLink) + router.Handle("POST /api/items/from-link", handlers.HandleCreateFromLink) + router.Handle("POST /api/items/upload", handlers.HandleUpload) + router.Handle("POST /api/items/quote", handlers.HandleCreateQuote) + router.Handle("POST /api/items/{id}/refresh", handlers.HandleRefreshMetadata) + router.Handle("POST /api/items/{id}/media", handlers.HandleReplaceMedia) + + // Tags API + router.Handle("GET /api/tags", handlers.HandleListTags) + router.Handle("GET /api/tags/suggest", handlers.HandleSuggestTags) + + addr := defaultAddr + if envAddr := os.Getenv("ADDR"); envAddr != "" { + addr = envAddr + } + + server := &http.Server{ + Addr: addr, + Handler: middleware.Logging(logger)(router), + ReadHeaderTimeout: 5 * time.Second, + } + + go func() { + logger.Info("listening", slog.String("addr", addr)) + if err := server.ListenAndServe(); err != nil && err != http.ErrServerClosed { + logger.Error("http server error", slog.Any("err", err)) + os.Exit(1) + } + }() + + waitForShutdown(server, logger) +} + +func waitForShutdown(server *http.Server, logger *slog.Logger) { + quit := make(chan os.Signal, 1) + signal.Notify(quit, os.Interrupt, syscall.SIGTERM) + + <-quit + logger.Info("shutting down") + + ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second) + defer cancel() + + if err := server.Shutdown(ctx); err != nil { + logger.Error("graceful shutdown failed", slog.Any("err", err)) + } +} diff --git a/default.nix b/default.nix new file mode 100644 index 0000000..1a2b009 --- /dev/null +++ b/default.nix @@ -0,0 +1,90 @@ +{ pkgs ? import {} }: + +let + lookbook = pkgs.buildGoModule { + pname = "lookbook"; + version = "0.1.0"; + src = ./.; + + vendorHash = null; # Update after first build + + env.GOPRIVATE = "git.soup.land"; + + # Build the server binary + subPackages = [ "cmd/server" ]; + + postInstall = '' + mv $out/bin/server $out/bin/lookbook + ''; + + meta = with pkgs.lib; { + description = "Lookbook"; + homepage = "https://git.soup.land/soup/lookbook"; + }; + }; +in +{ + package = lookbook; + + nixosModule = { config, lib, pkgs, ... }: + let + cfg = config.services.lookbook; + in { + options.services.lookbook = { + enable = lib.mkEnableOption "lookbook service"; + + address = lib.mkOption { + type = lib.types.str; + default = "127.0.0.1:8080"; + description = "Address to listen on"; + }; + + databaseUrl = lib.mkOption { + type = lib.types.str; + description = "PostgreSQL connection URL"; + }; + + user = lib.mkOption { + type = lib.types.str; + default = "lookbook"; + description = "User to run the service as"; + }; + + group = lib.mkOption { + type = lib.types.str; + default = "lookbook"; + description = "Group to run the service as"; + }; + }; + + config = lib.mkIf cfg.enable { + users.users.${cfg.user} = { + isSystemUser = true; + group = cfg.group; + }; + users.groups.${cfg.group} = {}; + + systemd.services.lookbook = { + description = "Lookbook"; + wantedBy = [ "multi-user.target" ]; + after = [ "network.target" "postgresql.service" ]; + requires = [ "postgresql.service" ]; + + environment = { + DATABASE_URL = cfg.databaseUrl; + ADDR = cfg.address; + }; + + serviceConfig = { + Type = "simple"; + User = cfg.user; + Group = cfg.group; + ExecStartPre = "${lookbook}/bin/lookbook -migrate"; + ExecStart = "${lookbook}/bin/lookbook web"; + Restart = "always"; + RestartSec = 5; + }; + }; + }; + }; +} diff --git a/go.mod b/go.mod new file mode 100644 index 0000000..f45b382 --- /dev/null +++ b/go.mod @@ -0,0 +1,23 @@ +module lookbook + +go 1.23.0 + +require ( + git.soup.land/soup/sxgo v0.1.1 + github.com/jackc/pgx/v5 v5.7.6 + github.com/pressly/goose/v3 v3.26.0 + golang.org/x/crypto v0.40.0 + golang.org/x/net v0.42.0 +) + +require ( + github.com/cespare/xxhash/v2 v2.3.0 // indirect + github.com/jackc/pgpassfile v1.0.0 // indirect + github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761 // indirect + github.com/jackc/puddle/v2 v2.2.2 // indirect + github.com/mfridman/interpolate v0.0.2 // indirect + github.com/sethvargo/go-retry v0.3.0 // indirect + go.uber.org/multierr v1.11.0 // indirect + golang.org/x/sync v0.16.0 // indirect + golang.org/x/text v0.27.0 // indirect +) diff --git a/go.sum b/go.sum new file mode 100644 index 0000000..d938dbb --- /dev/null +++ b/go.sum @@ -0,0 +1,64 @@ +git.soup.land/soup/sxgo v0.1.1 h1:EIEHcb+yptNHy5kd+/YU/+Ov4kNYftDm0+El3jFFmks= +git.soup.land/soup/sxgo v0.1.1/go.mod h1:U8x8wBk6gx4j43wgT8wXX3J6o4KIz92tPCVBeytDTxM= +github.com/cespare/xxhash/v2 v2.3.0 h1:UL815xU9SqsFlibzuggzjXhog7bL6oX9BbNZnL2UFvs= +github.com/cespare/xxhash/v2 v2.3.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= +github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= +github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/dustin/go-humanize v1.0.1 h1:GzkhY7T5VNhEkwH0PVJgjz+fX1rhBrR7pRT3mDkpeCY= +github.com/dustin/go-humanize v1.0.1/go.mod h1:Mu1zIs6XwVuF/gI1OepvI0qD18qycQx+mFykh5fBlto= +github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= +github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/jackc/pgpassfile v1.0.0 h1:/6Hmqy13Ss2zCq62VdNG8tM1wchn8zjSGOBJ6icpsIM= +github.com/jackc/pgpassfile v1.0.0/go.mod h1:CEx0iS5ambNFdcRtxPj5JhEz+xB6uRky5eyVu/W2HEg= +github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761 h1:iCEnooe7UlwOQYpKFhBabPMi4aNAfoODPEFNiAnClxo= +github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761/go.mod h1:5TJZWKEWniPve33vlWYSoGYefn3gLQRzjfDlhSJ9ZKM= +github.com/jackc/pgx/v5 v5.7.6 h1:rWQc5FwZSPX58r1OQmkuaNicxdmExaEz5A2DO2hUuTk= +github.com/jackc/pgx/v5 v5.7.6/go.mod h1:aruU7o91Tc2q2cFp5h4uP3f6ztExVpyVv88Xl/8Vl8M= +github.com/jackc/puddle/v2 v2.2.2 h1:PR8nw+E/1w0GLuRFSmiioY6UooMp6KJv0/61nB7icHo= +github.com/jackc/puddle/v2 v2.2.2/go.mod h1:vriiEXHvEE654aYKXXjOvZM39qJ0q+azkZFrfEOc3H4= +github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY= +github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= +github.com/mfridman/interpolate v0.0.2 h1:pnuTK7MQIxxFz1Gr+rjSIx9u7qVjf5VOoM/u6BbAxPY= +github.com/mfridman/interpolate v0.0.2/go.mod h1:p+7uk6oE07mpE/Ik1b8EckO0O4ZXiGAfshKBWLUM9Xg= +github.com/ncruces/go-strftime v0.1.9 h1:bY0MQC28UADQmHmaF5dgpLmImcShSi2kHU9XLdhx/f4= +github.com/ncruces/go-strftime v0.1.9/go.mod h1:Fwc5htZGVVkseilnfgOVb9mKy6w1naJmn9CehxcKcls= +github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= +github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/pressly/goose/v3 v3.26.0 h1:KJakav68jdH0WDvoAcj8+n61WqOIaPGgH0bJWS6jpmM= +github.com/pressly/goose/v3 v3.26.0/go.mod h1:4hC1KrritdCxtuFsqgs1R4AU5bWtTAf+cnWvfhf2DNY= +github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec h1:W09IVJc94icq4NjY3clb7Lk8O1qJ8BdBEF8z0ibU0rE= +github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec/go.mod h1:qqbHyh8v60DhA7CoWK5oRCqLrMHRGoxYCSS9EjAz6Eo= +github.com/sethvargo/go-retry v0.3.0 h1:EEt31A35QhrcRZtrYFDTBg91cqZVnFL2navjDrah2SE= +github.com/sethvargo/go-retry v0.3.0/go.mod h1:mNX17F0C/HguQMyMyJxcnU471gOZGxCLyYaFyAZraas= +github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= +github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.11.0 h1:ib4sjIrwZKxE5u/Japgo/7SJV3PvgjGiRNAvTVGqQl8= +github.com/stretchr/testify v1.11.0/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U= +go.uber.org/multierr v1.11.0 h1:blXXJkSxSSfBVBlC76pxqeO+LN3aDfLQo+309xJstO0= +go.uber.org/multierr v1.11.0/go.mod h1:20+QtiLqy0Nd6FdQB9TLXag12DsQkrbs3htMFfDN80Y= +golang.org/x/crypto v0.40.0 h1:r4x+VvoG5Fm+eJcxMaY8CQM7Lb0l1lsmjGBQ6s8BfKM= +golang.org/x/crypto v0.40.0/go.mod h1:Qr1vMER5WyS2dfPHAlsOj01wgLbsyWtFn/aY+5+ZdxY= +golang.org/x/exp v0.0.0-20250620022241-b7579e27df2b h1:M2rDM6z3Fhozi9O7NWsxAkg/yqS/lQJ6PmkyIV3YP+o= +golang.org/x/exp v0.0.0-20250620022241-b7579e27df2b/go.mod h1:3//PLf8L/X+8b4vuAfHzxeRUl04Adcb341+IGKfnqS8= +golang.org/x/net v0.42.0 h1:jzkYrhi3YQWD6MLBJcsklgQsoAcw89EcZbJw8Z614hs= +golang.org/x/net v0.42.0/go.mod h1:FF1RA5d3u7nAYA4z2TkclSCKh68eSXtiFwcWQpPXdt8= +golang.org/x/sync v0.16.0 h1:ycBJEhp9p4vXvUZNszeOq0kGTPghopOL8q0fq3vstxw= +golang.org/x/sync v0.16.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA= +golang.org/x/sys v0.34.0 h1:H5Y5sJ2L2JRdyv7ROF1he/lPdvFsd0mJHFw2ThKHxLA= +golang.org/x/sys v0.34.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k= +golang.org/x/text v0.27.0 h1:4fGWRpyh641NLlecmyl4LOe6yDdfaYNrGb2zdfo4JV4= +golang.org/x/text v0.27.0/go.mod h1:1D28KMCvyooCX9hBiosv5Tz/+YLxj0j7XhWjpSUF7CU= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= +gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +modernc.org/libc v1.66.3 h1:cfCbjTUcdsKyyZZfEUKfoHcP3S0Wkvz3jgSzByEWVCQ= +modernc.org/libc v1.66.3/go.mod h1:XD9zO8kt59cANKvHPXpx7yS2ELPheAey0vjIuZOhOU8= +modernc.org/mathutil v1.7.1 h1:GCZVGXdaN8gTqB1Mf/usp1Y/hSqgI2vAGGP4jZMCxOU= +modernc.org/mathutil v1.7.1/go.mod h1:4p5IwJITfppl0G4sUEDtCr4DthTaT47/N3aT6MhfgJg= +modernc.org/memory v1.11.0 h1:o4QC8aMQzmcwCK3t3Ux/ZHmwFPzE6hf2Y5LbkRs+hbI= +modernc.org/memory v1.11.0/go.mod h1:/JP4VbVC+K5sU2wZi9bHoq2MAkCnrt2r98UGeSK7Mjw= +modernc.org/sqlite v1.38.2 h1:Aclu7+tgjgcQVShZqim41Bbw9Cho0y/7WzYptXqkEek= +modernc.org/sqlite v1.38.2/go.mod h1:cPTJYSlgg3Sfg046yBShXENNtPrWrDX8bsbAQBzgQ5E= diff --git a/internal/components/page.go b/internal/components/page.go new file mode 100644 index 0000000..82c4d1d --- /dev/null +++ b/internal/components/page.go @@ -0,0 +1,43 @@ +package components + +import ( + "git.soup.land/soup/sxgo/ssr" +) + +type Page struct { + Title string + IsAdmin bool + Content ssr.Renderable +} + +func (p Page) Render(sw *ssr.Writer) error { + sw.Tmpl(p, ` + + + + + + {{if .Title}}{{.Title}} - {{end}}Lookbook + + + +
+ + +
+
+`) + + p.Content.Render(sw) + return sw.Tmpl(p, ` +
+ + + + +`) +} diff --git a/internal/data/admin/queries.go b/internal/data/admin/queries.go new file mode 100644 index 0000000..65aec5c --- /dev/null +++ b/internal/data/admin/queries.go @@ -0,0 +1,41 @@ +package admin + +import ( + "context" + "database/sql" + "time" +) + +type Row struct { + ID int + PasswordHash []byte // nil if not set + CreatedAt time.Time +} + +// QGet returns the single admin row. +func QGet(ctx context.Context, db *sql.DB) (Row, error) { + query := `SELECT id, password_hash, created_at FROM admin WHERE id = 1` + + var row Row + err := db.QueryRowContext(ctx, query).Scan( + &row.ID, + &row.PasswordHash, + &row.CreatedAt, + ) + return row, err +} + +// QSetPassword sets the admin password hash. +func QSetPassword(ctx context.Context, db *sql.DB, hash []byte) error { + query := `UPDATE admin SET password_hash = $1 WHERE id = 1` + _, err := db.ExecContext(ctx, query, hash) + return err +} + +// QHasPassword returns true if a password has been set. +func QHasPassword(ctx context.Context, db *sql.DB) (bool, error) { + query := `SELECT password_hash IS NOT NULL FROM admin WHERE id = 1` + var has bool + err := db.QueryRowContext(ctx, query).Scan(&has) + return has, err +} diff --git a/internal/data/item/queries.go b/internal/data/item/queries.go new file mode 100644 index 0000000..b3719dc --- /dev/null +++ b/internal/data/item/queries.go @@ -0,0 +1,219 @@ +package item + +import ( + "context" + "database/sql" + "fmt" + "time" + + "github.com/jackc/pgx/v5/pgtype" +) + +type Row struct { + ID int64 + PubID string + Title *string + Description *string + LinkURL *string + ItemType string // 'image', 'video', 'quote', 'embed' + EmbedProvider *string + EmbedVideoID *string + EmbedHTML *string + CreatedAt time.Time + DeletedAt *time.Time +} + +type CreateParams struct { + Title *string + Description *string + LinkURL *string + ItemType string + EmbedProvider *string + EmbedVideoID *string + EmbedHTML *string +} + +// QCreate creates a new item. +func QCreate(ctx context.Context, db *sql.DB, p CreateParams) (Row, error) { + query := ` + INSERT INTO item (title, description, link_url, item_type, embed_provider, embed_video_id, embed_html) + VALUES ($1, $2, $3, $4, $5, $6, $7) + RETURNING id, pub_id, title, description, link_url, item_type, embed_provider, embed_video_id, embed_html, created_at, deleted_at + ` + + var row Row + var pubID pgtype.UUID + err := db.QueryRowContext(ctx, query, + p.Title, p.Description, p.LinkURL, p.ItemType, p.EmbedProvider, p.EmbedVideoID, p.EmbedHTML, + ).Scan( + &row.ID, &pubID, &row.Title, &row.Description, &row.LinkURL, + &row.ItemType, &row.EmbedProvider, &row.EmbedVideoID, &row.EmbedHTML, + &row.CreatedAt, &row.DeletedAt, + ) + if err == nil { + row.PubID = formatUUID(pubID) + } + return row, err +} + +// QFindByPubID finds an item by its public ID. +func QFindByPubID(ctx context.Context, db *sql.DB, pubID string) (*Row, error) { + query := ` + SELECT id, pub_id, title, description, link_url, item_type, embed_provider, embed_video_id, embed_html, created_at, deleted_at + FROM item + WHERE pub_id = $1 + ` + + var row Row + var pubUUID pgtype.UUID + err := db.QueryRowContext(ctx, query, pubID).Scan( + &row.ID, &pubUUID, &row.Title, &row.Description, &row.LinkURL, + &row.ItemType, &row.EmbedProvider, &row.EmbedVideoID, &row.EmbedHTML, + &row.CreatedAt, &row.DeletedAt, + ) + if err == sql.ErrNoRows { + return nil, nil + } + if err != nil { + return nil, err + } + row.PubID = formatUUID(pubUUID) + return &row, nil +} + +// QFindByID finds an item by its internal ID. +func QFindByID(ctx context.Context, db *sql.DB, id int64) (*Row, error) { + query := ` + SELECT id, pub_id, title, description, link_url, item_type, embed_provider, embed_video_id, embed_html, created_at, deleted_at + FROM item + WHERE id = $1 + ` + + var row Row + var pubUUID pgtype.UUID + err := db.QueryRowContext(ctx, query, id).Scan( + &row.ID, &pubUUID, &row.Title, &row.Description, &row.LinkURL, + &row.ItemType, &row.EmbedProvider, &row.EmbedVideoID, &row.EmbedHTML, + &row.CreatedAt, &row.DeletedAt, + ) + if err == sql.ErrNoRows { + return nil, nil + } + if err != nil { + return nil, err + } + row.PubID = formatUUID(pubUUID) + return &row, nil +} + +// QList returns all non-deleted items, newest first. +func QList(ctx context.Context, db *sql.DB) ([]Row, error) { + query := ` + SELECT id, pub_id, title, description, link_url, item_type, embed_provider, embed_video_id, embed_html, created_at, deleted_at + FROM item + WHERE deleted_at IS NULL + ORDER BY created_at DESC + ` + + rows, err := db.QueryContext(ctx, query) + if err != nil { + return nil, err + } + defer rows.Close() + + var items []Row + for rows.Next() { + var row Row + var pubUUID pgtype.UUID + if err := rows.Scan( + &row.ID, &pubUUID, &row.Title, &row.Description, &row.LinkURL, + &row.ItemType, &row.EmbedProvider, &row.EmbedVideoID, &row.EmbedHTML, + &row.CreatedAt, &row.DeletedAt, + ); err != nil { + return nil, err + } + row.PubID = formatUUID(pubUUID) + items = append(items, row) + } + return items, rows.Err() +} + +// QListByTag returns all non-deleted items with a specific tag, newest first. +func QListByTag(ctx context.Context, db *sql.DB, tagName string) ([]Row, error) { + query := ` + SELECT i.id, i.pub_id, i.title, i.description, i.link_url, i.item_type, i.embed_provider, i.embed_video_id, i.embed_html, i.created_at, i.deleted_at + FROM item i + JOIN item_tag it ON i.id = it.item_id + JOIN tag t ON it.tag_id = t.id + WHERE i.deleted_at IS NULL AND t.name = $1 + ORDER BY i.created_at DESC + ` + + rows, err := db.QueryContext(ctx, query, tagName) + if err != nil { + return nil, err + } + defer rows.Close() + + var items []Row + for rows.Next() { + var row Row + var pubUUID pgtype.UUID + if err := rows.Scan( + &row.ID, &pubUUID, &row.Title, &row.Description, &row.LinkURL, + &row.ItemType, &row.EmbedProvider, &row.EmbedVideoID, &row.EmbedHTML, + &row.CreatedAt, &row.DeletedAt, + ); err != nil { + return nil, err + } + row.PubID = formatUUID(pubUUID) + items = append(items, row) + } + return items, rows.Err() +} + +type UpdateParams struct { + Title *string + Description *string + LinkURL *string +} + +// QUpdate updates an item's editable fields. +func QUpdate(ctx context.Context, db *sql.DB, id int64, p UpdateParams) error { + query := ` + UPDATE item + SET title = $2, description = $3, link_url = $4 + WHERE id = $1 + ` + _, err := db.ExecContext(ctx, query, id, p.Title, p.Description, p.LinkURL) + return err +} + +// QUpdateType updates an item's type. +func QUpdateType(ctx context.Context, db *sql.DB, id int64, itemType string) error { + query := `UPDATE item SET item_type = $2 WHERE id = $1` + _, err := db.ExecContext(ctx, query, id, itemType) + return err +} + +// QSoftDelete soft deletes an item. +func QSoftDelete(ctx context.Context, db *sql.DB, id int64) error { + query := `UPDATE item SET deleted_at = NOW() WHERE id = $1` + _, err := db.ExecContext(ctx, query, id) + return err +} + +// QRestore restores a soft-deleted item. +func QRestore(ctx context.Context, db *sql.DB, id int64) error { + query := `UPDATE item SET deleted_at = NULL WHERE id = $1` + _, err := db.ExecContext(ctx, query, id) + return err +} + +func formatUUID(u pgtype.UUID) string { + if !u.Valid { + return "" + } + b := u.Bytes + return fmt.Sprintf("%x-%x-%x-%x-%x", b[0:4], b[4:6], b[6:8], b[8:10], b[10:16]) +} diff --git a/internal/data/media/queries.go b/internal/data/media/queries.go new file mode 100644 index 0000000..c48de2e --- /dev/null +++ b/internal/data/media/queries.go @@ -0,0 +1,158 @@ +package media + +import ( + "context" + "database/sql" + "time" +) + +type Row struct { + ID int64 + ItemID int64 + MediaType string // 'original', 'thumbnail' + ContentType string // MIME type + Data []byte + Width *int + Height *int + SourceURL *string // Original URL the media was fetched from + CreatedAt time.Time +} + +type CreateParams struct { + ItemID int64 + MediaType string + ContentType string + Data []byte + Width *int + Height *int + SourceURL *string +} + +// QCreate creates a new media record. +func QCreate(ctx context.Context, db *sql.DB, p CreateParams) (Row, error) { + query := ` + INSERT INTO media (item_id, media_type, content_type, data, width, height, source_url) + VALUES ($1, $2, $3, $4, $5, $6, $7) + RETURNING id, item_id, media_type, content_type, data, width, height, source_url, created_at + ` + + var row Row + err := db.QueryRowContext(ctx, query, + p.ItemID, p.MediaType, p.ContentType, p.Data, p.Width, p.Height, p.SourceURL, + ).Scan( + &row.ID, &row.ItemID, &row.MediaType, &row.ContentType, &row.Data, + &row.Width, &row.Height, &row.SourceURL, &row.CreatedAt, + ) + return row, err +} + +// QFindByID finds a media record by ID. +func QFindByID(ctx context.Context, db *sql.DB, id int64) (*Row, error) { + query := ` + SELECT id, item_id, media_type, content_type, data, width, height, source_url, created_at + FROM media + WHERE id = $1 + ` + + var row Row + err := db.QueryRowContext(ctx, query, id).Scan( + &row.ID, &row.ItemID, &row.MediaType, &row.ContentType, &row.Data, + &row.Width, &row.Height, &row.SourceURL, &row.CreatedAt, + ) + if err == sql.ErrNoRows { + return nil, nil + } + if err != nil { + return nil, err + } + return &row, nil +} + +// QFindByItemID finds all media for an item. +func QFindByItemID(ctx context.Context, db *sql.DB, itemID int64) ([]Row, error) { + query := ` + SELECT id, item_id, media_type, content_type, data, width, height, source_url, created_at + FROM media + WHERE item_id = $1 + ORDER BY media_type ASC + ` + + rows, err := db.QueryContext(ctx, query, itemID) + if err != nil { + return nil, err + } + defer rows.Close() + + var media []Row + for rows.Next() { + var row Row + if err := rows.Scan( + &row.ID, &row.ItemID, &row.MediaType, &row.ContentType, &row.Data, + &row.Width, &row.Height, &row.SourceURL, &row.CreatedAt, + ); err != nil { + return nil, err + } + media = append(media, row) + } + return media, rows.Err() +} + +// QFindThumbnailByItemID finds the thumbnail for an item. +func QFindThumbnailByItemID(ctx context.Context, db *sql.DB, itemID int64) (*Row, error) { + query := ` + SELECT id, item_id, media_type, content_type, data, width, height, source_url, created_at + FROM media + WHERE item_id = $1 AND media_type = 'thumbnail' + LIMIT 1 + ` + + var row Row + err := db.QueryRowContext(ctx, query, itemID).Scan( + &row.ID, &row.ItemID, &row.MediaType, &row.ContentType, &row.Data, + &row.Width, &row.Height, &row.SourceURL, &row.CreatedAt, + ) + if err == sql.ErrNoRows { + return nil, nil + } + if err != nil { + return nil, err + } + return &row, nil +} + +// QFindOriginalByItemID finds the original media for an item. +func QFindOriginalByItemID(ctx context.Context, db *sql.DB, itemID int64) (*Row, error) { + query := ` + SELECT id, item_id, media_type, content_type, data, width, height, source_url, created_at + FROM media + WHERE item_id = $1 AND media_type = 'original' + LIMIT 1 + ` + + var row Row + err := db.QueryRowContext(ctx, query, itemID).Scan( + &row.ID, &row.ItemID, &row.MediaType, &row.ContentType, &row.Data, + &row.Width, &row.Height, &row.SourceURL, &row.CreatedAt, + ) + if err == sql.ErrNoRows { + return nil, nil + } + if err != nil { + return nil, err + } + return &row, nil +} + +// QDelete deletes a media record by ID. +func QDelete(ctx context.Context, db *sql.DB, id int64) error { + query := `DELETE FROM media WHERE id = $1` + _, err := db.ExecContext(ctx, query, id) + return err +} + +// QDeleteByItemID deletes all media for an item. +func QDeleteByItemID(ctx context.Context, db *sql.DB, itemID int64) error { + query := `DELETE FROM media WHERE item_id = $1` + _, err := db.ExecContext(ctx, query, itemID) + return err +} diff --git a/internal/data/session/queries.go b/internal/data/session/queries.go new file mode 100644 index 0000000..15386fc --- /dev/null +++ b/internal/data/session/queries.go @@ -0,0 +1,73 @@ +package session + +import ( + "context" + "database/sql" + "time" +) + +type Row struct { + ID int64 + SessionID string + CreatedAt time.Time + ExpiresAt time.Time +} + +// QCreate creates a new session. +func QCreate(ctx context.Context, db *sql.DB, sessionID string, expiresAt time.Time) (Row, error) { + query := ` + INSERT INTO session (session_id, expires_at) + VALUES ($1, $2) + RETURNING id, session_id, created_at, expires_at + ` + + var row Row + err := db.QueryRowContext(ctx, query, sessionID, expiresAt).Scan( + &row.ID, + &row.SessionID, + &row.CreatedAt, + &row.ExpiresAt, + ) + return row, err +} + +// QFindBySessionID finds a session by its session ID. +// Returns (nil, nil) if the session does not exist. +func QFindBySessionID(ctx context.Context, db *sql.DB, sessionID string) (*Row, error) { + query := ` + SELECT id, session_id, created_at, expires_at + FROM session + WHERE session_id = $1 + LIMIT 1 + ` + + var row Row + err := db.QueryRowContext(ctx, query, sessionID).Scan( + &row.ID, + &row.SessionID, + &row.CreatedAt, + &row.ExpiresAt, + ) + if err == sql.ErrNoRows { + return nil, nil + } + if err != nil { + return nil, err + } + + return &row, nil +} + +// QDelete deletes a session by its session ID. +func QDelete(ctx context.Context, db *sql.DB, sessionID string) error { + query := `DELETE FROM session WHERE session_id = $1` + _, err := db.ExecContext(ctx, query, sessionID) + return err +} + +// QDeleteExpired deletes all expired sessions. +func QDeleteExpired(ctx context.Context, db *sql.DB) error { + query := `DELETE FROM session WHERE expires_at < NOW()` + _, err := db.ExecContext(ctx, query) + return err +} diff --git a/internal/data/tag/queries.go b/internal/data/tag/queries.go new file mode 100644 index 0000000..ca39bc4 --- /dev/null +++ b/internal/data/tag/queries.go @@ -0,0 +1,153 @@ +package tag + +import ( + "context" + "database/sql" +) + +type Row struct { + ID int64 + Name string +} + +// QFindOrCreate finds a tag by name or creates it if it doesn't exist. +func QFindOrCreate(ctx context.Context, db *sql.DB, name string) (Row, error) { + query := ` + INSERT INTO tag (name) + VALUES ($1) + ON CONFLICT (name) DO UPDATE SET name = EXCLUDED.name + RETURNING id, name + ` + + var row Row + err := db.QueryRowContext(ctx, query, name).Scan(&row.ID, &row.Name) + return row, err +} + +// QFindByName finds a tag by name. +func QFindByName(ctx context.Context, db *sql.DB, name string) (*Row, error) { + query := `SELECT id, name FROM tag WHERE name = $1` + + var row Row + err := db.QueryRowContext(ctx, query, name).Scan(&row.ID, &row.Name) + if err == sql.ErrNoRows { + return nil, nil + } + if err != nil { + return nil, err + } + return &row, nil +} + +// QList returns all tags ordered by name. +func QList(ctx context.Context, db *sql.DB) ([]Row, error) { + query := `SELECT id, name FROM tag ORDER BY name ASC` + + rows, err := db.QueryContext(ctx, query) + if err != nil { + return nil, err + } + defer rows.Close() + + var tags []Row + for rows.Next() { + var row Row + if err := rows.Scan(&row.ID, &row.Name); err != nil { + return nil, err + } + tags = append(tags, row) + } + return tags, rows.Err() +} + +// QSuggest returns tags matching a prefix. +func QSuggest(ctx context.Context, db *sql.DB, prefix string, limit int) ([]Row, error) { + query := ` + SELECT id, name FROM tag + WHERE name ILIKE $1 || '%' + ORDER BY name ASC + LIMIT $2 + ` + + rows, err := db.QueryContext(ctx, query, prefix, limit) + if err != nil { + return nil, err + } + defer rows.Close() + + var tags []Row + for rows.Next() { + var row Row + if err := rows.Scan(&row.ID, &row.Name); err != nil { + return nil, err + } + tags = append(tags, row) + } + return tags, rows.Err() +} + +// QTagsForItem returns all tags for an item. +func QTagsForItem(ctx context.Context, db *sql.DB, itemID int64) ([]Row, error) { + query := ` + SELECT t.id, t.name + FROM tag t + JOIN item_tag it ON t.id = it.tag_id + WHERE it.item_id = $1 + ORDER BY t.name ASC + ` + + rows, err := db.QueryContext(ctx, query, itemID) + if err != nil { + return nil, err + } + defer rows.Close() + + var tags []Row + for rows.Next() { + var row Row + if err := rows.Scan(&row.ID, &row.Name); err != nil { + return nil, err + } + tags = append(tags, row) + } + return tags, rows.Err() +} + +// QAddTagToItem adds a tag to an item. +func QAddTagToItem(ctx context.Context, db *sql.DB, itemID, tagID int64) error { + query := ` + INSERT INTO item_tag (item_id, tag_id) + VALUES ($1, $2) + ON CONFLICT DO NOTHING + ` + _, err := db.ExecContext(ctx, query, itemID, tagID) + return err +} + +// QRemoveTagFromItem removes a tag from an item. +func QRemoveTagFromItem(ctx context.Context, db *sql.DB, itemID, tagID int64) error { + query := `DELETE FROM item_tag WHERE item_id = $1 AND tag_id = $2` + _, err := db.ExecContext(ctx, query, itemID, tagID) + return err +} + +// QSetTagsForItem replaces all tags for an item with the given tag names. +func QSetTagsForItem(ctx context.Context, db *sql.DB, itemID int64, tagNames []string) error { + // Delete existing tags + _, err := db.ExecContext(ctx, `DELETE FROM item_tag WHERE item_id = $1`, itemID) + if err != nil { + return err + } + + // Add new tags + for _, name := range tagNames { + tag, err := QFindOrCreate(ctx, db, name) + if err != nil { + return err + } + if err := QAddTagToItem(ctx, db, itemID, tag.ID); err != nil { + return err + } + } + return nil +} diff --git a/internal/embed/detect.go b/internal/embed/detect.go new file mode 100644 index 0000000..107ad89 --- /dev/null +++ b/internal/embed/detect.go @@ -0,0 +1,251 @@ +package embed + +import ( + "context" + "encoding/json" + "fmt" + "net/http" + "net/url" + "regexp" + "time" +) + +// Provider identifies the video hosting platform. +type Provider string + +const ( + ProviderYouTube Provider = "youtube" + ProviderVimeo Provider = "vimeo" + ProviderTwitter Provider = "twitter" +) + +// VideoInfo contains information about an embedded video. +type VideoInfo struct { + Provider Provider + VideoID string + Title string + Description string + ThumbnailURL string + EmbedHTML string +} + +var ( + youtubeRegex = regexp.MustCompile(`(?:youtube\.com/(?:watch\?v=|embed/|v/|shorts/)|youtu\.be/)([a-zA-Z0-9_-]{11})`) + vimeoRegex = regexp.MustCompile(`(?:vimeo\.com/(?:video/)?|player\.vimeo\.com/video/)(\d+)`) + twitterRegex = regexp.MustCompile(`(?:twitter\.com|x\.com)/([^/]+)/status/(\d+)`) +) + +// Detect checks if a URL is a YouTube, Vimeo, or Twitter/X post and returns its info. +func Detect(ctx context.Context, targetURL string) (*VideoInfo, error) { + // Try YouTube + if matches := youtubeRegex.FindStringSubmatch(targetURL); len(matches) > 1 { + return fetchYouTube(ctx, matches[1]) + } + + // Try Vimeo + if matches := vimeoRegex.FindStringSubmatch(targetURL); len(matches) > 1 { + return fetchVimeo(ctx, matches[1]) + } + + // Try Twitter/X + if matches := twitterRegex.FindStringSubmatch(targetURL); len(matches) > 2 { + return fetchTwitter(ctx, matches[2], targetURL) + } + + return nil, nil // Not a recognized embed +} + +func fetchYouTube(ctx context.Context, videoID string) (*VideoInfo, error) { + // YouTube thumbnails are available without API + thumbnailURL := fmt.Sprintf("https://img.youtube.com/vi/%s/maxresdefault.jpg", videoID) + + // Try to get metadata via oEmbed + oembedURL := fmt.Sprintf("https://www.youtube.com/oembed?url=%s&format=json", + url.QueryEscape("https://www.youtube.com/watch?v="+videoID)) + + var title string + if meta, err := fetchOEmbed(ctx, oembedURL); err == nil { + title = meta.Title + } + + embedHTML := fmt.Sprintf( + ``, + videoID, + ) + + return &VideoInfo{ + Provider: ProviderYouTube, + VideoID: videoID, + Title: title, + ThumbnailURL: thumbnailURL, + EmbedHTML: embedHTML, + }, nil +} + +func fetchVimeo(ctx context.Context, videoID string) (*VideoInfo, error) { + oembedURL := fmt.Sprintf("https://vimeo.com/api/oembed.json?url=%s", + url.QueryEscape("https://vimeo.com/"+videoID)) + + meta, err := fetchOEmbed(ctx, oembedURL) + if err != nil { + return nil, fmt.Errorf("vimeo oembed: %w", err) + } + + embedHTML := fmt.Sprintf( + ``, + videoID, + ) + + return &VideoInfo{ + Provider: ProviderVimeo, + VideoID: videoID, + Title: meta.Title, + Description: meta.Description, + ThumbnailURL: meta.ThumbnailURL, + EmbedHTML: embedHTML, + }, nil +} + +// twitterSyndicationResponse represents the Twitter syndication API response +type twitterSyndicationResponse struct { + Text string `json:"text"` + User struct { + Name string `json:"name"` + ScreenName string `json:"screen_name"` + } `json:"user"` + Photos []struct { + URL string `json:"url"` + Width int `json:"width"` + Height int `json:"height"` + } `json:"photos"` + MediaDetails []struct { + MediaURLHTTPS string `json:"media_url_https"` + Type string `json:"type"` + } `json:"mediaDetails"` + Video struct { + Poster string `json:"poster"` + } `json:"video"` +} + +func fetchTwitter(ctx context.Context, tweetID string, originalURL string) (*VideoInfo, error) { + apiURL := fmt.Sprintf("https://cdn.syndication.twimg.com/tweet-result?id=%s&token=0", tweetID) + + req, err := http.NewRequestWithContext(ctx, "GET", apiURL, nil) + if err != nil { + return nil, err + } + req.Header.Set("User-Agent", "Mozilla/5.0 (compatible; Lookbook/1.0)") + + client := &http.Client{Timeout: 10 * time.Second} + resp, err := client.Do(req) + if err != nil { + return nil, fmt.Errorf("twitter syndication: %w", err) + } + defer resp.Body.Close() + + if resp.StatusCode != http.StatusOK { + return nil, fmt.Errorf("twitter syndication status: %d", resp.StatusCode) + } + + var tweet twitterSyndicationResponse + if err := json.NewDecoder(resp.Body).Decode(&tweet); err != nil { + return nil, fmt.Errorf("twitter syndication decode: %w", err) + } + + // Find thumbnail - prefer photos, then video poster + var thumbnailURL string + if len(tweet.Photos) > 0 { + thumbnailURL = tweet.Photos[0].URL + } else if len(tweet.MediaDetails) > 0 { + thumbnailURL = tweet.MediaDetails[0].MediaURLHTTPS + } else if tweet.Video.Poster != "" { + thumbnailURL = tweet.Video.Poster + } + + // Build embed HTML using Twitter's embed widget + embedHTML := fmt.Sprintf( + ``, + originalURL, + ) + + title := fmt.Sprintf("@%s", tweet.User.ScreenName) + if tweet.User.Name != "" { + title = fmt.Sprintf("%s (@%s)", tweet.User.Name, tweet.User.ScreenName) + } + + return &VideoInfo{ + Provider: ProviderTwitter, + VideoID: tweetID, + Title: title, + Description: tweet.Text, + ThumbnailURL: thumbnailURL, + EmbedHTML: embedHTML, + }, nil +} + +type oembedResponse struct { + Title string `json:"title"` + Description string `json:"description"` + ThumbnailURL string `json:"thumbnail_url"` +} + +func fetchOEmbed(ctx context.Context, oembedURL string) (*oembedResponse, error) { + req, err := http.NewRequestWithContext(ctx, "GET", oembedURL, nil) + if err != nil { + return nil, err + } + + client := &http.Client{Timeout: 10 * time.Second} + resp, err := client.Do(req) + if err != nil { + return nil, err + } + defer resp.Body.Close() + + if resp.StatusCode != http.StatusOK { + return nil, fmt.Errorf("oembed status: %d", resp.StatusCode) + } + + var meta oembedResponse + if err := json.NewDecoder(resp.Body).Decode(&meta); err != nil { + return nil, err + } + + return &meta, nil +} + +// DownloadThumbnail downloads the thumbnail image for a video. +func DownloadThumbnail(ctx context.Context, thumbnailURL string) ([]byte, string, error) { + req, err := http.NewRequestWithContext(ctx, "GET", thumbnailURL, nil) + if err != nil { + return nil, "", err + } + req.Header.Set("User-Agent", "Mozilla/5.0 (compatible; Lookbook/1.0)") + + client := &http.Client{Timeout: 30 * time.Second} + resp, err := client.Do(req) + if err != nil { + return nil, "", err + } + defer resp.Body.Close() + + if resp.StatusCode != http.StatusOK { + return nil, "", fmt.Errorf("thumbnail status: %d", resp.StatusCode) + } + + contentType := resp.Header.Get("Content-Type") + + data := make([]byte, 0, 1<<20) // 1MB initial capacity + buf := make([]byte, 32*1024) + for { + n, err := resp.Body.Read(buf) + if n > 0 { + data = append(data, buf[:n]...) + } + if err != nil { + break + } + } + + return data, contentType, nil +} diff --git a/internal/handlers/api_auth.go b/internal/handlers/api_auth.go new file mode 100644 index 0000000..9bc6555 --- /dev/null +++ b/internal/handlers/api_auth.go @@ -0,0 +1,141 @@ +package handlers + +import ( + "context" + "crypto/rand" + "encoding/base64" + "encoding/json" + "net/http" + "time" + + "golang.org/x/crypto/bcrypt" + + "lookbook/internal/data/admin" + "lookbook/internal/data/session" +) + +const sessionDuration = 30 * 24 * time.Hour // 30 days + +type loginRequest struct { + Password string `json:"password"` +} + +type loginResponse struct { + FirstTime bool `json:"firstTime,omitempty"` + Error string `json:"error,omitempty"` +} + +// HandleLogin handles POST /api/auth/login +// If no password is set, it sets the password. Otherwise, it verifies the password. +func HandleLogin(rc *RequestContext, w http.ResponseWriter, r *http.Request) error { + var req loginRequest + if err := json.NewDecoder(r.Body).Decode(&req); err != nil { + return writeJSON(w, http.StatusBadRequest, loginResponse{Error: "invalid request"}) + } + + if req.Password == "" { + return writeJSON(w, http.StatusBadRequest, loginResponse{Error: "password required"}) + } + + ctx, cancel := context.WithTimeout(r.Context(), 5*time.Second) + defer cancel() + + adm, err := admin.QGet(ctx, rc.DB) + if err != nil { + return err + } + + firstTime := adm.PasswordHash == nil + + if firstTime { + // First login: set the password + hash, err := bcrypt.GenerateFromPassword([]byte(req.Password), bcrypt.DefaultCost) + if err != nil { + return err + } + if err := admin.QSetPassword(ctx, rc.DB, hash); err != nil { + return err + } + } else { + // Verify password + if err := bcrypt.CompareHashAndPassword(adm.PasswordHash, []byte(req.Password)); err != nil { + return writeJSON(w, http.StatusUnauthorized, loginResponse{Error: "invalid password"}) + } + } + + // Create session + sessionID, err := generateSessionID() + if err != nil { + return err + } + + expiresAt := time.Now().Add(sessionDuration) + if _, err := session.QCreate(ctx, rc.DB, sessionID, expiresAt); err != nil { + return err + } + + // Set cookie + http.SetCookie(w, &http.Cookie{ + Name: "session_id", + Value: sessionID, + Path: "/", + Expires: expiresAt, + HttpOnly: true, + SameSite: http.SameSiteLaxMode, + Secure: r.TLS != nil, + }) + + return writeJSON(w, http.StatusOK, loginResponse{FirstTime: firstTime}) +} + +// HandleLogout handles POST /api/auth/logout +func HandleLogout(rc *RequestContext, w http.ResponseWriter, r *http.Request) error { + cookie, err := r.Cookie("session_id") + if err == nil { + ctx, cancel := context.WithTimeout(r.Context(), 5*time.Second) + defer cancel() + session.QDelete(ctx, rc.DB, cookie.Value) + } + + // Clear cookie + http.SetCookie(w, &http.Cookie{ + Name: "session_id", + Value: "", + Path: "/", + MaxAge: -1, + HttpOnly: true, + SameSite: http.SameSiteLaxMode, + }) + + return writeJSON(w, http.StatusOK, map[string]string{"status": "ok"}) +} + +// HandleAuthStatus handles GET /api/auth/status +func HandleAuthStatus(rc *RequestContext, w http.ResponseWriter, r *http.Request) error { + ctx, cancel := context.WithTimeout(r.Context(), 5*time.Second) + defer cancel() + + hasPassword, err := admin.QHasPassword(ctx, rc.DB) + if err != nil { + return err + } + + return writeJSON(w, http.StatusOK, map[string]any{ + "isAdmin": rc.IsAdmin, + "passwordSet": hasPassword, + }) +} + +func generateSessionID() (string, error) { + b := make([]byte, 32) + if _, err := rand.Read(b); err != nil { + return "", err + } + return base64.URLEncoding.EncodeToString(b), nil +} + +func writeJSON(w http.ResponseWriter, status int, v any) error { + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(status) + return json.NewEncoder(w).Encode(v) +} diff --git a/internal/handlers/api_items.go b/internal/handlers/api_items.go new file mode 100644 index 0000000..0b0a453 --- /dev/null +++ b/internal/handlers/api_items.go @@ -0,0 +1,268 @@ +package handlers + +import ( + "context" + "encoding/json" + "net/http" + "time" + + "lookbook/internal/data/item" + "lookbook/internal/data/media" + "lookbook/internal/data/tag" +) + +type itemResponse struct { + ID string `json:"id"` + Title *string `json:"title,omitempty"` + Description *string `json:"description,omitempty"` + LinkURL *string `json:"linkUrl,omitempty"` + ItemType string `json:"itemType"` + EmbedHTML *string `json:"embedHtml,omitempty"` + Tags []string `json:"tags"` + CreatedAt string `json:"createdAt"` + MediaID *int64 `json:"mediaId,omitempty"` + ThumbnailID *int64 `json:"thumbnailId,omitempty"` + ThumbnailSourceURL *string `json:"thumbnailSourceUrl,omitempty"` +} + +type createItemRequest struct { + Title *string `json:"title"` + Description *string `json:"description"` + LinkURL *string `json:"linkUrl"` + ItemType string `json:"itemType"` // 'image', 'video', 'quote', 'embed' + EmbedProvider *string `json:"embedProvider"` + EmbedVideoID *string `json:"embedVideoId"` + EmbedHTML *string `json:"embedHtml"` + Tags []string `json:"tags"` +} + +type updateItemRequest struct { + Title *string `json:"title"` + Description *string `json:"description"` + LinkURL *string `json:"linkUrl"` + Tags []string `json:"tags"` +} + +// HandleListItems handles GET /api/items +func HandleListItems(rc *RequestContext, w http.ResponseWriter, r *http.Request) error { + ctx, cancel := context.WithTimeout(r.Context(), 10*time.Second) + defer cancel() + + tagFilter := r.URL.Query().Get("tag") + + var items []item.Row + var err error + if tagFilter != "" { + items, err = item.QListByTag(ctx, rc.DB, tagFilter) + } else { + items, err = item.QList(ctx, rc.DB) + } + if err != nil { + return err + } + + response := make([]itemResponse, 0, len(items)) + for _, it := range items { + resp, err := buildItemResponse(ctx, rc, it) + if err != nil { + return err + } + response = append(response, resp) + } + + return writeJSON(w, http.StatusOK, response) +} + +// HandleGetItem handles GET /api/items/{id} +func HandleGetItem(rc *RequestContext, w http.ResponseWriter, r *http.Request) error { + pubID := r.PathValue("id") + if pubID == "" { + return writeJSON(w, http.StatusBadRequest, map[string]string{"error": "missing id"}) + } + + ctx, cancel := context.WithTimeout(r.Context(), 5*time.Second) + defer cancel() + + it, err := item.QFindByPubID(ctx, rc.DB, pubID) + if err != nil { + return err + } + if it == nil { + return writeJSON(w, http.StatusNotFound, map[string]string{"error": "not found"}) + } + + resp, err := buildItemResponse(ctx, rc, *it) + if err != nil { + return err + } + + return writeJSON(w, http.StatusOK, resp) +} + +// HandleCreateItem handles POST /api/items +func HandleCreateItem(rc *RequestContext, w http.ResponseWriter, r *http.Request) error { + if !rc.RequireAdmin(w) { + return nil + } + + var req createItemRequest + if err := json.NewDecoder(r.Body).Decode(&req); err != nil { + return writeJSON(w, http.StatusBadRequest, map[string]string{"error": "invalid request"}) + } + + if req.ItemType == "" { + return writeJSON(w, http.StatusBadRequest, map[string]string{"error": "itemType required"}) + } + + ctx, cancel := context.WithTimeout(r.Context(), 10*time.Second) + defer cancel() + + it, err := item.QCreate(ctx, rc.DB, item.CreateParams{ + Title: req.Title, + Description: req.Description, + LinkURL: req.LinkURL, + ItemType: req.ItemType, + EmbedProvider: req.EmbedProvider, + EmbedVideoID: req.EmbedVideoID, + EmbedHTML: req.EmbedHTML, + }) + if err != nil { + return err + } + + // Set tags + if len(req.Tags) > 0 { + if err := tag.QSetTagsForItem(ctx, rc.DB, it.ID, req.Tags); err != nil { + return err + } + } + + resp, err := buildItemResponse(ctx, rc, it) + if err != nil { + return err + } + + return writeJSON(w, http.StatusCreated, resp) +} + +// HandleUpdateItem handles PUT /api/items/{id} +func HandleUpdateItem(rc *RequestContext, w http.ResponseWriter, r *http.Request) error { + if !rc.RequireAdmin(w) { + return nil + } + + pubID := r.PathValue("id") + if pubID == "" { + return writeJSON(w, http.StatusBadRequest, map[string]string{"error": "missing id"}) + } + + var req updateItemRequest + if err := json.NewDecoder(r.Body).Decode(&req); err != nil { + return writeJSON(w, http.StatusBadRequest, map[string]string{"error": "invalid request"}) + } + + ctx, cancel := context.WithTimeout(r.Context(), 10*time.Second) + defer cancel() + + it, err := item.QFindByPubID(ctx, rc.DB, pubID) + if err != nil { + return err + } + if it == nil { + return writeJSON(w, http.StatusNotFound, map[string]string{"error": "not found"}) + } + + if err := item.QUpdate(ctx, rc.DB, it.ID, item.UpdateParams{ + Title: req.Title, + Description: req.Description, + LinkURL: req.LinkURL, + }); err != nil { + return err + } + + // Update tags + if err := tag.QSetTagsForItem(ctx, rc.DB, it.ID, req.Tags); err != nil { + return err + } + + // Refetch to return updated item + it, err = item.QFindByID(ctx, rc.DB, it.ID) + if err != nil { + return err + } + + resp, err := buildItemResponse(ctx, rc, *it) + if err != nil { + return err + } + + return writeJSON(w, http.StatusOK, resp) +} + +// HandleDeleteItem handles DELETE /api/items/{id} +func HandleDeleteItem(rc *RequestContext, w http.ResponseWriter, r *http.Request) error { + if !rc.RequireAdmin(w) { + return nil + } + + pubID := r.PathValue("id") + if pubID == "" { + return writeJSON(w, http.StatusBadRequest, map[string]string{"error": "missing id"}) + } + + ctx, cancel := context.WithTimeout(r.Context(), 5*time.Second) + defer cancel() + + it, err := item.QFindByPubID(ctx, rc.DB, pubID) + if err != nil { + return err + } + if it == nil { + return writeJSON(w, http.StatusNotFound, map[string]string{"error": "not found"}) + } + + if err := item.QSoftDelete(ctx, rc.DB, it.ID); err != nil { + return err + } + + return writeJSON(w, http.StatusOK, map[string]string{"status": "deleted"}) +} + +func buildItemResponse(ctx context.Context, rc *RequestContext, it item.Row) (itemResponse, error) { + tags, err := tag.QTagsForItem(ctx, rc.DB, it.ID) + if err != nil { + return itemResponse{}, err + } + + tagNames := make([]string, len(tags)) + for i, t := range tags { + tagNames[i] = t.Name + } + + resp := itemResponse{ + ID: it.PubID, + Title: it.Title, + Description: it.Description, + LinkURL: it.LinkURL, + ItemType: it.ItemType, + EmbedHTML: it.EmbedHTML, + Tags: tagNames, + CreatedAt: it.CreatedAt.Format(time.RFC3339), + } + + // Get media IDs + mediaList, err := media.QFindByItemID(ctx, rc.DB, it.ID) + if err != nil { + return itemResponse{}, err + } + for _, m := range mediaList { + if m.MediaType == "original" { + resp.MediaID = &m.ID + } else if m.MediaType == "thumbnail" { + resp.ThumbnailID = &m.ID + resp.ThumbnailSourceURL = m.SourceURL + } + } + + return resp, nil +} diff --git a/internal/handlers/api_tags.go b/internal/handlers/api_tags.go new file mode 100644 index 0000000..6f6a3fa --- /dev/null +++ b/internal/handlers/api_tags.go @@ -0,0 +1,47 @@ +package handlers + +import ( + "context" + "net/http" + "time" + + "lookbook/internal/data/tag" +) + +// HandleListTags handles GET /api/tags +func HandleListTags(rc *RequestContext, w http.ResponseWriter, r *http.Request) error { + ctx, cancel := context.WithTimeout(r.Context(), 5*time.Second) + defer cancel() + + tags, err := tag.QList(ctx, rc.DB) + if err != nil { + return err + } + + names := make([]string, len(tags)) + for i, t := range tags { + names[i] = t.Name + } + + return writeJSON(w, http.StatusOK, names) +} + +// HandleSuggestTags handles GET /api/tags/suggest?q=... +func HandleSuggestTags(rc *RequestContext, w http.ResponseWriter, r *http.Request) error { + prefix := r.URL.Query().Get("q") + + ctx, cancel := context.WithTimeout(r.Context(), 5*time.Second) + defer cancel() + + tags, err := tag.QSuggest(ctx, rc.DB, prefix, 10) + if err != nil { + return err + } + + names := make([]string, len(tags)) + for i, t := range tags { + names[i] = t.Name + } + + return writeJSON(w, http.StatusOK, names) +} diff --git a/internal/handlers/api_upload.go b/internal/handlers/api_upload.go new file mode 100644 index 0000000..7df0a83 --- /dev/null +++ b/internal/handlers/api_upload.go @@ -0,0 +1,577 @@ +package handlers + +import ( + "context" + "encoding/json" + "fmt" + "io" + "net/http" + "strings" + "time" + + "lookbook/internal/data/item" + "lookbook/internal/data/media" + "lookbook/internal/data/tag" + "lookbook/internal/embed" + "lookbook/internal/opengraph" + "lookbook/internal/video" +) + +type urlMetadata struct { + Title string + Description string + ImageURL string + SiteName string + IsEmbed bool + Provider string + VideoID string + EmbedHTML string +} + +// fetchURLMetadata fetches metadata for a URL, trying embed detection first, then OpenGraph +func fetchURLMetadata(ctx context.Context, url string) (*urlMetadata, error) { + // Check if it's a YouTube/Vimeo/Twitter embed + videoInfo, err := embed.Detect(ctx, url) + if err == nil && videoInfo != nil { + return &urlMetadata{ + Title: videoInfo.Title, + Description: videoInfo.Description, + ImageURL: videoInfo.ThumbnailURL, + IsEmbed: true, + Provider: string(videoInfo.Provider), + VideoID: videoInfo.VideoID, + EmbedHTML: videoInfo.EmbedHTML, + }, nil + } + + // Fetch OpenGraph metadata + meta, err := opengraph.Fetch(ctx, url) + if err != nil { + return nil, err + } + + return &urlMetadata{ + Title: meta.Title, + Description: meta.Description, + ImageURL: meta.ImageURL, + SiteName: meta.SiteName, + IsEmbed: false, + }, nil +} + +type previewResponse struct { + Title string `json:"title,omitempty"` + Description string `json:"description,omitempty"` + ImageURL string `json:"imageUrl,omitempty"` + SiteName string `json:"siteName,omitempty"` + IsEmbed bool `json:"isEmbed"` + Provider string `json:"provider,omitempty"` + VideoID string `json:"videoId,omitempty"` + EmbedHTML string `json:"embedHtml,omitempty"` +} + +// HandlePreviewLink handles POST /api/preview - fetches metadata for a URL +func HandlePreviewLink(rc *RequestContext, w http.ResponseWriter, r *http.Request) error { + if !rc.RequireAdmin(w) { + return nil + } + + var req struct { + URL string `json:"url"` + } + if err := json.NewDecoder(r.Body).Decode(&req); err != nil { + return writeJSON(w, http.StatusBadRequest, map[string]string{"error": "invalid request"}) + } + + if req.URL == "" { + return writeJSON(w, http.StatusBadRequest, map[string]string{"error": "url required"}) + } + + ctx, cancel := context.WithTimeout(r.Context(), 30*time.Second) + defer cancel() + + meta, err := fetchURLMetadata(ctx, req.URL) + if err != nil { + return writeJSON(w, http.StatusBadRequest, map[string]string{"error": fmt.Sprintf("failed to fetch: %v", err)}) + } + + return writeJSON(w, http.StatusOK, previewResponse{ + Title: meta.Title, + Description: meta.Description, + ImageURL: meta.ImageURL, + SiteName: meta.SiteName, + IsEmbed: meta.IsEmbed, + Provider: meta.Provider, + VideoID: meta.VideoID, + EmbedHTML: meta.EmbedHTML, + }) +} + +type createFromLinkRequest struct { + URL string `json:"url"` + Title *string `json:"title"` + Description *string `json:"description"` + Tags []string `json:"tags"` + // For embeds: + Provider *string `json:"provider"` + VideoID *string `json:"videoId"` + EmbedHTML *string `json:"embedHtml"` + // For downloading hero image: + ImageURL *string `json:"imageUrl"` +} + +// HandleCreateFromLink handles POST /api/items/from-link +func HandleCreateFromLink(rc *RequestContext, w http.ResponseWriter, r *http.Request) error { + if !rc.RequireAdmin(w) { + return nil + } + + var req createFromLinkRequest + if err := json.NewDecoder(r.Body).Decode(&req); err != nil { + return writeJSON(w, http.StatusBadRequest, map[string]string{"error": "invalid request"}) + } + + ctx, cancel := context.WithTimeout(r.Context(), 60*time.Second) + defer cancel() + + var itemType string + var embedProvider, embedVideoID, embedHTML *string + + if req.Provider != nil && *req.Provider != "" { + // It's an embed + itemType = "embed" + embedProvider = req.Provider + embedVideoID = req.VideoID + embedHTML = req.EmbedHTML + } else if req.ImageURL != nil && *req.ImageURL != "" { + // It's a link with an image + itemType = "image" + } else { + // Just a link (will be shown as a card) + itemType = "link" + } + + // Create the item + it, err := item.QCreate(ctx, rc.DB, item.CreateParams{ + Title: req.Title, + Description: req.Description, + LinkURL: &req.URL, + ItemType: itemType, + EmbedProvider: embedProvider, + EmbedVideoID: embedVideoID, + EmbedHTML: embedHTML, + }) + if err != nil { + return err + } + + // Download and store image if available + var imageURL string + if req.ImageURL != nil { + imageURL = *req.ImageURL + } + + // For embeds, fetch thumbnail + if itemType == "embed" && embedProvider != nil { + if videoInfo, err := embed.Detect(ctx, req.URL); err == nil && videoInfo != nil { + imageURL = videoInfo.ThumbnailURL + } + } + + if imageURL != "" { + imgData, contentType, err := opengraph.DownloadImage(ctx, imageURL) + if err != nil { + rc.Logger.Warn("failed to download image", "url", imageURL, "error", err) + } else { + _, err = media.QCreate(ctx, rc.DB, media.CreateParams{ + ItemID: it.ID, + MediaType: "thumbnail", + ContentType: contentType, + Data: imgData, + SourceURL: &imageURL, + }) + if err != nil { + rc.Logger.Warn("failed to store image", "error", err) + } + } + } + + // Set tags + if len(req.Tags) > 0 { + if err := tag.QSetTagsForItem(ctx, rc.DB, it.ID, req.Tags); err != nil { + return err + } + } + + resp, err := buildItemResponse(ctx, rc, it) + if err != nil { + return err + } + + return writeJSON(w, http.StatusCreated, resp) +} + +// HandleUpload handles POST /api/items/upload - multipart file upload +func HandleUpload(rc *RequestContext, w http.ResponseWriter, r *http.Request) error { + if !rc.RequireAdmin(w) { + return nil + } + + // Parse multipart form (max 500MB) + if err := r.ParseMultipartForm(500 << 20); err != nil { + return writeJSON(w, http.StatusBadRequest, map[string]string{"error": "failed to parse form"}) + } + + file, header, err := r.FormFile("file") + if err != nil { + return writeJSON(w, http.StatusBadRequest, map[string]string{"error": "file required"}) + } + defer file.Close() + + title := r.FormValue("title") + description := r.FormValue("description") + tagsStr := r.FormValue("tags") + + var tags []string + if tagsStr != "" { + tags = strings.Split(tagsStr, ",") + for i := range tags { + tags[i] = strings.TrimSpace(tags[i]) + } + } + + ctx, cancel := context.WithTimeout(r.Context(), 5*time.Minute) + defer cancel() + + // Read file data + data, err := io.ReadAll(file) + if err != nil { + return writeJSON(w, http.StatusBadRequest, map[string]string{"error": "failed to read file"}) + } + + contentType := header.Header.Get("Content-Type") + if contentType == "" { + contentType = http.DetectContentType(data) + } + + var itemType string + var originalData, thumbnailData []byte + var originalContentType string + + if video.IsVideo(contentType) { + itemType = "video" + // Process video: transcode and extract thumbnail + transcoded, thumbnail, err := video.ProcessVideo(ctx, data, contentType) + if err != nil { + return writeJSON(w, http.StatusInternalServerError, map[string]string{"error": fmt.Sprintf("video processing failed: %v", err)}) + } + originalData = transcoded + originalContentType = "video/mp4" + thumbnailData = thumbnail + } else if video.IsImage(contentType) { + itemType = "image" + originalData = data + originalContentType = contentType + } else { + return writeJSON(w, http.StatusBadRequest, map[string]string{"error": "unsupported file type"}) + } + + // Create item + var titlePtr, descPtr *string + if title != "" { + titlePtr = &title + } + if description != "" { + descPtr = &description + } + + it, err := item.QCreate(ctx, rc.DB, item.CreateParams{ + Title: titlePtr, + Description: descPtr, + ItemType: itemType, + }) + if err != nil { + return err + } + + // Store original media + _, err = media.QCreate(ctx, rc.DB, media.CreateParams{ + ItemID: it.ID, + MediaType: "original", + ContentType: originalContentType, + Data: originalData, + }) + if err != nil { + return err + } + + // Store thumbnail for videos + if len(thumbnailData) > 0 { + _, err = media.QCreate(ctx, rc.DB, media.CreateParams{ + ItemID: it.ID, + MediaType: "thumbnail", + ContentType: "image/jpeg", + Data: thumbnailData, + }) + if err != nil { + rc.Logger.Warn("failed to store thumbnail", "error", err) + } + } + + // Set tags + if len(tags) > 0 { + if err := tag.QSetTagsForItem(ctx, rc.DB, it.ID, tags); err != nil { + return err + } + } + + resp, err := buildItemResponse(ctx, rc, it) + if err != nil { + return err + } + + return writeJSON(w, http.StatusCreated, resp) +} + +type createQuoteRequest struct { + Text string `json:"text"` + Source *string `json:"source"` // Optional attribution + SourceURL *string `json:"sourceUrl"` + Tags []string `json:"tags"` +} + +// HandleCreateQuote handles POST /api/items/quote +func HandleCreateQuote(rc *RequestContext, w http.ResponseWriter, r *http.Request) error { + if !rc.RequireAdmin(w) { + return nil + } + + var req createQuoteRequest + if err := json.NewDecoder(r.Body).Decode(&req); err != nil { + return writeJSON(w, http.StatusBadRequest, map[string]string{"error": "invalid request"}) + } + + if req.Text == "" { + return writeJSON(w, http.StatusBadRequest, map[string]string{"error": "text required"}) + } + + ctx, cancel := context.WithTimeout(r.Context(), 10*time.Second) + defer cancel() + + it, err := item.QCreate(ctx, rc.DB, item.CreateParams{ + Title: req.Source, + Description: &req.Text, + LinkURL: req.SourceURL, + ItemType: "quote", + }) + if err != nil { + return err + } + + // Set tags + if len(req.Tags) > 0 { + if err := tag.QSetTagsForItem(ctx, rc.DB, it.ID, req.Tags); err != nil { + return err + } + } + + resp, err := buildItemResponse(ctx, rc, it) + if err != nil { + return err + } + + return writeJSON(w, http.StatusCreated, resp) +} + +// HandleReplaceMedia handles POST /api/items/{id}/media - replaces media for an item +func HandleReplaceMedia(rc *RequestContext, w http.ResponseWriter, r *http.Request) error { + if !rc.RequireAdmin(w) { + return nil + } + + pubID := r.PathValue("id") + if pubID == "" { + return writeJSON(w, http.StatusBadRequest, map[string]string{"error": "missing id"}) + } + + // Parse multipart form (max 500MB) + if err := r.ParseMultipartForm(500 << 20); err != nil { + return writeJSON(w, http.StatusBadRequest, map[string]string{"error": "failed to parse form"}) + } + + file, header, err := r.FormFile("file") + if err != nil { + return writeJSON(w, http.StatusBadRequest, map[string]string{"error": "file required"}) + } + defer file.Close() + + ctx, cancel := context.WithTimeout(r.Context(), 5*time.Minute) + defer cancel() + + it, err := item.QFindByPubID(ctx, rc.DB, pubID) + if err != nil { + return err + } + if it == nil { + return writeJSON(w, http.StatusNotFound, map[string]string{"error": "not found"}) + } + + // Read file data + data, err := io.ReadAll(file) + if err != nil { + return writeJSON(w, http.StatusBadRequest, map[string]string{"error": "failed to read file"}) + } + + contentType := header.Header.Get("Content-Type") + if contentType == "" { + contentType = http.DetectContentType(data) + } + + var originalData, thumbnailData []byte + var originalContentType string + var newItemType string + + if video.IsVideo(contentType) { + newItemType = "video" + transcoded, thumbnail, err := video.ProcessVideo(ctx, data, contentType) + if err != nil { + return writeJSON(w, http.StatusInternalServerError, map[string]string{"error": fmt.Sprintf("video processing failed: %v", err)}) + } + originalData = transcoded + originalContentType = "video/mp4" + thumbnailData = thumbnail + } else if video.IsImage(contentType) { + newItemType = "image" + originalData = data + originalContentType = contentType + } else { + return writeJSON(w, http.StatusBadRequest, map[string]string{"error": "unsupported file type"}) + } + + // Delete existing media + media.QDeleteByItemID(ctx, rc.DB, it.ID) + + // Store new original media + _, err = media.QCreate(ctx, rc.DB, media.CreateParams{ + ItemID: it.ID, + MediaType: "original", + ContentType: originalContentType, + Data: originalData, + }) + if err != nil { + return err + } + + // Store thumbnail for videos + if len(thumbnailData) > 0 { + _, err = media.QCreate(ctx, rc.DB, media.CreateParams{ + ItemID: it.ID, + MediaType: "thumbnail", + ContentType: "image/jpeg", + Data: thumbnailData, + }) + if err != nil { + rc.Logger.Warn("failed to store thumbnail", "error", err) + } + } + + // Update item type if it changed (e.g., embed -> image) + if it.ItemType != newItemType && (it.ItemType == "embed" || it.ItemType == "link") { + item.QUpdateType(ctx, rc.DB, it.ID, newItemType) + } + + // Refetch and return updated item + it, err = item.QFindByID(ctx, rc.DB, it.ID) + if err != nil { + return err + } + + resp, err := buildItemResponse(ctx, rc, *it) + if err != nil { + return err + } + + return writeJSON(w, http.StatusOK, resp) +} + +// HandleRefreshMetadata handles POST /api/items/{id}/refresh +// Re-fetches metadata and thumbnail for an item with a link URL +func HandleRefreshMetadata(rc *RequestContext, w http.ResponseWriter, r *http.Request) error { + if !rc.RequireAdmin(w) { + return nil + } + + pubID := r.PathValue("id") + if pubID == "" { + return writeJSON(w, http.StatusBadRequest, map[string]string{"error": "missing id"}) + } + + ctx, cancel := context.WithTimeout(r.Context(), 60*time.Second) + defer cancel() + + it, err := item.QFindByPubID(ctx, rc.DB, pubID) + if err != nil { + return err + } + if it == nil { + return writeJSON(w, http.StatusNotFound, map[string]string{"error": "not found"}) + } + + if it.LinkURL == nil || *it.LinkURL == "" { + return writeJSON(w, http.StatusBadRequest, map[string]string{"error": "item has no link URL"}) + } + + meta, err := fetchURLMetadata(ctx, *it.LinkURL) + if err != nil { + return writeJSON(w, http.StatusBadRequest, map[string]string{"error": fmt.Sprintf("failed to fetch: %v", err)}) + } + + // Update title and description + var titlePtr, descPtr *string + if meta.Title != "" { + titlePtr = &meta.Title + } + if meta.Description != "" { + descPtr = &meta.Description + } + item.QUpdate(ctx, rc.DB, it.ID, item.UpdateParams{ + Title: titlePtr, + Description: descPtr, + LinkURL: it.LinkURL, + }) + + // Download and replace thumbnail + if meta.ImageURL != "" { + // Delete existing media for this item + media.QDeleteByItemID(ctx, rc.DB, it.ID) + + imgData, contentType, err := opengraph.DownloadImage(ctx, meta.ImageURL) + if err != nil { + rc.Logger.Warn("failed to download image during refresh", "url", meta.ImageURL, "error", err) + } else { + _, err = media.QCreate(ctx, rc.DB, media.CreateParams{ + ItemID: it.ID, + MediaType: "thumbnail", + ContentType: contentType, + Data: imgData, + SourceURL: &meta.ImageURL, + }) + if err != nil { + rc.Logger.Warn("failed to store refreshed image", "error", err) + } + } + } + + // Refetch and return updated item + it, err = item.QFindByID(ctx, rc.DB, it.ID) + if err != nil { + return err + } + + resp, err := buildItemResponse(ctx, rc, *it) + if err != nil { + return err + } + + return writeJSON(w, http.StatusOK, resp) +} diff --git a/internal/handlers/context.go b/internal/handlers/context.go new file mode 100644 index 0000000..9f4ee77 --- /dev/null +++ b/internal/handlers/context.go @@ -0,0 +1,27 @@ +package handlers + +import ( + "database/sql" + "log/slog" + "net/http" + + "git.soup.land/soup/sxgo/ssr" +) + +// RequestContext holds dependencies that are injected into every request handler. +type RequestContext struct { + DB *sql.DB + Logger *slog.Logger + TmplCache *ssr.TmplCache + IsAdmin bool // true if authenticated as admin +} + +// RequireAdmin checks authentication and returns 401 if not admin. +// Returns true if authenticated, false if 401 was sent. +func (rc *RequestContext) RequireAdmin(w http.ResponseWriter) bool { + if !rc.IsAdmin { + http.Error(w, "Unauthorized", http.StatusUnauthorized) + return false + } + return true +} diff --git a/internal/handlers/handler.go b/internal/handlers/handler.go new file mode 100644 index 0000000..4878f12 --- /dev/null +++ b/internal/handlers/handler.go @@ -0,0 +1,49 @@ +package handlers + +import ( + "log/slog" + "net/http" +) + +// Handler is a function that handles HTTP requests and can return an error. +// It receives a RequestContext with injected dependencies (DB, Logger). +type Handler func(rc *RequestContext, w http.ResponseWriter, r *http.Request) error + +// WithErrorHandling wraps a Handler to automatically handle errors. +func WithErrorHandling(rc *RequestContext, h Handler) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + wrapper := &responseWrapper{ + ResponseWriter: w, + written: false, + } + + err := h(rc, wrapper, r) + if err != nil { + rc.Logger.Error("handler error", + slog.String("method", r.Method), + slog.String("path", r.URL.Path), + slog.Any("error", err), + ) + + if !wrapper.written { + http.Error(w, "Internal Server Error", http.StatusInternalServerError) + } + } + } +} + +// responseWrapper tracks if response was written +type responseWrapper struct { + http.ResponseWriter + written bool +} + +func (w *responseWrapper) Write(b []byte) (int, error) { + w.written = true + return w.ResponseWriter.Write(b) +} + +func (w *responseWrapper) WriteHeader(statusCode int) { + w.written = true + w.ResponseWriter.WriteHeader(statusCode) +} diff --git a/internal/handlers/home.go b/internal/handlers/home.go new file mode 100644 index 0000000..eb8ba14 --- /dev/null +++ b/internal/handlers/home.go @@ -0,0 +1,213 @@ +package handlers + +import ( + "context" + "net/http" + "time" + + "git.soup.land/soup/sxgo/ssr" + "lookbook/internal/components" + "lookbook/internal/data/item" + "lookbook/internal/data/media" + "lookbook/internal/data/tag" +) + +type homeContent struct { + Items []homeItem + Tags []string + ActiveTag string + IsAdmin bool +} + +type homeItem struct { + ID string + Title *string + Description *string + LinkURL *string + ItemType string + EmbedHTML *string + Tags []string + ThumbnailID *int64 + MediaID *int64 +} + +func (h homeContent) Render(sw *ssr.Writer) error { + return sw.Tmpl(h, ` + + +{{if .IsAdmin}} + +{{end}} +`) +} + +// HandleHome handles GET / +func HandleHome(rc *RequestContext, w http.ResponseWriter, r *http.Request) error { + ctx, cancel := context.WithTimeout(r.Context(), 10*time.Second) + defer cancel() + + tagFilter := r.URL.Query().Get("tag") + + var items []item.Row + var err error + if tagFilter != "" { + items, err = item.QListByTag(ctx, rc.DB, tagFilter) + } else { + items, err = item.QList(ctx, rc.DB) + } + if err != nil { + return err + } + + // Get all tags + allTags, err := tag.QList(ctx, rc.DB) + if err != nil { + return err + } + tagNames := make([]string, len(allTags)) + for i, t := range allTags { + tagNames[i] = t.Name + } + + // Build home items + homeItems := make([]homeItem, 0, len(items)) + for _, it := range items { + hi := homeItem{ + ID: it.PubID, + Title: it.Title, + Description: it.Description, + LinkURL: it.LinkURL, + ItemType: it.ItemType, + EmbedHTML: it.EmbedHTML, + } + + // Get tags + itemTags, err := tag.QTagsForItem(ctx, rc.DB, it.ID) + if err != nil { + return err + } + hi.Tags = make([]string, len(itemTags)) + for i, t := range itemTags { + hi.Tags[i] = t.Name + } + + // Get media + mediaList, err := media.QFindByItemID(ctx, rc.DB, it.ID) + if err != nil { + return err + } + for _, m := range mediaList { + if m.MediaType == "thumbnail" { + hi.ThumbnailID = &m.ID + } else if m.MediaType == "original" { + hi.MediaID = &m.ID + } + } + + homeItems = append(homeItems, hi) + } + + content := homeContent{ + Items: homeItems, + Tags: tagNames, + ActiveTag: tagFilter, + IsAdmin: rc.IsAdmin, + } + + w.Header().Set("Content-Type", "text/html; charset=utf-8") + sw := ssr.NewWriter(w, rc.TmplCache) + + page := components.Page{ + Title: "", + IsAdmin: rc.IsAdmin, + Content: content, + } + + return page.Render(sw) +} diff --git a/internal/handlers/item_page.go b/internal/handlers/item_page.go new file mode 100644 index 0000000..c0bc609 --- /dev/null +++ b/internal/handlers/item_page.go @@ -0,0 +1,205 @@ +package handlers + +import ( + "context" + "net/http" + "time" + + "git.soup.land/soup/sxgo/ssr" + "lookbook/internal/components" + "lookbook/internal/data/item" + "lookbook/internal/data/media" + "lookbook/internal/data/tag" +) + +type itemPageContent struct { + Item itemPageData + IsAdmin bool +} + +type itemPageData struct { + ID string + Title *string + Description *string + LinkURL *string + ItemType string + EmbedHTML *string + Tags []string + CreatedAt string + ThumbnailID *int64 + MediaID *int64 +} + +func (c itemPageContent) Render(sw *ssr.Writer) error { + return sw.Tmpl(c, ` +
+ ← BACK + +
+ {{if eq .Item.ItemType "quote"}} +
+
{{.Item.Description}}
+ {{if .Item.Title}}— {{.Item.Title}}{{end}} +
+ {{else if eq .Item.ItemType "video"}} +
+ +
+ {{else if eq .Item.ItemType "embed"}} + {{if .Item.ThumbnailID}} +
+ {{if .Item.Title}}{{.Item.Title}}{{else}}Embed{{end}} +
+ {{else if .Item.MediaID}} +
+ {{if .Item.Title}}{{.Item.Title}}{{else}}Embed{{end}} +
+ {{end}} + {{else if .Item.MediaID}} +
+ {{if .Item.Title}}{{.Item.Title}}{{else}}Image{{end}} +
+ {{else if .Item.ThumbnailID}} +
+ {{if .Item.Title}}{{.Item.Title}}{{else}}Image{{end}} +
+ {{end}} + +
+ {{if .Item.Title}} +

{{.Item.Title}}

+ {{end}} + + {{if and .Item.Description (ne .Item.ItemType "quote")}} +

{{.Item.Description}}

+ {{end}} + + {{if .Item.LinkURL}} + {{.Item.LinkURL}} + {{end}} + + {{if .Item.Tags}} +
+ {{range .Item.Tags}} + {{.}} + {{end}} +
+ {{end}} + + +
+ + {{if .IsAdmin}} +
+ + {{if .Item.LinkURL}}{{end}} + +
+ {{end}} +
+
+ +{{if .IsAdmin}} + +{{end}} +`) +} + +// HandleItemPage handles GET /item/{id} +func HandleItemPage(rc *RequestContext, w http.ResponseWriter, r *http.Request) error { + pubID := r.PathValue("id") + if pubID == "" { + http.NotFound(w, r) + return nil + } + + ctx, cancel := context.WithTimeout(r.Context(), 10*time.Second) + defer cancel() + + it, err := item.QFindByPubID(ctx, rc.DB, pubID) + if err != nil { + return err + } + if it == nil { + http.NotFound(w, r) + return nil + } + + // Get tags + itemTags, err := tag.QTagsForItem(ctx, rc.DB, it.ID) + if err != nil { + return err + } + tagNames := make([]string, len(itemTags)) + for i, t := range itemTags { + tagNames[i] = t.Name + } + + // Get media + var thumbnailID, mediaID *int64 + mediaList, err := media.QFindByItemID(ctx, rc.DB, it.ID) + if err != nil { + return err + } + for _, m := range mediaList { + if m.MediaType == "thumbnail" { + thumbnailID = &m.ID + } else if m.MediaType == "original" { + mediaID = &m.ID + } + } + + data := itemPageData{ + ID: it.PubID, + Title: it.Title, + Description: it.Description, + LinkURL: it.LinkURL, + ItemType: it.ItemType, + EmbedHTML: it.EmbedHTML, + Tags: tagNames, + CreatedAt: it.CreatedAt.Format("Jan 2, 2006"), + ThumbnailID: thumbnailID, + MediaID: mediaID, + } + + w.Header().Set("Content-Type", "text/html; charset=utf-8") + sw := ssr.NewWriter(w, rc.TmplCache) + + var title string + if it.Title != nil { + title = *it.Title + } + + page := components.Page{ + Title: title, + IsAdmin: rc.IsAdmin, + Content: itemPageContent{Item: data, IsAdmin: rc.IsAdmin}, + } + + return page.Render(sw) +} diff --git a/internal/handlers/media.go b/internal/handlers/media.go new file mode 100644 index 0000000..d46f844 --- /dev/null +++ b/internal/handlers/media.go @@ -0,0 +1,50 @@ +package handlers + +import ( + "context" + "fmt" + "net/http" + "strconv" + "time" + + "lookbook/internal/data/media" +) + +// HandleGetMedia handles GET /media/{id} +func HandleGetMedia(rc *RequestContext, w http.ResponseWriter, r *http.Request) error { + idStr := r.PathValue("id") + id, err := strconv.ParseInt(idStr, 10, 64) + if err != nil { + http.Error(w, "invalid id", http.StatusBadRequest) + return nil + } + + ctx, cancel := context.WithTimeout(r.Context(), 30*time.Second) + defer cancel() + + m, err := media.QFindByID(ctx, rc.DB, id) + if err != nil { + return err + } + if m == nil { + http.NotFound(w, r) + return nil + } + + // Set caching headers (media is immutable) + w.Header().Set("Cache-Control", "public, max-age=31536000, immutable") + w.Header().Set("Content-Type", m.ContentType) + w.Header().Set("Content-Length", strconv.Itoa(len(m.Data))) + + // Add ETag for conditional requests + etag := fmt.Sprintf(`"%d"`, m.ID) + w.Header().Set("ETag", etag) + + if r.Header.Get("If-None-Match") == etag { + w.WriteHeader(http.StatusNotModified) + return nil + } + + w.Write(m.Data) + return nil +} diff --git a/internal/handlers/router.go b/internal/handlers/router.go new file mode 100644 index 0000000..81a91f6 --- /dev/null +++ b/internal/handlers/router.go @@ -0,0 +1,75 @@ +package handlers + +import ( + "context" + "log/slog" + "net/http" + "time" + + "lookbook/internal/data/session" +) + +// Router wraps http.ServeMux and automatically injects RequestContext into handlers. +type Router struct { + mux *http.ServeMux + rc *RequestContext +} + +func NewRouter(rc *RequestContext) *Router { + return &Router{ + mux: http.NewServeMux(), + rc: rc, + } +} + +// Handle registers a handler that returns an error. +// The RequestContext is automatically injected and error handling is applied. +func (rt *Router) Handle(pattern string, h Handler) { + rt.mux.HandleFunc(pattern, func(w http.ResponseWriter, r *http.Request) { + rc := &RequestContext{ + DB: rt.rc.DB, + Logger: rt.rc.Logger, + TmplCache: rt.rc.TmplCache, + IsAdmin: rt.loadAuth(r), + } + + handler := WithErrorHandling(rc, h) + handler(w, r) + }) +} + +// loadAuth checks if the request has a valid session cookie. +func (rt *Router) loadAuth(r *http.Request) bool { + cookie, err := r.Cookie("session_id") + if err != nil { + return false + } + + ctx, cancel := context.WithTimeout(r.Context(), 5*time.Second) + defer cancel() + + sess, err := session.QFindBySessionID(ctx, rt.rc.DB, cookie.Value) + if err != nil { + rt.rc.Logger.Error("failed to find session", slog.Any("err", err)) + return false + } + if sess == nil { + return false + } + + if time.Now().After(sess.ExpiresAt) { + rt.rc.Logger.Info("session expired", slog.String("session_id", cookie.Value)) + return false + } + + return true +} + +// HandleStd registers a standard http.Handler (for static files, etc.) +func (rt *Router) HandleStd(pattern string, h http.Handler) { + rt.mux.Handle(pattern, h) +} + +func (rt *Router) ServeHTTP(w http.ResponseWriter, r *http.Request) { + rt.mux.ServeHTTP(w, r) +} diff --git a/internal/handlers/templates.go b/internal/handlers/templates.go new file mode 100644 index 0000000..e1b9517 --- /dev/null +++ b/internal/handlers/templates.go @@ -0,0 +1,17 @@ +package handlers + +import ( + "html/template" + + "lookbook/internal/static" +) + +var TemplateFuncs = template.FuncMap{ + "staticURL": static.VersionedPath, + "safeHTML": func(s *string) template.HTML { + if s == nil { + return "" + } + return template.HTML(*s) + }, +} diff --git a/internal/middleware/logger.go b/internal/middleware/logger.go new file mode 100644 index 0000000..d1d2a7c --- /dev/null +++ b/internal/middleware/logger.go @@ -0,0 +1,44 @@ +package middleware + +import ( + "log/slog" + "net/http" + "time" +) + +// Logging emits structured request logs. +func Logging(logger *slog.Logger) func(http.Handler) http.Handler { + return func(next http.Handler) http.Handler { + return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + start := time.Now() + ww := &responseWriter{ResponseWriter: w, status: http.StatusOK} + + next.ServeHTTP(ww, r) + + logger.LogAttrs(r.Context(), slog.LevelInfo, "request", + slog.String("method", r.Method), + slog.String("path", r.URL.Path), + slog.Int("status", ww.status), + slog.Int("bytes", ww.bytes), + slog.Duration("latency", time.Since(start)), + ) + }) + } +} + +type responseWriter struct { + http.ResponseWriter + status int + bytes int +} + +func (w *responseWriter) WriteHeader(statusCode int) { + w.status = statusCode + w.ResponseWriter.WriteHeader(statusCode) +} + +func (w *responseWriter) Write(b []byte) (int, error) { + n, err := w.ResponseWriter.Write(b) + w.bytes += n + return n, err +} diff --git a/internal/migrations/migrations.go b/internal/migrations/migrations.go new file mode 100644 index 0000000..a6dc1a0 --- /dev/null +++ b/internal/migrations/migrations.go @@ -0,0 +1,131 @@ +package migrations + +import ( + "context" + "database/sql" + "embed" + "fmt" + "log/slog" + "os" + + _ "github.com/jackc/pgx/v5/stdlib" + "github.com/pressly/goose/v3" +) + +//go:embed sql/*.sql +var FS embed.FS + +const DefaultURL = "postgres:///lookbook?sslmode=disable" + +// Up applies all available migrations using the provided database URL. +func Up(ctx context.Context, dbURL string, logger *slog.Logger) error { + url := dbURL + if url == "" { + url = DefaultURL + } + + db, err := openDB(url, logger) + if err != nil { + return err + } + defer db.Close() + + if err := goose.UpContext(ctx, db, "sql"); err != nil { + return fmt.Errorf("apply migrations: %w", err) + } + + logger.Info("database migrated") + return nil +} + +// Down rolls back migrations. If targetVersion < 0, it steps back one migration; otherwise it migrates down to the target version. +func Down(ctx context.Context, dbURL string, targetVersion int64, logger *slog.Logger) error { + url := dbURL + if url == "" { + url = DefaultURL + } + + db, err := openDB(url, logger) + if err != nil { + return err + } + defer db.Close() + + if targetVersion < 0 { + if err := goose.DownContext(ctx, db, "sql"); err != nil { + return fmt.Errorf("rollback one: %w", err) + } + logger.Info("rolled back one migration") + return nil + } + + if err := goose.DownToContext(ctx, db, "sql", targetVersion); err != nil { + return fmt.Errorf("rollback to version %d: %w", targetVersion, err) + } + logger.Info("rolled back to version", slog.Int64("version", targetVersion)) + return nil +} + +// CheckPending returns the number of pending migrations without applying them. +func CheckPending(ctx context.Context, dbURL string, logger *slog.Logger) (int, error) { + url := dbURL + if url == "" { + url = DefaultURL + } + + db, err := openDB(url, logger) + if err != nil { + return 0, err + } + defer db.Close() + + current, err := goose.GetDBVersionContext(ctx, db) + if err != nil { + return 0, fmt.Errorf("get db version: %w", err) + } + + migrations, err := goose.CollectMigrations("sql", 0, goose.MaxVersion) + if err != nil { + return 0, fmt.Errorf("collect migrations: %w", err) + } + + pending := 0 + for _, m := range migrations { + if m.Version > current { + pending++ + } + } + + return pending, nil +} + +// slogLogger adapts slog to goose's minimal logging interface. +type slogLogger struct { + logger *slog.Logger +} + +func (l slogLogger) Printf(format string, v ...any) { + l.logger.Info(fmt.Sprintf(format, v...)) +} + +func (l slogLogger) Fatalf(format string, v ...any) { + l.logger.Error(fmt.Sprintf(format, v...)) + os.Exit(1) +} + +func openDB(url string, logger *slog.Logger) (*sql.DB, error) { + goose.SetBaseFS(FS) + goose.SetLogger(slogLogger{logger: logger}) + + db, err := sql.Open("pgx", url) + if err != nil { + return nil, fmt.Errorf("open db: %w", err) + } + + if err := db.Ping(); err != nil { + db.Close() + return nil, fmt.Errorf("ping db: %w", err) + } + + return db, nil +} diff --git a/internal/migrations/sql/.gitkeep b/internal/migrations/sql/.gitkeep new file mode 100644 index 0000000..e69de29 diff --git a/internal/migrations/sql/0001_init.sql b/internal/migrations/sql/0001_init.sql new file mode 100644 index 0000000..0dafb8c --- /dev/null +++ b/internal/migrations/sql/0001_init.sql @@ -0,0 +1,80 @@ +-- +goose Up +-- gen_random_uuid() is built-in since PostgreSQL 13, no extension needed + +-- Admin authentication (single row) +CREATE TABLE admin ( + id SERIAL PRIMARY KEY, + password_hash BYTEA, -- NULL until first login() sets password + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW() +); + +-- Insert the single admin row +INSERT INTO admin (id) VALUES (1); + +-- Sessions for admin authentication +CREATE TABLE session ( + id BIGSERIAL PRIMARY KEY, + session_id TEXT NOT NULL UNIQUE, + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + expires_at TIMESTAMPTZ NOT NULL +); + +CREATE INDEX idx_session_expires_at ON session(expires_at); + +-- Items (the main content) +CREATE TABLE item ( + id BIGSERIAL PRIMARY KEY, + pub_id UUID NOT NULL DEFAULT gen_random_uuid() UNIQUE, + title TEXT, + description TEXT, + link_url TEXT, -- Source URL (optional) + item_type TEXT NOT NULL, -- 'image', 'video', 'quote', 'embed' + embed_provider TEXT, -- 'youtube', 'vimeo', NULL + embed_video_id TEXT, -- Video ID for embeds + embed_html TEXT, -- Cached embed iframe HTML + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + deleted_at TIMESTAMPTZ -- Soft delete +); + +CREATE INDEX idx_item_deleted ON item(deleted_at); +CREATE INDEX idx_item_created ON item(created_at DESC); +CREATE INDEX idx_item_pub_id ON item(pub_id); + +-- Media blobs (stored in DB) +CREATE TABLE media ( + id BIGSERIAL PRIMARY KEY, + item_id BIGINT NOT NULL REFERENCES item(id) ON DELETE CASCADE, + media_type TEXT NOT NULL, -- 'original', 'thumbnail' + content_type TEXT NOT NULL, -- MIME type + data BYTEA NOT NULL, + width INT, + height INT, + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW() +); + +CREATE INDEX idx_media_item_id ON media(item_id); + +-- Tags +CREATE TABLE tag ( + id BIGSERIAL PRIMARY KEY, + name TEXT NOT NULL UNIQUE +); + +CREATE INDEX idx_tag_name ON tag(name); + +-- Item-Tag junction +CREATE TABLE item_tag ( + item_id BIGINT NOT NULL REFERENCES item(id) ON DELETE CASCADE, + tag_id BIGINT NOT NULL REFERENCES tag(id) ON DELETE CASCADE, + PRIMARY KEY (item_id, tag_id) +); + +CREATE INDEX idx_item_tag_tag_id ON item_tag(tag_id); + +-- +goose Down +DROP TABLE IF EXISTS item_tag; +DROP TABLE IF EXISTS tag; +DROP TABLE IF EXISTS media; +DROP TABLE IF EXISTS item; +DROP TABLE IF EXISTS session; +DROP TABLE IF EXISTS admin; diff --git a/internal/migrations/sql/0002_media_source_url.sql b/internal/migrations/sql/0002_media_source_url.sql new file mode 100644 index 0000000..6fa0935 --- /dev/null +++ b/internal/migrations/sql/0002_media_source_url.sql @@ -0,0 +1,5 @@ +-- +goose Up +ALTER TABLE media ADD COLUMN source_url TEXT; + +-- +goose Down +ALTER TABLE media DROP COLUMN source_url; diff --git a/internal/opengraph/fetch.go b/internal/opengraph/fetch.go new file mode 100644 index 0000000..4060537 --- /dev/null +++ b/internal/opengraph/fetch.go @@ -0,0 +1,185 @@ +package opengraph + +import ( + "context" + "fmt" + "io" + "net/http" + "net/url" + "strings" + "time" + + "golang.org/x/net/html" +) + +// Metadata contains extracted OpenGraph and meta data from a URL. +type Metadata struct { + Title string + Description string + ImageURL string + VideoURL string + SiteName string + Type string // og:type +} + +// Fetch fetches and parses OpenGraph metadata from a URL. +func Fetch(ctx context.Context, targetURL string) (*Metadata, error) { + req, err := http.NewRequestWithContext(ctx, "GET", targetURL, nil) + if err != nil { + return nil, fmt.Errorf("create request: %w", err) + } + req.Header.Set("User-Agent", "Mozilla/5.0 (compatible; Lookbook/1.0)") + + client := &http.Client{ + Timeout: 10 * time.Second, + CheckRedirect: func(req *http.Request, via []*http.Request) error { + if len(via) >= 5 { + return fmt.Errorf("too many redirects") + } + return nil + }, + } + + resp, err := client.Do(req) + if err != nil { + return nil, fmt.Errorf("fetch url: %w", err) + } + defer resp.Body.Close() + + if resp.StatusCode != http.StatusOK { + return nil, fmt.Errorf("unexpected status: %d", resp.StatusCode) + } + + // Limit response body to 1MB + body := io.LimitReader(resp.Body, 1<<20) + + doc, err := html.Parse(body) + if err != nil { + return nil, fmt.Errorf("parse html: %w", err) + } + + meta := &Metadata{} + parseNode(doc, meta) + + // Resolve relative URLs + baseURL, _ := url.Parse(targetURL) + if meta.ImageURL != "" && !strings.HasPrefix(meta.ImageURL, "http") { + if imgURL, err := baseURL.Parse(meta.ImageURL); err == nil { + meta.ImageURL = imgURL.String() + } + } + if meta.VideoURL != "" && !strings.HasPrefix(meta.VideoURL, "http") { + if vidURL, err := baseURL.Parse(meta.VideoURL); err == nil { + meta.VideoURL = vidURL.String() + } + } + + return meta, nil +} + +func parseNode(n *html.Node, meta *Metadata) { + if n.Type == html.ElementNode { + switch n.Data { + case "meta": + parseMeta(n, meta) + case "title": + if meta.Title == "" && n.FirstChild != nil { + meta.Title = strings.TrimSpace(n.FirstChild.Data) + } + } + } + + for c := n.FirstChild; c != nil; c = c.NextSibling { + parseNode(c, meta) + } +} + +func parseMeta(n *html.Node, meta *Metadata) { + var property, name, content string + for _, attr := range n.Attr { + switch attr.Key { + case "property": + property = attr.Val + case "name": + name = attr.Val + case "content": + content = attr.Val + } + } + + // OpenGraph properties + switch property { + case "og:title": + meta.Title = content + case "og:description": + if meta.Description == "" { + meta.Description = content + } + case "og:image": + if meta.ImageURL == "" { + meta.ImageURL = content + } + case "og:video", "og:video:url": + if meta.VideoURL == "" { + meta.VideoURL = content + } + case "og:site_name": + meta.SiteName = content + case "og:type": + meta.Type = content + } + + // Twitter cards + switch name { + case "twitter:title": + if meta.Title == "" { + meta.Title = content + } + case "twitter:description": + if meta.Description == "" { + meta.Description = content + } + case "twitter:image": + if meta.ImageURL == "" { + meta.ImageURL = content + } + case "description": + if meta.Description == "" { + meta.Description = content + } + } +} + +// DownloadImage downloads an image from a URL and returns the data and content type. +func DownloadImage(ctx context.Context, imageURL string) ([]byte, string, error) { + req, err := http.NewRequestWithContext(ctx, "GET", imageURL, nil) + if err != nil { + return nil, "", fmt.Errorf("create request: %w", err) + } + req.Header.Set("User-Agent", "Mozilla/5.0 (compatible; Lookbook/1.0)") + + client := &http.Client{Timeout: 30 * time.Second} + + resp, err := client.Do(req) + if err != nil { + return nil, "", fmt.Errorf("fetch image: %w", err) + } + defer resp.Body.Close() + + if resp.StatusCode != http.StatusOK { + return nil, "", fmt.Errorf("unexpected status: %d", resp.StatusCode) + } + + contentType := resp.Header.Get("Content-Type") + if !strings.HasPrefix(contentType, "image/") { + return nil, "", fmt.Errorf("not an image: %s", contentType) + } + + // Limit to 50MB + data, err := io.ReadAll(io.LimitReader(resp.Body, 50<<20)) + if err != nil { + return nil, "", fmt.Errorf("read image: %w", err) + } + + return data, contentType, nil +} diff --git a/internal/static/css/.gitkeep b/internal/static/css/.gitkeep new file mode 100644 index 0000000..e69de29 diff --git a/internal/static/css/app.css b/internal/static/css/app.css new file mode 100644 index 0000000..3910d32 --- /dev/null +++ b/internal/static/css/app.css @@ -0,0 +1,562 @@ +/* + * Commit Mono - Neutral programming typeface + * Licensed under SIL Open Font License 1.1 + * https://commitmono.com/ + */ + +@font-face { + font-family: 'CommitMono'; + font-style: normal; + font-weight: 400; + font-display: swap; + src: url("../fonts/CommitMono-450-Regular.woff2") format('woff2'); +} + +@font-face { + font-family: 'CommitMono'; + font-style: italic; + font-weight: 400; + font-display: swap; + src: url("../fonts/CommitMono-450-Italic.woff2") format('woff2'); +} + +@font-face { + font-family: 'CommitMono'; + font-style: normal; + font-weight: 700; + font-display: swap; + src: url("../fonts/CommitMono-700-Regular.woff2") format('woff2'); +} + +@font-face { + font-family: 'CommitMono'; + font-style: italic; + font-weight: 700; + font-display: swap; + src: url("../fonts/CommitMono-700-Italic.woff2") format('woff2'); +} + +/* Reset */ +*, *::before, *::after { + box-sizing: border-box; + margin: 0; + padding: 0; +} + +/* Base */ +:root { + --bg: #fff; + --fg: #000; + --gray-1: #f5f5f5; + --gray-2: #e5e5e5; + --gray-3: #888; + --font: 'CommitMono', ui-monospace, 'SF Mono', Menlo, monospace; +} + +html { + font-size: 14px; +} + +body { + font-family: var(--font); + background: var(--bg); + color: var(--fg); + line-height: 1.5; + min-height: 100vh; +} + +a { + color: inherit; + text-decoration: none; +} + +a:hover { + text-decoration: underline; +} + +img, video { + max-width: 100%; + height: auto; + display: block; +} + +button, input, textarea, select { + font: inherit; + color: inherit; +} + +/* Header */ +.header { + display: flex; + justify-content: space-between; + align-items: center; + padding: 1rem 2rem; + border-bottom: 1px solid var(--gray-2); +} + +.logo { + font-weight: 700; + font-size: 1rem; + letter-spacing: 0.1em; +} + +.nav { + display: flex; + gap: 1.5rem; +} + +.nav a { + font-size: 0.85rem; + text-transform: uppercase; + letter-spacing: 0.05em; +} + +/* Main */ +.main { + max-width: 1600px; + margin: 0 auto; + padding: 2rem; +} + +/* Admin Bar */ +.admin-bar { + margin-bottom: 1.5rem; + display: flex; + gap: 1rem; +} + +/* Tags Bar */ +.tags-bar { + display: flex; + flex-wrap: wrap; + gap: 0.5rem; + margin-bottom: 2rem; + padding-bottom: 1rem; + border-bottom: 1px solid var(--gray-2); +} + +.tag { + font-size: 0.75rem; + text-transform: uppercase; + letter-spacing: 0.05em; + padding: 0.25rem 0.5rem; + border: 1px solid var(--gray-2); + background: var(--bg); + transition: all 0.15s ease; +} + +.tag:hover, .tag.active { + background: var(--fg); + color: var(--bg); + border-color: var(--fg); + text-decoration: none; +} + +/* Grid */ +.grid { + column-count: 4; + column-gap: 1rem; +} + +.grid-item { + display: block; + break-inside: avoid; + margin-bottom: 1rem; + background: var(--gray-1); + border: 1px solid var(--gray-2); + overflow: hidden; + position: relative; + transition: transform 0.15s ease, box-shadow 0.15s ease; +} + +.grid-item:hover { + text-decoration: none; + transform: translateY(-2px); + box-shadow: 0 4px 12px rgba(0,0,0,0.1); +} + +.grid-item img { + width: 100%; + display: block; +} + +/* Quote Card */ +.quote-card { + padding: 1.5rem; + min-height: 150px; + display: flex; + flex-direction: column; + justify-content: center; +} + +.quote-card blockquote { + font-size: 1.1rem; + font-style: italic; + line-height: 1.6; + margin-bottom: 0.75rem; +} + +.quote-card blockquote::before { + content: '"'; +} + +.quote-card blockquote::after { + content: '"'; +} + +.quote-card cite { + font-size: 0.85rem; + color: var(--gray-3); + font-style: normal; +} + +/* Link Card */ +.link-card { + padding: 1.5rem; + min-height: 100px; +} + +.link-title { + font-weight: 700; + margin-bottom: 0.5rem; +} + +.link-url { + font-size: 0.75rem; + color: var(--gray-3); + word-break: break-all; +} + +/* Embed Placeholder */ +.embed-placeholder { + aspect-ratio: 16/9; + display: flex; + align-items: center; + justify-content: center; + background: var(--fg); + color: var(--bg); + font-size: 2rem; +} + +/* Item Tags */ +.item-tags { + display: flex; + flex-wrap: wrap; + gap: 0.25rem; + padding: 0.5rem; + background: var(--bg); + border-top: 1px solid var(--gray-2); +} + +.item-tag { + font-size: 0.65rem; + text-transform: uppercase; + letter-spacing: 0.05em; + padding: 0.15rem 0.35rem; + background: var(--gray-1); + border: 1px solid var(--gray-2); +} + +/* Item Page */ +.item-page { + max-width: 900px; + margin: 0 auto; +} + +.back-link { + display: inline-block; + font-size: 0.85rem; + margin-bottom: 2rem; + text-transform: uppercase; + letter-spacing: 0.05em; +} + +.item-detail { + border: 1px solid var(--gray-2); +} + +.image-container img, +.video-container video { + width: 100%; +} + +.embed-container { + position: relative; +} + +/* YouTube/Vimeo iframe embeds */ +.embed-container iframe { + max-width: 100%; +} + +/* Twitter embeds */ +.embed-container .twitter-tweet { + margin: 1rem auto !important; +} + +.quote-detail { + padding: 3rem; + text-align: center; +} + +.quote-detail blockquote { + font-size: 1.5rem; + font-style: italic; + line-height: 1.6; + margin-bottom: 1rem; +} + +.quote-detail blockquote::before { + content: '"'; +} + +.quote-detail blockquote::after { + content: '"'; +} + +.quote-detail cite { + color: var(--gray-3); + font-style: normal; +} + +.item-meta { + padding: 1.5rem; + border-top: 1px solid var(--gray-2); +} + +.item-meta h1 { + font-size: 1.25rem; + font-weight: 700; + margin-bottom: 0.75rem; +} + +.item-meta .description { + margin-bottom: 1rem; + color: var(--gray-3); +} + +.item-meta .source-link { + display: block; + font-size: 0.85rem; + color: var(--gray-3); + word-break: break-all; + margin-bottom: 1rem; +} + +.item-meta .item-tags { + background: none; + border: none; + padding: 0; + margin-bottom: 1rem; +} + +.item-meta .timestamp { + display: block; + font-size: 0.75rem; + color: var(--gray-3); + text-transform: uppercase; + letter-spacing: 0.05em; +} + +.item-actions { + padding: 1rem 1.5rem; + border-top: 1px solid var(--gray-2); + display: flex; + gap: 0.5rem; +} + +/* Buttons */ +.btn { + font-size: 0.75rem; + text-transform: uppercase; + letter-spacing: 0.1em; + padding: 0.5rem 1rem; + border: 1px solid var(--fg); + background: var(--bg); + cursor: pointer; + transition: all 0.15s ease; +} + +.btn:hover { + background: var(--fg); + color: var(--bg); +} + +.btn-danger { + border-color: #c00; + color: #c00; +} + +.btn-danger:hover { + background: #c00; + color: #fff; +} + +.btn-close { + background: none; + border: none; + font-size: 1.5rem; + cursor: pointer; + line-height: 1; +} + +/* Modal */ +.modal { + display: none; + position: fixed; + inset: 0; + background: rgba(0,0,0,0.5); + z-index: 1000; + align-items: center; + justify-content: center; +} + +.modal.active { + display: flex; +} + +.modal-content { + background: var(--bg); + width: 90%; + max-width: 500px; + max-height: 90vh; + overflow-y: auto; + border: 1px solid var(--fg); +} + +.modal-header { + display: flex; + justify-content: space-between; + align-items: center; + padding: 1rem 1.5rem; + border-bottom: 1px solid var(--gray-2); +} + +.modal-header h2 { + font-size: 1rem; + font-weight: 700; + letter-spacing: 0.1em; +} + +.modal-tabs { + display: flex; + border-bottom: 1px solid var(--gray-2); +} + +.modal-tabs .tab { + flex: 1; + padding: 0.75rem; + background: none; + border: none; + font-size: 0.75rem; + text-transform: uppercase; + letter-spacing: 0.1em; + cursor: pointer; + border-bottom: 2px solid transparent; +} + +.modal-tabs .tab.active { + border-bottom-color: var(--fg); +} + +.modal-body { + padding: 1.5rem; +} + +.tab-content { + display: none; +} + +.tab-content.active { + display: block; +} + +/* Forms */ +form { + display: flex; + flex-direction: column; + gap: 1rem; +} + +input, textarea { + width: 100%; + padding: 0.75rem; + border: 1px solid var(--gray-2); + background: var(--bg); +} + +input:focus, textarea:focus { + outline: none; + border-color: var(--fg); +} + +textarea { + resize: vertical; + min-height: 80px; +} + +input[type="file"] { + padding: 0.5rem; +} + +/* Preview */ +.preview { + padding: 1rem; + background: var(--gray-1); + border: 1px solid var(--gray-2); + display: none; +} + +.preview.active { + display: block; +} + +.preview img { + max-width: 200px; + margin-bottom: 0.5rem; +} + +.preview-title { + font-weight: 700; + margin-bottom: 0.25rem; +} + +.preview-description { + font-size: 0.85rem; + color: var(--gray-3); +} + +/* Responsive */ +@media (max-width: 1200px) { + .grid { + column-count: 3; + } +} + +@media (max-width: 900px) { + .grid { + column-count: 2; + } + + .main { + padding: 1rem; + } +} + +@media (max-width: 600px) { + .grid { + column-count: 1; + } + + .header { + padding: 1rem; + } + + .quote-detail { + padding: 2rem 1rem; + } + + .quote-detail blockquote { + font-size: 1.25rem; + } +} diff --git a/internal/static/fonts/CommitMono-400-Italic.woff2 b/internal/static/fonts/CommitMono-400-Italic.woff2 new file mode 100644 index 0000000..7701d5f Binary files /dev/null and b/internal/static/fonts/CommitMono-400-Italic.woff2 differ diff --git a/internal/static/fonts/CommitMono-400-Regular.woff2 b/internal/static/fonts/CommitMono-400-Regular.woff2 new file mode 100644 index 0000000..fc8cb3d Binary files /dev/null and b/internal/static/fonts/CommitMono-400-Regular.woff2 differ diff --git a/internal/static/fonts/CommitMono-450-Italic.woff2 b/internal/static/fonts/CommitMono-450-Italic.woff2 new file mode 100644 index 0000000..ffdf7e8 Binary files /dev/null and b/internal/static/fonts/CommitMono-450-Italic.woff2 differ diff --git a/internal/static/fonts/CommitMono-450-Regular.woff2 b/internal/static/fonts/CommitMono-450-Regular.woff2 new file mode 100644 index 0000000..014538a Binary files /dev/null and b/internal/static/fonts/CommitMono-450-Regular.woff2 differ diff --git a/internal/static/fonts/CommitMono-700-Italic.woff2 b/internal/static/fonts/CommitMono-700-Italic.woff2 new file mode 100644 index 0000000..b2c9d8a Binary files /dev/null and b/internal/static/fonts/CommitMono-700-Italic.woff2 differ diff --git a/internal/static/fonts/CommitMono-700-Regular.woff2 b/internal/static/fonts/CommitMono-700-Regular.woff2 new file mode 100644 index 0000000..202fe56 Binary files /dev/null and b/internal/static/fonts/CommitMono-700-Regular.woff2 differ diff --git a/internal/static/js/.gitkeep b/internal/static/js/.gitkeep new file mode 100644 index 0000000..e69de29 diff --git a/internal/static/js/app.js b/internal/static/js/app.js new file mode 100644 index 0000000..1863ba9 --- /dev/null +++ b/internal/static/js/app.js @@ -0,0 +1,386 @@ +// Console-based authentication +window.login = async (password) => { + if (!password) { + console.error('Usage: login("your-password")'); + return; + } + try { + const res = await fetch('/api/auth/login', { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ password }) + }); + const data = await res.json(); + if (res.ok) { + console.log(data.firstTime ? 'Password set! Reloading...' : 'Logged in! Reloading...'); + setTimeout(() => location.reload(), 500); + } else { + console.error(data.error || 'Login failed'); + } + } catch (err) { + console.error('Login error:', err); + } +}; + +window.logout = async () => { + try { + await fetch('/api/auth/logout', { method: 'POST' }); + console.log('Logged out! Reloading...'); + setTimeout(() => location.reload(), 500); + } catch (err) { + console.error('Logout error:', err); + } +}; + +// Modal functions +function showAddModal() { + document.getElementById('add-modal').classList.add('active'); +} + +function hideAddModal() { + document.getElementById('add-modal').classList.remove('active'); +} + +function showEditModal() { + document.getElementById('edit-modal').classList.add('active'); +} + +function hideEditModal() { + document.getElementById('edit-modal').classList.remove('active'); +} + +// Tab switching +document.addEventListener('DOMContentLoaded', () => { + const tabs = document.querySelectorAll('.modal-tabs .tab'); + tabs.forEach(tab => { + tab.addEventListener('click', () => { + const tabId = tab.dataset.tab; + + // Update tab buttons + tabs.forEach(t => t.classList.remove('active')); + tab.classList.add('active'); + + // Update tab content + document.querySelectorAll('.tab-content').forEach(c => c.classList.remove('active')); + document.getElementById('tab-' + tabId).classList.add('active'); + }); + }); + + // URL preview on input + const urlInput = document.querySelector('#link-form input[name="url"]'); + if (urlInput) { + let debounceTimer; + urlInput.addEventListener('input', (e) => { + clearTimeout(debounceTimer); + debounceTimer = setTimeout(() => fetchPreview(e.target.value), 500); + }); + } +}); + +// Fetch URL preview +async function fetchPreview(url) { + const preview = document.getElementById('link-preview'); + if (!url) { + preview.classList.remove('active'); + preview.innerHTML = ''; + return; + } + + try { + const res = await fetch('/api/preview', { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ url }) + }); + + if (!res.ok) { + const data = await res.json(); + preview.innerHTML = `
${data.error || 'Failed to fetch preview'}
`; + preview.classList.add('active'); + return; + } + + const data = await res.json(); + preview.dataset.preview = JSON.stringify(data); + + let html = ''; + if (data.imageUrl) { + html += `Preview`; + } + if (data.title) { + html += `
${escapeHtml(data.title)}
`; + } + if (data.description) { + html += `
${escapeHtml(data.description)}
`; + } + if (data.isEmbed) { + html += `
${escapeHtml(data.provider.toUpperCase())} VIDEO
`; + } + + preview.innerHTML = html || '
No preview available
'; + preview.classList.add('active'); + + // Auto-fill title if empty + const titleInput = document.querySelector('#link-form input[name="title"]'); + if (titleInput && !titleInput.value && data.title) { + titleInput.value = data.title; + } + } catch (err) { + console.error('Preview error:', err); + preview.innerHTML = '
Failed to fetch preview
'; + preview.classList.add('active'); + } +} + +// Submit link form +async function submitLink(event) { + event.preventDefault(); + const form = event.target; + const url = form.url.value; + const tags = form.tags.value ? form.tags.value.split(',').map(t => t.trim()).filter(Boolean) : []; + + // Get preview data + const preview = document.getElementById('link-preview'); + const previewData = preview.dataset.preview ? JSON.parse(preview.dataset.preview) : {}; + + // Use form values, falling back to preview data + const title = form.title.value || previewData.title || null; + const description = form.description.value || previewData.description || null; + + try { + const body = { + url, + title, + description, + tags, + imageUrl: previewData.imageUrl || null, + }; + + if (previewData.isEmbed) { + body.provider = previewData.provider; + body.videoId = previewData.videoId; + body.embedHtml = previewData.embedHtml; + } + + const res = await fetch('/api/items/from-link', { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify(body) + }); + + if (res.ok) { + location.reload(); + } else { + const data = await res.json(); + alert(data.error || 'Failed to add item'); + } + } catch (err) { + console.error('Submit error:', err); + alert('Failed to add item'); + } + + return false; +} + +// Submit upload form +async function submitUpload(event) { + event.preventDefault(); + const form = event.target; + const formData = new FormData(form); + + try { + const res = await fetch('/api/items/upload', { + method: 'POST', + body: formData + }); + + if (res.ok) { + location.reload(); + } else { + const data = await res.json(); + alert(data.error || 'Failed to upload'); + } + } catch (err) { + console.error('Upload error:', err); + alert('Failed to upload'); + } + + return false; +} + +// Submit quote form +async function submitQuote(event) { + event.preventDefault(); + const form = event.target; + const text = form.text.value; + const source = form.source.value || null; + const sourceUrl = form.sourceUrl.value || null; + const tags = form.tags.value ? form.tags.value.split(',').map(t => t.trim()).filter(Boolean) : []; + + try { + const res = await fetch('/api/items/quote', { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ text, source, sourceUrl, tags }) + }); + + if (res.ok) { + location.reload(); + } else { + const data = await res.json(); + alert(data.error || 'Failed to add quote'); + } + } catch (err) { + console.error('Submit error:', err); + alert('Failed to add quote'); + } + + return false; +} + +// Edit item +function editItem(id) { + showEditModal(); +} + +// Submit edit form +async function submitEdit(event) { + event.preventDefault(); + const form = event.target; + const id = form.id.value; + const title = form.title.value || null; + const description = form.description.value || null; + const linkUrl = form.linkUrl.value || null; + const tags = form.tags.value ? form.tags.value.split(',').map(t => t.trim()).filter(Boolean) : []; + + try { + const res = await fetch(`/api/items/${id}`, { + method: 'PUT', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ title, description, linkUrl, tags }) + }); + + if (res.ok) { + location.reload(); + } else { + const data = await res.json(); + alert(data.error || 'Failed to update'); + } + } catch (err) { + console.error('Update error:', err); + alert('Failed to update'); + } + + return false; +} + +// Refresh metadata +async function refreshMetadata(id) { + try { + // Fetch current item data + const itemRes = await fetch(`/api/items/${id}`); + if (!itemRes.ok) { + alert('Failed to fetch item'); + return; + } + const item = await itemRes.json(); + + if (!item.linkUrl) { + alert('Item has no link URL'); + return; + } + + // Fetch fresh metadata + const previewRes = await fetch('/api/preview', { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ url: item.linkUrl }) + }); + if (!previewRes.ok) { + const data = await previewRes.json(); + alert(data.error || 'Failed to fetch metadata'); + return; + } + const preview = await previewRes.json(); + + // Check if user has made manual edits + const titleChanged = item.title && preview.title && item.title !== preview.title; + const descChanged = item.description && preview.description && item.description !== preview.description; + const imageChanged = item.thumbnailSourceUrl && preview.imageUrl && item.thumbnailSourceUrl !== preview.imageUrl; + + if (titleChanged || descChanged || imageChanged) { + let msg = 'This will overwrite your changes:\n'; + if (titleChanged) msg += `\nTitle: "${item.title}" → "${preview.title}"`; + if (descChanged) msg += `\nDescription will be replaced`; + if (imageChanged) msg += `\nImage will be replaced`; + msg += '\n\nContinue?'; + + if (!confirm(msg)) return; + } + + // Proceed with refresh + const res = await fetch(`/api/items/${id}/refresh`, { method: 'POST' }); + + if (res.ok) { + location.reload(); + } else { + const data = await res.json(); + alert(data.error || 'Failed to refresh'); + } + } catch (err) { + console.error('Refresh error:', err); + alert('Failed to refresh'); + } +} + +// Replace media +async function submitReplaceMedia(event, id) { + event.preventDefault(); + const form = event.target; + const formData = new FormData(form); + + try { + const res = await fetch(`/api/items/${id}/media`, { + method: 'POST', + body: formData + }); + + if (res.ok) { + location.reload(); + } else { + const data = await res.json(); + alert(data.error || 'Failed to replace media'); + } + } catch (err) { + console.error('Replace media error:', err); + alert('Failed to replace media'); + } + + return false; +} + +// Delete item +async function deleteItem(id) { + if (!confirm('Delete this item?')) return; + + try { + const res = await fetch(`/api/items/${id}`, { method: 'DELETE' }); + + if (res.ok) { + location.href = '/'; + } else { + const data = await res.json(); + alert(data.error || 'Failed to delete'); + } + } catch (err) { + console.error('Delete error:', err); + alert('Failed to delete'); + } +} + +// Utility +function escapeHtml(str) { + const div = document.createElement('div'); + div.textContent = str; + return div.innerHTML; +} diff --git a/internal/static/static.go b/internal/static/static.go new file mode 100644 index 0000000..f1e45a1 --- /dev/null +++ b/internal/static/static.go @@ -0,0 +1,43 @@ +package static + +import ( + "crypto/sha256" + "embed" + "encoding/hex" + "net/http" + "strings" + "time" +) + +//go:embed css/* js/* fonts/* +var staticFS embed.FS + +// Version is set via -ldflags in production +var Version string + +func init() { + if Version == "" { + h := sha256.Sum256([]byte(time.Now().String())) + Version = hex.EncodeToString(h[:4]) + } +} + +func VersionedPath(path string) string { + return "/static/" + Version + "/" + path +} + +func Handler() http.Handler { + fileServer := http.FileServer(http.FS(staticFS)) + return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + // Strip version prefix: /static/{version}/file.css -> /file.css + path := r.URL.Path + path = strings.TrimPrefix(path, "/static/") + if idx := strings.Index(path, "/"); idx != -1 { + path = path[idx:] + } + r.URL.Path = path + + w.Header().Set("Cache-Control", "public, max-age=31536000, immutable") + fileServer.ServeHTTP(w, r) + }) +} diff --git a/internal/video/process.go b/internal/video/process.go new file mode 100644 index 0000000..6270ce7 --- /dev/null +++ b/internal/video/process.go @@ -0,0 +1,151 @@ +package video + +import ( + "bytes" + "context" + "fmt" + "io" + "os" + "os/exec" + "path/filepath" +) + +// ExtractThumbnail extracts a thumbnail from a video file. +// Returns the thumbnail image data as JPEG. +func ExtractThumbnail(ctx context.Context, videoData []byte) ([]byte, error) { + // Write video to temp file + tmpDir, err := os.MkdirTemp("", "lookbook-video-*") + if err != nil { + return nil, fmt.Errorf("create temp dir: %w", err) + } + defer os.RemoveAll(tmpDir) + + inputPath := filepath.Join(tmpDir, "input") + outputPath := filepath.Join(tmpDir, "thumbnail.jpg") + + if err := os.WriteFile(inputPath, videoData, 0600); err != nil { + return nil, fmt.Errorf("write temp video: %w", err) + } + + // Extract thumbnail at 1 second mark + cmd := exec.CommandContext(ctx, "ffmpeg", + "-i", inputPath, + "-ss", "00:00:01", + "-vframes", "1", + "-vf", "scale='min(1280,iw)':'min(720,ih)':force_original_aspect_ratio=decrease", + "-q:v", "2", + "-y", + outputPath, + ) + + var stderr bytes.Buffer + cmd.Stderr = &stderr + + if err := cmd.Run(); err != nil { + return nil, fmt.Errorf("ffmpeg thumbnail: %w: %s", err, stderr.String()) + } + + thumbnail, err := os.ReadFile(outputPath) + if err != nil { + return nil, fmt.Errorf("read thumbnail: %w", err) + } + + return thumbnail, nil +} + +// TranscodeToMP4 transcodes a video to H.264 MP4 format. +// Returns the transcoded video data. +func TranscodeToMP4(ctx context.Context, videoData []byte, contentType string) ([]byte, error) { + // If already MP4 with H.264, we might skip transcoding + // For simplicity, we always transcode to ensure compatibility + + tmpDir, err := os.MkdirTemp("", "lookbook-video-*") + if err != nil { + return nil, fmt.Errorf("create temp dir: %w", err) + } + defer os.RemoveAll(tmpDir) + + inputPath := filepath.Join(tmpDir, "input") + outputPath := filepath.Join(tmpDir, "output.mp4") + + if err := os.WriteFile(inputPath, videoData, 0600); err != nil { + return nil, fmt.Errorf("write temp video: %w", err) + } + + // Transcode to H.264 MP4 + cmd := exec.CommandContext(ctx, "ffmpeg", + "-i", inputPath, + "-c:v", "libx264", + "-preset", "medium", + "-crf", "23", + "-c:a", "aac", + "-b:a", "128k", + "-movflags", "+faststart", + "-y", + outputPath, + ) + + var stderr bytes.Buffer + cmd.Stderr = &stderr + + if err := cmd.Run(); err != nil { + return nil, fmt.Errorf("ffmpeg transcode: %w: %s", err, stderr.String()) + } + + output, err := os.ReadFile(outputPath) + if err != nil { + return nil, fmt.Errorf("read transcoded: %w", err) + } + + return output, nil +} + +// ProcessVideo processes an uploaded video: transcodes to MP4 and extracts thumbnail. +// Returns (transcodedData, thumbnailData, error). +func ProcessVideo(ctx context.Context, videoData []byte, contentType string) ([]byte, []byte, error) { + // Extract thumbnail first (from original, often has better quality) + thumbnail, err := ExtractThumbnail(ctx, videoData) + if err != nil { + return nil, nil, fmt.Errorf("extract thumbnail: %w", err) + } + + // Transcode to MP4 + transcoded, err := TranscodeToMP4(ctx, videoData, contentType) + if err != nil { + return nil, nil, fmt.Errorf("transcode: %w", err) + } + + return transcoded, thumbnail, nil +} + +// IsVideo checks if a content type is a video type. +func IsVideo(contentType string) bool { + switch contentType { + case "video/mp4", "video/webm", "video/quicktime", "video/x-msvideo", + "video/x-matroska", "video/mpeg", "video/ogg", "video/3gpp": + return true + } + return false +} + +// IsImage checks if a content type is an image type. +func IsImage(contentType string) bool { + switch contentType { + case "image/jpeg", "image/png", "image/gif", "image/webp", "image/avif": + return true + } + return false +} + +// ReadUpload reads an uploaded file up to maxSize bytes. +func ReadUpload(r io.Reader, maxSize int64) ([]byte, error) { + limited := io.LimitReader(r, maxSize+1) + data, err := io.ReadAll(limited) + if err != nil { + return nil, err + } + if int64(len(data)) > maxSize { + return nil, fmt.Errorf("file too large (max %d bytes)", maxSize) + } + return data, nil +} diff --git a/shell.nix b/shell.nix new file mode 100644 index 0000000..70e57fa --- /dev/null +++ b/shell.nix @@ -0,0 +1,14 @@ +let + sx-nix = import ../sx-nix {}; + pkgs = sx-nix.pkgs; +in + +pkgs.mkShell { + packages = [ + (sx-nix.go.goToolchainBin { version = "1.25.5"; sha256 = "sha256-npt1XWOzas8wwSqaP8N5JDcUwcbT3XKGHaY38zbrs1s="; }) + pkgs.postgresql + pkgs.entr + pkgs.fd + pkgs.hivemind + ]; +}