Compare commits

..

No commits in common. "007e1677079b714e2b5c4c03ebf49d438ecb2081" and "0f9ee07092ffdc3a6677b2c6499077e5d112ab43" have entirely different histories.

498 changed files with 194719 additions and 4344 deletions

19
.direnv/bin/nix-direnv-reload Executable file
View file

@ -0,0 +1,19 @@
#!/usr/bin/env bash
set -e
if [[ ! -d "/home/soup/sx/lookbook" ]]; then
echo "Cannot find source directory; Did you move it?"
echo "(Looking for "/home/soup/sx/lookbook")"
echo 'Cannot force reload with this script - use "direnv reload" manually and then try again'
exit 1
fi
# rebuild the cache forcefully
_nix_direnv_force_reload=1 direnv exec "/home/soup/sx/lookbook" true
# Update the mtime for .envrc.
# This will cause direnv to reload again - but without re-building.
touch "/home/soup/sx/lookbook/.envrc"
# Also update the timestamp of whatever profile_rc we have.
# This makes sure that we know we are up to date.
touch -r "/home/soup/sx/lookbook/.envrc" "/home/soup/sx/lookbook/.direnv"/*.rc

View file

@ -0,0 +1 @@
/nix/store/fg60fb70c4ji2m0mw7zr6577g4y9fkjy-nix-shell-env

30
.gitignore vendored
View file

@ -1,30 +0,0 @@
.direnv/
# Environment files
.env
.env.local
# IDE
.idea/
.vscode/
*.swp
*.swo
# macOS
.DS_Store
# OpenCode
.opencode/
# Go
/bin/
*.exe
*.out
*.test
result
# Windows zone identifiers
*:Zone.Identifier
# Inspiration/test files
/inspo/

View file

@ -1,6 +1,6 @@
APP := ./cmd/server APP := ./cmd/server
VERSION := $(shell git rev-parse --short HEAD 2>/dev/null || echo "dev") VERSION := $(shell git rev-parse --short HEAD 2>/dev/null || echo "dev")
LDFLAGS := -X lookbook/internal/static.Version=$(VERSION) LDFLAGS := -X git.soup.land/soup/lookbook/internal/static.Version=$(VERSION)
.PHONY: dev run migrate rollback fmt test build .PHONY: dev run migrate rollback fmt test build

View file

@ -14,11 +14,11 @@ import (
_ "github.com/jackc/pgx/v5/stdlib" _ "github.com/jackc/pgx/v5/stdlib"
"git.soup.land/soup/lookbook/internal/handlers"
"git.soup.land/soup/lookbook/internal/middleware"
"git.soup.land/soup/lookbook/internal/migrations"
"git.soup.land/soup/lookbook/internal/static"
"git.soup.land/soup/sxgo/ssr" "git.soup.land/soup/sxgo/ssr"
"lookbook/internal/handlers"
"lookbook/internal/middleware"
"lookbook/internal/migrations"
"lookbook/internal/static"
) )
const defaultAddr = ":8080" const defaultAddr = ":8080"
@ -80,7 +80,7 @@ func main() {
func printUsage() { func printUsage() {
fmt.Fprintln(os.Stderr, "Usage:") fmt.Fprintln(os.Stderr, "Usage:")
fmt.Fprintln(os.Stderr, " lookbook web - Run web server") fmt.Fprintln(os.Stderr, " lookbook web - Run web server")
fmt.Fprintln(os.Stderr, "") fmt.Fprintln(os.Stderr, "")
fmt.Fprintln(os.Stderr, "Flags:") fmt.Fprintln(os.Stderr, "Flags:")
fmt.Fprintln(os.Stderr, " -migrate Run database migrations") fmt.Fprintln(os.Stderr, " -migrate Run database migrations")
@ -90,7 +90,6 @@ func printUsage() {
} }
func runWebServer(dbURL string, logger *slog.Logger) { func runWebServer(dbURL string, logger *slog.Logger) {
// Check for pending migrations
pending, err := migrations.CheckPending(context.Background(), dbURL, logger) pending, err := migrations.CheckPending(context.Background(), dbURL, logger)
if err != nil { if err != nil {
logger.Warn("could not check migration status", slog.Any("err", err)) logger.Warn("could not check migration status", slog.Any("err", err))
@ -119,41 +118,18 @@ func runWebServer(dbURL string, logger *slog.Logger) {
} }
router := handlers.NewRouter(rc) router := handlers.NewRouter(rc)
// Pages router.Handle("GET /", handlers.HandleGetGallery)
router.Handle("GET /", handlers.HandleHome) router.Handle("POST /items", handlers.HandlePostItem)
router.Handle("GET /item/{id}", handlers.HandleItemPage) router.Handle("GET /items/{id}", handlers.HandleGetItem)
router.Handle("POST /items/{id}/tags", handlers.HandlePostItemTags)
// Static files router.Handle("POST /items/{id}/delete", handlers.HandleDeleteItem)
router.Handle("POST /items/{id}/refresh", handlers.HandleRefreshItem)
router.Handle("GET /images/{id}", handlers.HandleGetImage)
router.Handle("POST /auth/login", handlers.HandlePostAuthLogin)
router.Handle("POST /auth/logout", handlers.HandlePostAuthLogout)
router.Handle("GET /auth/status", handlers.HandleGetAuthStatus)
router.HandleStd("GET /static/{version}/", static.Handler()) router.HandleStd("GET /static/{version}/", static.Handler())
// Media
router.Handle("GET /media/{id}", handlers.HandleGetMedia)
router.Handle("GET /proxy/video/{id}", handlers.HandleProxyVideo)
// Auth API
router.Handle("POST /api/auth/login", handlers.HandleLogin)
router.Handle("POST /api/auth/logout", handlers.HandleLogout)
router.Handle("GET /api/auth/status", handlers.HandleAuthStatus)
// Items API
router.Handle("GET /api/items", handlers.HandleListItems)
router.Handle("GET /api/items/{id}", handlers.HandleGetItem)
router.Handle("POST /api/items", handlers.HandleCreateItem)
router.Handle("PUT /api/items/{id}", handlers.HandleUpdateItem)
router.Handle("DELETE /api/items/{id}", handlers.HandleDeleteItem)
// Item creation endpoints
router.Handle("POST /api/preview", handlers.HandlePreviewLink)
router.Handle("POST /api/items/from-link", handlers.HandleCreateFromLink)
router.Handle("POST /api/items/upload", handlers.HandleUpload)
router.Handle("POST /api/items/quote", handlers.HandleCreateQuote)
router.Handle("POST /api/items/{id}/refresh", handlers.HandleRefreshMetadata)
router.Handle("POST /api/items/{id}/media", handlers.HandleReplaceMedia)
// Tags API
router.Handle("GET /api/tags", handlers.HandleListTags)
router.Handle("GET /api/tags/suggest", handlers.HandleSuggestTags)
addr := defaultAddr addr := defaultAddr
if envAddr := os.Getenv("ADDR"); envAddr != "" { if envAddr := os.Getenv("ADDR"); envAddr != "" {
addr = envAddr addr = envAddr

View file

@ -1,90 +1,90 @@
{ pkgs ? import <nixpkgs> {} }: { pkgs ? import <nixpkgs> {} }:
let let
lookbook = pkgs.buildGoModule { lookbook = pkgs.buildGoModule {
pname = "lookbook"; pname = "lookbook";
version = "0.1.0"; version = "0.1.0";
src = ./.; src = ./.;
vendorHash = null; # Update after first build vendorHash = null;
env.GOPRIVATE = "git.soup.land"; env.GOPRIVATE = "git.soup.land";
# Build the server binary subPackages = [ "cmd/server" ];
subPackages = [ "cmd/server" ];
postInstall = '' postInstall = ''
mv $out/bin/server $out/bin/lookbook mv $out/bin/server $out/bin/lookbook
''; '';
meta = with pkgs.lib; {
description = "Lookbook inspiration board";
homepage = "https://git.soup.land/soup/lookbook";
};
};
meta = with pkgs.lib; {
description = "Lookbook";
homepage = "https://git.soup.land/soup/lookbook";
};
};
in in
{ {
package = lookbook; package = lookbook;
nixosModule = { config, lib, pkgs, ... }: nixosModule = { config, lib, pkgs, ... }:
let let
cfg = config.services.lookbook; cfg = config.services.lookbook;
in { in {
options.services.lookbook = { options.services.lookbook = {
enable = lib.mkEnableOption "lookbook service"; enable = lib.mkEnableOption "lookbook service";
address = lib.mkOption { address = lib.mkOption {
type = lib.types.str; type = lib.types.str;
default = "127.0.0.1:8080"; default = "127.0.0.1:8083";
description = "Address to listen on"; description = "Address to listen on";
}; };
databaseUrl = lib.mkOption { databaseUrl = lib.mkOption {
type = lib.types.str; type = lib.types.str;
description = "PostgreSQL connection URL"; description = "PostgreSQL connection URL";
}; };
user = lib.mkOption { user = lib.mkOption {
type = lib.types.str; type = lib.types.str;
default = "lookbook"; default = "lookbook";
description = "User to run the service as"; description = "User to run the service as";
}; };
group = lib.mkOption { group = lib.mkOption {
type = lib.types.str; type = lib.types.str;
default = "lookbook"; default = "lookbook";
description = "Group to run the service as"; description = "Group to run the service as";
}; };
}; };
config = lib.mkIf cfg.enable { config = lib.mkIf cfg.enable {
users.users.${cfg.user} = { users.users.${cfg.user} = {
isSystemUser = true; isSystemUser = true;
group = cfg.group; group = cfg.group;
}; };
users.groups.${cfg.group} = {}; users.groups.${cfg.group} = {};
systemd.services.lookbook = { systemd.services.lookbook = {
description = "Lookbook"; description = "Lookbook App";
wantedBy = [ "multi-user.target" ]; wantedBy = [ "multi-user.target" ];
after = [ "network.target" "postgresql.service" ]; after = [ "network.target" "postgresql.service" ];
requires = [ "postgresql.service" ]; requires = [ "postgresql.service" ];
environment = { environment = {
DATABASE_URL = cfg.databaseUrl; DATABASE_URL = cfg.databaseUrl;
ADDR = cfg.address; ADDR = cfg.address;
}; };
serviceConfig = { serviceConfig = {
Type = "simple"; Type = "simple";
User = cfg.user; User = cfg.user;
Group = cfg.group; Group = cfg.group;
ExecStartPre = "${lookbook}/bin/lookbook -migrate"; ExecStartPre = "${lookbook}/bin/lookbook -migrate";
ExecStart = "${lookbook}/bin/lookbook web"; ExecStart = "${lookbook}/bin/lookbook web";
Restart = "always"; Restart = "always";
RestartSec = 5; RestartSec = 5;
}; };
}; };
}; };
}; };
} }

11
go.mod
View file

@ -1,9 +1,11 @@
module lookbook module git.soup.land/soup/lookbook
go 1.23.0 go 1.25.0
require ( require (
git.soup.land/soup/sxgo v0.1.1 git.soup.land/soup/sxgo v0.1.1
github.com/disintegration/imaging v1.6.2
github.com/google/uuid v1.6.0
github.com/jackc/pgx/v5 v5.7.6 github.com/jackc/pgx/v5 v5.7.6
github.com/pressly/goose/v3 v3.26.0 github.com/pressly/goose/v3 v3.26.0
golang.org/x/crypto v0.40.0 golang.org/x/crypto v0.40.0
@ -18,6 +20,7 @@ require (
github.com/mfridman/interpolate v0.0.2 // indirect github.com/mfridman/interpolate v0.0.2 // indirect
github.com/sethvargo/go-retry v0.3.0 // indirect github.com/sethvargo/go-retry v0.3.0 // indirect
go.uber.org/multierr v1.11.0 // indirect go.uber.org/multierr v1.11.0 // indirect
golang.org/x/sync v0.16.0 // indirect golang.org/x/image v0.32.0 // indirect
golang.org/x/text v0.27.0 // indirect golang.org/x/sync v0.17.0 // indirect
golang.org/x/text v0.30.0 // indirect
) )

14
go.sum
View file

@ -5,6 +5,8 @@ github.com/cespare/xxhash/v2 v2.3.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XL
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/disintegration/imaging v1.6.2 h1:w1LecBlG2Lnp8B3jk5zSuNqd7b4DXhcjwek1ei82L+c=
github.com/disintegration/imaging v1.6.2/go.mod h1:44/5580QXChDfwIclfc/PCwrr44amcmDAg8hxG0Ewe4=
github.com/dustin/go-humanize v1.0.1 h1:GzkhY7T5VNhEkwH0PVJgjz+fX1rhBrR7pRT3mDkpeCY= github.com/dustin/go-humanize v1.0.1 h1:GzkhY7T5VNhEkwH0PVJgjz+fX1rhBrR7pRT3mDkpeCY=
github.com/dustin/go-humanize v1.0.1/go.mod h1:Mu1zIs6XwVuF/gI1OepvI0qD18qycQx+mFykh5fBlto= github.com/dustin/go-humanize v1.0.1/go.mod h1:Mu1zIs6XwVuF/gI1OepvI0qD18qycQx+mFykh5fBlto=
github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0=
@ -42,14 +44,18 @@ golang.org/x/crypto v0.40.0 h1:r4x+VvoG5Fm+eJcxMaY8CQM7Lb0l1lsmjGBQ6s8BfKM=
golang.org/x/crypto v0.40.0/go.mod h1:Qr1vMER5WyS2dfPHAlsOj01wgLbsyWtFn/aY+5+ZdxY= golang.org/x/crypto v0.40.0/go.mod h1:Qr1vMER5WyS2dfPHAlsOj01wgLbsyWtFn/aY+5+ZdxY=
golang.org/x/exp v0.0.0-20250620022241-b7579e27df2b h1:M2rDM6z3Fhozi9O7NWsxAkg/yqS/lQJ6PmkyIV3YP+o= golang.org/x/exp v0.0.0-20250620022241-b7579e27df2b h1:M2rDM6z3Fhozi9O7NWsxAkg/yqS/lQJ6PmkyIV3YP+o=
golang.org/x/exp v0.0.0-20250620022241-b7579e27df2b/go.mod h1:3//PLf8L/X+8b4vuAfHzxeRUl04Adcb341+IGKfnqS8= golang.org/x/exp v0.0.0-20250620022241-b7579e27df2b/go.mod h1:3//PLf8L/X+8b4vuAfHzxeRUl04Adcb341+IGKfnqS8=
golang.org/x/image v0.0.0-20191009234506-e7c1f5e7dbb8/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0=
golang.org/x/image v0.32.0 h1:6lZQWq75h7L5IWNk0r+SCpUJ6tUVd3v4ZHnbRKLkUDQ=
golang.org/x/image v0.32.0/go.mod h1:/R37rrQmKXtO6tYXAjtDLwQgFLHmhW+V6ayXlxzP2Pc=
golang.org/x/net v0.42.0 h1:jzkYrhi3YQWD6MLBJcsklgQsoAcw89EcZbJw8Z614hs= golang.org/x/net v0.42.0 h1:jzkYrhi3YQWD6MLBJcsklgQsoAcw89EcZbJw8Z614hs=
golang.org/x/net v0.42.0/go.mod h1:FF1RA5d3u7nAYA4z2TkclSCKh68eSXtiFwcWQpPXdt8= golang.org/x/net v0.42.0/go.mod h1:FF1RA5d3u7nAYA4z2TkclSCKh68eSXtiFwcWQpPXdt8=
golang.org/x/sync v0.16.0 h1:ycBJEhp9p4vXvUZNszeOq0kGTPghopOL8q0fq3vstxw= golang.org/x/sync v0.17.0 h1:l60nONMj9l5drqw6jlhIELNv9I0A4OFgRsG9k2oT9Ug=
golang.org/x/sync v0.16.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA= golang.org/x/sync v0.17.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI=
golang.org/x/sys v0.34.0 h1:H5Y5sJ2L2JRdyv7ROF1he/lPdvFsd0mJHFw2ThKHxLA= golang.org/x/sys v0.34.0 h1:H5Y5sJ2L2JRdyv7ROF1he/lPdvFsd0mJHFw2ThKHxLA=
golang.org/x/sys v0.34.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k= golang.org/x/sys v0.34.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k=
golang.org/x/text v0.27.0 h1:4fGWRpyh641NLlecmyl4LOe6yDdfaYNrGb2zdfo4JV4= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.27.0/go.mod h1:1D28KMCvyooCX9hBiosv5Tz/+YLxj0j7XhWjpSUF7CU= golang.org/x/text v0.30.0 h1:yznKA/E9zq54KzlzBEAWn1NXSQ8DIp/NYMy88xJjl4k=
golang.org/x/text v0.30.0/go.mod h1:yDdHFIX9t+tORqspjENWgzaCVXgk0yYnYuSZ8UzzBVM=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=

View file

@ -6,8 +6,9 @@ import (
type Page struct { type Page struct {
Title string Title string
IsAdmin bool
Content ssr.Renderable Content ssr.Renderable
ShowNav bool
HasAuth bool
} }
func (p Page) Render(sw *ssr.Writer) error { func (p Page) Render(sw *ssr.Writer) error {
@ -17,26 +18,33 @@ func (p Page) Render(sw *ssr.Writer) error {
<head> <head>
<meta charset="UTF-8"> <meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0"> <meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>{{if .Title}}{{.Title}} - {{end}}Lookbook</title> <meta name="color-scheme" content="light dark">
<link rel="stylesheet" href="{{staticURL "css/app.css"}}"> <meta name="darkreader-lock">
<title>{{.Title}} - Lookbook</title>
<link rel="stylesheet" href="{{staticURL "css/pico.min.css"}}">
<link rel="stylesheet" href="{{staticURL "css/styles.css"}}">
</head> </head>
<body> <body>
<header class="header"> <header class="site-header">
<a href="/" class="logo">LOOKBOOK</a> <div class="title">Lookbook</div>
<nav class="nav"> {{if .ShowNav}}
<a href="/">ALL</a> <nav class="site-nav">
<a href="/">All</a>
<button type="button" class="ghost" onclick="auth()">Auth</button>
<button type="button" class="ghost" onclick="logout()" {{if not .HasAuth}}hidden{{end}}>Logout</button>
</nav> </nav>
{{end}}
</header> </header>
<main class="main"> <main class="container">
`) `)
p.Content.Render(sw) p.Content.Render(sw)
return sw.Tmpl(p, ` return sw.Tmpl(p, `
</main> </main>
<script src="{{staticURL "js/app.js"}}"></script>
<script> <script>
window.IS_ADMIN = {{.IsAdmin}}; window.LOOKBOOK_AUTH = {{if .HasAuth}}true{{else}}false{{end}};
</script> </script>
<script src="{{staticURL "js/app.js"}}"></script>
</body> </body>
</html> </html>
`) `)

View file

@ -1,41 +0,0 @@
package admin
import (
"context"
"database/sql"
"time"
)
type Row struct {
ID int
PasswordHash []byte // nil if not set
CreatedAt time.Time
}
// QGet returns the single admin row.
func QGet(ctx context.Context, db *sql.DB) (Row, error) {
query := `SELECT id, password_hash, created_at FROM admin WHERE id = 1`
var row Row
err := db.QueryRowContext(ctx, query).Scan(
&row.ID,
&row.PasswordHash,
&row.CreatedAt,
)
return row, err
}
// QSetPassword sets the admin password hash.
func QSetPassword(ctx context.Context, db *sql.DB, hash []byte) error {
query := `UPDATE admin SET password_hash = $1 WHERE id = 1`
_, err := db.ExecContext(ctx, query, hash)
return err
}
// QHasPassword returns true if a password has been set.
func QHasPassword(ctx context.Context, db *sql.DB) (bool, error) {
query := `SELECT password_hash IS NOT NULL FROM admin WHERE id = 1`
var has bool
err := db.QueryRowContext(ctx, query).Scan(&has)
return has, err
}

View file

@ -0,0 +1,45 @@
package auth
import (
"context"
"database/sql"
"time"
)
type Row struct {
ID int64
PasswordHash []byte
CreatedAt time.Time
}
func QGet(ctx context.Context, db *sql.DB) (*Row, error) {
var row Row
err := db.QueryRowContext(ctx, `
SELECT id, password_hash, created_at
FROM auth
ORDER BY id ASC
LIMIT 1
`).Scan(&row.ID, &row.PasswordHash, &row.CreatedAt)
if err == sql.ErrNoRows {
return nil, nil
}
if err != nil {
return nil, err
}
return &row, nil
}
func QCreate(ctx context.Context, db *sql.DB, hash []byte) (Row, error) {
var row Row
err := db.QueryRowContext(ctx, `
INSERT INTO auth (password_hash)
VALUES ($1)
RETURNING id, password_hash, created_at
`, hash).Scan(&row.ID, &row.PasswordHash, &row.CreatedAt)
return row, err
}
func QUpdate(ctx context.Context, db *sql.DB, id int64, hash []byte) error {
_, err := db.ExecContext(ctx, `UPDATE auth SET password_hash = $2 WHERE id = $1`, id, hash)
return err
}

View file

@ -0,0 +1,119 @@
package image
import (
"context"
"database/sql"
"time"
)
type Row struct {
ID int64
ItemID int64
OriginalURL string
ContentType string
Bytes []byte
Width int
Height int
IsThumb bool
CreatedAt time.Time
}
type Ref struct {
ID int64
ItemID int64
IsThumb bool
}
func QListByItem(ctx context.Context, db *sql.DB, itemID int64) ([]Row, error) {
rows, err := db.QueryContext(ctx, `
SELECT id, item_id, original_url, content_type, bytes, width, height, is_thumb, created_at
FROM image
WHERE item_id = $1
ORDER BY is_thumb DESC, id ASC
`, itemID)
if err != nil {
return nil, err
}
defer rows.Close()
var items []Row
for rows.Next() {
var row Row
if err := rows.Scan(&row.ID, &row.ItemID, &row.OriginalURL, &row.ContentType, &row.Bytes, &row.Width, &row.Height, &row.IsThumb, &row.CreatedAt); err != nil {
return nil, err
}
items = append(items, row)
}
return items, rows.Err()
}
func QListPrimaryRefsByItems(ctx context.Context, db *sql.DB, itemIDs []int64) (map[int64]Ref, error) {
if len(itemIDs) == 0 {
return map[int64]Ref{}, nil
}
rows, err := db.QueryContext(ctx, `
SELECT id, item_id, is_thumb
FROM image
WHERE item_id = ANY($1)
ORDER BY item_id ASC, is_thumb DESC, id ASC
`, itemIDs)
if err != nil {
return nil, err
}
defer rows.Close()
results := make(map[int64]Ref)
for rows.Next() {
var row Ref
if err := rows.Scan(&row.ID, &row.ItemID, &row.IsThumb); err != nil {
return nil, err
}
if _, exists := results[row.ItemID]; !exists {
results[row.ItemID] = row
}
}
return results, rows.Err()
}
func QFindByID(ctx context.Context, db *sql.DB, id int64) (*Row, error) {
var row Row
err := db.QueryRowContext(ctx, `
SELECT id, item_id, original_url, content_type, bytes, width, height, is_thumb, created_at
FROM image
WHERE id = $1
LIMIT 1
`, id).Scan(&row.ID, &row.ItemID, &row.OriginalURL, &row.ContentType, &row.Bytes, &row.Width, &row.Height, &row.IsThumb, &row.CreatedAt)
if err == sql.ErrNoRows {
return nil, nil
}
if err != nil {
return nil, err
}
return &row, nil
}
func QCreate(ctx context.Context, db *sql.DB, itemID int64, originalURL, contentType string, bytes []byte, width, height int, isThumb bool) (Row, error) {
var row Row
err := db.QueryRowContext(ctx, `
INSERT INTO image (item_id, original_url, content_type, bytes, width, height, is_thumb)
VALUES ($1, $2, $3, $4, $5, $6, $7)
RETURNING id, item_id, original_url, content_type, bytes, width, height, is_thumb, created_at
`, itemID, originalURL, contentType, bytes, width, height, isThumb).Scan(
&row.ID,
&row.ItemID,
&row.OriginalURL,
&row.ContentType,
&row.Bytes,
&row.Width,
&row.Height,
&row.IsThumb,
&row.CreatedAt,
)
return row, err
}
func QDeleteByItem(ctx context.Context, db *sql.DB, itemID int64) error {
_, err := db.ExecContext(ctx, `DELETE FROM image WHERE item_id = $1`, itemID)
return err
}

View file

@ -3,137 +3,30 @@ package item
import ( import (
"context" "context"
"database/sql" "database/sql"
"fmt"
"time" "time"
"github.com/jackc/pgx/v5/pgtype" "github.com/google/uuid"
) )
// Nullable creates a sql.Null[T] from a pointer.
func Nullable[T any](v *T) sql.Null[T] {
if v == nil {
return sql.Null[T]{}
}
return sql.Null[T]{V: *v, Valid: true}
}
// Ptr returns a pointer from sql.Null[T], or nil if not valid.
func Ptr[T any](n sql.Null[T]) *T {
if !n.Valid {
return nil
}
return &n.V
}
type Row struct { type Row struct {
ID int64 ID int64
PubID string PubID uuid.UUID
Title sql.Null[string] SourceURL string
Description sql.Null[string] Title string
LinkURL sql.Null[string] Description string
ItemType string // 'image', 'video', 'quote', 'embed' SiteName string
EmbedProvider sql.Null[string] CreatedAt time.Time
EmbedVideoID sql.Null[string] UpdatedAt time.Time
EmbedHTML sql.Null[string] DeletedAt sql.Null[time.Time]
EmbedVideoURL sql.Null[string]
CreatedAt time.Time
DeletedAt sql.Null[time.Time]
} }
type CreateParams struct {
Title sql.Null[string]
Description sql.Null[string]
LinkURL sql.Null[string]
ItemType string
EmbedProvider sql.Null[string]
EmbedVideoID sql.Null[string]
EmbedHTML sql.Null[string]
EmbedVideoURL sql.Null[string]
}
// QCreate creates a new item.
func QCreate(ctx context.Context, db *sql.DB, p CreateParams) (Row, error) {
query := `
INSERT INTO item (title, description, link_url, item_type, embed_provider, embed_video_id, embed_html, embed_video_url)
VALUES ($1, $2, $3, $4, $5, $6, $7, $8)
RETURNING id, pub_id, title, description, link_url, item_type, embed_provider, embed_video_id, embed_html, embed_video_url, created_at, deleted_at
`
var row Row
var pubID pgtype.UUID
err := db.QueryRowContext(ctx, query,
p.Title, p.Description, p.LinkURL, p.ItemType, p.EmbedProvider, p.EmbedVideoID, p.EmbedHTML, p.EmbedVideoURL,
).Scan(
&row.ID, &pubID, &row.Title, &row.Description, &row.LinkURL,
&row.ItemType, &row.EmbedProvider, &row.EmbedVideoID, &row.EmbedHTML, &row.EmbedVideoURL,
&row.CreatedAt, &row.DeletedAt,
)
if err == nil {
row.PubID = formatUUID(pubID)
}
return row, err
}
// QFindByPubID finds an item by its public ID.
func QFindByPubID(ctx context.Context, db *sql.DB, pubID string) (*Row, error) {
query := `
SELECT id, pub_id, title, description, link_url, item_type, embed_provider, embed_video_id, embed_html, embed_video_url, created_at, deleted_at
FROM item
WHERE pub_id = $1
`
var row Row
var pubUUID pgtype.UUID
err := db.QueryRowContext(ctx, query, pubID).Scan(
&row.ID, &pubUUID, &row.Title, &row.Description, &row.LinkURL,
&row.ItemType, &row.EmbedProvider, &row.EmbedVideoID, &row.EmbedHTML, &row.EmbedVideoURL,
&row.CreatedAt, &row.DeletedAt,
)
if err == sql.ErrNoRows {
return nil, nil
}
if err != nil {
return nil, err
}
row.PubID = formatUUID(pubUUID)
return &row, nil
}
// QFindByID finds an item by its internal ID.
func QFindByID(ctx context.Context, db *sql.DB, id int64) (*Row, error) {
query := `
SELECT id, pub_id, title, description, link_url, item_type, embed_provider, embed_video_id, embed_html, embed_video_url, created_at, deleted_at
FROM item
WHERE id = $1
`
var row Row
var pubUUID pgtype.UUID
err := db.QueryRowContext(ctx, query, id).Scan(
&row.ID, &pubUUID, &row.Title, &row.Description, &row.LinkURL,
&row.ItemType, &row.EmbedProvider, &row.EmbedVideoID, &row.EmbedHTML, &row.EmbedVideoURL,
&row.CreatedAt, &row.DeletedAt,
)
if err == sql.ErrNoRows {
return nil, nil
}
if err != nil {
return nil, err
}
row.PubID = formatUUID(pubUUID)
return &row, nil
}
// QList returns all non-deleted items, newest first.
func QList(ctx context.Context, db *sql.DB) ([]Row, error) { func QList(ctx context.Context, db *sql.DB) ([]Row, error) {
query := ` rows, err := db.QueryContext(ctx, `
SELECT id, pub_id, title, description, link_url, item_type, embed_provider, embed_video_id, embed_html, embed_video_url, created_at, deleted_at SELECT id, pub_id, source_url, title, description, site_name, created_at, updated_at, deleted_at
FROM item FROM item
WHERE deleted_at IS NULL WHERE deleted_at IS NULL
ORDER BY created_at DESC ORDER BY created_at DESC
` `)
rows, err := db.QueryContext(ctx, query)
if err != nil { if err != nil {
return nil, err return nil, err
} }
@ -142,103 +35,66 @@ func QList(ctx context.Context, db *sql.DB) ([]Row, error) {
var items []Row var items []Row
for rows.Next() { for rows.Next() {
var row Row var row Row
var pubUUID pgtype.UUID if err := rows.Scan(&row.ID, &row.PubID, &row.SourceURL, &row.Title, &row.Description, &row.SiteName, &row.CreatedAt, &row.UpdatedAt, &row.DeletedAt); err != nil {
if err := rows.Scan(
&row.ID, &pubUUID, &row.Title, &row.Description, &row.LinkURL,
&row.ItemType, &row.EmbedProvider, &row.EmbedVideoID, &row.EmbedHTML, &row.EmbedVideoURL,
&row.CreatedAt, &row.DeletedAt,
); err != nil {
return nil, err return nil, err
} }
row.PubID = formatUUID(pubUUID)
items = append(items, row) items = append(items, row)
} }
return items, rows.Err() return items, rows.Err()
} }
// QListByTag returns all non-deleted items with a specific tag, newest first. func QFindByID(ctx context.Context, db *sql.DB, id int64) (*Row, error) {
func QListByTag(ctx context.Context, db *sql.DB, tagName string) ([]Row, error) { var row Row
query := ` err := db.QueryRowContext(ctx, `
SELECT i.id, i.pub_id, i.title, i.description, i.link_url, i.item_type, i.embed_provider, i.embed_video_id, i.embed_html, i.embed_video_url, i.created_at, i.deleted_at SELECT id, pub_id, source_url, title, description, site_name, created_at, updated_at, deleted_at
FROM item i FROM item
JOIN item_tag it ON i.id = it.item_id
JOIN tag t ON it.tag_id = t.id
WHERE i.deleted_at IS NULL AND t.name = $1
ORDER BY i.created_at DESC
`
rows, err := db.QueryContext(ctx, query, tagName)
if err != nil {
return nil, err
}
defer rows.Close()
var items []Row
for rows.Next() {
var row Row
var pubUUID pgtype.UUID
if err := rows.Scan(
&row.ID, &pubUUID, &row.Title, &row.Description, &row.LinkURL,
&row.ItemType, &row.EmbedProvider, &row.EmbedVideoID, &row.EmbedHTML, &row.EmbedVideoURL,
&row.CreatedAt, &row.DeletedAt,
); err != nil {
return nil, err
}
row.PubID = formatUUID(pubUUID)
items = append(items, row)
}
return items, rows.Err()
}
type UpdateParams struct {
Title sql.Null[string]
Description sql.Null[string]
LinkURL sql.Null[string]
}
// QUpdate updates an item's editable fields.
func QUpdate(ctx context.Context, db *sql.DB, id int64, p UpdateParams) error {
query := `
UPDATE item
SET title = $2, description = $3, link_url = $4
WHERE id = $1 WHERE id = $1
` LIMIT 1
_, err := db.ExecContext(ctx, query, id, p.Title, p.Description, p.LinkURL) `, id).Scan(&row.ID, &row.PubID, &row.SourceURL, &row.Title, &row.Description, &row.SiteName, &row.CreatedAt, &row.UpdatedAt, &row.DeletedAt)
return err if err == sql.ErrNoRows {
} return nil, nil
// QUpdateType updates an item's type.
func QUpdateType(ctx context.Context, db *sql.DB, id int64, itemType string) error {
query := `UPDATE item SET item_type = $2 WHERE id = $1`
_, err := db.ExecContext(ctx, query, id, itemType)
return err
}
// QUpdateVideoURL updates an item's embed video URL.
func QUpdateVideoURL(ctx context.Context, db *sql.DB, id int64, videoURL string) error {
query := `UPDATE item SET embed_video_url = $2 WHERE id = $1`
_, err := db.ExecContext(ctx, query, id, videoURL)
return err
}
// QSoftDelete soft deletes an item.
func QSoftDelete(ctx context.Context, db *sql.DB, id int64) error {
query := `UPDATE item SET deleted_at = NOW() WHERE id = $1`
_, err := db.ExecContext(ctx, query, id)
return err
}
// QRestore restores a soft-deleted item.
func QRestore(ctx context.Context, db *sql.DB, id int64) error {
query := `UPDATE item SET deleted_at = NULL WHERE id = $1`
_, err := db.ExecContext(ctx, query, id)
return err
}
func formatUUID(u pgtype.UUID) string {
if !u.Valid {
return ""
} }
b := u.Bytes if err != nil {
return fmt.Sprintf("%x-%x-%x-%x-%x", b[0:4], b[4:6], b[6:8], b[8:10], b[10:16]) return nil, err
}
return &row, nil
}
func QCreate(ctx context.Context, db *sql.DB, sourceURL, title, description, siteName string) (Row, error) {
var row Row
err := db.QueryRowContext(ctx, `
INSERT INTO item (source_url, title, description, site_name)
VALUES ($1, $2, $3, $4)
RETURNING id, pub_id, source_url, title, description, site_name, created_at, updated_at, deleted_at
`, sourceURL, title, description, siteName).Scan(
&row.ID,
&row.PubID,
&row.SourceURL,
&row.Title,
&row.Description,
&row.SiteName,
&row.CreatedAt,
&row.UpdatedAt,
&row.DeletedAt,
)
return row, err
}
func QUpdateMeta(ctx context.Context, db *sql.DB, id int64, title, description, siteName string) error {
_, err := db.ExecContext(ctx, `
UPDATE item
SET title = $2, description = $3, site_name = $4, updated_at = NOW()
WHERE id = $1
`, id, title, description, siteName)
return err
}
func QSoftDelete(ctx context.Context, db *sql.DB, id int64) error {
_, err := db.ExecContext(ctx, `UPDATE item SET deleted_at = NOW() WHERE id = $1`, id)
return err
}
func QRestore(ctx context.Context, db *sql.DB, id int64) error {
_, err := db.ExecContext(ctx, `UPDATE item SET deleted_at = NULL WHERE id = $1`, id)
return err
} }

View file

@ -1,159 +0,0 @@
package media
import (
"context"
"database/sql"
"time"
)
type Row struct {
ID int64
ItemID int64
MediaType string // 'original' (user uploads), 'image' (fetched from URLs)
ContentType string // MIME type
Data []byte
Width *int
Height *int
SourceURL *string // Original URL the media was fetched from
CreatedAt time.Time
}
type CreateParams struct {
ItemID int64
MediaType string
ContentType string
Data []byte
Width *int
Height *int
SourceURL *string
}
// QCreate creates a new media record.
func QCreate(ctx context.Context, db *sql.DB, p CreateParams) (Row, error) {
query := `
INSERT INTO media (item_id, media_type, content_type, data, width, height, source_url)
VALUES ($1, $2, $3, $4, $5, $6, $7)
RETURNING id, item_id, media_type, content_type, data, width, height, source_url, created_at
`
var row Row
err := db.QueryRowContext(ctx, query,
p.ItemID, p.MediaType, p.ContentType, p.Data, p.Width, p.Height, p.SourceURL,
).Scan(
&row.ID, &row.ItemID, &row.MediaType, &row.ContentType, &row.Data,
&row.Width, &row.Height, &row.SourceURL, &row.CreatedAt,
)
return row, err
}
// QFindByID finds a media record by ID.
func QFindByID(ctx context.Context, db *sql.DB, id int64) (*Row, error) {
query := `
SELECT id, item_id, media_type, content_type, data, width, height, source_url, created_at
FROM media
WHERE id = $1
`
var row Row
err := db.QueryRowContext(ctx, query, id).Scan(
&row.ID, &row.ItemID, &row.MediaType, &row.ContentType, &row.Data,
&row.Width, &row.Height, &row.SourceURL, &row.CreatedAt,
)
if err == sql.ErrNoRows {
return nil, nil
}
if err != nil {
return nil, err
}
return &row, nil
}
// QFindByItemID finds all media for an item, ordered by ID.
func QFindByItemID(ctx context.Context, db *sql.DB, itemID int64) ([]Row, error) {
query := `
SELECT id, item_id, media_type, content_type, data, width, height, source_url, created_at
FROM media
WHERE item_id = $1
ORDER BY id ASC
`
rows, err := db.QueryContext(ctx, query, itemID)
if err != nil {
return nil, err
}
defer rows.Close()
var media []Row
for rows.Next() {
var row Row
if err := rows.Scan(
&row.ID, &row.ItemID, &row.MediaType, &row.ContentType, &row.Data,
&row.Width, &row.Height, &row.SourceURL, &row.CreatedAt,
); err != nil {
return nil, err
}
media = append(media, row)
}
return media, rows.Err()
}
// QFindThumbnailByItemID finds the first image for an item (used as thumbnail).
func QFindThumbnailByItemID(ctx context.Context, db *sql.DB, itemID int64) (*Row, error) {
query := `
SELECT id, item_id, media_type, content_type, data, width, height, source_url, created_at
FROM media
WHERE item_id = $1 AND media_type = 'image'
ORDER BY id ASC
LIMIT 1
`
var row Row
err := db.QueryRowContext(ctx, query, itemID).Scan(
&row.ID, &row.ItemID, &row.MediaType, &row.ContentType, &row.Data,
&row.Width, &row.Height, &row.SourceURL, &row.CreatedAt,
)
if err == sql.ErrNoRows {
return nil, nil
}
if err != nil {
return nil, err
}
return &row, nil
}
// QFindOriginalByItemID finds the original media for an item.
func QFindOriginalByItemID(ctx context.Context, db *sql.DB, itemID int64) (*Row, error) {
query := `
SELECT id, item_id, media_type, content_type, data, width, height, source_url, created_at
FROM media
WHERE item_id = $1 AND media_type = 'original'
LIMIT 1
`
var row Row
err := db.QueryRowContext(ctx, query, itemID).Scan(
&row.ID, &row.ItemID, &row.MediaType, &row.ContentType, &row.Data,
&row.Width, &row.Height, &row.SourceURL, &row.CreatedAt,
)
if err == sql.ErrNoRows {
return nil, nil
}
if err != nil {
return nil, err
}
return &row, nil
}
// QDelete deletes a media record by ID.
func QDelete(ctx context.Context, db *sql.DB, id int64) error {
query := `DELETE FROM media WHERE id = $1`
_, err := db.ExecContext(ctx, query, id)
return err
}
// QDeleteByItemID deletes all media for an item.
func QDeleteByItemID(ctx context.Context, db *sql.DB, itemID int64) error {
query := `DELETE FROM media WHERE item_id = $1`
_, err := db.ExecContext(ctx, query, itemID)
return err
}

View file

@ -8,66 +8,44 @@ import (
type Row struct { type Row struct {
ID int64 ID int64
SessionID string Token string
CreatedAt time.Time CreatedAt time.Time
ExpiresAt time.Time ExpiresAt time.Time
} }
// QCreate creates a new session. func QCreate(ctx context.Context, db *sql.DB, token string, expiresAt time.Time) (Row, error) {
func QCreate(ctx context.Context, db *sql.DB, sessionID string, expiresAt time.Time) (Row, error) {
query := `
INSERT INTO session (session_id, expires_at)
VALUES ($1, $2)
RETURNING id, session_id, created_at, expires_at
`
var row Row var row Row
err := db.QueryRowContext(ctx, query, sessionID, expiresAt).Scan( err := db.QueryRowContext(ctx, `
&row.ID, INSERT INTO session (token, expires_at)
&row.SessionID, VALUES ($1, $2)
&row.CreatedAt, RETURNING id, token, created_at, expires_at
&row.ExpiresAt, `, token, expiresAt).Scan(&row.ID, &row.Token, &row.CreatedAt, &row.ExpiresAt)
)
return row, err return row, err
} }
// QFindBySessionID finds a session by its session ID. func QFindByToken(ctx context.Context, db *sql.DB, token string) (*Row, error) {
// Returns (nil, nil) if the session does not exist.
func QFindBySessionID(ctx context.Context, db *sql.DB, sessionID string) (*Row, error) {
query := `
SELECT id, session_id, created_at, expires_at
FROM session
WHERE session_id = $1
LIMIT 1
`
var row Row var row Row
err := db.QueryRowContext(ctx, query, sessionID).Scan( err := db.QueryRowContext(ctx, `
&row.ID, SELECT id, token, created_at, expires_at
&row.SessionID, FROM session
&row.CreatedAt, WHERE token = $1
&row.ExpiresAt, LIMIT 1
) `, token).Scan(&row.ID, &row.Token, &row.CreatedAt, &row.ExpiresAt)
if err == sql.ErrNoRows { if err == sql.ErrNoRows {
return nil, nil return nil, nil
} }
if err != nil { if err != nil {
return nil, err return nil, err
} }
return &row, nil return &row, nil
} }
// QDelete deletes a session by its session ID. func QDelete(ctx context.Context, db *sql.DB, token string) error {
func QDelete(ctx context.Context, db *sql.DB, sessionID string) error { _, err := db.ExecContext(ctx, `DELETE FROM session WHERE token = $1`, token)
query := `DELETE FROM session WHERE session_id = $1`
_, err := db.ExecContext(ctx, query, sessionID)
return err return err
} }
// QDeleteExpired deletes all expired sessions.
func QDeleteExpired(ctx context.Context, db *sql.DB) error { func QDeleteExpired(ctx context.Context, db *sql.DB) error {
query := `DELETE FROM session WHERE expires_at < NOW()` _, err := db.ExecContext(ctx, `DELETE FROM session WHERE expires_at < NOW()`)
_, err := db.ExecContext(ctx, query)
return err return err
} }

View file

@ -3,151 +3,139 @@ package tag
import ( import (
"context" "context"
"database/sql" "database/sql"
"strings"
"time"
) )
type Row struct { type Row struct {
ID int64 ID int64
Name string Name string
CreatedAt time.Time
} }
// QFindOrCreate finds a tag by name or creates it if it doesn't exist. type ItemTag struct {
func QFindOrCreate(ctx context.Context, db *sql.DB, name string) (Row, error) { ItemID int64
query := ` TagID int64
Name string
}
func QList(ctx context.Context, db *sql.DB) ([]Row, error) {
rows, err := db.QueryContext(ctx, `
SELECT id, name, created_at
FROM tag
ORDER BY name ASC
`)
if err != nil {
return nil, err
}
defer rows.Close()
var tags []Row
for rows.Next() {
var row Row
if err := rows.Scan(&row.ID, &row.Name, &row.CreatedAt); err != nil {
return nil, err
}
tags = append(tags, row)
}
return tags, rows.Err()
}
func QListByItem(ctx context.Context, db *sql.DB, itemID int64) ([]Row, error) {
rows, err := db.QueryContext(ctx, `
SELECT t.id, t.name, t.created_at
FROM tag t
JOIN item_tag it ON it.tag_id = t.id
WHERE it.item_id = $1
ORDER BY t.name ASC
`, itemID)
if err != nil {
return nil, err
}
defer rows.Close()
var tags []Row
for rows.Next() {
var row Row
if err := rows.Scan(&row.ID, &row.Name, &row.CreatedAt); err != nil {
return nil, err
}
tags = append(tags, row)
}
return tags, rows.Err()
}
func QListItemTags(ctx context.Context, db *sql.DB) ([]ItemTag, error) {
rows, err := db.QueryContext(ctx, `
SELECT it.item_id, t.id, t.name
FROM item_tag it
JOIN tag t ON t.id = it.tag_id
ORDER BY t.name ASC
`)
if err != nil {
return nil, err
}
defer rows.Close()
var entries []ItemTag
for rows.Next() {
var row ItemTag
if err := rows.Scan(&row.ItemID, &row.TagID, &row.Name); err != nil {
return nil, err
}
entries = append(entries, row)
}
return entries, rows.Err()
}
func QUpsert(ctx context.Context, db *sql.DB, name string) (Row, error) {
cleaned := strings.TrimSpace(strings.ToLower(name))
var row Row
err := db.QueryRowContext(ctx, `
INSERT INTO tag (name) INSERT INTO tag (name)
VALUES ($1) VALUES ($1)
ON CONFLICT (name) DO UPDATE SET name = EXCLUDED.name ON CONFLICT (name) DO UPDATE SET name = EXCLUDED.name
RETURNING id, name RETURNING id, name, created_at
` `, cleaned).Scan(&row.ID, &row.Name, &row.CreatedAt)
var row Row
err := db.QueryRowContext(ctx, query, name).Scan(&row.ID, &row.Name)
return row, err return row, err
} }
// QFindByName finds a tag by name. func QReplaceItemTags(ctx context.Context, db *sql.DB, itemID int64, names []string) error {
func QFindByName(ctx context.Context, db *sql.DB, name string) (*Row, error) { tx, err := db.BeginTx(ctx, nil)
query := `SELECT id, name FROM tag WHERE name = $1`
var row Row
err := db.QueryRowContext(ctx, query, name).Scan(&row.ID, &row.Name)
if err == sql.ErrNoRows {
return nil, nil
}
if err != nil { if err != nil {
return nil, err return err
} }
return &row, nil defer tx.Rollback()
}
// QList returns all tags ordered by name. _, err = tx.ExecContext(ctx, `DELETE FROM item_tag WHERE item_id = $1`, itemID)
func QList(ctx context.Context, db *sql.DB) ([]Row, error) {
query := `SELECT id, name FROM tag ORDER BY name ASC`
rows, err := db.QueryContext(ctx, query)
if err != nil {
return nil, err
}
defer rows.Close()
var tags []Row
for rows.Next() {
var row Row
if err := rows.Scan(&row.ID, &row.Name); err != nil {
return nil, err
}
tags = append(tags, row)
}
return tags, rows.Err()
}
// QSuggest returns tags matching a prefix.
func QSuggest(ctx context.Context, db *sql.DB, prefix string, limit int) ([]Row, error) {
query := `
SELECT id, name FROM tag
WHERE name ILIKE $1 || '%'
ORDER BY name ASC
LIMIT $2
`
rows, err := db.QueryContext(ctx, query, prefix, limit)
if err != nil {
return nil, err
}
defer rows.Close()
var tags []Row
for rows.Next() {
var row Row
if err := rows.Scan(&row.ID, &row.Name); err != nil {
return nil, err
}
tags = append(tags, row)
}
return tags, rows.Err()
}
// QTagsForItem returns all tags for an item.
func QTagsForItem(ctx context.Context, db *sql.DB, itemID int64) ([]Row, error) {
query := `
SELECT t.id, t.name
FROM tag t
JOIN item_tag it ON t.id = it.tag_id
WHERE it.item_id = $1
ORDER BY t.name ASC
`
rows, err := db.QueryContext(ctx, query, itemID)
if err != nil {
return nil, err
}
defer rows.Close()
var tags []Row
for rows.Next() {
var row Row
if err := rows.Scan(&row.ID, &row.Name); err != nil {
return nil, err
}
tags = append(tags, row)
}
return tags, rows.Err()
}
// QAddTagToItem adds a tag to an item.
func QAddTagToItem(ctx context.Context, db *sql.DB, itemID, tagID int64) error {
query := `
INSERT INTO item_tag (item_id, tag_id)
VALUES ($1, $2)
ON CONFLICT DO NOTHING
`
_, err := db.ExecContext(ctx, query, itemID, tagID)
return err
}
// QRemoveTagFromItem removes a tag from an item.
func QRemoveTagFromItem(ctx context.Context, db *sql.DB, itemID, tagID int64) error {
query := `DELETE FROM item_tag WHERE item_id = $1 AND tag_id = $2`
_, err := db.ExecContext(ctx, query, itemID, tagID)
return err
}
// QSetTagsForItem replaces all tags for an item with the given tag names.
func QSetTagsForItem(ctx context.Context, db *sql.DB, itemID int64, tagNames []string) error {
// Delete existing tags
_, err := db.ExecContext(ctx, `DELETE FROM item_tag WHERE item_id = $1`, itemID)
if err != nil { if err != nil {
return err return err
} }
// Add new tags for _, name := range names {
for _, name := range tagNames { if strings.TrimSpace(name) == "" {
tag, err := QFindOrCreate(ctx, db, name) continue
}
var tagID int64
err = tx.QueryRowContext(ctx, `
INSERT INTO tag (name)
VALUES ($1)
ON CONFLICT (name) DO UPDATE SET name = EXCLUDED.name
RETURNING id
`, strings.TrimSpace(strings.ToLower(name))).Scan(&tagID)
if err != nil { if err != nil {
return err return err
} }
if err := QAddTagToItem(ctx, db, itemID, tag.ID); err != nil {
_, err = tx.ExecContext(ctx, `
INSERT INTO item_tag (item_id, tag_id)
VALUES ($1, $2)
ON CONFLICT DO NOTHING
`, itemID, tagID)
if err != nil {
return err return err
} }
} }
return nil
return tx.Commit()
} }

View file

@ -1,286 +0,0 @@
package embed
import (
"context"
"encoding/json"
"fmt"
"net/http"
"net/url"
"regexp"
"strings"
"time"
)
// Provider identifies the video hosting platform.
type Provider string
const (
ProviderYouTube Provider = "youtube"
ProviderVimeo Provider = "vimeo"
ProviderTwitter Provider = "twitter"
)
// VideoInfo contains information about an embedded video.
type VideoInfo struct {
Provider Provider
VideoID string
Title string
Description string
ThumbnailURL string // First/primary thumbnail (for backward compatibility)
ThumbnailURLs []string // All thumbnail URLs (for multi-image tweets)
EmbedHTML string
VideoURL string // Direct video URL (for Twitter videos)
}
var (
youtubeRegex = regexp.MustCompile(`(?:youtube\.com/(?:watch\?v=|embed/|v/|shorts/)|youtu\.be/)([a-zA-Z0-9_-]{11})`)
vimeoRegex = regexp.MustCompile(`(?:vimeo\.com/(?:video/)?|player\.vimeo\.com/video/)(\d+)`)
twitterRegex = regexp.MustCompile(`(?:twitter\.com|x\.com)/([^/]+)/status/(\d+)`)
tcoRegex = regexp.MustCompile(`\s*https://t\.co/\S+`)
)
// Detect checks if a URL is a YouTube, Vimeo, or Twitter/X post and returns its info.
func Detect(ctx context.Context, targetURL string) (*VideoInfo, error) {
// Try YouTube
if matches := youtubeRegex.FindStringSubmatch(targetURL); len(matches) > 1 {
return fetchYouTube(ctx, matches[1])
}
// Try Vimeo
if matches := vimeoRegex.FindStringSubmatch(targetURL); len(matches) > 1 {
return fetchVimeo(ctx, matches[1])
}
// Try Twitter/X
if matches := twitterRegex.FindStringSubmatch(targetURL); len(matches) > 2 {
return fetchTwitter(ctx, matches[2], targetURL)
}
return nil, nil // Not a recognized embed
}
func fetchYouTube(ctx context.Context, videoID string) (*VideoInfo, error) {
// YouTube thumbnails are available without API
thumbnailURL := fmt.Sprintf("https://img.youtube.com/vi/%s/maxresdefault.jpg", videoID)
// Try to get metadata via oEmbed
oembedURL := fmt.Sprintf("https://www.youtube.com/oembed?url=%s&format=json",
url.QueryEscape("https://www.youtube.com/watch?v="+videoID))
var title string
if meta, err := fetchOEmbed(ctx, oembedURL); err == nil {
title = meta.Title
}
embedHTML := fmt.Sprintf(
`<iframe width="560" height="315" src="https://www.youtube.com/embed/%s" frameborder="0" allowfullscreen></iframe>`,
videoID,
)
return &VideoInfo{
Provider: ProviderYouTube,
VideoID: videoID,
Title: title,
ThumbnailURL: thumbnailURL,
EmbedHTML: embedHTML,
}, nil
}
func fetchVimeo(ctx context.Context, videoID string) (*VideoInfo, error) {
oembedURL := fmt.Sprintf("https://vimeo.com/api/oembed.json?url=%s",
url.QueryEscape("https://vimeo.com/"+videoID))
meta, err := fetchOEmbed(ctx, oembedURL)
if err != nil {
return nil, fmt.Errorf("vimeo oembed: %w", err)
}
embedHTML := fmt.Sprintf(
`<iframe src="https://player.vimeo.com/video/%s" width="640" height="360" frameborder="0" allowfullscreen></iframe>`,
videoID,
)
return &VideoInfo{
Provider: ProviderVimeo,
VideoID: videoID,
Title: meta.Title,
Description: meta.Description,
ThumbnailURL: meta.ThumbnailURL,
EmbedHTML: embedHTML,
}, nil
}
// twitterSyndicationResponse represents the Twitter syndication API response
type twitterSyndicationResponse struct {
Text string `json:"text"`
User struct {
Name string `json:"name"`
ScreenName string `json:"screen_name"`
} `json:"user"`
Photos []struct {
URL string `json:"url"`
Width int `json:"width"`
Height int `json:"height"`
} `json:"photos"`
MediaDetails []struct {
MediaURLHTTPS string `json:"media_url_https"`
Type string `json:"type"`
VideoInfo struct {
Variants []struct {
ContentType string `json:"content_type"`
URL string `json:"url"`
Bitrate int `json:"bitrate,omitempty"`
} `json:"variants"`
} `json:"video_info"`
} `json:"mediaDetails"`
Video struct {
Poster string `json:"poster"`
} `json:"video"`
}
func fetchTwitter(ctx context.Context, tweetID string, originalURL string) (*VideoInfo, error) {
apiURL := fmt.Sprintf("https://cdn.syndication.twimg.com/tweet-result?id=%s&token=0", tweetID)
req, err := http.NewRequestWithContext(ctx, "GET", apiURL, nil)
if err != nil {
return nil, err
}
req.Header.Set("User-Agent", "Mozilla/5.0 (compatible; Lookbook/1.0)")
client := &http.Client{Timeout: 10 * time.Second}
resp, err := client.Do(req)
if err != nil {
return nil, fmt.Errorf("twitter syndication: %w", err)
}
defer resp.Body.Close()
if resp.StatusCode != http.StatusOK {
return nil, fmt.Errorf("twitter syndication status: %d", resp.StatusCode)
}
var tweet twitterSyndicationResponse
if err := json.NewDecoder(resp.Body).Decode(&tweet); err != nil {
return nil, fmt.Errorf("twitter syndication decode: %w", err)
}
// Find thumbnail and video URL from media
var thumbnailURL, videoURL string
var thumbnailURLs []string
if len(tweet.Photos) > 0 {
thumbnailURL = tweet.Photos[0].URL
// Collect all photo URLs for multi-image tweets
for _, photo := range tweet.Photos {
thumbnailURLs = append(thumbnailURLs, photo.URL)
}
} else if len(tweet.MediaDetails) > 0 {
media := tweet.MediaDetails[0]
thumbnailURL = media.MediaURLHTTPS
// Extract video URL - find highest bitrate MP4
if media.Type == "video" || media.Type == "animated_gif" {
var bestBitrate int
for _, v := range media.VideoInfo.Variants {
if v.ContentType == "video/mp4" && v.Bitrate >= bestBitrate {
bestBitrate = v.Bitrate
videoURL = v.URL
}
}
}
}
if thumbnailURL == "" && tweet.Video.Poster != "" {
thumbnailURL = tweet.Video.Poster
}
// Build embed HTML using Twitter's embed widget
embedHTML := fmt.Sprintf(
`<blockquote class="twitter-tweet"><a href="%s"></a></blockquote><script async src="https://platform.twitter.com/widgets.js"></script>`,
originalURL,
)
title := fmt.Sprintf("@%s", tweet.User.ScreenName)
if tweet.User.Name != "" {
title = fmt.Sprintf("%s (@%s)", tweet.User.Name, tweet.User.ScreenName)
}
// Clean up tweet text - remove trailing t.co URLs
description := tcoRegex.ReplaceAllString(tweet.Text, "")
description = strings.TrimSpace(description)
return &VideoInfo{
Provider: ProviderTwitter,
VideoID: tweetID,
Title: title,
Description: description,
ThumbnailURL: thumbnailURL,
ThumbnailURLs: thumbnailURLs,
VideoURL: videoURL,
EmbedHTML: embedHTML,
}, nil
}
type oembedResponse struct {
Title string `json:"title"`
Description string `json:"description"`
ThumbnailURL string `json:"thumbnail_url"`
}
func fetchOEmbed(ctx context.Context, oembedURL string) (*oembedResponse, error) {
req, err := http.NewRequestWithContext(ctx, "GET", oembedURL, nil)
if err != nil {
return nil, err
}
client := &http.Client{Timeout: 10 * time.Second}
resp, err := client.Do(req)
if err != nil {
return nil, err
}
defer resp.Body.Close()
if resp.StatusCode != http.StatusOK {
return nil, fmt.Errorf("oembed status: %d", resp.StatusCode)
}
var meta oembedResponse
if err := json.NewDecoder(resp.Body).Decode(&meta); err != nil {
return nil, err
}
return &meta, nil
}
// DownloadThumbnail downloads the thumbnail image for a video.
func DownloadThumbnail(ctx context.Context, thumbnailURL string) ([]byte, string, error) {
req, err := http.NewRequestWithContext(ctx, "GET", thumbnailURL, nil)
if err != nil {
return nil, "", err
}
req.Header.Set("User-Agent", "Mozilla/5.0 (compatible; Lookbook/1.0)")
client := &http.Client{Timeout: 30 * time.Second}
resp, err := client.Do(req)
if err != nil {
return nil, "", err
}
defer resp.Body.Close()
if resp.StatusCode != http.StatusOK {
return nil, "", fmt.Errorf("thumbnail status: %d", resp.StatusCode)
}
contentType := resp.Header.Get("Content-Type")
data := make([]byte, 0, 1<<20) // 1MB initial capacity
buf := make([]byte, 32*1024)
for {
n, err := resp.Body.Read(buf)
if n > 0 {
data = append(data, buf[:n]...)
}
if err != nil {
break
}
}
return data, contentType, nil
}

View file

@ -1,141 +0,0 @@
package handlers
import (
"context"
"crypto/rand"
"encoding/base64"
"encoding/json"
"net/http"
"time"
"golang.org/x/crypto/bcrypt"
"lookbook/internal/data/admin"
"lookbook/internal/data/session"
)
const sessionDuration = 30 * 24 * time.Hour // 30 days
type loginRequest struct {
Password string `json:"password"`
}
type loginResponse struct {
FirstTime bool `json:"firstTime,omitempty"`
Error string `json:"error,omitempty"`
}
// HandleLogin handles POST /api/auth/login
// If no password is set, it sets the password. Otherwise, it verifies the password.
func HandleLogin(rc *RequestContext, w http.ResponseWriter, r *http.Request) error {
var req loginRequest
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
return writeJSON(w, http.StatusBadRequest, loginResponse{Error: "invalid request"})
}
if req.Password == "" {
return writeJSON(w, http.StatusBadRequest, loginResponse{Error: "password required"})
}
ctx, cancel := context.WithTimeout(r.Context(), 5*time.Second)
defer cancel()
adm, err := admin.QGet(ctx, rc.DB)
if err != nil {
return err
}
firstTime := adm.PasswordHash == nil
if firstTime {
// First login: set the password
hash, err := bcrypt.GenerateFromPassword([]byte(req.Password), bcrypt.DefaultCost)
if err != nil {
return err
}
if err := admin.QSetPassword(ctx, rc.DB, hash); err != nil {
return err
}
} else {
// Verify password
if err := bcrypt.CompareHashAndPassword(adm.PasswordHash, []byte(req.Password)); err != nil {
return writeJSON(w, http.StatusUnauthorized, loginResponse{Error: "invalid password"})
}
}
// Create session
sessionID, err := generateSessionID()
if err != nil {
return err
}
expiresAt := time.Now().Add(sessionDuration)
if _, err := session.QCreate(ctx, rc.DB, sessionID, expiresAt); err != nil {
return err
}
// Set cookie
http.SetCookie(w, &http.Cookie{
Name: "session_id",
Value: sessionID,
Path: "/",
Expires: expiresAt,
HttpOnly: true,
SameSite: http.SameSiteLaxMode,
Secure: r.TLS != nil,
})
return writeJSON(w, http.StatusOK, loginResponse{FirstTime: firstTime})
}
// HandleLogout handles POST /api/auth/logout
func HandleLogout(rc *RequestContext, w http.ResponseWriter, r *http.Request) error {
cookie, err := r.Cookie("session_id")
if err == nil {
ctx, cancel := context.WithTimeout(r.Context(), 5*time.Second)
defer cancel()
session.QDelete(ctx, rc.DB, cookie.Value)
}
// Clear cookie
http.SetCookie(w, &http.Cookie{
Name: "session_id",
Value: "",
Path: "/",
MaxAge: -1,
HttpOnly: true,
SameSite: http.SameSiteLaxMode,
})
return writeJSON(w, http.StatusOK, map[string]string{"status": "ok"})
}
// HandleAuthStatus handles GET /api/auth/status
func HandleAuthStatus(rc *RequestContext, w http.ResponseWriter, r *http.Request) error {
ctx, cancel := context.WithTimeout(r.Context(), 5*time.Second)
defer cancel()
hasPassword, err := admin.QHasPassword(ctx, rc.DB)
if err != nil {
return err
}
return writeJSON(w, http.StatusOK, map[string]any{
"isAdmin": rc.IsAdmin,
"passwordSet": hasPassword,
})
}
func generateSessionID() (string, error) {
b := make([]byte, 32)
if _, err := rand.Read(b); err != nil {
return "", err
}
return base64.URLEncoding.EncodeToString(b), nil
}
func writeJSON(w http.ResponseWriter, status int, v any) error {
w.Header().Set("Content-Type", "application/json")
w.WriteHeader(status)
return json.NewEncoder(w).Encode(v)
}

View file

@ -1,276 +0,0 @@
package handlers
import (
"context"
"encoding/json"
"net/http"
"time"
"lookbook/internal/data/item"
"lookbook/internal/data/media"
"lookbook/internal/data/tag"
)
type itemResponse struct {
ID string `json:"id"`
Title *string `json:"title,omitempty"`
Description *string `json:"description,omitempty"`
LinkURL *string `json:"linkUrl,omitempty"`
ItemType string `json:"itemType"`
EmbedHTML *string `json:"embedHtml,omitempty"`
Tags []string `json:"tags"`
CreatedAt string `json:"createdAt"`
MediaID *int64 `json:"mediaId,omitempty"`
ThumbnailID *int64 `json:"thumbnailId,omitempty"`
ThumbnailSourceURL *string `json:"thumbnailSourceUrl,omitempty"`
GalleryIDs []int64 `json:"galleryIds,omitempty"` // Additional images (for multi-image tweets)
}
type createItemRequest struct {
Title *string `json:"title"`
Description *string `json:"description"`
LinkURL *string `json:"linkUrl"`
ItemType string `json:"itemType"` // 'image', 'video', 'quote', 'embed'
EmbedProvider *string `json:"embedProvider"`
EmbedVideoID *string `json:"embedVideoId"`
EmbedHTML *string `json:"embedHtml"`
Tags []string `json:"tags"`
}
type updateItemRequest struct {
Title *string `json:"title"`
Description *string `json:"description"`
LinkURL *string `json:"linkUrl"`
Tags []string `json:"tags"`
}
// HandleListItems handles GET /api/items
func HandleListItems(rc *RequestContext, w http.ResponseWriter, r *http.Request) error {
ctx, cancel := context.WithTimeout(r.Context(), 10*time.Second)
defer cancel()
tagFilter := r.URL.Query().Get("tag")
var items []item.Row
var err error
if tagFilter != "" {
items, err = item.QListByTag(ctx, rc.DB, tagFilter)
} else {
items, err = item.QList(ctx, rc.DB)
}
if err != nil {
return err
}
response := make([]itemResponse, 0, len(items))
for _, it := range items {
resp, err := buildItemResponse(ctx, rc, it)
if err != nil {
return err
}
response = append(response, resp)
}
return writeJSON(w, http.StatusOK, response)
}
// HandleGetItem handles GET /api/items/{id}
func HandleGetItem(rc *RequestContext, w http.ResponseWriter, r *http.Request) error {
pubID := r.PathValue("id")
if pubID == "" {
return writeJSON(w, http.StatusBadRequest, map[string]string{"error": "missing id"})
}
ctx, cancel := context.WithTimeout(r.Context(), 5*time.Second)
defer cancel()
it, err := item.QFindByPubID(ctx, rc.DB, pubID)
if err != nil {
return err
}
if it == nil {
return writeJSON(w, http.StatusNotFound, map[string]string{"error": "not found"})
}
resp, err := buildItemResponse(ctx, rc, *it)
if err != nil {
return err
}
return writeJSON(w, http.StatusOK, resp)
}
// HandleCreateItem handles POST /api/items
func HandleCreateItem(rc *RequestContext, w http.ResponseWriter, r *http.Request) error {
if !rc.RequireAdmin(w) {
return nil
}
var req createItemRequest
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
return writeJSON(w, http.StatusBadRequest, map[string]string{"error": "invalid request"})
}
if req.ItemType == "" {
return writeJSON(w, http.StatusBadRequest, map[string]string{"error": "itemType required"})
}
ctx, cancel := context.WithTimeout(r.Context(), 10*time.Second)
defer cancel()
it, err := item.QCreate(ctx, rc.DB, item.CreateParams{
Title: item.Nullable(req.Title),
Description: item.Nullable(req.Description),
LinkURL: item.Nullable(req.LinkURL),
ItemType: req.ItemType,
EmbedProvider: item.Nullable(req.EmbedProvider),
EmbedVideoID: item.Nullable(req.EmbedVideoID),
EmbedHTML: item.Nullable(req.EmbedHTML),
})
if err != nil {
return err
}
// Set tags
if len(req.Tags) > 0 {
if err := tag.QSetTagsForItem(ctx, rc.DB, it.ID, req.Tags); err != nil {
return err
}
}
resp, err := buildItemResponse(ctx, rc, it)
if err != nil {
return err
}
return writeJSON(w, http.StatusCreated, resp)
}
// HandleUpdateItem handles PUT /api/items/{id}
func HandleUpdateItem(rc *RequestContext, w http.ResponseWriter, r *http.Request) error {
if !rc.RequireAdmin(w) {
return nil
}
pubID := r.PathValue("id")
if pubID == "" {
return writeJSON(w, http.StatusBadRequest, map[string]string{"error": "missing id"})
}
var req updateItemRequest
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
return writeJSON(w, http.StatusBadRequest, map[string]string{"error": "invalid request"})
}
ctx, cancel := context.WithTimeout(r.Context(), 10*time.Second)
defer cancel()
it, err := item.QFindByPubID(ctx, rc.DB, pubID)
if err != nil {
return err
}
if it == nil {
return writeJSON(w, http.StatusNotFound, map[string]string{"error": "not found"})
}
if err := item.QUpdate(ctx, rc.DB, it.ID, item.UpdateParams{
Title: item.Nullable(req.Title),
Description: item.Nullable(req.Description),
LinkURL: item.Nullable(req.LinkURL),
}); err != nil {
return err
}
// Update tags
if err := tag.QSetTagsForItem(ctx, rc.DB, it.ID, req.Tags); err != nil {
return err
}
// Refetch to return updated item
it, err = item.QFindByID(ctx, rc.DB, it.ID)
if err != nil {
return err
}
resp, err := buildItemResponse(ctx, rc, *it)
if err != nil {
return err
}
return writeJSON(w, http.StatusOK, resp)
}
// HandleDeleteItem handles DELETE /api/items/{id}
func HandleDeleteItem(rc *RequestContext, w http.ResponseWriter, r *http.Request) error {
if !rc.RequireAdmin(w) {
return nil
}
pubID := r.PathValue("id")
if pubID == "" {
return writeJSON(w, http.StatusBadRequest, map[string]string{"error": "missing id"})
}
ctx, cancel := context.WithTimeout(r.Context(), 5*time.Second)
defer cancel()
it, err := item.QFindByPubID(ctx, rc.DB, pubID)
if err != nil {
return err
}
if it == nil {
return writeJSON(w, http.StatusNotFound, map[string]string{"error": "not found"})
}
if err := item.QSoftDelete(ctx, rc.DB, it.ID); err != nil {
return err
}
return writeJSON(w, http.StatusOK, map[string]string{"status": "deleted"})
}
func buildItemResponse(ctx context.Context, rc *RequestContext, it item.Row) (itemResponse, error) {
tags, err := tag.QTagsForItem(ctx, rc.DB, it.ID)
if err != nil {
return itemResponse{}, err
}
tagNames := make([]string, len(tags))
for i, t := range tags {
tagNames[i] = t.Name
}
resp := itemResponse{
ID: it.PubID,
Title: item.Ptr(it.Title),
Description: item.Ptr(it.Description),
LinkURL: item.Ptr(it.LinkURL),
ItemType: it.ItemType,
EmbedHTML: item.Ptr(it.EmbedHTML),
Tags: tagNames,
CreatedAt: it.CreatedAt.Format(time.RFC3339),
}
// Get media IDs
// Media is ordered by ID, so first "image" is the thumbnail, rest are gallery
mediaList, err := media.QFindByItemID(ctx, rc.DB, it.ID)
if err != nil {
return itemResponse{}, err
}
firstImage := true
for _, m := range mediaList {
if m.MediaType == "original" {
resp.MediaID = &m.ID
} else if m.MediaType == "image" {
if firstImage {
resp.ThumbnailID = &m.ID
resp.ThumbnailSourceURL = m.SourceURL
firstImage = false
} else {
resp.GalleryIDs = append(resp.GalleryIDs, m.ID)
}
}
}
return resp, nil
}

View file

@ -1,47 +0,0 @@
package handlers
import (
"context"
"net/http"
"time"
"lookbook/internal/data/tag"
)
// HandleListTags handles GET /api/tags
func HandleListTags(rc *RequestContext, w http.ResponseWriter, r *http.Request) error {
ctx, cancel := context.WithTimeout(r.Context(), 5*time.Second)
defer cancel()
tags, err := tag.QList(ctx, rc.DB)
if err != nil {
return err
}
names := make([]string, len(tags))
for i, t := range tags {
names[i] = t.Name
}
return writeJSON(w, http.StatusOK, names)
}
// HandleSuggestTags handles GET /api/tags/suggest?q=...
func HandleSuggestTags(rc *RequestContext, w http.ResponseWriter, r *http.Request) error {
prefix := r.URL.Query().Get("q")
ctx, cancel := context.WithTimeout(r.Context(), 5*time.Second)
defer cancel()
tags, err := tag.QSuggest(ctx, rc.DB, prefix, 10)
if err != nil {
return err
}
names := make([]string, len(tags))
for i, t := range tags {
names[i] = t.Name
}
return writeJSON(w, http.StatusOK, names)
}

View file

@ -1,608 +0,0 @@
package handlers
import (
"context"
"database/sql"
"encoding/json"
"fmt"
"io"
"log/slog"
"net/http"
"strings"
"time"
"lookbook/internal/data/item"
"lookbook/internal/data/media"
"lookbook/internal/data/tag"
"lookbook/internal/embed"
"lookbook/internal/opengraph"
"lookbook/internal/video"
)
// strOrNil returns nil if s is empty, otherwise a pointer to s.
func strOrNil(s string) *string {
if s == "" {
return nil
}
return &s
}
// nullStr creates a sql.Null[string] from a string (empty string = invalid).
func nullStr(s string) sql.Null[string] {
if s == "" {
return sql.Null[string]{}
}
return sql.Null[string]{V: s, Valid: true}
}
// downloadAndStoreImages downloads images from URLs and stores them for an item.
// All images are stored with media_type "image". The first image (by ID order) serves as the thumbnail.
// If imageURLs is empty but imageURL is set, stores that single image.
func downloadAndStoreImages(
ctx context.Context,
db *sql.DB,
logger *slog.Logger,
itemID int64,
imageURL string,
imageURLs []string,
) {
if len(imageURLs) > 0 {
// Multi-image (e.g., Twitter with multiple photos)
for i, imgURL := range imageURLs {
imgData, contentType, err := opengraph.DownloadImage(ctx, imgURL)
if err != nil {
logger.Warn("failed to download image", "url", imgURL, "index", i, "error", err)
continue
}
_, err = media.QCreate(ctx, db, media.CreateParams{
ItemID: itemID,
MediaType: "image",
ContentType: contentType,
Data: imgData,
SourceURL: &imgURL,
})
if err != nil {
logger.Warn("failed to store image", "index", i, "error", err)
}
}
} else if imageURL != "" {
// Single image
imgData, contentType, err := opengraph.DownloadImage(ctx, imageURL)
if err != nil {
logger.Warn("failed to download image", "url", imageURL, "error", err)
return
}
_, err = media.QCreate(ctx, db, media.CreateParams{
ItemID: itemID,
MediaType: "image",
ContentType: contentType,
Data: imgData,
SourceURL: &imageURL,
})
if err != nil {
logger.Warn("failed to store image", "error", err)
}
}
}
type urlMetadata struct {
Title string
Description string
ImageURL string
ImageURLs []string // All image URLs (for multi-image tweets)
SiteName string
IsEmbed bool
Provider string
VideoID string
EmbedHTML string
VideoURL string // Direct video URL (for Twitter)
}
func fetchURLMetadata(ctx context.Context, url string) (*urlMetadata, error) {
// Check if it's a YouTube/Vimeo/Twitter embed
videoInfo, err := embed.Detect(ctx, url)
if err == nil && videoInfo != nil {
return &urlMetadata{
Title: videoInfo.Title,
Description: videoInfo.Description,
ImageURL: videoInfo.ThumbnailURL,
ImageURLs: videoInfo.ThumbnailURLs,
IsEmbed: true,
Provider: string(videoInfo.Provider),
VideoID: videoInfo.VideoID,
EmbedHTML: videoInfo.EmbedHTML,
VideoURL: videoInfo.VideoURL,
}, nil
}
// Fetch OpenGraph metadata
meta, err := opengraph.Fetch(ctx, url)
if err != nil {
return nil, err
}
return &urlMetadata{
Title: meta.Title,
Description: meta.Description,
ImageURL: meta.ImageURL,
SiteName: meta.SiteName,
IsEmbed: false,
}, nil
}
type previewResponse struct {
Title string `json:"title,omitempty"`
Description string `json:"description,omitempty"`
ImageURL string `json:"imageUrl,omitempty"`
SiteName string `json:"siteName,omitempty"`
IsEmbed bool `json:"isEmbed"`
Provider string `json:"provider,omitempty"`
VideoID string `json:"videoId,omitempty"`
EmbedHTML string `json:"embedHtml,omitempty"`
}
// HandlePreviewLink handles POST /api/preview - fetches metadata for a URL
func HandlePreviewLink(rc *RequestContext, w http.ResponseWriter, r *http.Request) error {
if !rc.RequireAdmin(w) {
return nil
}
var req struct {
URL string `json:"url"`
}
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
return writeJSON(w, http.StatusBadRequest, map[string]string{"error": "invalid request"})
}
if req.URL == "" {
return writeJSON(w, http.StatusBadRequest, map[string]string{"error": "url required"})
}
ctx, cancel := context.WithTimeout(r.Context(), 30*time.Second)
defer cancel()
meta, err := fetchURLMetadata(ctx, req.URL)
if err != nil {
return writeJSON(w, http.StatusBadRequest, map[string]string{"error": fmt.Sprintf("failed to fetch: %v", err)})
}
return writeJSON(w, http.StatusOK, previewResponse{
Title: meta.Title,
Description: meta.Description,
ImageURL: meta.ImageURL,
SiteName: meta.SiteName,
IsEmbed: meta.IsEmbed,
Provider: meta.Provider,
VideoID: meta.VideoID,
EmbedHTML: meta.EmbedHTML,
})
}
type createFromLinkRequest struct {
URL string `json:"url"`
Title *string `json:"title"`
Description *string `json:"description"`
Tags []string `json:"tags"`
// For embeds:
Provider *string `json:"provider"`
VideoID *string `json:"videoId"`
EmbedHTML *string `json:"embedHtml"`
// For downloading hero image:
ImageURL *string `json:"imageUrl"`
}
// HandleCreateFromLink handles POST /api/items/from-link
func HandleCreateFromLink(rc *RequestContext, w http.ResponseWriter, r *http.Request) error {
if !rc.RequireAdmin(w) {
return nil
}
var req createFromLinkRequest
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
return writeJSON(w, http.StatusBadRequest, map[string]string{"error": "invalid request"})
}
ctx, cancel := context.WithTimeout(r.Context(), 60*time.Second)
defer cancel()
var itemType string
var embedProvider, embedVideoID, embedHTML *string
if req.Provider != nil && *req.Provider != "" {
// It's an embed
itemType = "embed"
embedProvider = req.Provider
embedVideoID = req.VideoID
embedHTML = req.EmbedHTML
} else if req.ImageURL != nil && *req.ImageURL != "" {
// It's a link with an image
itemType = "image"
} else {
// Just a link (will be shown as a card)
itemType = "link"
}
// For embeds, fetch thumbnail(s) and video URL
var imageURL, videoURL string
var imageURLs []string // For multi-image tweets
if req.ImageURL != nil {
imageURL = *req.ImageURL
}
if itemType == "embed" && embedProvider != nil {
if videoInfo, err := embed.Detect(ctx, req.URL); err == nil && videoInfo != nil {
imageURL = videoInfo.ThumbnailURL
imageURLs = videoInfo.ThumbnailURLs
videoURL = videoInfo.VideoURL
}
}
// Create the item
it, err := item.QCreate(ctx, rc.DB, item.CreateParams{
Title: item.Nullable(req.Title),
Description: item.Nullable(req.Description),
LinkURL: item.Nullable(&req.URL),
ItemType: itemType,
EmbedProvider: item.Nullable(embedProvider),
EmbedVideoID: item.Nullable(embedVideoID),
EmbedHTML: item.Nullable(embedHTML),
EmbedVideoURL: nullStr(videoURL),
})
if err != nil {
return err
}
// Download and store images
downloadAndStoreImages(ctx, rc.DB, rc.Logger, it.ID, imageURL, imageURLs)
// Set tags
if len(req.Tags) > 0 {
if err := tag.QSetTagsForItem(ctx, rc.DB, it.ID, req.Tags); err != nil {
return err
}
}
resp, err := buildItemResponse(ctx, rc, it)
if err != nil {
return err
}
return writeJSON(w, http.StatusCreated, resp)
}
// HandleUpload handles POST /api/items/upload - multipart file upload
func HandleUpload(rc *RequestContext, w http.ResponseWriter, r *http.Request) error {
if !rc.RequireAdmin(w) {
return nil
}
// Parse multipart form (max 500MB)
if err := r.ParseMultipartForm(500 << 20); err != nil {
return writeJSON(w, http.StatusBadRequest, map[string]string{"error": "failed to parse form"})
}
file, header, err := r.FormFile("file")
if err != nil {
return writeJSON(w, http.StatusBadRequest, map[string]string{"error": "file required"})
}
defer file.Close()
title := r.FormValue("title")
description := r.FormValue("description")
tagsStr := r.FormValue("tags")
var tags []string
if tagsStr != "" {
tags = strings.Split(tagsStr, ",")
for i := range tags {
tags[i] = strings.TrimSpace(tags[i])
}
}
ctx, cancel := context.WithTimeout(r.Context(), 5*time.Minute)
defer cancel()
// Read file data
data, err := io.ReadAll(file)
if err != nil {
return writeJSON(w, http.StatusBadRequest, map[string]string{"error": "failed to read file"})
}
contentType := header.Header.Get("Content-Type")
if contentType == "" {
contentType = http.DetectContentType(data)
}
var itemType string
var originalData, thumbnailData []byte
var originalContentType string
if video.IsVideo(contentType) {
itemType = "video"
// Process video: transcode and extract thumbnail
transcoded, thumbnail, err := video.ProcessVideo(ctx, data, contentType)
if err != nil {
return writeJSON(w, http.StatusInternalServerError, map[string]string{"error": fmt.Sprintf("video processing failed: %v", err)})
}
originalData = transcoded
originalContentType = "video/mp4"
thumbnailData = thumbnail
} else if video.IsImage(contentType) {
itemType = "image"
originalData = data
originalContentType = contentType
} else {
return writeJSON(w, http.StatusBadRequest, map[string]string{"error": "unsupported file type"})
}
// Create item
it, err := item.QCreate(ctx, rc.DB, item.CreateParams{
Title: item.Nullable(strOrNil(title)),
Description: item.Nullable(strOrNil(description)),
ItemType: itemType,
})
if err != nil {
return err
}
// Store original media
_, err = media.QCreate(ctx, rc.DB, media.CreateParams{
ItemID: it.ID,
MediaType: "original",
ContentType: originalContentType,
Data: originalData,
})
if err != nil {
return err
}
// Store thumbnail for videos
if len(thumbnailData) > 0 {
_, err = media.QCreate(ctx, rc.DB, media.CreateParams{
ItemID: it.ID,
MediaType: "thumbnail",
ContentType: "image/jpeg",
Data: thumbnailData,
})
if err != nil {
rc.Logger.Warn("failed to store thumbnail", "error", err)
}
}
// Set tags
if len(tags) > 0 {
if err := tag.QSetTagsForItem(ctx, rc.DB, it.ID, tags); err != nil {
return err
}
}
resp, err := buildItemResponse(ctx, rc, it)
if err != nil {
return err
}
return writeJSON(w, http.StatusCreated, resp)
}
type createQuoteRequest struct {
Text string `json:"text"`
Source *string `json:"source"` // Optional attribution
SourceURL *string `json:"sourceUrl"`
Tags []string `json:"tags"`
}
// HandleCreateQuote handles POST /api/items/quote
func HandleCreateQuote(rc *RequestContext, w http.ResponseWriter, r *http.Request) error {
if !rc.RequireAdmin(w) {
return nil
}
var req createQuoteRequest
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
return writeJSON(w, http.StatusBadRequest, map[string]string{"error": "invalid request"})
}
if req.Text == "" {
return writeJSON(w, http.StatusBadRequest, map[string]string{"error": "text required"})
}
ctx, cancel := context.WithTimeout(r.Context(), 10*time.Second)
defer cancel()
it, err := item.QCreate(ctx, rc.DB, item.CreateParams{
Title: item.Nullable(req.Source),
Description: nullStr(req.Text),
LinkURL: item.Nullable(req.SourceURL),
ItemType: "quote",
})
if err != nil {
return err
}
// Set tags
if len(req.Tags) > 0 {
if err := tag.QSetTagsForItem(ctx, rc.DB, it.ID, req.Tags); err != nil {
return err
}
}
resp, err := buildItemResponse(ctx, rc, it)
if err != nil {
return err
}
return writeJSON(w, http.StatusCreated, resp)
}
// HandleReplaceMedia handles POST /api/items/{id}/media - replaces media for an item
func HandleReplaceMedia(rc *RequestContext, w http.ResponseWriter, r *http.Request) error {
if !rc.RequireAdmin(w) {
return nil
}
pubID := r.PathValue("id")
if pubID == "" {
return writeJSON(w, http.StatusBadRequest, map[string]string{"error": "missing id"})
}
// Parse multipart form (max 500MB)
if err := r.ParseMultipartForm(500 << 20); err != nil {
return writeJSON(w, http.StatusBadRequest, map[string]string{"error": "failed to parse form"})
}
file, header, err := r.FormFile("file")
if err != nil {
return writeJSON(w, http.StatusBadRequest, map[string]string{"error": "file required"})
}
defer file.Close()
ctx, cancel := context.WithTimeout(r.Context(), 5*time.Minute)
defer cancel()
it, err := item.QFindByPubID(ctx, rc.DB, pubID)
if err != nil {
return err
}
if it == nil {
return writeJSON(w, http.StatusNotFound, map[string]string{"error": "not found"})
}
// Read file data
data, err := io.ReadAll(file)
if err != nil {
return writeJSON(w, http.StatusBadRequest, map[string]string{"error": "failed to read file"})
}
contentType := header.Header.Get("Content-Type")
if contentType == "" {
contentType = http.DetectContentType(data)
}
var originalData, thumbnailData []byte
var originalContentType string
var newItemType string
if video.IsVideo(contentType) {
newItemType = "video"
transcoded, thumbnail, err := video.ProcessVideo(ctx, data, contentType)
if err != nil {
return writeJSON(w, http.StatusInternalServerError, map[string]string{"error": fmt.Sprintf("video processing failed: %v", err)})
}
originalData = transcoded
originalContentType = "video/mp4"
thumbnailData = thumbnail
} else if video.IsImage(contentType) {
newItemType = "image"
originalData = data
originalContentType = contentType
} else {
return writeJSON(w, http.StatusBadRequest, map[string]string{"error": "unsupported file type"})
}
// Delete existing media
media.QDeleteByItemID(ctx, rc.DB, it.ID)
// Store new original media
_, err = media.QCreate(ctx, rc.DB, media.CreateParams{
ItemID: it.ID,
MediaType: "original",
ContentType: originalContentType,
Data: originalData,
})
if err != nil {
return err
}
// Store thumbnail for videos
if len(thumbnailData) > 0 {
_, err = media.QCreate(ctx, rc.DB, media.CreateParams{
ItemID: it.ID,
MediaType: "thumbnail",
ContentType: "image/jpeg",
Data: thumbnailData,
})
if err != nil {
rc.Logger.Warn("failed to store thumbnail", "error", err)
}
}
// Update item type if it changed (e.g., embed -> image)
if it.ItemType != newItemType && (it.ItemType == "embed" || it.ItemType == "link") {
item.QUpdateType(ctx, rc.DB, it.ID, newItemType)
}
// Refetch and return updated item
it, err = item.QFindByID(ctx, rc.DB, it.ID)
if err != nil {
return err
}
resp, err := buildItemResponse(ctx, rc, *it)
if err != nil {
return err
}
return writeJSON(w, http.StatusOK, resp)
}
// HandleRefreshMetadata handles POST /api/items/{id}/refresh
// Re-fetches metadata and thumbnail for an item with a link URL
func HandleRefreshMetadata(rc *RequestContext, w http.ResponseWriter, r *http.Request) error {
if !rc.RequireAdmin(w) {
return nil
}
pubID := r.PathValue("id")
if pubID == "" {
return writeJSON(w, http.StatusBadRequest, map[string]string{"error": "missing id"})
}
ctx, cancel := context.WithTimeout(r.Context(), 60*time.Second)
defer cancel()
it, err := item.QFindByPubID(ctx, rc.DB, pubID)
if err != nil {
return err
}
if it == nil {
return writeJSON(w, http.StatusNotFound, map[string]string{"error": "not found"})
}
if !it.LinkURL.Valid || it.LinkURL.V == "" {
return writeJSON(w, http.StatusBadRequest, map[string]string{"error": "item has no link URL"})
}
meta, err := fetchURLMetadata(ctx, it.LinkURL.V)
if err != nil {
return writeJSON(w, http.StatusBadRequest, map[string]string{"error": fmt.Sprintf("failed to fetch: %v", err)})
}
// Update title, description, and video URL
item.QUpdate(ctx, rc.DB, it.ID, item.UpdateParams{
Title: nullStr(meta.Title),
Description: nullStr(meta.Description),
LinkURL: it.LinkURL,
})
if meta.VideoURL != "" {
item.QUpdateVideoURL(ctx, rc.DB, it.ID, meta.VideoURL)
}
// Download and replace images
if len(meta.ImageURLs) > 0 || meta.ImageURL != "" {
// Delete existing media (thumbnails and gallery)
media.QDeleteByItemID(ctx, rc.DB, it.ID)
downloadAndStoreImages(ctx, rc.DB, rc.Logger, it.ID, meta.ImageURL, meta.ImageURLs)
}
// Refetch and return updated item
it, err = item.QFindByID(ctx, rc.DB, it.ID)
if err != nil {
return err
}
resp, err := buildItemResponse(ctx, rc, *it)
if err != nil {
return err
}
return writeJSON(w, http.StatusOK, resp)
}

167
internal/handlers/auth.go Normal file
View file

@ -0,0 +1,167 @@
package handlers
import (
"crypto/rand"
"encoding/base64"
"encoding/json"
"fmt"
"net/http"
"time"
"golang.org/x/crypto/bcrypt"
"git.soup.land/soup/lookbook/internal/data/auth"
"git.soup.land/soup/lookbook/internal/data/session"
)
const sessionCookieName = "lookbook_session"
const sessionDuration = 30 * 24 * time.Hour
type authRequest struct {
Password string `json:"password"`
}
type authStatus struct {
Authenticated bool `json:"authenticated"`
HasPassword bool `json:"has_password"`
}
func HandlePostAuthLogin(rc *RequestContext, w http.ResponseWriter, r *http.Request) error {
var req authRequest
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
http.Error(w, "Invalid request", http.StatusBadRequest)
return nil
}
if req.Password == "" {
http.Error(w, "Password required", http.StatusBadRequest)
return nil
}
existing, err := auth.QGet(r.Context(), rc.DB)
if err != nil {
return fmt.Errorf("get auth: %w", err)
}
if existing == nil {
hash, err := bcrypt.GenerateFromPassword([]byte(req.Password), bcrypt.DefaultCost)
if err != nil {
return fmt.Errorf("hash password: %w", err)
}
if _, err := auth.QCreate(r.Context(), rc.DB, hash); err != nil {
return fmt.Errorf("create password: %w", err)
}
} else {
if err := bcrypt.CompareHashAndPassword(existing.PasswordHash, []byte(req.Password)); err != nil {
http.Error(w, "Unauthorized", http.StatusUnauthorized)
return nil
}
}
token, err := newToken(32)
if err != nil {
return fmt.Errorf("token: %w", err)
}
expiresAt := time.Now().Add(sessionDuration)
if _, err := session.QCreate(r.Context(), rc.DB, token, expiresAt); err != nil {
return fmt.Errorf("create session: %w", err)
}
setSessionCookie(w, r, token, expiresAt)
w.WriteHeader(http.StatusNoContent)
return nil
}
func HandlePostAuthLogout(rc *RequestContext, w http.ResponseWriter, r *http.Request) error {
cookie, err := r.Cookie(sessionCookieName)
if err == nil {
_ = session.QDelete(r.Context(), rc.DB, cookie.Value)
}
clearSessionCookie(w, r)
w.WriteHeader(http.StatusNoContent)
return nil
}
func HandleGetAuthStatus(rc *RequestContext, w http.ResponseWriter, r *http.Request) error {
status := authStatus{}
existing, err := auth.QGet(r.Context(), rc.DB)
if err != nil {
return fmt.Errorf("get auth: %w", err)
}
status.HasPassword = existing != nil
authed, err := isAuthenticated(r, rc)
if err != nil {
return fmt.Errorf("auth status: %w", err)
}
status.Authenticated = authed
w.Header().Set("Content-Type", "application/json")
return json.NewEncoder(w).Encode(status)
}
func isAuthenticated(r *http.Request, rc *RequestContext) (bool, error) {
cookie, err := r.Cookie(sessionCookieName)
if err != nil {
return false, nil
}
row, err := session.QFindByToken(r.Context(), rc.DB, cookie.Value)
if err != nil {
return false, err
}
if row == nil {
return false, nil
}
if time.Now().After(row.ExpiresAt) {
_ = session.QDelete(r.Context(), rc.DB, cookie.Value)
return false, nil
}
return true, nil
}
func requireAuth(w http.ResponseWriter, r *http.Request, rc *RequestContext) bool {
authed, err := isAuthenticated(r, rc)
if err != nil {
http.Error(w, "Unauthorized", http.StatusUnauthorized)
return false
}
if !authed {
http.Error(w, "Unauthorized", http.StatusUnauthorized)
return false
}
return true
}
func setSessionCookie(w http.ResponseWriter, r *http.Request, token string, expiresAt time.Time) {
http.SetCookie(w, &http.Cookie{
Name: sessionCookieName,
Value: token,
Path: "/",
HttpOnly: true,
Secure: r.TLS != nil,
SameSite: http.SameSiteLaxMode,
Expires: expiresAt,
})
}
func clearSessionCookie(w http.ResponseWriter, r *http.Request) {
http.SetCookie(w, &http.Cookie{
Name: sessionCookieName,
Value: "",
Path: "/",
HttpOnly: true,
Secure: r.TLS != nil,
SameSite: http.SameSiteLaxMode,
Expires: time.Unix(0, 0),
})
}
func newToken(length int) (string, error) {
buf := make([]byte, length)
if _, err := rand.Read(buf); err != nil {
return "", err
}
return base64.RawURLEncoding.EncodeToString(buf), nil
}

View file

@ -3,7 +3,6 @@ package handlers
import ( import (
"database/sql" "database/sql"
"log/slog" "log/slog"
"net/http"
"git.soup.land/soup/sxgo/ssr" "git.soup.land/soup/sxgo/ssr"
) )
@ -13,15 +12,4 @@ type RequestContext struct {
DB *sql.DB DB *sql.DB
Logger *slog.Logger Logger *slog.Logger
TmplCache *ssr.TmplCache TmplCache *ssr.TmplCache
IsAdmin bool // true if authenticated as admin
}
// RequireAdmin checks authentication and returns 401 if not admin.
// Returns true if authenticated, false if 401 was sent.
func (rc *RequestContext) RequireAdmin(w http.ResponseWriter) bool {
if !rc.IsAdmin {
http.Error(w, "Unauthorized", http.StatusUnauthorized)
return false
}
return true
} }

View file

@ -0,0 +1,145 @@
package handlers
import (
"fmt"
"net/http"
"strconv"
"strings"
"git.soup.land/soup/lookbook/internal/components"
"git.soup.land/soup/lookbook/internal/data/image"
"git.soup.land/soup/lookbook/internal/data/item"
"git.soup.land/soup/lookbook/internal/data/tag"
"git.soup.land/soup/sxgo/ssr"
)
type galleryPageData struct {
Items []galleryItem
Tags []tag.Row
HasAuth bool
TagNames []string
}
type galleryItem struct {
Item item.Row
Thumb *image.Ref
TagNames []string
TagListText string
}
func (g galleryPageData) Render(sw *ssr.Writer) error {
return sw.Tmpl(g, `
<section class="hero">
<form method="POST" action="/items" data-auth-required {{if not .HasAuth}}hidden{{end}}>
<input type="url" name="url" placeholder="Paste a link" required>
<button type="submit">Add</button>
</form>
<div class="notice" data-auth-status>Read only</div>
</section>
<section class="filters">
{{range .Tags}}
<button type="button" class="filter-pill" data-tag-filter="{{.Name}}">{{.Name}}</button>
{{end}}
<div class="tag-suggestions" data-tag-suggestions data-tags='{{json .TagNames}}'></div>
</section>
<section class="gallery">
{{range .Items}}
<div class="card" data-item-tags="{{.TagListText}}">
<a href="/items/{{.Item.ID}}">
{{if .Thumb}}
<img src="/images/{{.Thumb.ID}}" alt="{{.Item.Title}}">
{{else}}
<div class="placeholder">{{if .Item.Title}}{{.Item.Title}}{{else}}{{.Item.SourceURL}}{{end}}</div>
{{end}}
<div class="overlay">
<div class="title">{{if .Item.Title}}{{.Item.Title}}{{else}}{{.Item.SourceURL}}{{end}}</div>
<div class="tags">{{range $i, $tag := .TagNames}}{{if $i}} · {{end}}{{$tag}}{{end}}</div>
</div>
</a>
</div>
{{end}}
</section>
`)
}
func HandleGetGallery(rc *RequestContext, w http.ResponseWriter, r *http.Request) error {
items, err := item.QList(r.Context(), rc.DB)
if err != nil {
return fmt.Errorf("list items: %w", err)
}
var itemIDs []int64
for _, row := range items {
itemIDs = append(itemIDs, row.ID)
}
thumbs, err := image.QListPrimaryRefsByItems(r.Context(), rc.DB, itemIDs)
if err != nil {
return fmt.Errorf("list thumbs: %w", err)
}
entries, err := tag.QListItemTags(r.Context(), rc.DB)
if err != nil {
return fmt.Errorf("list tags: %w", err)
}
itemTagNames := make(map[int64][]string)
for _, entry := range entries {
itemTagNames[entry.ItemID] = append(itemTagNames[entry.ItemID], entry.Name)
}
var cards []galleryItem
for _, row := range items {
tagNames := itemTagNames[row.ID]
cards = append(cards, galleryItem{
Item: row,
Thumb: refOrNil(thumbs[row.ID]),
TagNames: tagNames,
TagListText: strings.Join(tagNames, ","),
})
}
tags, err := tag.QList(r.Context(), rc.DB)
if err != nil {
return fmt.Errorf("list tags: %w", err)
}
authed, err := isAuthenticated(r, rc)
if err != nil {
return fmt.Errorf("auth status: %w", err)
}
w.Header().Set("Content-Type", "text/html; charset=utf-8")
sw := ssr.NewWriter(w, rc.TmplCache)
var tagNames []string
for _, row := range tags {
tagNames = append(tagNames, row.Name)
}
data := galleryPageData{
Items: cards,
Tags: tags,
HasAuth: authed,
TagNames: tagNames,
}
return components.Page{
Title: "Gallery",
Content: data,
ShowNav: true,
HasAuth: authed,
}.Render(sw)
}
func refOrNil(ref image.Ref) *image.Ref {
if ref.ID == 0 {
return nil
}
return &ref
}
func parseID(value string) (int64, error) {
return strconv.ParseInt(value, 10, 64)
}

View file

@ -1,251 +0,0 @@
package handlers
import (
"context"
"net/http"
"strings"
"time"
"git.soup.land/soup/sxgo/ssr"
"lookbook/internal/components"
"lookbook/internal/data/item"
"lookbook/internal/data/media"
"lookbook/internal/data/tag"
)
type homeContent struct {
Items []homeItem
Tags []string
ActiveTag string
IsAdmin bool
}
type homeItem struct {
ID string
Title *string
Description *string
LinkURL *string
ItemType string
EmbedHTML *string
Tags []string
ThumbnailID *int64
MediaID *int64
HasVideo bool
GalleryIDs []int64 // Additional images for multi-image embeds
ImageCount int // Total image count (1 + len(GalleryIDs))
}
func (h homeContent) Render(sw *ssr.Writer) error {
return sw.Tmpl(h, `
<div class="home">
{{if .IsAdmin}}
<div class="admin-bar">
<button class="btn" onclick="showAddModal()">+ ADD ITEM</button>
</div>
{{end}}
<div class="tags-bar">
<a href="/" class="tag {{if not .ActiveTag}}active{{end}}">ALL</a>
{{range .Tags}}
<a href="/?tag={{.}}" class="tag {{if eq $.ActiveTag .}}active{{end}}">{{.}}</a>
{{end}}
</div>
<div class="grid">
{{range .Items}}
<a href="/item/{{.ID}}" class="grid-item" data-type="{{.ItemType}}">
{{if eq .ItemType "quote"}}
<div class="quote-card">
<blockquote>{{.Description}}</blockquote>
{{if .Title}}<cite> {{.Title}}</cite>{{end}}
</div>
{{else if .GalleryIDs}}
<div class="grid-item-images" data-gallery="true" data-count="{{.ImageCount}}">
<img src="/media/{{.ThumbnailID}}" alt="{{if .Title}}{{.Title}}{{else}}Image{{end}}" loading="lazy" class="active">
{{range .GalleryIDs}}<img src="/media/{{.}}" alt="Image" loading="lazy">{{end}}
</div>
<div class="gallery-indicator">{{.ImageCount}}</div>
{{else if .ThumbnailID}}
<img src="/media/{{.ThumbnailID}}" alt="{{if .Title}}{{.Title}}{{else}}Image{{end}}" loading="lazy">
{{if or .HasVideo (eq .ItemType "video")}}<div class="play-indicator"></div>{{end}}
{{else if .MediaID}}
<img src="/media/{{.MediaID}}" alt="{{if .Title}}{{.Title}}{{else}}Image{{end}}" loading="lazy">
{{if or .HasVideo (eq .ItemType "video")}}<div class="play-indicator"></div>{{end}}
{{else if eq .ItemType "embed"}}
<div class="embed-placeholder">
<span></span>
</div>
{{else}}
<div class="link-card">
{{if .Title}}<div class="link-title">{{.Title}}</div>{{end}}
{{if .LinkURL}}<div class="link-url">{{.LinkURL}}</div>{{end}}
</div>
{{end}}
{{if or .Title .Tags}}
<div class="item-overlay">
{{if and .Title (ne .ItemType "link") (ne .ItemType "quote")}}
<div class="item-title">{{.Title}}</div>
{{end}}
{{if .Tags}}
<div class="item-tags">
{{range .Tags}}<span class="item-tag">{{.}}</span>{{end}}
</div>
{{end}}
</div>
{{end}}
</a>
{{end}}
</div>
</div>
{{if .IsAdmin}}
<div id="add-modal" class="modal" onclick="if(event.target===this)hideAddModal()">
<div class="modal-content">
<div class="modal-header">
<h2>ADD ITEM</h2>
<button class="btn-close" onclick="hideAddModal()">&times;</button>
</div>
<div class="modal-tabs">
<button class="tab active" data-tab="link">LINK</button>
<button class="tab" data-tab="upload">UPLOAD</button>
<button class="tab" data-tab="quote">QUOTE</button>
</div>
<div class="modal-body">
<div id="tab-link" class="tab-content active">
<form id="link-form" onsubmit="return submitLink(event)">
<input type="url" name="url" placeholder="Paste URL..." required>
<div id="link-preview" class="preview"></div>
<input type="text" name="title" placeholder="Title (optional)">
<textarea name="description" placeholder="Description (optional)"></textarea>
<input type="text" name="tags" placeholder="Tags (comma-separated)">
<button type="submit" class="btn">ADD</button>
</form>
</div>
<div id="tab-upload" class="tab-content">
<form id="upload-form" onsubmit="return submitUpload(event)">
<input type="file" name="file" accept="image/*,video/*" required>
<input type="text" name="title" placeholder="Title (optional)">
<textarea name="description" placeholder="Description (optional)"></textarea>
<input type="text" name="tags" placeholder="Tags (comma-separated)">
<button type="submit" class="btn">UPLOAD</button>
</form>
</div>
<div id="tab-quote" class="tab-content">
<form id="quote-form" onsubmit="return submitQuote(event)">
<textarea name="text" placeholder="Quote text..." required rows="4"></textarea>
<input type="text" name="source" placeholder="Source / Attribution (optional)">
<input type="url" name="sourceUrl" placeholder="Source URL (optional)">
<input type="text" name="tags" placeholder="Tags (comma-separated)">
<button type="submit" class="btn">ADD QUOTE</button>
</form>
</div>
</div>
</div>
</div>
{{end}}
`)
}
// HandleHome handles GET /
func HandleHome(rc *RequestContext, w http.ResponseWriter, r *http.Request) error {
ctx, cancel := context.WithTimeout(r.Context(), 10*time.Second)
defer cancel()
tagFilter := r.URL.Query().Get("tag")
var items []item.Row
var err error
if tagFilter != "" {
items, err = item.QListByTag(ctx, rc.DB, tagFilter)
} else {
items, err = item.QList(ctx, rc.DB)
}
if err != nil {
return err
}
// Get all tags
allTags, err := tag.QList(ctx, rc.DB)
if err != nil {
return err
}
tagNames := make([]string, len(allTags))
for i, t := range allTags {
tagNames[i] = t.Name
}
// Build home items
homeItems := make([]homeItem, 0, len(items))
for _, it := range items {
hi := homeItem{
ID: it.PubID,
Title: item.Ptr(it.Title),
Description: item.Ptr(it.Description),
LinkURL: item.Ptr(it.LinkURL),
ItemType: it.ItemType,
EmbedHTML: item.Ptr(it.EmbedHTML),
}
// Get tags
itemTags, err := tag.QTagsForItem(ctx, rc.DB, it.ID)
if err != nil {
return err
}
hi.Tags = make([]string, len(itemTags))
for i, t := range itemTags {
hi.Tags[i] = t.Name
}
// Get media
// Media is ordered by ID, so first "image" is the thumbnail, rest are gallery
mediaList, err := media.QFindByItemID(ctx, rc.DB, it.ID)
if err != nil {
return err
}
firstImage := true
for _, m := range mediaList {
if m.MediaType == "original" {
hi.MediaID = &m.ID
if strings.HasPrefix(m.ContentType, "video/") {
hi.HasVideo = true
}
} else if m.MediaType == "image" {
if firstImage {
hi.ThumbnailID = &m.ID
firstImage = false
} else {
hi.GalleryIDs = append(hi.GalleryIDs, m.ID)
}
}
}
// Calculate total image count (thumbnail + gallery images)
if len(hi.GalleryIDs) > 0 {
hi.ImageCount = 1 + len(hi.GalleryIDs)
}
// Also check for embed video URL
if it.EmbedVideoURL.Valid && it.EmbedVideoURL.V != "" {
hi.HasVideo = true
}
homeItems = append(homeItems, hi)
}
content := homeContent{
Items: homeItems,
Tags: tagNames,
ActiveTag: tagFilter,
IsAdmin: rc.IsAdmin,
}
w.Header().Set("Content-Type", "text/html; charset=utf-8")
sw := ssr.NewWriter(w, rc.TmplCache)
page := components.Page{
Title: "",
IsAdmin: rc.IsAdmin,
Content: content,
}
return page.Render(sw)
}

View file

@ -0,0 +1,32 @@
package handlers
import (
"fmt"
"net/http"
"git.soup.land/soup/lookbook/internal/data/image"
)
func HandleGetImage(rc *RequestContext, w http.ResponseWriter, r *http.Request) error {
id, err := parseID(r.PathValue("id"))
if err != nil {
http.Error(w, "Invalid ID", http.StatusBadRequest)
return nil
}
row, err := image.QFindByID(r.Context(), rc.DB, id)
if err != nil {
return fmt.Errorf("find image: %w", err)
}
if row == nil {
http.NotFound(w, r)
return nil
}
if row.ContentType != "" {
w.Header().Set("Content-Type", row.ContentType)
}
w.Header().Set("Cache-Control", "public, max-age=604800")
_, _ = w.Write(row.Bytes)
return nil
}

245
internal/handlers/item.go Normal file
View file

@ -0,0 +1,245 @@
package handlers
import (
"fmt"
"net/http"
"strings"
"git.soup.land/soup/lookbook/internal/components"
"git.soup.land/soup/lookbook/internal/data/image"
"git.soup.land/soup/lookbook/internal/data/item"
"git.soup.land/soup/lookbook/internal/data/tag"
"git.soup.land/soup/lookbook/internal/services"
"git.soup.land/soup/sxgo/ssr"
)
type itemPageData struct {
Item item.Row
Images []image.Row
TagNames []string
AllTagNames []string
HasAuth bool
}
func (d itemPageData) Render(sw *ssr.Writer) error {
return sw.Tmpl(d, `
<section class="detail">
<div>
{{if .Images}}
<img src="/images/{{(index .Images 0).ID}}" alt="{{.Item.Title}}">
{{else}}
<div class="notice">No preview image cached.</div>
{{end}}
</div>
<div class="meta">
<h1>{{if .Item.Title}}{{.Item.Title}}{{else}}{{.Item.SourceURL}}{{end}}</h1>
{{if .Item.SiteName}}<div>{{.Item.SiteName}}</div>{{end}}
{{if .Item.Description}}<p>{{.Item.Description}}</p>{{end}}
<a href="{{.Item.SourceURL}}" target="_blank" rel="noreferrer">{{.Item.SourceURL}}</a>
<div>
<strong>Tags</strong>
<div class="tag-list" data-tag-list>
{{range .TagNames}}
<span class="tag-chip">{{.}}</span>
{{else}}
<span class="tag-chip">No tags</span>
{{end}}
</div>
<button type="button" class="ghost" data-tag-toggle data-auth-required {{if not .HasAuth}}hidden{{end}}>Edit tags</button>
</div>
<div class="tag-editor" data-tag-editor data-auth-required hidden {{if not .HasAuth}}hidden{{end}}>
<form method="POST" action="/items/{{.Item.ID}}/tags" data-tag-form>
<input type="text" name="tags" value="{{range $i, $tag := .TagNames}}{{if $i}}, {{end}}{{$tag}}{{end}}" data-tag-input>
<button type="submit">Save tags</button>
</form>
<div class="tag-suggestions" data-tag-suggestions data-tags='{{json .AllTagNames}}'></div>
</div>
<div class="actions" data-auth-required {{if not .HasAuth}}hidden{{end}}>
<form method="POST" action="/items/{{.Item.ID}}/refresh">
<button type="submit">Refresh metadata</button>
</form>
<form method="POST" action="/items/{{.Item.ID}}/delete">
<button type="submit">Delete</button>
</form>
</div>
</div>
</section>
`)
}
func HandleGetItem(rc *RequestContext, w http.ResponseWriter, r *http.Request) error {
id, err := parseID(r.PathValue("id"))
if err != nil {
http.Error(w, "Invalid ID", http.StatusBadRequest)
return nil
}
row, err := item.QFindByID(r.Context(), rc.DB, id)
if err != nil {
return fmt.Errorf("find item: %w", err)
}
if row == nil {
http.NotFound(w, r)
return nil
}
images, err := image.QListByItem(r.Context(), rc.DB, id)
if err != nil {
return fmt.Errorf("list images: %w", err)
}
tags, err := tag.QListByItem(r.Context(), rc.DB, id)
if err != nil {
return fmt.Errorf("list item tags: %w", err)
}
allTags, err := tag.QList(r.Context(), rc.DB)
if err != nil {
return fmt.Errorf("list tags: %w", err)
}
var tagNames []string
for _, row := range tags {
tagNames = append(tagNames, row.Name)
}
var allTagNames []string
for _, row := range allTags {
allTagNames = append(allTagNames, row.Name)
}
authed, err := isAuthenticated(r, rc)
if err != nil {
return fmt.Errorf("auth status: %w", err)
}
w.Header().Set("Content-Type", "text/html; charset=utf-8")
sw := ssr.NewWriter(w, rc.TmplCache)
data := itemPageData{
Item: *row,
Images: images,
TagNames: tagNames,
AllTagNames: allTagNames,
HasAuth: authed,
}
return components.Page{
Title: "Item",
Content: data,
ShowNav: true,
HasAuth: authed,
}.Render(sw)
}
func HandlePostItem(rc *RequestContext, w http.ResponseWriter, r *http.Request) error {
if !requireAuth(w, r, rc) {
return nil
}
if err := r.ParseForm(); err != nil {
http.Error(w, "Bad Request", http.StatusBadRequest)
return nil
}
sourceURL := strings.TrimSpace(r.FormValue("url"))
if sourceURL == "" {
http.Error(w, "URL required", http.StatusBadRequest)
return nil
}
_, err := services.CreateItemFromURL(r.Context(), rc.DB, sourceURL)
if err != nil {
return fmt.Errorf("create item: %w", err)
}
http.Redirect(w, r, "/", http.StatusSeeOther)
return nil
}
func HandlePostItemTags(rc *RequestContext, w http.ResponseWriter, r *http.Request) error {
if !requireAuth(w, r, rc) {
return nil
}
id, err := parseID(r.PathValue("id"))
if err != nil {
http.Error(w, "Invalid ID", http.StatusBadRequest)
return nil
}
if err := r.ParseForm(); err != nil {
http.Error(w, "Bad Request", http.StatusBadRequest)
return nil
}
tags := parseTags(r.FormValue("tags"))
if err := tag.QReplaceItemTags(r.Context(), rc.DB, id, tags); err != nil {
return fmt.Errorf("update tags: %w", err)
}
w.WriteHeader(http.StatusNoContent)
return nil
}
func HandleDeleteItem(rc *RequestContext, w http.ResponseWriter, r *http.Request) error {
if !requireAuth(w, r, rc) {
return nil
}
id, err := parseID(r.PathValue("id"))
if err != nil {
http.Error(w, "Invalid ID", http.StatusBadRequest)
return nil
}
if err := item.QSoftDelete(r.Context(), rc.DB, id); err != nil {
return fmt.Errorf("delete item: %w", err)
}
http.Redirect(w, r, "/", http.StatusSeeOther)
return nil
}
func HandleRefreshItem(rc *RequestContext, w http.ResponseWriter, r *http.Request) error {
if !requireAuth(w, r, rc) {
return nil
}
id, err := parseID(r.PathValue("id"))
if err != nil {
http.Error(w, "Invalid ID", http.StatusBadRequest)
return nil
}
row, err := item.QFindByID(r.Context(), rc.DB, id)
if err != nil {
return fmt.Errorf("find item: %w", err)
}
if row == nil {
http.NotFound(w, r)
return nil
}
if err := services.RefreshItemFromURL(r.Context(), rc.DB, *row); err != nil {
return fmt.Errorf("refresh item: %w", err)
}
http.Redirect(w, r, fmt.Sprintf("/items/%d", id), http.StatusSeeOther)
return nil
}
func parseTags(value string) []string {
var tags []string
for _, tagValue := range strings.Split(value, ",") {
clean := strings.TrimSpace(tagValue)
if clean == "" {
continue
}
tags = append(tags, clean)
}
return tags
}

View file

@ -1,236 +0,0 @@
package handlers
import (
"context"
"net/http"
"strings"
"time"
"git.soup.land/soup/sxgo/ssr"
"lookbook/internal/components"
"lookbook/internal/data/item"
"lookbook/internal/data/media"
"lookbook/internal/data/tag"
)
type itemPageContent struct {
Item itemPageData
IsAdmin bool
}
type itemPageData struct {
ID string
Title *string
Description *string
LinkURL *string
ItemType string
EmbedHTML *string
EmbedVideoURL *string
Tags []string
CreatedAt string
ThumbnailID *int64
MediaID *int64
MediaIsVideo bool
GalleryIDs []int64 // Additional images for multi-image embeds
}
func (c itemPageContent) Render(sw *ssr.Writer) error {
return sw.Tmpl(c, `
<div class="item-page">
<a href="/" class="back-link">&larr; BACK</a>
<article class="item-detail">
{{if eq .Item.ItemType "quote"}}
<div class="quote-detail">
<blockquote>{{.Item.Description}}</blockquote>
{{if .Item.Title}}<cite> {{.Item.Title}}</cite>{{end}}
</div>
{{else if eq .Item.ItemType "video"}}
<div class="video-container">
<video controls>
<source src="/media/{{.Item.MediaID}}" type="video/mp4">
</video>
</div>
{{else if eq .Item.ItemType "embed"}}
{{if .Item.EmbedVideoURL}}
<div class="video-container">
<video loop muted playsinline poster="{{if .Item.ThumbnailID}}/media/{{.Item.ThumbnailID}}{{end}}">
<source src="/proxy/video/{{.Item.ID}}" type="video/mp4">
</video>
<div class="video-overlay" onclick="playVideo(this)">
<span class="play-button">&#9654;</span>
</div>
</div>
{{else if .Item.GalleryIDs}}
<div class="image-gallery">
{{if .Item.ThumbnailID}}<a href="/media/{{.Item.ThumbnailID}}" target="_blank"><img src="/media/{{.Item.ThumbnailID}}" alt="{{if .Item.Title}}{{.Item.Title}}{{else}}Image{{end}}"></a>{{end}}
{{range .Item.GalleryIDs}}<a href="/media/{{.}}" target="_blank"><img src="/media/{{.}}" alt="Image"></a>{{end}}
</div>
{{else if .Item.ThumbnailID}}
<div class="image-container">
<a href="/media/{{.Item.ThumbnailID}}" target="_blank"><img src="/media/{{.Item.ThumbnailID}}" alt="{{if .Item.Title}}{{.Item.Title}}{{else}}Embed{{end}}"></a>
</div>
{{else if .Item.MediaID}}
<div class="image-container">
<a href="/media/{{.Item.MediaID}}" target="_blank"><img src="/media/{{.Item.MediaID}}" alt="{{if .Item.Title}}{{.Item.Title}}{{else}}Embed{{end}}"></a>
</div>
{{end}}
{{else if .Item.MediaID}}
<div class="image-container">
<a href="/media/{{.Item.MediaID}}" target="_blank"><img src="/media/{{.Item.MediaID}}" alt="{{if .Item.Title}}{{.Item.Title}}{{else}}Image{{end}}"></a>
</div>
{{else if .Item.ThumbnailID}}
<div class="image-container">
<a href="/media/{{.Item.ThumbnailID}}" target="_blank"><img src="/media/{{.Item.ThumbnailID}}" alt="{{if .Item.Title}}{{.Item.Title}}{{else}}Image{{end}}"></a>
</div>
{{end}}
<div class="item-meta">
{{if .Item.Title}}
<h1>{{.Item.Title}}</h1>
{{end}}
{{if and .Item.Description (ne .Item.ItemType "quote")}}
<p class="description">{{.Item.Description}}</p>
{{end}}
{{if .Item.LinkURL}}
<a href="{{.Item.LinkURL}}" target="_blank" rel="noopener" class="source-link">{{.Item.LinkURL}}</a>
{{end}}
{{if .Item.Tags}}
<div class="item-tags">
{{range .Item.Tags}}
<a href="/?tag={{.}}" class="item-tag">{{.}}</a>
{{end}}
</div>
{{end}}
<time class="timestamp">{{.Item.CreatedAt}}</time>
</div>
{{if .IsAdmin}}
<div class="item-actions">
<button class="btn" onclick="editItem('{{.Item.ID}}')">EDIT</button>
{{if .Item.LinkURL}}<button class="btn" onclick="refreshMetadata('{{.Item.ID}}')">REFRESH</button>{{end}}
<button class="btn btn-danger" onclick="deleteItem('{{.Item.ID}}')">DELETE</button>
</div>
{{end}}
</article>
</div>
{{if .IsAdmin}}
<div id="edit-modal" class="modal" onclick="if(event.target===this)hideEditModal()">
<div class="modal-content">
<div class="modal-header">
<h2>EDIT ITEM</h2>
<button class="btn-close" onclick="hideEditModal()">&times;</button>
</div>
<div class="modal-body">
<form id="edit-form" onsubmit="return submitEdit(event)">
<input type="hidden" name="id" value="{{.Item.ID}}">
<input type="text" name="title" placeholder="Title" value="{{if .Item.Title}}{{.Item.Title}}{{end}}">
<textarea name="description" placeholder="Description">{{if .Item.Description}}{{.Item.Description}}{{end}}</textarea>
<input type="url" name="linkUrl" placeholder="Link URL" value="{{if .Item.LinkURL}}{{.Item.LinkURL}}{{end}}">
<input type="text" name="tags" placeholder="Tags (comma-separated)" value="{{range $i, $t := .Item.Tags}}{{if $i}}, {{end}}{{$t}}{{end}}">
<button type="submit" class="btn">SAVE</button>
</form>
<hr style="margin: 1.5rem 0; border: none; border-top: 1px solid var(--border);">
<form id="replace-media-form" onsubmit="return submitReplaceMedia(event, '{{.Item.ID}}')">
<label style="display: block; margin-bottom: 0.5rem; font-size: 0.875rem;">REPLACE IMAGE/VIDEO</label>
<input type="file" name="file" accept="image/*,video/*" required>
<button type="submit" class="btn" style="margin-top: 0.5rem;">REPLACE</button>
</form>
</div>
</div>
</div>
{{end}}
`)
}
// HandleItemPage handles GET /item/{id}
func HandleItemPage(rc *RequestContext, w http.ResponseWriter, r *http.Request) error {
pubID := r.PathValue("id")
if pubID == "" {
http.NotFound(w, r)
return nil
}
ctx, cancel := context.WithTimeout(r.Context(), 10*time.Second)
defer cancel()
it, err := item.QFindByPubID(ctx, rc.DB, pubID)
if err != nil {
return err
}
if it == nil {
http.NotFound(w, r)
return nil
}
// Get tags
itemTags, err := tag.QTagsForItem(ctx, rc.DB, it.ID)
if err != nil {
return err
}
tagNames := make([]string, len(itemTags))
for i, t := range itemTags {
tagNames[i] = t.Name
}
// Get media
// Media is ordered by ID, so first "image" is the thumbnail, rest are gallery
var thumbnailID, mediaID *int64
var mediaIsVideo bool
var galleryIDs []int64
mediaList, err := media.QFindByItemID(ctx, rc.DB, it.ID)
if err != nil {
return err
}
firstImage := true
for _, m := range mediaList {
if m.MediaType == "original" {
mediaID = &m.ID
mediaIsVideo = strings.HasPrefix(m.ContentType, "video/")
} else if m.MediaType == "image" {
if firstImage {
thumbnailID = &m.ID
firstImage = false
} else {
galleryIDs = append(galleryIDs, m.ID)
}
}
}
data := itemPageData{
ID: it.PubID,
Title: item.Ptr(it.Title),
Description: item.Ptr(it.Description),
LinkURL: item.Ptr(it.LinkURL),
ItemType: it.ItemType,
EmbedHTML: item.Ptr(it.EmbedHTML),
EmbedVideoURL: item.Ptr(it.EmbedVideoURL),
Tags: tagNames,
CreatedAt: it.CreatedAt.Format("Jan 2, 2006"),
ThumbnailID: thumbnailID,
MediaID: mediaID,
MediaIsVideo: mediaIsVideo,
GalleryIDs: galleryIDs,
}
w.Header().Set("Content-Type", "text/html; charset=utf-8")
sw := ssr.NewWriter(w, rc.TmplCache)
var title string
if it.Title.Valid {
title = it.Title.V
}
page := components.Page{
Title: title,
IsAdmin: rc.IsAdmin,
Content: itemPageContent{Item: data, IsAdmin: rc.IsAdmin},
}
return page.Render(sw)
}

View file

@ -1,50 +0,0 @@
package handlers
import (
"context"
"fmt"
"net/http"
"strconv"
"time"
"lookbook/internal/data/media"
)
// HandleGetMedia handles GET /media/{id}
func HandleGetMedia(rc *RequestContext, w http.ResponseWriter, r *http.Request) error {
idStr := r.PathValue("id")
id, err := strconv.ParseInt(idStr, 10, 64)
if err != nil {
http.Error(w, "invalid id", http.StatusBadRequest)
return nil
}
ctx, cancel := context.WithTimeout(r.Context(), 30*time.Second)
defer cancel()
m, err := media.QFindByID(ctx, rc.DB, id)
if err != nil {
return err
}
if m == nil {
http.NotFound(w, r)
return nil
}
// Set caching headers (media is immutable)
w.Header().Set("Cache-Control", "public, max-age=31536000, immutable")
w.Header().Set("Content-Type", m.ContentType)
w.Header().Set("Content-Length", strconv.Itoa(len(m.Data)))
// Add ETag for conditional requests
etag := fmt.Sprintf(`"%d"`, m.ID)
w.Header().Set("ETag", etag)
if r.Header.Get("If-None-Match") == etag {
w.WriteHeader(http.StatusNotModified)
return nil
}
w.Write(m.Data)
return nil
}

View file

@ -1,77 +0,0 @@
package handlers
import (
"context"
"io"
"net/http"
"time"
"lookbook/internal/data/item"
)
// HandleProxyVideo proxies video requests for embed items to avoid
// cross-origin issues with external video CDNs (e.g., Twitter's video.twimg.com).
// GET /proxy/video/{id}
func HandleProxyVideo(rc *RequestContext, w http.ResponseWriter, r *http.Request) error {
pubID := r.PathValue("id")
if pubID == "" {
http.NotFound(w, r)
return nil
}
ctx, cancel := context.WithTimeout(r.Context(), 30*time.Second)
defer cancel()
it, err := item.QFindByPubID(ctx, rc.DB, pubID)
if err != nil {
return err
}
if it == nil || !it.EmbedVideoURL.Valid || it.EmbedVideoURL.V == "" {
http.NotFound(w, r)
return nil
}
videoURL := it.EmbedVideoURL.V
req, err := http.NewRequestWithContext(ctx, "GET", videoURL, nil)
if err != nil {
return err
}
req.Header.Set("User-Agent", "Mozilla/5.0 (compatible; Lookbook/1.0)")
// Pass through Range header for video seeking
if rangeHeader := r.Header.Get("Range"); rangeHeader != "" {
req.Header.Set("Range", rangeHeader)
}
client := &http.Client{Timeout: 2 * time.Minute}
resp, err := client.Do(req)
if err != nil {
http.Error(w, "Failed to fetch video", http.StatusBadGateway)
return nil
}
defer resp.Body.Close()
// Pass through relevant headers
if ct := resp.Header.Get("Content-Type"); ct != "" {
w.Header().Set("Content-Type", ct)
}
if cl := resp.Header.Get("Content-Length"); cl != "" {
w.Header().Set("Content-Length", cl)
}
if cr := resp.Header.Get("Content-Range"); cr != "" {
w.Header().Set("Content-Range", cr)
}
if ar := resp.Header.Get("Accept-Ranges"); ar != "" {
w.Header().Set("Accept-Ranges", ar)
}
// Cache for 1 day in the browser
w.Header().Set("Cache-Control", "public, max-age=86400")
w.WriteHeader(resp.StatusCode)
io.Copy(w, resp.Body)
return nil
}

View file

@ -1,12 +1,7 @@
package handlers package handlers
import ( import (
"context"
"log/slog"
"net/http" "net/http"
"time"
"lookbook/internal/data/session"
) )
// Router wraps http.ServeMux and automatically injects RequestContext into handlers. // Router wraps http.ServeMux and automatically injects RequestContext into handlers.
@ -30,7 +25,6 @@ func (rt *Router) Handle(pattern string, h Handler) {
DB: rt.rc.DB, DB: rt.rc.DB,
Logger: rt.rc.Logger, Logger: rt.rc.Logger,
TmplCache: rt.rc.TmplCache, TmplCache: rt.rc.TmplCache,
IsAdmin: rt.loadAuth(r),
} }
handler := WithErrorHandling(rc, h) handler := WithErrorHandling(rc, h)
@ -38,33 +32,6 @@ func (rt *Router) Handle(pattern string, h Handler) {
}) })
} }
// loadAuth checks if the request has a valid session cookie.
func (rt *Router) loadAuth(r *http.Request) bool {
cookie, err := r.Cookie("session_id")
if err != nil {
return false
}
ctx, cancel := context.WithTimeout(r.Context(), 5*time.Second)
defer cancel()
sess, err := session.QFindBySessionID(ctx, rt.rc.DB, cookie.Value)
if err != nil {
rt.rc.Logger.Error("failed to find session", slog.Any("err", err))
return false
}
if sess == nil {
return false
}
if time.Now().After(sess.ExpiresAt) {
rt.rc.Logger.Info("session expired", slog.String("session_id", cookie.Value))
return false
}
return true
}
// HandleStd registers a standard http.Handler (for static files, etc.) // HandleStd registers a standard http.Handler (for static files, etc.)
func (rt *Router) HandleStd(pattern string, h http.Handler) { func (rt *Router) HandleStd(pattern string, h http.Handler) {
rt.mux.Handle(pattern, h) rt.mux.Handle(pattern, h)

View file

@ -1,17 +1,23 @@
package handlers package handlers
import ( import (
"encoding/json"
"html/template" "html/template"
"strings"
"lookbook/internal/static" "git.soup.land/soup/lookbook/internal/static"
) )
var TemplateFuncs = template.FuncMap{ var TemplateFuncs = template.FuncMap{
"staticURL": static.VersionedPath, "staticURL": static.VersionedPath,
"safeHTML": func(s *string) template.HTML { "json": jsonTemplate,
if s == nil { }
return ""
} func jsonTemplate(v any) template.JS {
return template.HTML(*s) payload, err := json.Marshal(v)
}, if err != nil {
return template.JS("null")
}
safe := strings.ReplaceAll(string(payload), "</", "<\\/")
return template.JS(safe)
} }

View file

@ -15,7 +15,7 @@ import (
//go:embed sql/*.sql //go:embed sql/*.sql
var FS embed.FS var FS embed.FS
const DefaultURL = "postgres:///lookbook?sslmode=disable" const DefaultURL = "postgres://postgres:postgres@localhost:5432/lookbook?sslmode=disable"
// Up applies all available migrations using the provided database URL. // Up applies all available migrations using the provided database URL.
func Up(ctx context.Context, dbURL string, logger *slog.Logger) error { func Up(ctx context.Context, dbURL string, logger *slog.Logger) error {
@ -66,7 +66,6 @@ func Down(ctx context.Context, dbURL string, targetVersion int64, logger *slog.L
return nil return nil
} }
// CheckPending returns the number of pending migrations without applying them.
func CheckPending(ctx context.Context, dbURL string, logger *slog.Logger) (int, error) { func CheckPending(ctx context.Context, dbURL string, logger *slog.Logger) (int, error) {
url := dbURL url := dbURL
if url == "" { if url == "" {

View file

@ -1,80 +1,73 @@
-- +goose Up -- +goose Up
-- gen_random_uuid() is built-in since PostgreSQL 13, no extension needed CREATE EXTENSION IF NOT EXISTS pgcrypto;
-- Admin authentication (single row)
CREATE TABLE admin (
id SERIAL PRIMARY KEY,
password_hash BYTEA, -- NULL until first login() sets password
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
);
-- Insert the single admin row
INSERT INTO admin (id) VALUES (1);
-- Sessions for admin authentication
CREATE TABLE session (
id BIGSERIAL PRIMARY KEY,
session_id TEXT NOT NULL UNIQUE,
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
expires_at TIMESTAMPTZ NOT NULL
);
CREATE INDEX idx_session_expires_at ON session(expires_at);
-- Items (the main content)
CREATE TABLE item ( CREATE TABLE item (
id BIGSERIAL PRIMARY KEY, id BIGSERIAL PRIMARY KEY,
pub_id UUID NOT NULL DEFAULT gen_random_uuid() UNIQUE, pub_id UUID NOT NULL DEFAULT gen_random_uuid(),
title TEXT, source_url TEXT NOT NULL,
description TEXT, title TEXT NOT NULL DEFAULT '',
link_url TEXT, -- Source URL (optional) description TEXT NOT NULL DEFAULT '',
item_type TEXT NOT NULL, -- 'image', 'video', 'quote', 'embed' site_name TEXT NOT NULL DEFAULT '',
embed_provider TEXT, -- 'youtube', 'vimeo', NULL created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
embed_video_id TEXT, -- Video ID for embeds updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
embed_html TEXT, -- Cached embed iframe HTML deleted_at TIMESTAMPTZ,
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), UNIQUE (pub_id)
deleted_at TIMESTAMPTZ -- Soft delete
); );
CREATE INDEX idx_item_deleted ON item(deleted_at); CREATE INDEX idx_item_created_at ON item(created_at DESC);
CREATE INDEX idx_item_created ON item(created_at DESC); CREATE INDEX idx_item_deleted_at ON item(deleted_at);
CREATE INDEX idx_item_pub_id ON item(pub_id);
-- Media blobs (stored in DB) CREATE TABLE image (
CREATE TABLE media ( id BIGSERIAL PRIMARY KEY,
id BIGSERIAL PRIMARY KEY, item_id BIGINT NOT NULL REFERENCES item(id) ON DELETE CASCADE,
item_id BIGINT NOT NULL REFERENCES item(id) ON DELETE CASCADE, original_url TEXT NOT NULL DEFAULT '',
media_type TEXT NOT NULL, -- 'original', 'thumbnail' content_type TEXT NOT NULL DEFAULT '',
content_type TEXT NOT NULL, -- MIME type bytes BYTEA NOT NULL,
data BYTEA NOT NULL, width INT NOT NULL DEFAULT 0,
width INT, height INT NOT NULL DEFAULT 0,
height INT, is_thumb BOOLEAN NOT NULL DEFAULT FALSE,
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW() created_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
); );
CREATE INDEX idx_media_item_id ON media(item_id); CREATE INDEX idx_image_item ON image(item_id);
CREATE INDEX idx_image_thumb ON image(item_id, is_thumb);
-- Tags
CREATE TABLE tag ( CREATE TABLE tag (
id BIGSERIAL PRIMARY KEY, id BIGSERIAL PRIMARY KEY,
name TEXT NOT NULL UNIQUE name TEXT NOT NULL UNIQUE,
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
); );
CREATE INDEX idx_tag_name ON tag(name);
-- Item-Tag junction
CREATE TABLE item_tag ( CREATE TABLE item_tag (
item_id BIGINT NOT NULL REFERENCES item(id) ON DELETE CASCADE, item_id BIGINT NOT NULL REFERENCES item(id) ON DELETE CASCADE,
tag_id BIGINT NOT NULL REFERENCES tag(id) ON DELETE CASCADE, tag_id BIGINT NOT NULL REFERENCES tag(id) ON DELETE CASCADE,
PRIMARY KEY (item_id, tag_id) PRIMARY KEY (item_id, tag_id)
); );
CREATE INDEX idx_item_tag_tag_id ON item_tag(tag_id); CREATE INDEX idx_item_tag_item ON item_tag(item_id);
CREATE INDEX idx_item_tag_tag ON item_tag(tag_id);
CREATE TABLE auth (
id BIGSERIAL PRIMARY KEY,
password_hash BYTEA NOT NULL,
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
);
CREATE TABLE session (
id BIGSERIAL PRIMARY KEY,
token TEXT NOT NULL UNIQUE,
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
expires_at TIMESTAMPTZ NOT NULL
);
CREATE INDEX idx_session_token ON session(token);
CREATE INDEX idx_session_expires_at ON session(expires_at);
-- +goose Down -- +goose Down
DROP TABLE IF EXISTS session;
DROP TABLE IF EXISTS auth;
DROP TABLE IF EXISTS item_tag; DROP TABLE IF EXISTS item_tag;
DROP TABLE IF EXISTS tag; DROP TABLE IF EXISTS tag;
DROP TABLE IF EXISTS media; DROP TABLE IF EXISTS image;
DROP TABLE IF EXISTS item; DROP TABLE IF EXISTS item;
DROP TABLE IF EXISTS session; DROP EXTENSION IF EXISTS pgcrypto;
DROP TABLE IF EXISTS admin;

View file

@ -1,5 +0,0 @@
-- +goose Up
ALTER TABLE media ADD COLUMN source_url TEXT;
-- +goose Down
ALTER TABLE media DROP COLUMN source_url;

View file

@ -1,5 +0,0 @@
-- +goose Up
ALTER TABLE item ADD COLUMN embed_video_url TEXT;
-- +goose Down
ALTER TABLE item DROP COLUMN embed_video_url;

View file

@ -1,8 +0,0 @@
-- +goose Up
-- Consolidate 'thumbnail' and 'gallery' media types into unified 'image' type
UPDATE media SET media_type = 'image' WHERE media_type IN ('thumbnail', 'gallery');
-- +goose Down
-- Note: Cannot perfectly reverse since we lose the distinction between thumbnail and gallery
-- This sets all 'image' back to 'thumbnail' as a fallback
UPDATE media SET media_type = 'thumbnail' WHERE media_type = 'image';

View file

@ -1,185 +0,0 @@
package opengraph
import (
"context"
"fmt"
"io"
"net/http"
"net/url"
"strings"
"time"
"golang.org/x/net/html"
)
// Metadata contains extracted OpenGraph and meta data from a URL.
type Metadata struct {
Title string
Description string
ImageURL string
VideoURL string
SiteName string
Type string // og:type
}
// Fetch fetches and parses OpenGraph metadata from a URL.
func Fetch(ctx context.Context, targetURL string) (*Metadata, error) {
req, err := http.NewRequestWithContext(ctx, "GET", targetURL, nil)
if err != nil {
return nil, fmt.Errorf("create request: %w", err)
}
req.Header.Set("User-Agent", "Mozilla/5.0 (compatible; Lookbook/1.0)")
client := &http.Client{
Timeout: 10 * time.Second,
CheckRedirect: func(req *http.Request, via []*http.Request) error {
if len(via) >= 5 {
return fmt.Errorf("too many redirects")
}
return nil
},
}
resp, err := client.Do(req)
if err != nil {
return nil, fmt.Errorf("fetch url: %w", err)
}
defer resp.Body.Close()
if resp.StatusCode != http.StatusOK {
return nil, fmt.Errorf("unexpected status: %d", resp.StatusCode)
}
// Limit response body to 1MB
body := io.LimitReader(resp.Body, 1<<20)
doc, err := html.Parse(body)
if err != nil {
return nil, fmt.Errorf("parse html: %w", err)
}
meta := &Metadata{}
parseNode(doc, meta)
// Resolve relative URLs
baseURL, _ := url.Parse(targetURL)
if meta.ImageURL != "" && !strings.HasPrefix(meta.ImageURL, "http") {
if imgURL, err := baseURL.Parse(meta.ImageURL); err == nil {
meta.ImageURL = imgURL.String()
}
}
if meta.VideoURL != "" && !strings.HasPrefix(meta.VideoURL, "http") {
if vidURL, err := baseURL.Parse(meta.VideoURL); err == nil {
meta.VideoURL = vidURL.String()
}
}
return meta, nil
}
func parseNode(n *html.Node, meta *Metadata) {
if n.Type == html.ElementNode {
switch n.Data {
case "meta":
parseMeta(n, meta)
case "title":
if meta.Title == "" && n.FirstChild != nil {
meta.Title = strings.TrimSpace(n.FirstChild.Data)
}
}
}
for c := n.FirstChild; c != nil; c = c.NextSibling {
parseNode(c, meta)
}
}
func parseMeta(n *html.Node, meta *Metadata) {
var property, name, content string
for _, attr := range n.Attr {
switch attr.Key {
case "property":
property = attr.Val
case "name":
name = attr.Val
case "content":
content = attr.Val
}
}
// OpenGraph properties
switch property {
case "og:title":
meta.Title = content
case "og:description":
if meta.Description == "" {
meta.Description = content
}
case "og:image":
if meta.ImageURL == "" {
meta.ImageURL = content
}
case "og:video", "og:video:url":
if meta.VideoURL == "" {
meta.VideoURL = content
}
case "og:site_name":
meta.SiteName = content
case "og:type":
meta.Type = content
}
// Twitter cards
switch name {
case "twitter:title":
if meta.Title == "" {
meta.Title = content
}
case "twitter:description":
if meta.Description == "" {
meta.Description = content
}
case "twitter:image":
if meta.ImageURL == "" {
meta.ImageURL = content
}
case "description":
if meta.Description == "" {
meta.Description = content
}
}
}
// DownloadImage downloads an image from a URL and returns the data and content type.
func DownloadImage(ctx context.Context, imageURL string) ([]byte, string, error) {
req, err := http.NewRequestWithContext(ctx, "GET", imageURL, nil)
if err != nil {
return nil, "", fmt.Errorf("create request: %w", err)
}
req.Header.Set("User-Agent", "Mozilla/5.0 (compatible; Lookbook/1.0)")
client := &http.Client{Timeout: 30 * time.Second}
resp, err := client.Do(req)
if err != nil {
return nil, "", fmt.Errorf("fetch image: %w", err)
}
defer resp.Body.Close()
if resp.StatusCode != http.StatusOK {
return nil, "", fmt.Errorf("unexpected status: %d", resp.StatusCode)
}
contentType := resp.Header.Get("Content-Type")
if !strings.HasPrefix(contentType, "image/") {
return nil, "", fmt.Errorf("not an image: %s", contentType)
}
// Limit to 50MB
data, err := io.ReadAll(io.LimitReader(resp.Body, 50<<20))
if err != nil {
return nil, "", fmt.Errorf("read image: %w", err)
}
return data, contentType, nil
}

View file

@ -0,0 +1,410 @@
package services
import (
"bytes"
"context"
"database/sql"
"encoding/json"
"fmt"
"io"
"mime"
"net/http"
"net/url"
"path"
"regexp"
"strings"
"time"
"github.com/disintegration/imaging"
"golang.org/x/net/html"
"git.soup.land/soup/lookbook/internal/data/image"
"git.soup.land/soup/lookbook/internal/data/item"
)
var tweetURLPattern = regexp.MustCompile(`^https?://(?:www\.)?(?:twitter\.com|x\.com)/([^/]+)/status/(\d+)`)
const thumbWidth = 480
func CreateItemFromURL(ctx context.Context, db *sql.DB, sourceURL string) (item.Row, error) {
meta, err := FetchMetadata(ctx, sourceURL)
if err != nil {
return item.Row{}, err
}
row, err := item.QCreate(ctx, db, sourceURL, meta.Title, meta.Description, meta.SiteName)
if err != nil {
return item.Row{}, err
}
if err := storeImages(ctx, db, row.ID, meta); err != nil {
return row, err
}
return row, nil
}
func RefreshItemFromURL(ctx context.Context, db *sql.DB, row item.Row) error {
meta, err := FetchMetadata(ctx, row.SourceURL)
if err != nil {
return err
}
if err := item.QUpdateMeta(ctx, db, row.ID, meta.Title, meta.Description, meta.SiteName); err != nil {
return err
}
if err := image.QDeleteByItem(ctx, db, row.ID); err != nil {
return err
}
return storeImages(ctx, db, row.ID, meta)
}
type Metadata struct {
Title string
Description string
SiteName string
ImageURL string
}
func FetchMetadata(ctx context.Context, sourceURL string) (Metadata, error) {
// Check if this is a Twitter/X URL and use syndication API
if meta, ok := fetchTwitterMetadata(ctx, sourceURL); ok {
return meta, nil
}
resp, err := fetchURL(ctx, sourceURL)
if err != nil {
return Metadata{}, err
}
defer resp.Body.Close()
body, err := io.ReadAll(io.LimitReader(resp.Body, 8<<20))
if err != nil {
return Metadata{}, err
}
meta := Metadata{}
contentType := resp.Header.Get("Content-Type")
meta.ImageURL = extractImageURL(resp.Request.URL, contentType)
if strings.HasPrefix(strings.ToLower(contentType), "image/") {
if meta.Title == "" {
meta.Title = path.Base(resp.Request.URL.Path)
}
if meta.SiteName == "" {
meta.SiteName = resp.Request.URL.Hostname()
}
return meta, nil
}
doc, err := html.Parse(bytes.NewReader(body))
if err != nil {
return meta, nil
}
extractMeta(doc, &meta)
if meta.Title == "" {
meta.Title = titleFromDoc(doc)
}
if meta.ImageURL == "" {
if oembed, err := fetchOEmbed(ctx, sourceURL); err == nil {
if meta.Title == "" {
meta.Title = oembed.Title
}
if meta.Description == "" {
meta.Description = oembed.Description
}
if meta.ImageURL == "" {
meta.ImageURL = oembed.ThumbnailURL
}
if meta.SiteName == "" {
meta.SiteName = oembed.ProviderName
}
}
}
return meta, nil
}
type oEmbedResponse struct {
Title string `json:"title"`
Description string `json:"description"`
ThumbnailURL string `json:"thumbnail_url"`
ProviderName string `json:"provider_name"`
}
// Twitter/X syndication API response structures
type twitterSyndicationResponse struct {
Text string `json:"text"`
User twitterUser `json:"user"`
Photos []twitterPhoto `json:"photos"`
Video *twitterVideo `json:"video"`
Card *twitterCard `json:"card"`
Media []twitterMediaEntry `json:"mediaDetails"`
}
type twitterUser struct {
Name string `json:"name"`
ScreenName string `json:"screen_name"`
}
type twitterPhoto struct {
URL string `json:"url"`
}
type twitterVideo struct {
Poster string `json:"poster"`
}
type twitterCard struct {
ThumbnailImageOriginal string `json:"thumbnail_image_original"`
}
type twitterMediaEntry struct {
MediaURLHTTPS string `json:"media_url_https"`
Type string `json:"type"`
}
// fetchTwitterMetadata attempts to fetch metadata from Twitter's syndication API
// Returns the metadata and true if successful, or empty metadata and false if not a Twitter URL or fetch failed
func fetchTwitterMetadata(ctx context.Context, sourceURL string) (Metadata, bool) {
matches := tweetURLPattern.FindStringSubmatch(sourceURL)
if matches == nil {
return Metadata{}, false
}
username := matches[1]
tweetID := matches[2]
syndicationURL := fmt.Sprintf("https://cdn.syndication.twimg.com/tweet-result?id=%s&token=0", tweetID)
req, err := http.NewRequestWithContext(ctx, http.MethodGet, syndicationURL, nil)
if err != nil {
return Metadata{}, false
}
// The syndication API requires specific headers
req.Header.Set("User-Agent", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36")
req.Header.Set("Referer", "https://platform.twitter.com/")
client := &http.Client{Timeout: 12 * time.Second}
resp, err := client.Do(req)
if err != nil {
return Metadata{}, false
}
defer resp.Body.Close()
if resp.StatusCode != 200 {
return Metadata{}, false
}
// Check content type - if it's HTML, it's an error page
contentType := resp.Header.Get("Content-Type")
if !strings.Contains(contentType, "application/json") {
return Metadata{}, false
}
var tweet twitterSyndicationResponse
if err := json.NewDecoder(resp.Body).Decode(&tweet); err != nil {
return Metadata{}, false
}
meta := Metadata{
Title: truncateText(tweet.Text, 200),
Description: fmt.Sprintf("@%s", username),
SiteName: "X",
}
// Try to get image URL from various sources
// Priority: photos > video poster > card thumbnail > mediaDetails
if len(tweet.Photos) > 0 {
meta.ImageURL = tweet.Photos[0].URL
} else if tweet.Video != nil && tweet.Video.Poster != "" {
meta.ImageURL = tweet.Video.Poster
} else if tweet.Card != nil && tweet.Card.ThumbnailImageOriginal != "" {
meta.ImageURL = tweet.Card.ThumbnailImageOriginal
} else if len(tweet.Media) > 0 {
meta.ImageURL = tweet.Media[0].MediaURLHTTPS
}
// If we got user info, use it for a better description
if tweet.User.Name != "" {
meta.Description = fmt.Sprintf("%s (@%s)", tweet.User.Name, tweet.User.ScreenName)
}
return meta, true
}
func truncateText(s string, maxLen int) string {
if len(s) <= maxLen {
return s
}
// Try to break at a word boundary
truncated := s[:maxLen]
if lastSpace := strings.LastIndex(truncated, " "); lastSpace > maxLen/2 {
truncated = truncated[:lastSpace]
}
return truncated + "…"
}
func fetchOEmbed(ctx context.Context, sourceURL string) (oEmbedResponse, error) {
oembedURL := fmt.Sprintf("https://noembed.com/embed?url=%s", url.QueryEscape(sourceURL))
resp, err := fetchURL(ctx, oembedURL)
if err != nil {
return oEmbedResponse{}, err
}
defer resp.Body.Close()
var payload oEmbedResponse
if err := json.NewDecoder(resp.Body).Decode(&payload); err != nil {
return oEmbedResponse{}, err
}
return payload, nil
}
func fetchURL(ctx context.Context, rawURL string) (*http.Response, error) {
req, err := http.NewRequestWithContext(ctx, http.MethodGet, rawURL, nil)
if err != nil {
return nil, err
}
req.Header.Set("User-Agent", "lookbook/1.0")
client := &http.Client{Timeout: 12 * time.Second}
resp, err := client.Do(req)
if err != nil {
return nil, err
}
if resp.StatusCode < 200 || resp.StatusCode >= 400 {
resp.Body.Close()
return nil, fmt.Errorf("fetch %s: status %d", rawURL, resp.StatusCode)
}
return resp, nil
}
func extractMeta(n *html.Node, meta *Metadata) {
if n.Type == html.ElementNode && n.Data == "meta" {
var property, content, name string
for _, attr := range n.Attr {
switch strings.ToLower(attr.Key) {
case "property":
property = strings.ToLower(attr.Val)
case "content":
content = strings.TrimSpace(attr.Val)
case "name":
name = strings.ToLower(attr.Val)
}
}
if content != "" {
switch property {
case "og:title", "twitter:title":
if meta.Title == "" {
meta.Title = content
}
case "og:description", "twitter:description":
if meta.Description == "" {
meta.Description = content
}
case "og:site_name":
if meta.SiteName == "" {
meta.SiteName = content
}
case "og:image", "twitter:image":
if meta.ImageURL == "" {
meta.ImageURL = content
}
}
if meta.Description == "" && name == "description" {
meta.Description = content
}
}
}
for c := n.FirstChild; c != nil; c = c.NextSibling {
extractMeta(c, meta)
}
}
func titleFromDoc(n *html.Node) string {
if n.Type == html.ElementNode && n.Data == "title" && n.FirstChild != nil {
return strings.TrimSpace(n.FirstChild.Data)
}
for c := n.FirstChild; c != nil; c = c.NextSibling {
if title := titleFromDoc(c); title != "" {
return title
}
}
return ""
}
func extractImageURL(baseURL *url.URL, contentType string) string {
if strings.HasPrefix(strings.ToLower(contentType), "image/") {
return baseURL.String()
}
return ""
}
func storeImages(ctx context.Context, db *sql.DB, itemID int64, meta Metadata) error {
if meta.ImageURL == "" {
return nil
}
resp, err := fetchURL(ctx, meta.ImageURL)
if err != nil {
return err
}
defer resp.Body.Close()
payload, err := io.ReadAll(io.LimitReader(resp.Body, 16<<20))
if err != nil {
return err
}
contentType := resp.Header.Get("Content-Type")
if contentType == "" {
contentType = mime.TypeByExtension(strings.ToLower(path.Ext(resp.Request.URL.Path)))
}
width, height, thumbBytes, thumbHeight, err := createThumb(payload)
if err != nil {
return err
}
_, err = image.QCreate(ctx, db, itemID, meta.ImageURL, contentType, payload, width, height, false)
if err != nil {
return err
}
if thumbBytes != nil {
_, err = image.QCreate(ctx, db, itemID, meta.ImageURL, thumbContentType(contentType), thumbBytes, thumbWidth, thumbHeight, true)
if err != nil {
return err
}
}
return nil
}
func createThumb(payload []byte) (int, int, []byte, int, error) {
img, err := imaging.Decode(bytes.NewReader(payload))
if err != nil {
return 0, 0, nil, 0, nil
}
bounds := img.Bounds()
width := bounds.Dx()
height := bounds.Dy()
if width <= thumbWidth {
return width, height, payload, height, nil
}
thumb := imaging.Resize(img, thumbWidth, 0, imaging.Lanczos)
buf := new(bytes.Buffer)
if err := imaging.Encode(buf, thumb, imaging.JPEG); err != nil {
return width, height, nil, 0, err
}
return width, height, buf.Bytes(), thumb.Bounds().Dy(), nil
}
func thumbContentType(_ string) string {
return "image/jpeg"
}

View file

@ -1,695 +0,0 @@
/*
* Commit Mono - Neutral programming typeface
* Licensed under SIL Open Font License 1.1
* https://commitmono.com/
*/
@font-face {
font-family: 'CommitMono';
font-style: normal;
font-weight: 400;
font-display: swap;
src: url("../fonts/CommitMono-450-Regular.woff2") format('woff2');
}
@font-face {
font-family: 'CommitMono';
font-style: italic;
font-weight: 400;
font-display: swap;
src: url("../fonts/CommitMono-450-Italic.woff2") format('woff2');
}
@font-face {
font-family: 'CommitMono';
font-style: normal;
font-weight: 700;
font-display: swap;
src: url("../fonts/CommitMono-700-Regular.woff2") format('woff2');
}
@font-face {
font-family: 'CommitMono';
font-style: italic;
font-weight: 700;
font-display: swap;
src: url("../fonts/CommitMono-700-Italic.woff2") format('woff2');
}
/* Reset */
*, *::before, *::after {
box-sizing: border-box;
margin: 0;
padding: 0;
}
/* Base */
:root {
--bg: #fff;
--fg: #000;
--gray-1: #f5f5f5;
--gray-2: #e5e5e5;
--gray-3: #888;
--font: 'CommitMono', ui-monospace, 'SF Mono', Menlo, monospace;
}
html {
font-size: 14px;
}
body {
font-family: var(--font);
background: var(--bg);
color: var(--fg);
line-height: 1.5;
min-height: 100vh;
}
a {
color: inherit;
text-decoration: none;
}
a:hover {
text-decoration: underline;
}
img, video {
max-width: 100%;
height: auto;
display: block;
}
button, input, textarea, select {
font: inherit;
color: inherit;
}
/* Header */
.header {
display: flex;
justify-content: space-between;
align-items: center;
padding: 1rem 2rem;
border-bottom: 1px solid var(--gray-2);
}
.logo {
font-weight: 700;
font-size: 1rem;
letter-spacing: 0.1em;
}
.nav {
display: flex;
gap: 1.5rem;
}
.nav a {
font-size: 0.85rem;
text-transform: uppercase;
letter-spacing: 0.05em;
}
/* Main */
.main {
max-width: 1600px;
margin: 0 auto;
padding: 2rem;
}
/* Admin Bar */
.admin-bar {
margin-bottom: 1.5rem;
display: flex;
gap: 1rem;
}
/* Tags Bar */
.tags-bar {
display: flex;
flex-wrap: wrap;
gap: 0.5rem;
margin-bottom: 2rem;
padding-bottom: 1rem;
border-bottom: 1px solid var(--gray-2);
}
.tag {
font-size: 0.75rem;
text-transform: uppercase;
letter-spacing: 0.05em;
padding: 0.25rem 0.5rem;
border: 1px solid var(--gray-2);
background: var(--bg);
transition: all 0.15s ease;
}
.tag:hover, .tag.active {
background: var(--fg);
color: var(--bg);
border-color: var(--fg);
text-decoration: none;
}
/* Grid */
.grid {
column-count: 4;
column-gap: 1rem;
}
.grid-item {
display: block;
break-inside: avoid;
margin-bottom: 1rem;
background: var(--gray-1);
border: 1px solid var(--gray-2);
overflow: hidden;
position: relative;
transition: transform 0.15s ease, box-shadow 0.15s ease;
}
.grid-item:hover {
text-decoration: none;
transform: translateY(-2px);
box-shadow: 0 4px 12px rgba(0,0,0,0.1);
}
.grid-item img {
width: 100%;
display: block;
}
/* Quote Card */
.quote-card {
padding: 1.5rem;
min-height: 150px;
display: flex;
flex-direction: column;
justify-content: center;
}
.quote-card blockquote {
font-size: 1.1rem;
font-style: italic;
line-height: 1.6;
margin-bottom: 0.75rem;
}
.quote-card blockquote::before {
content: '"';
}
.quote-card blockquote::after {
content: '"';
}
.quote-card cite {
font-size: 0.85rem;
color: var(--gray-3);
font-style: normal;
}
/* Link Card */
.link-card {
padding: 1.5rem;
min-height: 100px;
}
.link-title {
font-weight: 700;
margin-bottom: 0.5rem;
}
.link-url {
font-size: 0.75rem;
color: var(--gray-3);
word-break: break-all;
}
/* Embed Placeholder */
.embed-placeholder {
aspect-ratio: 16/9;
display: flex;
align-items: center;
justify-content: center;
background: var(--fg);
color: var(--bg);
font-size: 2rem;
}
/* Play Indicator */
.play-indicator {
position: absolute;
bottom: 0.5rem;
right: 0.5rem;
width: 2rem;
height: 2rem;
background: rgba(0,0,0,0.7);
color: #fff;
display: flex;
align-items: center;
justify-content: center;
font-size: 0.75rem;
pointer-events: none;
}
/* Item Overlay */
.item-overlay {
position: absolute;
inset: auto 0 0 0;
padding: 0.75rem;
background: rgba(255, 255, 255, 0.9);
border-top: 1px solid var(--gray-2);
display: flex;
flex-direction: column;
gap: 0.5rem;
opacity: 0;
transform: translateY(8px);
transition: opacity 0.15s ease, transform 0.15s ease;
}
.grid-item:hover .item-overlay {
opacity: 1;
transform: translateY(0);
}
.item-title {
font-weight: 700;
font-size: 0.9rem;
}
/* Item Tags */
.item-tags {
display: flex;
flex-wrap: wrap;
gap: 0.25rem;
}
.item-tag {
font-size: 0.65rem;
text-transform: uppercase;
letter-spacing: 0.05em;
padding: 0.15rem 0.35rem;
background: var(--gray-1);
border: 1px solid var(--gray-2);
}
/* Item Page */
.item-page {
max-width: 900px;
margin: 0 auto;
}
.back-link {
display: inline-block;
font-size: 0.85rem;
margin-bottom: 2rem;
text-transform: uppercase;
letter-spacing: 0.05em;
}
.item-detail {
border: 1px solid var(--gray-2);
}
.image-container img,
.video-container video {
width: 100%;
}
.video-container {
position: relative;
}
.video-overlay {
position: absolute;
inset: 0;
display: flex;
align-items: center;
justify-content: center;
background: rgba(0, 0, 0, 0.3);
cursor: pointer;
transition: background 0.15s ease;
}
.video-overlay:hover {
background: rgba(0, 0, 0, 0.4);
}
.video-overlay .play-button {
width: 4rem;
height: 4rem;
background: rgba(0, 0, 0, 0.7);
color: #fff;
border-radius: 50%;
display: flex;
align-items: center;
justify-content: center;
font-size: 1.5rem;
padding-left: 0.25rem;
}
.embed-container {
position: relative;
}
/* YouTube/Vimeo iframe embeds */
.embed-container iframe {
max-width: 100%;
}
/* Twitter embeds */
.embed-container .twitter-tweet {
margin: 1rem auto !important;
}
.quote-detail {
padding: 3rem;
text-align: center;
}
.quote-detail blockquote {
font-size: 1.5rem;
font-style: italic;
line-height: 1.6;
margin-bottom: 1rem;
}
.quote-detail blockquote::before {
content: '"';
}
.quote-detail blockquote::after {
content: '"';
}
.quote-detail cite {
color: var(--gray-3);
font-style: normal;
}
.item-meta {
padding: 1.5rem;
border-top: 1px solid var(--gray-2);
}
.item-meta h1 {
font-size: 1.25rem;
font-weight: 700;
margin-bottom: 0.75rem;
}
.item-meta .description {
margin-bottom: 1rem;
color: var(--gray-3);
}
.item-meta .source-link {
display: block;
font-size: 0.85rem;
color: var(--gray-3);
word-break: break-all;
margin-bottom: 1rem;
}
.item-meta .item-tags {
background: none;
border: none;
padding: 0;
margin-bottom: 1rem;
gap: 0.5rem;
}
.item-meta .timestamp {
display: block;
font-size: 0.75rem;
color: var(--gray-3);
text-transform: uppercase;
letter-spacing: 0.05em;
}
.item-actions {
padding: 1rem 1.5rem;
border-top: 1px solid var(--gray-2);
display: flex;
gap: 0.5rem;
}
/* Buttons */
.btn {
font-size: 0.75rem;
text-transform: uppercase;
letter-spacing: 0.1em;
padding: 0.5rem 1rem;
border: 1px solid var(--fg);
background: var(--bg);
cursor: pointer;
transition: all 0.15s ease;
}
.btn:hover {
background: var(--fg);
color: var(--bg);
}
.btn-danger {
border-color: #c00;
color: #c00;
}
.btn-danger:hover {
background: #c00;
color: #fff;
}
.btn-close {
background: none;
border: none;
font-size: 1.5rem;
cursor: pointer;
line-height: 1;
}
/* Modal */
.modal {
display: none;
position: fixed;
inset: 0;
background: rgba(0,0,0,0.5);
z-index: 1000;
align-items: center;
justify-content: center;
}
.modal.active {
display: flex;
}
.modal-content {
background: var(--bg);
width: 90%;
max-width: 500px;
max-height: 90vh;
overflow-y: auto;
border: 1px solid var(--fg);
}
.modal-header {
display: flex;
justify-content: space-between;
align-items: center;
padding: 1rem 1.5rem;
border-bottom: 1px solid var(--gray-2);
}
.modal-header h2 {
font-size: 1rem;
font-weight: 700;
letter-spacing: 0.1em;
}
.modal-tabs {
display: flex;
border-bottom: 1px solid var(--gray-2);
}
.modal-tabs .tab {
flex: 1;
padding: 0.75rem;
background: none;
border: none;
font-size: 0.75rem;
text-transform: uppercase;
letter-spacing: 0.1em;
cursor: pointer;
border-bottom: 2px solid transparent;
}
.modal-tabs .tab.active {
border-bottom-color: var(--fg);
}
.modal-body {
padding: 1.5rem;
}
.tab-content {
display: none;
}
.tab-content.active {
display: block;
}
/* Forms */
form {
display: flex;
flex-direction: column;
gap: 1rem;
}
input, textarea {
width: 100%;
padding: 0.75rem;
border: 1px solid var(--gray-2);
background: var(--bg);
}
input:focus, textarea:focus {
outline: none;
border-color: var(--fg);
}
textarea {
resize: vertical;
min-height: 80px;
}
input[type="file"] {
padding: 0.5rem;
}
/* Preview */
.preview {
padding: 1rem;
background: var(--gray-1);
border: 1px solid var(--gray-2);
display: none;
}
.preview.active {
display: block;
}
.preview img {
max-width: 200px;
margin-bottom: 0.5rem;
}
.preview-title {
font-weight: 700;
margin-bottom: 0.25rem;
}
.preview-description {
font-size: 0.85rem;
color: var(--gray-3);
}
/* Image Gallery (detail page) */
.image-gallery {
display: grid;
grid-template-columns: repeat(2, 1fr);
gap: 2px;
}
.image-gallery img {
width: 100%;
height: auto;
display: block;
cursor: pointer;
transition: opacity 0.15s ease;
}
.image-gallery img:hover {
opacity: 0.9;
}
/* Single image when odd count - make last one full width */
.image-gallery img:last-child:nth-child(odd) {
grid-column: span 2;
}
/* Gallery Cycling (home grid) */
.grid-item-images {
position: relative;
width: 100%;
}
.grid-item-images img {
width: 100%;
display: block;
position: absolute;
top: 0;
left: 0;
opacity: 0;
transition: opacity 0.4s ease;
}
.grid-item-images img:first-child {
position: relative;
}
.grid-item-images img.active {
opacity: 1;
}
/* Gallery Indicator */
.gallery-indicator {
position: absolute;
top: 0.5rem;
right: 0.5rem;
background: rgba(0, 0, 0, 0.7);
color: #fff;
font-size: 0.7rem;
padding: 0.2rem 0.4rem;
pointer-events: none;
}
/* Responsive */
@media (max-width: 1200px) {
.grid {
column-count: 3;
}
}
@media (max-width: 900px) {
.grid {
column-count: 2;
}
.main {
padding: 1rem;
}
}
@media (max-width: 600px) {
.grid {
column-count: 1;
}
.header {
padding: 1rem;
}
.quote-detail {
padding: 2rem 1rem;
}
.quote-detail blockquote {
font-size: 1.25rem;
}
}

4
internal/static/css/pico.min.css vendored Normal file

File diff suppressed because one or more lines are too long

View file

@ -0,0 +1,331 @@
/* Commit Mono */
@font-face {
font-family: "CommitMono";
font-style: normal;
font-weight: 400;
font-display: swap;
src: url("../fonts/CommitMono-400-Regular.woff2") format("woff2");
}
@font-face {
font-family: "CommitMono";
font-style: italic;
font-weight: 400;
font-display: swap;
src: url("../fonts/CommitMono-400-Italic.woff2") format("woff2");
}
@font-face {
font-family: "CommitMono-Light";
font-style: normal;
font-weight: 400;
font-display: swap;
src: url("../fonts/CommitMono-450-Regular.woff2") format("woff2");
}
@font-face {
font-family: "CommitMono-Light";
font-style: italic;
font-weight: 400;
font-display: swap;
src: url("../fonts/CommitMono-450-Italic.woff2") format("woff2");
}
@font-face {
font-family: "CommitMono";
font-style: normal;
font-weight: 700;
font-display: swap;
src: url("../fonts/CommitMono-700-Regular.woff2") format("woff2");
}
@font-face {
font-family: "CommitMono";
font-style: italic;
font-weight: 700;
font-display: swap;
src: url("../fonts/CommitMono-700-Italic.woff2") format("woff2");
}
@font-face {
font-family: "CommitMono-Light";
font-style: normal;
font-weight: 700;
font-display: swap;
src: url("../fonts/CommitMono-700-Regular.woff2") format("woff2");
}
@font-face {
font-family: "CommitMono-Light";
font-style: italic;
font-weight: 700;
font-display: swap;
src: url("../fonts/CommitMono-700-Italic.woff2") format("woff2");
}
:root {
color-scheme: light dark;
font-family:
"CommitMono-Light",
"CommitMono",
monospace;
font-size: 13px;
}
@media (prefers-color-scheme: dark) {
:root {
font-family:
"CommitMono",
monospace;
}
}
body {
margin: 0;
background: var(--pico-background-color);
color: var(--pico-color);
}
.container {
max-width: 1200px;
margin: 0 auto;
padding: 2rem 1.5rem 4rem;
}
.site-header {
display: flex;
align-items: center;
justify-content: space-between;
padding: 1.5rem 2rem;
border-bottom: 2px solid currentColor;
}
.site-header .title {
font-weight: 700;
text-transform: uppercase;
letter-spacing: 0.08em;
}
.site-nav {
display: flex;
gap: 0.75rem;
align-items: center;
}
.site-nav a,
.site-nav button {
font-size: 0.85rem;
text-transform: uppercase;
letter-spacing: 0.08em;
border: 1px solid currentColor;
padding: 0.35rem 0.75rem;
background: transparent;
color: inherit;
}
.site-nav button.ghost {
background: transparent;
}
button.ghost {
border: 1px solid currentColor;
padding: 0.35rem 0.75rem;
background: transparent;
color: inherit;
}
.hero {
display: grid;
grid-template-columns: minmax(0, 1fr);
gap: 1rem;
margin-bottom: 2rem;
}
.hero form {
display: grid;
grid-template-columns: minmax(0, 1fr) auto;
gap: 0.5rem;
}
.hero input[type="url"] {
border-radius: 0;
border: 2px solid currentColor;
background: transparent;
padding: 0.6rem 0.75rem;
}
.hero button {
border-radius: 0;
border: 2px solid currentColor;
background: currentColor;
color: var(--pico-background-color);
padding: 0.6rem 1rem;
}
.filters {
display: flex;
flex-wrap: wrap;
gap: 0.5rem;
margin-bottom: 2rem;
}
.filter-pill {
border: 1px solid currentColor;
padding: 0.35rem 0.6rem;
font-size: 0.8rem;
cursor: pointer;
text-transform: uppercase;
letter-spacing: 0.06em;
}
.filter-pill.active {
background: currentColor;
color: var(--pico-background-color);
}
.gallery {
display: grid;
gap: 1.5rem;
grid-template-columns: repeat(auto-fill, minmax(220px, 1fr));
}
.card {
position: relative;
border: 2px solid currentColor;
background: transparent;
overflow: hidden;
min-height: 160px;
}
.card img,
.card video {
display: block;
width: 100%;
height: auto;
}
.card a {
color: inherit;
text-decoration: none;
}
.card .placeholder {
padding: 1rem;
font-size: 0.9rem;
}
.card .overlay {
position: absolute;
inset: 0;
background: rgba(0, 0, 0, 0.75);
color: #fff;
display: flex;
flex-direction: column;
justify-content: flex-end;
gap: 0.5rem;
padding: 0.8rem;
opacity: 0;
transition: opacity 0.15s ease;
}
.card:hover .overlay {
opacity: 1;
}
.card .overlay .title {
font-weight: 600;
font-size: 0.85rem;
}
.card .overlay .tags {
font-size: 0.7rem;
text-transform: uppercase;
letter-spacing: 0.08em;
}
.detail {
display: grid;
grid-template-columns: minmax(0, 1.1fr) minmax(0, 0.9fr);
gap: 2rem;
}
.detail img {
width: 100%;
border: 2px solid currentColor;
}
.detail .meta {
display: grid;
gap: 0.75rem;
}
.detail .meta h1 {
margin: 0;
font-size: 1.5rem;
}
.tag-list {
display: flex;
flex-wrap: wrap;
gap: 0.5rem;
}
.tag-chip {
border: 1px solid currentColor;
padding: 0.25rem 0.5rem;
font-size: 0.75rem;
text-transform: uppercase;
letter-spacing: 0.06em;
}
.tag-editor {
display: flex;
flex-direction: column;
gap: 0.5rem;
}
.tag-editor input {
border-radius: 0;
border: 2px solid currentColor;
padding: 0.45rem 0.6rem;
background: transparent;
}
.tag-suggestions {
display: flex;
flex-wrap: wrap;
gap: 0.5rem;
}
.tag-suggestions button {
border-radius: 0;
border: 1px solid currentColor;
padding: 0.25rem 0.5rem;
background: transparent;
}
.actions {
display: flex;
flex-wrap: wrap;
gap: 0.5rem;
}
.actions button,
.actions form button {
border-radius: 0;
border: 1px solid currentColor;
padding: 0.45rem 0.8rem;
background: transparent;
}
.notice {
border: 2px dashed currentColor;
padding: 1rem;
font-size: 0.85rem;
}
@media (max-width: 900px) {
.detail {
grid-template-columns: 1fr;
}
}

View file

@ -0,0 +1,39 @@
Copyright (c) 2023 Eigil Nikolajsen (eigi0088@gmail.com)
This Font Software is licensed under the SIL Open Font License, Version 1.1.
This license is copied below, and is also available with a FAQ at:
http://scripts.sil.org/OFL
-----------------------------------------------------------
SIL OPEN FONT LICENSE Version 1.1 - 26 February 2007
-----------------------------------------------------------
PREAMBLE
The goals of the Open Font License (OFL) are to stimulate worldwide
development of collaborative font projects, to support the font creation
efforts of academic and linguistic communities, and to provide a free and
open framework in which fonts may be shared and improved in partnership
with others.
The OFL allows the licensed fonts to be used, studied, modified and
redistributed freely as long as they are not sold by themselves. The
fonts, including any derivative works, can be bundled, embedded,
redistributed and/or sold with any software provided that any reserved
names are not used by derivative works. The fonts and derivatives,
however, cannot be released under any other type of license. The
requirement for fonts to remain under this license does not apply
to any document created using the fonts or their derivatives.
DEFINITIONS
"Font Software" refers to the set of files released by the Copyright
Holder(s) under this license and clearly marked as such. This may
include source files, build scripts and documentation.
"Reserved Font Name" refers to any names specified as such after the
copyright statement(s).
"Original Version" refers to the collection of Font Software components as
distributed by the Copyright Holder(s).
"Modified Version" refers to any derivative made by adding to, deleting,
or substituting -- in part or in whole

View file

@ -1,479 +1,181 @@
// Console-based authentication (function() {
window.login = async (password) => { const logoutButton = document.querySelector('.site-nav button[onclick="logout()"]');
if (!password) { const authStatusEl = document.querySelector('[data-auth-status]');
console.error('Usage: login("your-password")');
return;
}
try {
const res = await fetch("/api/auth/login", {
method: "POST",
headers: { "Content-Type": "application/json" },
body: JSON.stringify({ password }),
});
const data = await res.json();
if (res.ok) {
console.log(
data.firstTime
? "Password set! Reloading..."
: "Logged in! Reloading...",
);
setTimeout(() => location.reload(), 500);
} else {
console.error(data.error || "Login failed");
}
} catch (err) {
console.error("Login error:", err);
}
};
window.logout = async () => { window.auth = async function() {
try { const password = window.prompt('Enter password');
await fetch("/api/auth/logout", { method: "POST" }); if (!password) return;
console.log("Logged out! Reloading..."); const resp = await fetch('/auth/login', {
setTimeout(() => location.reload(), 500); method: 'POST',
} catch (err) { headers: { 'Content-Type': 'application/json' },
console.error("Logout error:", err); body: JSON.stringify({ password })
} });
}; if (resp.ok) {
await refreshAuth();
} else {
alert('Auth failed');
}
};
// Modal functions window.logout = async function() {
function showAddModal() { await fetch('/auth/logout', { method: 'POST' });
document.getElementById("add-modal").classList.add("active"); await refreshAuth();
} };
function hideAddModal() { async function refreshAuth() {
document.getElementById("add-modal").classList.remove("active"); const resp = await fetch('/auth/status');
} if (!resp.ok) return;
const data = await resp.json();
window.LOOKBOOK_AUTH = data.authenticated;
if (logoutButton) {
logoutButton.hidden = !data.authenticated;
}
if (authStatusEl) {
authStatusEl.textContent = data.authenticated ? 'Authed' : 'Read only';
}
const gated = document.querySelectorAll('[data-auth-required]');
gated.forEach((el) => {
el.toggleAttribute('hidden', !data.authenticated);
});
}
function showEditModal() { const tagInput = document.querySelector('[data-tag-input]');
document.getElementById("edit-modal").classList.add("active"); const tagSuggestions = document.querySelector('[data-tag-suggestions]');
} const tagForm = document.querySelector('[data-tag-form]');
const tagEditor = document.querySelector('[data-tag-editor]');
const tagToggle = document.querySelector('[data-tag-toggle]');
const tagList = document.querySelector('[data-tag-list]');
const tagData = tagSuggestions ? JSON.parse(tagSuggestions.getAttribute('data-tags') || '[]') : [];
function hideEditModal() { if (tagToggle && tagEditor) {
document.getElementById("edit-modal").classList.remove("active"); tagToggle.addEventListener('click', () => {
} const isHidden = tagEditor.hasAttribute('hidden');
tagEditor.toggleAttribute('hidden', !isHidden);
});
}
// Tab switching if (tagList && tagEditor) {
document.addEventListener("DOMContentLoaded", () => { tagList.addEventListener('click', () => {
const tabs = document.querySelectorAll(".modal-tabs .tab"); if (!window.LOOKBOOK_AUTH) return;
tabs.forEach((tab) => { const isHidden = tagEditor.hasAttribute('hidden');
tab.addEventListener("click", () => { tagEditor.toggleAttribute('hidden', !isHidden);
const tabId = tab.dataset.tab; });
}
// Update tab buttons if (tagSuggestions) {
tabs.forEach((t) => t.classList.remove("active")); if (tagInput) {
tab.classList.add("active"); tagInput.addEventListener('input', () => renderSuggestions(tagInput.value));
renderSuggestions(tagInput.value);
} else {
renderSuggestions('');
}
}
// Update tab content function renderSuggestions(value) {
document if (!tagSuggestions) return;
.querySelectorAll(".tab-content") const query = (value || '').trim().toLowerCase();
.forEach((c) => c.classList.remove("active")); tagSuggestions.innerHTML = '';
document.getElementById("tab-" + tabId).classList.add("active"); const matches = fuzzyMatchTags(query, tagData).slice(0, 10);
}); matches.forEach((tag) => {
}); const button = document.createElement('button');
button.type = 'button';
button.textContent = tag;
button.addEventListener('click', () => {
if (tagInput) {
addTag(tag);
return;
}
const target = Array.from(filterButtons).find((btn) => btn.dataset.tagFilter === tag);
if (target) target.click();
});
tagSuggestions.appendChild(button);
});
}
// URL preview on input function addTag(tag) {
const urlInput = document.querySelector('#link-form input[name="url"]'); if (!tagInput) return;
if (urlInput) { const tags = parseTags(tagInput.value);
let debounceTimer; if (!tags.includes(tag)) tags.push(tag);
urlInput.addEventListener("input", (e) => { tagInput.value = tags.join(', ');
clearTimeout(debounceTimer); renderSuggestions(tagInput.value);
debounceTimer = setTimeout(() => fetchPreview(e.target.value), 500); }
});
}
// Gallery hover cycling function parseTags(value) {
const galleries = document.querySelectorAll('.grid-item-images[data-gallery="true"]'); return value
galleries.forEach((gallery) => { .split(',')
const images = gallery.querySelectorAll('img'); .map((tag) => tag.trim())
const indicator = gallery.parentElement.querySelector('.gallery-indicator'); .filter((tag) => tag.length > 0);
if (images.length < 2) return; }
let currentIndex = 0; function fuzzyMatchTags(query, tags) {
let cycleInterval = null; if (!query) return tags;
const scores = tags.map((tag) => ({ tag, score: fuzzyScore(tag, query) }))
.filter((entry) => entry.score > 0)
.sort((a, b) => b.score - a.score || a.tag.localeCompare(b.tag));
return scores.map((entry) => entry.tag);
}
const showImage = (index) => { function fuzzyScore(tag, query) {
images.forEach((img, i) => { let score = 0;
img.classList.toggle('active', i === index); let ti = 0;
}); for (const qc of query) {
if (indicator) { const idx = tag.indexOf(qc, ti);
indicator.textContent = `${index + 1}/${images.length}`; if (idx === -1) return 0;
} score += idx === ti ? 3 : 1;
}; ti = idx + 1;
}
return score;
}
const startCycling = () => { const filterButtons = document.querySelectorAll('[data-tag-filter]');
if (cycleInterval) return; const gridItems = document.querySelectorAll('[data-item-tags]');
cycleInterval = setInterval(() => { const selectedTags = new Set();
currentIndex = (currentIndex + 1) % images.length;
showImage(currentIndex);
}, 2000);
};
const stopCycling = () => { filterButtons.forEach((button) => {
if (cycleInterval) { button.addEventListener('click', (event) => {
clearInterval(cycleInterval); const tag = button.dataset.tagFilter;
cycleInterval = null; const multi = event.ctrlKey || event.metaKey;
} if (!multi) {
// Reset to first image selectedTags.clear();
currentIndex = 0; filterButtons.forEach((b) => b.classList.remove('active'));
showImage(0); }
}; if (selectedTags.has(tag)) {
selectedTags.delete(tag);
button.classList.remove('active');
} else {
selectedTags.add(tag);
button.classList.add('active');
}
applyFilters();
});
});
gallery.parentElement.addEventListener('mouseenter', startCycling); function applyFilters() {
gallery.parentElement.addEventListener('mouseleave', stopCycling); const selected = Array.from(selectedTags);
}); gridItems.forEach((item) => {
}); const tags = (item.getAttribute('data-item-tags') || '').split(',').map((t) => t.trim()).filter(Boolean);
if (selected.length === 0) {
item.removeAttribute('hidden');
return;
}
const matches = selected.some((tag) => tags.includes(tag));
item.toggleAttribute('hidden', !matches);
});
}
// Fetch URL preview if (tagForm) {
async function fetchPreview(url) { tagForm.addEventListener('submit', async (event) => {
const preview = document.getElementById("link-preview"); event.preventDefault();
if (!url) { const formData = new FormData(tagForm);
preview.classList.remove("active"); const resp = await fetch(tagForm.action, {
preview.innerHTML = ""; method: 'POST',
return; body: formData
} });
if (resp.ok) {
window.location.reload();
}
});
}
try { refreshAuth();
const res = await fetch("/api/preview", { })();
method: "POST",
headers: { "Content-Type": "application/json" },
body: JSON.stringify({ url }),
});
if (!res.ok) {
const data = await res.json();
preview.innerHTML = `<div class="preview-error">${data.error || "Failed to fetch preview"}</div>`;
preview.classList.add("active");
return;
}
const data = await res.json();
preview.dataset.preview = JSON.stringify(data);
let html = "";
if (data.imageUrl) {
html += `<img src="${escapeHtml(data.imageUrl)}" alt="Preview">`;
}
if (data.title) {
html += `<div class="preview-title">${escapeHtml(data.title)}</div>`;
}
if (data.description) {
html += `<div class="preview-description">${escapeHtml(data.description)}</div>`;
}
if (data.isEmbed) {
html += `<div class="preview-badge">${escapeHtml(data.provider.toUpperCase())} VIDEO</div>`;
}
preview.innerHTML = html || "<div>No preview available</div>";
preview.classList.add("active");
// Auto-fill title if empty
const titleInput = document.querySelector('#link-form input[name="title"]');
if (titleInput && !titleInput.value && data.title) {
titleInput.value = data.title;
}
// Auto-fill description if empty
const descriptionInput = document.querySelector(
'#link-form textarea[name="description"]',
);
if (descriptionInput && !descriptionInput.value && data.description) {
descriptionInput.value = data.description;
}
} catch (err) {
console.error("Preview error:", err);
preview.innerHTML =
'<div class="preview-error">Failed to fetch preview</div>';
preview.classList.add("active");
}
}
// Submit link form
async function submitLink(event) {
event.preventDefault();
const form = event.target;
const url = form.url.value;
const tags = form.tags.value
? form.tags.value
.split(",")
.map((t) => t.trim())
.filter(Boolean)
: [];
// Get preview data
const preview = document.getElementById("link-preview");
const previewData = preview.dataset.preview
? JSON.parse(preview.dataset.preview)
: {};
// Use form values, falling back to preview data
const title = form.title.value || previewData.title || null;
const description = form.description.value || previewData.description || null;
try {
const body = {
url,
title,
description,
tags,
imageUrl: previewData.imageUrl || null,
};
if (previewData.isEmbed) {
body.provider = previewData.provider;
body.videoId = previewData.videoId;
body.embedHtml = previewData.embedHtml;
}
const res = await fetch("/api/items/from-link", {
method: "POST",
headers: { "Content-Type": "application/json" },
body: JSON.stringify(body),
});
if (res.ok) {
location.reload();
} else {
const data = await res.json();
alert(data.error || "Failed to add item");
}
} catch (err) {
console.error("Submit error:", err);
alert("Failed to add item");
}
return false;
}
// Submit upload form
async function submitUpload(event) {
event.preventDefault();
const form = event.target;
const formData = new FormData(form);
try {
const res = await fetch("/api/items/upload", {
method: "POST",
body: formData,
});
if (res.ok) {
location.reload();
} else {
const data = await res.json();
alert(data.error || "Failed to upload");
}
} catch (err) {
console.error("Upload error:", err);
alert("Failed to upload");
}
return false;
}
// Submit quote form
async function submitQuote(event) {
event.preventDefault();
const form = event.target;
const text = form.text.value;
const source = form.source.value || null;
const sourceUrl = form.sourceUrl.value || null;
const tags = form.tags.value
? form.tags.value
.split(",")
.map((t) => t.trim())
.filter(Boolean)
: [];
try {
const res = await fetch("/api/items/quote", {
method: "POST",
headers: { "Content-Type": "application/json" },
body: JSON.stringify({ text, source, sourceUrl, tags }),
});
if (res.ok) {
location.reload();
} else {
const data = await res.json();
alert(data.error || "Failed to add quote");
}
} catch (err) {
console.error("Submit error:", err);
alert("Failed to add quote");
}
return false;
}
// Edit item
function editItem(id) {
showEditModal();
}
// Submit edit form
async function submitEdit(event) {
event.preventDefault();
const form = event.target;
const id = form.id.value;
const title = form.title.value || null;
const description = form.description.value || null;
const linkUrl = form.linkUrl.value || null;
const tags = form.tags.value
? form.tags.value
.split(",")
.map((t) => t.trim())
.filter(Boolean)
: [];
try {
const res = await fetch(`/api/items/${id}`, {
method: "PUT",
headers: { "Content-Type": "application/json" },
body: JSON.stringify({ title, description, linkUrl, tags }),
});
if (res.ok) {
location.reload();
} else {
const data = await res.json();
alert(data.error || "Failed to update");
}
} catch (err) {
console.error("Update error:", err);
alert("Failed to update");
}
return false;
}
// Refresh metadata
async function refreshMetadata(id) {
try {
// Fetch current item data
const itemRes = await fetch(`/api/items/${id}`);
if (!itemRes.ok) {
alert("Failed to fetch item");
return;
}
const item = await itemRes.json();
if (!item.linkUrl) {
alert("Item has no link URL");
return;
}
// Fetch fresh metadata
const previewRes = await fetch("/api/preview", {
method: "POST",
headers: { "Content-Type": "application/json" },
body: JSON.stringify({ url: item.linkUrl }),
});
if (!previewRes.ok) {
const data = await previewRes.json();
alert(data.error || "Failed to fetch metadata");
return;
}
const preview = await previewRes.json();
// Check if user has made manual edits
const titleChanged =
item.title && preview.title && item.title !== preview.title;
const descChanged =
item.description &&
preview.description &&
item.description !== preview.description;
const imageChanged =
item.thumbnailSourceUrl &&
preview.imageUrl &&
item.thumbnailSourceUrl !== preview.imageUrl;
if (titleChanged || descChanged || imageChanged) {
let msg = "This will overwrite your changes:\n";
if (titleChanged) msg += `\nTitle: "${item.title}" → "${preview.title}"`;
if (descChanged) msg += `\nDescription will be replaced`;
if (imageChanged) msg += `\nImage will be replaced`;
msg += "\n\nContinue?";
if (!confirm(msg)) return;
}
// Proceed with refresh
const res = await fetch(`/api/items/${id}/refresh`, { method: "POST" });
if (res.ok) {
location.reload();
} else {
const data = await res.json();
alert(data.error || "Failed to refresh");
}
} catch (err) {
console.error("Refresh error:", err);
alert("Failed to refresh");
}
}
// Replace media
async function submitReplaceMedia(event, id) {
event.preventDefault();
const form = event.target;
const formData = new FormData(form);
try {
const res = await fetch(`/api/items/${id}/media`, {
method: "POST",
body: formData,
});
if (res.ok) {
location.reload();
} else {
const data = await res.json();
alert(data.error || "Failed to replace media");
}
} catch (err) {
console.error("Replace media error:", err);
alert("Failed to replace media");
}
return false;
}
// Delete item
async function deleteItem(id) {
if (!confirm("Delete this item?")) return;
try {
const res = await fetch(`/api/items/${id}`, { method: "DELETE" });
if (res.ok) {
location.href = "/";
} else {
const data = await res.json();
alert(data.error || "Failed to delete");
}
} catch (err) {
console.error("Delete error:", err);
alert("Failed to delete");
}
}
// Play video (initial click to start)
function playVideo(overlay) {
const container = overlay.parentElement;
const video = container.querySelector("video");
if (video) {
video.play();
overlay.remove();
// Show controls on hover
video.addEventListener("mouseenter", () => (video.controls = true));
video.addEventListener("mouseleave", () => (video.controls = false));
}
}
// Utility
function escapeHtml(str) {
const div = document.createElement("div");
div.textContent = str;
return div.innerHTML;
}

View file

@ -29,13 +29,12 @@ func VersionedPath(path string) string {
func Handler() http.Handler { func Handler() http.Handler {
fileServer := http.FileServer(http.FS(staticFS)) fileServer := http.FileServer(http.FS(staticFS))
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
// Strip version prefix: /static/{version}/file.css -> /file.css
path := r.URL.Path path := r.URL.Path
path = strings.TrimPrefix(path, "/static/") path = strings.TrimPrefix(path, "/static/")
if idx := strings.Index(path, "/"); idx != -1 { if idx := strings.Index(path, "/"); idx != -1 {
path = path[idx:] path = path[idx+1:]
} }
r.URL.Path = path r.URL.Path = "/" + path
w.Header().Set("Cache-Control", "public, max-age=31536000, immutable") w.Header().Set("Cache-Control", "public, max-age=31536000, immutable")
fileServer.ServeHTTP(w, r) fileServer.ServeHTTP(w, r)

View file

@ -1,151 +0,0 @@
package video
import (
"bytes"
"context"
"fmt"
"io"
"os"
"os/exec"
"path/filepath"
)
// ExtractThumbnail extracts a thumbnail from a video file.
// Returns the thumbnail image data as JPEG.
func ExtractThumbnail(ctx context.Context, videoData []byte) ([]byte, error) {
// Write video to temp file
tmpDir, err := os.MkdirTemp("", "lookbook-video-*")
if err != nil {
return nil, fmt.Errorf("create temp dir: %w", err)
}
defer os.RemoveAll(tmpDir)
inputPath := filepath.Join(tmpDir, "input")
outputPath := filepath.Join(tmpDir, "thumbnail.jpg")
if err := os.WriteFile(inputPath, videoData, 0600); err != nil {
return nil, fmt.Errorf("write temp video: %w", err)
}
// Extract thumbnail at 1 second mark
cmd := exec.CommandContext(ctx, "ffmpeg",
"-i", inputPath,
"-ss", "00:00:01",
"-vframes", "1",
"-vf", "scale='min(1280,iw)':'min(720,ih)':force_original_aspect_ratio=decrease",
"-q:v", "2",
"-y",
outputPath,
)
var stderr bytes.Buffer
cmd.Stderr = &stderr
if err := cmd.Run(); err != nil {
return nil, fmt.Errorf("ffmpeg thumbnail: %w: %s", err, stderr.String())
}
thumbnail, err := os.ReadFile(outputPath)
if err != nil {
return nil, fmt.Errorf("read thumbnail: %w", err)
}
return thumbnail, nil
}
// TranscodeToMP4 transcodes a video to H.264 MP4 format.
// Returns the transcoded video data.
func TranscodeToMP4(ctx context.Context, videoData []byte, contentType string) ([]byte, error) {
// If already MP4 with H.264, we might skip transcoding
// For simplicity, we always transcode to ensure compatibility
tmpDir, err := os.MkdirTemp("", "lookbook-video-*")
if err != nil {
return nil, fmt.Errorf("create temp dir: %w", err)
}
defer os.RemoveAll(tmpDir)
inputPath := filepath.Join(tmpDir, "input")
outputPath := filepath.Join(tmpDir, "output.mp4")
if err := os.WriteFile(inputPath, videoData, 0600); err != nil {
return nil, fmt.Errorf("write temp video: %w", err)
}
// Transcode to H.264 MP4
cmd := exec.CommandContext(ctx, "ffmpeg",
"-i", inputPath,
"-c:v", "libx264",
"-preset", "medium",
"-crf", "23",
"-c:a", "aac",
"-b:a", "128k",
"-movflags", "+faststart",
"-y",
outputPath,
)
var stderr bytes.Buffer
cmd.Stderr = &stderr
if err := cmd.Run(); err != nil {
return nil, fmt.Errorf("ffmpeg transcode: %w: %s", err, stderr.String())
}
output, err := os.ReadFile(outputPath)
if err != nil {
return nil, fmt.Errorf("read transcoded: %w", err)
}
return output, nil
}
// ProcessVideo processes an uploaded video: transcodes to MP4 and extracts thumbnail.
// Returns (transcodedData, thumbnailData, error).
func ProcessVideo(ctx context.Context, videoData []byte, contentType string) ([]byte, []byte, error) {
// Extract thumbnail first (from original, often has better quality)
thumbnail, err := ExtractThumbnail(ctx, videoData)
if err != nil {
return nil, nil, fmt.Errorf("extract thumbnail: %w", err)
}
// Transcode to MP4
transcoded, err := TranscodeToMP4(ctx, videoData, contentType)
if err != nil {
return nil, nil, fmt.Errorf("transcode: %w", err)
}
return transcoded, thumbnail, nil
}
// IsVideo checks if a content type is a video type.
func IsVideo(contentType string) bool {
switch contentType {
case "video/mp4", "video/webm", "video/quicktime", "video/x-msvideo",
"video/x-matroska", "video/mpeg", "video/ogg", "video/3gpp":
return true
}
return false
}
// IsImage checks if a content type is an image type.
func IsImage(contentType string) bool {
switch contentType {
case "image/jpeg", "image/png", "image/gif", "image/webp", "image/avif":
return true
}
return false
}
// ReadUpload reads an uploaded file up to maxSize bytes.
func ReadUpload(r io.Reader, maxSize int64) ([]byte, error) {
limited := io.LimitReader(r, maxSize+1)
data, err := io.ReadAll(limited)
if err != nil {
return nil, err
}
if int64(len(data)) > maxSize {
return nil, fmt.Errorf("file too large (max %d bytes)", maxSize)
}
return data, nil
}

View file

@ -1,4 +1,5 @@
let let
pins = import ./npins;
sx-nix = import ../sx-nix {}; sx-nix = import ../sx-nix {};
pkgs = sx-nix.pkgs; pkgs = sx-nix.pkgs;
in in

View file

@ -0,0 +1,71 @@
package ssr
import (
"errors"
"html/template"
"io"
"sync"
"github.com/cespare/xxhash/v2"
)
type Writer struct {
err error
w io.Writer
cache *TmplCache
}
func NewWriter(w io.Writer, cache *TmplCache) *Writer {
return &Writer{w: w, cache: cache}
}
func (w *Writer) Raw(s string) error {
_, err := w.w.Write([]byte(s))
w.err = errors.Join(w.err, err)
return w.err
}
func (w *Writer) Tmpl(data any, tmpl string) error {
t, err := w.cache.GetOrCompile(tmpl)
if err != nil {
w.err = errors.Join(w.err, err)
return w.err
}
err = t.Execute(w.w, data)
w.err = errors.Join(w.err, err)
return w.err
}
func (w *Writer) Error() error {
return w.err
}
type TmplCache struct {
funcs template.FuncMap
cache sync.Map // uint64 -> *template.Template
}
func NewTmplCache(funcs template.FuncMap) *TmplCache {
return &TmplCache{funcs: funcs}
}
func (t *TmplCache) GetOrCompile(content string) (*template.Template, error) {
key := xxhash.Sum64String(content)
if cached, ok := t.cache.Load(key); ok {
return cached.(*template.Template), nil
}
tmpl, err := template.New("").Funcs(t.funcs).Parse(content)
if err != nil {
return nil, err
}
t.cache.Store(key, tmpl)
return tmpl, nil
}
type Renderable interface {
Render(sw *Writer) error
}

22
vendor/github.com/cespare/xxhash/v2/LICENSE.txt generated vendored Normal file
View file

@ -0,0 +1,22 @@
Copyright (c) 2016 Caleb Spare
MIT License
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
"Software"), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

74
vendor/github.com/cespare/xxhash/v2/README.md generated vendored Normal file
View file

@ -0,0 +1,74 @@
# xxhash
[![Go Reference](https://pkg.go.dev/badge/github.com/cespare/xxhash/v2.svg)](https://pkg.go.dev/github.com/cespare/xxhash/v2)
[![Test](https://github.com/cespare/xxhash/actions/workflows/test.yml/badge.svg)](https://github.com/cespare/xxhash/actions/workflows/test.yml)
xxhash is a Go implementation of the 64-bit [xxHash] algorithm, XXH64. This is a
high-quality hashing algorithm that is much faster than anything in the Go
standard library.
This package provides a straightforward API:
```
func Sum64(b []byte) uint64
func Sum64String(s string) uint64
type Digest struct{ ... }
func New() *Digest
```
The `Digest` type implements hash.Hash64. Its key methods are:
```
func (*Digest) Write([]byte) (int, error)
func (*Digest) WriteString(string) (int, error)
func (*Digest) Sum64() uint64
```
The package is written with optimized pure Go and also contains even faster
assembly implementations for amd64 and arm64. If desired, the `purego` build tag
opts into using the Go code even on those architectures.
[xxHash]: http://cyan4973.github.io/xxHash/
## Compatibility
This package is in a module and the latest code is in version 2 of the module.
You need a version of Go with at least "minimal module compatibility" to use
github.com/cespare/xxhash/v2:
* 1.9.7+ for Go 1.9
* 1.10.3+ for Go 1.10
* Go 1.11 or later
I recommend using the latest release of Go.
## Benchmarks
Here are some quick benchmarks comparing the pure-Go and assembly
implementations of Sum64.
| input size | purego | asm |
| ---------- | --------- | --------- |
| 4 B | 1.3 GB/s | 1.2 GB/s |
| 16 B | 2.9 GB/s | 3.5 GB/s |
| 100 B | 6.9 GB/s | 8.1 GB/s |
| 4 KB | 11.7 GB/s | 16.7 GB/s |
| 10 MB | 12.0 GB/s | 17.3 GB/s |
These numbers were generated on Ubuntu 20.04 with an Intel Xeon Platinum 8252C
CPU using the following commands under Go 1.19.2:
```
benchstat <(go test -tags purego -benchtime 500ms -count 15 -bench 'Sum64$')
benchstat <(go test -benchtime 500ms -count 15 -bench 'Sum64$')
```
## Projects using this package
- [InfluxDB](https://github.com/influxdata/influxdb)
- [Prometheus](https://github.com/prometheus/prometheus)
- [VictoriaMetrics](https://github.com/VictoriaMetrics/VictoriaMetrics)
- [FreeCache](https://github.com/coocood/freecache)
- [FastCache](https://github.com/VictoriaMetrics/fastcache)
- [Ristretto](https://github.com/dgraph-io/ristretto)
- [Badger](https://github.com/dgraph-io/badger)

10
vendor/github.com/cespare/xxhash/v2/testall.sh generated vendored Normal file
View file

@ -0,0 +1,10 @@
#!/bin/bash
set -eu -o pipefail
# Small convenience script for running the tests with various combinations of
# arch/tags. This assumes we're running on amd64 and have qemu available.
go test ./...
go test -tags purego ./...
GOARCH=arm64 go test
GOARCH=arm64 go test -tags purego

243
vendor/github.com/cespare/xxhash/v2/xxhash.go generated vendored Normal file
View file

@ -0,0 +1,243 @@
// Package xxhash implements the 64-bit variant of xxHash (XXH64) as described
// at http://cyan4973.github.io/xxHash/.
package xxhash
import (
"encoding/binary"
"errors"
"math/bits"
)
const (
prime1 uint64 = 11400714785074694791
prime2 uint64 = 14029467366897019727
prime3 uint64 = 1609587929392839161
prime4 uint64 = 9650029242287828579
prime5 uint64 = 2870177450012600261
)
// Store the primes in an array as well.
//
// The consts are used when possible in Go code to avoid MOVs but we need a
// contiguous array for the assembly code.
var primes = [...]uint64{prime1, prime2, prime3, prime4, prime5}
// Digest implements hash.Hash64.
//
// Note that a zero-valued Digest is not ready to receive writes.
// Call Reset or create a Digest using New before calling other methods.
type Digest struct {
v1 uint64
v2 uint64
v3 uint64
v4 uint64
total uint64
mem [32]byte
n int // how much of mem is used
}
// New creates a new Digest with a zero seed.
func New() *Digest {
return NewWithSeed(0)
}
// NewWithSeed creates a new Digest with the given seed.
func NewWithSeed(seed uint64) *Digest {
var d Digest
d.ResetWithSeed(seed)
return &d
}
// Reset clears the Digest's state so that it can be reused.
// It uses a seed value of zero.
func (d *Digest) Reset() {
d.ResetWithSeed(0)
}
// ResetWithSeed clears the Digest's state so that it can be reused.
// It uses the given seed to initialize the state.
func (d *Digest) ResetWithSeed(seed uint64) {
d.v1 = seed + prime1 + prime2
d.v2 = seed + prime2
d.v3 = seed
d.v4 = seed - prime1
d.total = 0
d.n = 0
}
// Size always returns 8 bytes.
func (d *Digest) Size() int { return 8 }
// BlockSize always returns 32 bytes.
func (d *Digest) BlockSize() int { return 32 }
// Write adds more data to d. It always returns len(b), nil.
func (d *Digest) Write(b []byte) (n int, err error) {
n = len(b)
d.total += uint64(n)
memleft := d.mem[d.n&(len(d.mem)-1):]
if d.n+n < 32 {
// This new data doesn't even fill the current block.
copy(memleft, b)
d.n += n
return
}
if d.n > 0 {
// Finish off the partial block.
c := copy(memleft, b)
d.v1 = round(d.v1, u64(d.mem[0:8]))
d.v2 = round(d.v2, u64(d.mem[8:16]))
d.v3 = round(d.v3, u64(d.mem[16:24]))
d.v4 = round(d.v4, u64(d.mem[24:32]))
b = b[c:]
d.n = 0
}
if len(b) >= 32 {
// One or more full blocks left.
nw := writeBlocks(d, b)
b = b[nw:]
}
// Store any remaining partial block.
copy(d.mem[:], b)
d.n = len(b)
return
}
// Sum appends the current hash to b and returns the resulting slice.
func (d *Digest) Sum(b []byte) []byte {
s := d.Sum64()
return append(
b,
byte(s>>56),
byte(s>>48),
byte(s>>40),
byte(s>>32),
byte(s>>24),
byte(s>>16),
byte(s>>8),
byte(s),
)
}
// Sum64 returns the current hash.
func (d *Digest) Sum64() uint64 {
var h uint64
if d.total >= 32 {
v1, v2, v3, v4 := d.v1, d.v2, d.v3, d.v4
h = rol1(v1) + rol7(v2) + rol12(v3) + rol18(v4)
h = mergeRound(h, v1)
h = mergeRound(h, v2)
h = mergeRound(h, v3)
h = mergeRound(h, v4)
} else {
h = d.v3 + prime5
}
h += d.total
b := d.mem[:d.n&(len(d.mem)-1)]
for ; len(b) >= 8; b = b[8:] {
k1 := round(0, u64(b[:8]))
h ^= k1
h = rol27(h)*prime1 + prime4
}
if len(b) >= 4 {
h ^= uint64(u32(b[:4])) * prime1
h = rol23(h)*prime2 + prime3
b = b[4:]
}
for ; len(b) > 0; b = b[1:] {
h ^= uint64(b[0]) * prime5
h = rol11(h) * prime1
}
h ^= h >> 33
h *= prime2
h ^= h >> 29
h *= prime3
h ^= h >> 32
return h
}
const (
magic = "xxh\x06"
marshaledSize = len(magic) + 8*5 + 32
)
// MarshalBinary implements the encoding.BinaryMarshaler interface.
func (d *Digest) MarshalBinary() ([]byte, error) {
b := make([]byte, 0, marshaledSize)
b = append(b, magic...)
b = appendUint64(b, d.v1)
b = appendUint64(b, d.v2)
b = appendUint64(b, d.v3)
b = appendUint64(b, d.v4)
b = appendUint64(b, d.total)
b = append(b, d.mem[:d.n]...)
b = b[:len(b)+len(d.mem)-d.n]
return b, nil
}
// UnmarshalBinary implements the encoding.BinaryUnmarshaler interface.
func (d *Digest) UnmarshalBinary(b []byte) error {
if len(b) < len(magic) || string(b[:len(magic)]) != magic {
return errors.New("xxhash: invalid hash state identifier")
}
if len(b) != marshaledSize {
return errors.New("xxhash: invalid hash state size")
}
b = b[len(magic):]
b, d.v1 = consumeUint64(b)
b, d.v2 = consumeUint64(b)
b, d.v3 = consumeUint64(b)
b, d.v4 = consumeUint64(b)
b, d.total = consumeUint64(b)
copy(d.mem[:], b)
d.n = int(d.total % uint64(len(d.mem)))
return nil
}
func appendUint64(b []byte, x uint64) []byte {
var a [8]byte
binary.LittleEndian.PutUint64(a[:], x)
return append(b, a[:]...)
}
func consumeUint64(b []byte) ([]byte, uint64) {
x := u64(b)
return b[8:], x
}
func u64(b []byte) uint64 { return binary.LittleEndian.Uint64(b) }
func u32(b []byte) uint32 { return binary.LittleEndian.Uint32(b) }
func round(acc, input uint64) uint64 {
acc += input * prime2
acc = rol31(acc)
acc *= prime1
return acc
}
func mergeRound(acc, val uint64) uint64 {
val = round(0, val)
acc ^= val
acc = acc*prime1 + prime4
return acc
}
func rol1(x uint64) uint64 { return bits.RotateLeft64(x, 1) }
func rol7(x uint64) uint64 { return bits.RotateLeft64(x, 7) }
func rol11(x uint64) uint64 { return bits.RotateLeft64(x, 11) }
func rol12(x uint64) uint64 { return bits.RotateLeft64(x, 12) }
func rol18(x uint64) uint64 { return bits.RotateLeft64(x, 18) }
func rol23(x uint64) uint64 { return bits.RotateLeft64(x, 23) }
func rol27(x uint64) uint64 { return bits.RotateLeft64(x, 27) }
func rol31(x uint64) uint64 { return bits.RotateLeft64(x, 31) }

209
vendor/github.com/cespare/xxhash/v2/xxhash_amd64.s generated vendored Normal file
View file

@ -0,0 +1,209 @@
//go:build !appengine && gc && !purego
// +build !appengine
// +build gc
// +build !purego
#include "textflag.h"
// Registers:
#define h AX
#define d AX
#define p SI // pointer to advance through b
#define n DX
#define end BX // loop end
#define v1 R8
#define v2 R9
#define v3 R10
#define v4 R11
#define x R12
#define prime1 R13
#define prime2 R14
#define prime4 DI
#define round(acc, x) \
IMULQ prime2, x \
ADDQ x, acc \
ROLQ $31, acc \
IMULQ prime1, acc
// round0 performs the operation x = round(0, x).
#define round0(x) \
IMULQ prime2, x \
ROLQ $31, x \
IMULQ prime1, x
// mergeRound applies a merge round on the two registers acc and x.
// It assumes that prime1, prime2, and prime4 have been loaded.
#define mergeRound(acc, x) \
round0(x) \
XORQ x, acc \
IMULQ prime1, acc \
ADDQ prime4, acc
// blockLoop processes as many 32-byte blocks as possible,
// updating v1, v2, v3, and v4. It assumes that there is at least one block
// to process.
#define blockLoop() \
loop: \
MOVQ +0(p), x \
round(v1, x) \
MOVQ +8(p), x \
round(v2, x) \
MOVQ +16(p), x \
round(v3, x) \
MOVQ +24(p), x \
round(v4, x) \
ADDQ $32, p \
CMPQ p, end \
JLE loop
// func Sum64(b []byte) uint64
TEXT ·Sum64(SB), NOSPLIT|NOFRAME, $0-32
// Load fixed primes.
MOVQ ·primes+0(SB), prime1
MOVQ ·primes+8(SB), prime2
MOVQ ·primes+24(SB), prime4
// Load slice.
MOVQ b_base+0(FP), p
MOVQ b_len+8(FP), n
LEAQ (p)(n*1), end
// The first loop limit will be len(b)-32.
SUBQ $32, end
// Check whether we have at least one block.
CMPQ n, $32
JLT noBlocks
// Set up initial state (v1, v2, v3, v4).
MOVQ prime1, v1
ADDQ prime2, v1
MOVQ prime2, v2
XORQ v3, v3
XORQ v4, v4
SUBQ prime1, v4
blockLoop()
MOVQ v1, h
ROLQ $1, h
MOVQ v2, x
ROLQ $7, x
ADDQ x, h
MOVQ v3, x
ROLQ $12, x
ADDQ x, h
MOVQ v4, x
ROLQ $18, x
ADDQ x, h
mergeRound(h, v1)
mergeRound(h, v2)
mergeRound(h, v3)
mergeRound(h, v4)
JMP afterBlocks
noBlocks:
MOVQ ·primes+32(SB), h
afterBlocks:
ADDQ n, h
ADDQ $24, end
CMPQ p, end
JG try4
loop8:
MOVQ (p), x
ADDQ $8, p
round0(x)
XORQ x, h
ROLQ $27, h
IMULQ prime1, h
ADDQ prime4, h
CMPQ p, end
JLE loop8
try4:
ADDQ $4, end
CMPQ p, end
JG try1
MOVL (p), x
ADDQ $4, p
IMULQ prime1, x
XORQ x, h
ROLQ $23, h
IMULQ prime2, h
ADDQ ·primes+16(SB), h
try1:
ADDQ $4, end
CMPQ p, end
JGE finalize
loop1:
MOVBQZX (p), x
ADDQ $1, p
IMULQ ·primes+32(SB), x
XORQ x, h
ROLQ $11, h
IMULQ prime1, h
CMPQ p, end
JL loop1
finalize:
MOVQ h, x
SHRQ $33, x
XORQ x, h
IMULQ prime2, h
MOVQ h, x
SHRQ $29, x
XORQ x, h
IMULQ ·primes+16(SB), h
MOVQ h, x
SHRQ $32, x
XORQ x, h
MOVQ h, ret+24(FP)
RET
// func writeBlocks(d *Digest, b []byte) int
TEXT ·writeBlocks(SB), NOSPLIT|NOFRAME, $0-40
// Load fixed primes needed for round.
MOVQ ·primes+0(SB), prime1
MOVQ ·primes+8(SB), prime2
// Load slice.
MOVQ b_base+8(FP), p
MOVQ b_len+16(FP), n
LEAQ (p)(n*1), end
SUBQ $32, end
// Load vN from d.
MOVQ s+0(FP), d
MOVQ 0(d), v1
MOVQ 8(d), v2
MOVQ 16(d), v3
MOVQ 24(d), v4
// We don't need to check the loop condition here; this function is
// always called with at least one block of data to process.
blockLoop()
// Copy vN back to d.
MOVQ v1, 0(d)
MOVQ v2, 8(d)
MOVQ v3, 16(d)
MOVQ v4, 24(d)
// The number of bytes written is p minus the old base pointer.
SUBQ b_base+8(FP), p
MOVQ p, ret+32(FP)
RET

183
vendor/github.com/cespare/xxhash/v2/xxhash_arm64.s generated vendored Normal file
View file

@ -0,0 +1,183 @@
//go:build !appengine && gc && !purego
// +build !appengine
// +build gc
// +build !purego
#include "textflag.h"
// Registers:
#define digest R1
#define h R2 // return value
#define p R3 // input pointer
#define n R4 // input length
#define nblocks R5 // n / 32
#define prime1 R7
#define prime2 R8
#define prime3 R9
#define prime4 R10
#define prime5 R11
#define v1 R12
#define v2 R13
#define v3 R14
#define v4 R15
#define x1 R20
#define x2 R21
#define x3 R22
#define x4 R23
#define round(acc, x) \
MADD prime2, acc, x, acc \
ROR $64-31, acc \
MUL prime1, acc
// round0 performs the operation x = round(0, x).
#define round0(x) \
MUL prime2, x \
ROR $64-31, x \
MUL prime1, x
#define mergeRound(acc, x) \
round0(x) \
EOR x, acc \
MADD acc, prime4, prime1, acc
// blockLoop processes as many 32-byte blocks as possible,
// updating v1, v2, v3, and v4. It assumes that n >= 32.
#define blockLoop() \
LSR $5, n, nblocks \
PCALIGN $16 \
loop: \
LDP.P 16(p), (x1, x2) \
LDP.P 16(p), (x3, x4) \
round(v1, x1) \
round(v2, x2) \
round(v3, x3) \
round(v4, x4) \
SUB $1, nblocks \
CBNZ nblocks, loop
// func Sum64(b []byte) uint64
TEXT ·Sum64(SB), NOSPLIT|NOFRAME, $0-32
LDP b_base+0(FP), (p, n)
LDP ·primes+0(SB), (prime1, prime2)
LDP ·primes+16(SB), (prime3, prime4)
MOVD ·primes+32(SB), prime5
CMP $32, n
CSEL LT, prime5, ZR, h // if n < 32 { h = prime5 } else { h = 0 }
BLT afterLoop
ADD prime1, prime2, v1
MOVD prime2, v2
MOVD $0, v3
NEG prime1, v4
blockLoop()
ROR $64-1, v1, x1
ROR $64-7, v2, x2
ADD x1, x2
ROR $64-12, v3, x3
ROR $64-18, v4, x4
ADD x3, x4
ADD x2, x4, h
mergeRound(h, v1)
mergeRound(h, v2)
mergeRound(h, v3)
mergeRound(h, v4)
afterLoop:
ADD n, h
TBZ $4, n, try8
LDP.P 16(p), (x1, x2)
round0(x1)
// NOTE: here and below, sequencing the EOR after the ROR (using a
// rotated register) is worth a small but measurable speedup for small
// inputs.
ROR $64-27, h
EOR x1 @> 64-27, h, h
MADD h, prime4, prime1, h
round0(x2)
ROR $64-27, h
EOR x2 @> 64-27, h, h
MADD h, prime4, prime1, h
try8:
TBZ $3, n, try4
MOVD.P 8(p), x1
round0(x1)
ROR $64-27, h
EOR x1 @> 64-27, h, h
MADD h, prime4, prime1, h
try4:
TBZ $2, n, try2
MOVWU.P 4(p), x2
MUL prime1, x2
ROR $64-23, h
EOR x2 @> 64-23, h, h
MADD h, prime3, prime2, h
try2:
TBZ $1, n, try1
MOVHU.P 2(p), x3
AND $255, x3, x1
LSR $8, x3, x2
MUL prime5, x1
ROR $64-11, h
EOR x1 @> 64-11, h, h
MUL prime1, h
MUL prime5, x2
ROR $64-11, h
EOR x2 @> 64-11, h, h
MUL prime1, h
try1:
TBZ $0, n, finalize
MOVBU (p), x4
MUL prime5, x4
ROR $64-11, h
EOR x4 @> 64-11, h, h
MUL prime1, h
finalize:
EOR h >> 33, h
MUL prime2, h
EOR h >> 29, h
MUL prime3, h
EOR h >> 32, h
MOVD h, ret+24(FP)
RET
// func writeBlocks(d *Digest, b []byte) int
TEXT ·writeBlocks(SB), NOSPLIT|NOFRAME, $0-40
LDP ·primes+0(SB), (prime1, prime2)
// Load state. Assume v[1-4] are stored contiguously.
MOVD d+0(FP), digest
LDP 0(digest), (v1, v2)
LDP 16(digest), (v3, v4)
LDP b_base+8(FP), (p, n)
blockLoop()
// Store updated state.
STP (v1, v2), 0(digest)
STP (v3, v4), 16(digest)
BIC $31, n
MOVD n, ret+32(FP)
RET

15
vendor/github.com/cespare/xxhash/v2/xxhash_asm.go generated vendored Normal file
View file

@ -0,0 +1,15 @@
//go:build (amd64 || arm64) && !appengine && gc && !purego
// +build amd64 arm64
// +build !appengine
// +build gc
// +build !purego
package xxhash
// Sum64 computes the 64-bit xxHash digest of b with a zero seed.
//
//go:noescape
func Sum64(b []byte) uint64
//go:noescape
func writeBlocks(d *Digest, b []byte) int

76
vendor/github.com/cespare/xxhash/v2/xxhash_other.go generated vendored Normal file
View file

@ -0,0 +1,76 @@
//go:build (!amd64 && !arm64) || appengine || !gc || purego
// +build !amd64,!arm64 appengine !gc purego
package xxhash
// Sum64 computes the 64-bit xxHash digest of b with a zero seed.
func Sum64(b []byte) uint64 {
// A simpler version would be
// d := New()
// d.Write(b)
// return d.Sum64()
// but this is faster, particularly for small inputs.
n := len(b)
var h uint64
if n >= 32 {
v1 := primes[0] + prime2
v2 := prime2
v3 := uint64(0)
v4 := -primes[0]
for len(b) >= 32 {
v1 = round(v1, u64(b[0:8:len(b)]))
v2 = round(v2, u64(b[8:16:len(b)]))
v3 = round(v3, u64(b[16:24:len(b)]))
v4 = round(v4, u64(b[24:32:len(b)]))
b = b[32:len(b):len(b)]
}
h = rol1(v1) + rol7(v2) + rol12(v3) + rol18(v4)
h = mergeRound(h, v1)
h = mergeRound(h, v2)
h = mergeRound(h, v3)
h = mergeRound(h, v4)
} else {
h = prime5
}
h += uint64(n)
for ; len(b) >= 8; b = b[8:] {
k1 := round(0, u64(b[:8]))
h ^= k1
h = rol27(h)*prime1 + prime4
}
if len(b) >= 4 {
h ^= uint64(u32(b[:4])) * prime1
h = rol23(h)*prime2 + prime3
b = b[4:]
}
for ; len(b) > 0; b = b[1:] {
h ^= uint64(b[0]) * prime5
h = rol11(h) * prime1
}
h ^= h >> 33
h *= prime2
h ^= h >> 29
h *= prime3
h ^= h >> 32
return h
}
func writeBlocks(d *Digest, b []byte) int {
v1, v2, v3, v4 := d.v1, d.v2, d.v3, d.v4
n := len(b)
for len(b) >= 32 {
v1 = round(v1, u64(b[0:8:len(b)]))
v2 = round(v2, u64(b[8:16:len(b)]))
v3 = round(v3, u64(b[16:24:len(b)]))
v4 = round(v4, u64(b[24:32:len(b)]))
b = b[32:len(b):len(b)]
}
d.v1, d.v2, d.v3, d.v4 = v1, v2, v3, v4
return n - len(b)
}

16
vendor/github.com/cespare/xxhash/v2/xxhash_safe.go generated vendored Normal file
View file

@ -0,0 +1,16 @@
//go:build appengine
// +build appengine
// This file contains the safe implementations of otherwise unsafe-using code.
package xxhash
// Sum64String computes the 64-bit xxHash digest of s with a zero seed.
func Sum64String(s string) uint64 {
return Sum64([]byte(s))
}
// WriteString adds more data to d. It always returns len(s), nil.
func (d *Digest) WriteString(s string) (n int, err error) {
return d.Write([]byte(s))
}

58
vendor/github.com/cespare/xxhash/v2/xxhash_unsafe.go generated vendored Normal file
View file

@ -0,0 +1,58 @@
//go:build !appengine
// +build !appengine
// This file encapsulates usage of unsafe.
// xxhash_safe.go contains the safe implementations.
package xxhash
import (
"unsafe"
)
// In the future it's possible that compiler optimizations will make these
// XxxString functions unnecessary by realizing that calls such as
// Sum64([]byte(s)) don't need to copy s. See https://go.dev/issue/2205.
// If that happens, even if we keep these functions they can be replaced with
// the trivial safe code.
// NOTE: The usual way of doing an unsafe string-to-[]byte conversion is:
//
// var b []byte
// bh := (*reflect.SliceHeader)(unsafe.Pointer(&b))
// bh.Data = (*reflect.StringHeader)(unsafe.Pointer(&s)).Data
// bh.Len = len(s)
// bh.Cap = len(s)
//
// Unfortunately, as of Go 1.15.3 the inliner's cost model assigns a high enough
// weight to this sequence of expressions that any function that uses it will
// not be inlined. Instead, the functions below use a different unsafe
// conversion designed to minimize the inliner weight and allow both to be
// inlined. There is also a test (TestInlining) which verifies that these are
// inlined.
//
// See https://github.com/golang/go/issues/42739 for discussion.
// Sum64String computes the 64-bit xxHash digest of s with a zero seed.
// It may be faster than Sum64([]byte(s)) by avoiding a copy.
func Sum64String(s string) uint64 {
b := *(*[]byte)(unsafe.Pointer(&sliceHeader{s, len(s)}))
return Sum64(b)
}
// WriteString adds more data to d. It always returns len(s), nil.
// It may be faster than Write([]byte(s)) by avoiding a copy.
func (d *Digest) WriteString(s string) (n int, err error) {
d.Write(*(*[]byte)(unsafe.Pointer(&sliceHeader{s, len(s)})))
// d.Write always returns len(s), nil.
// Ignoring the return output and returning these fixed values buys a
// savings of 6 in the inliner's cost model.
return len(s), nil
}
// sliceHeader is similar to reflect.SliceHeader, but it assumes that the layout
// of the first two words is the same as the layout of a string.
type sliceHeader struct {
s string
cap int
}

12
vendor/github.com/disintegration/imaging/.travis.yml generated vendored Normal file
View file

@ -0,0 +1,12 @@
language: go
go:
- "1.10.x"
- "1.11.x"
- "1.12.x"
before_install:
- go get github.com/mattn/goveralls
script:
- go test -v -race -cover
- $GOPATH/bin/goveralls -service=travis-ci

21
vendor/github.com/disintegration/imaging/LICENSE generated vendored Normal file
View file

@ -0,0 +1,21 @@
The MIT License (MIT)
Copyright (c) 2012 Grigory Dryapak
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

226
vendor/github.com/disintegration/imaging/README.md generated vendored Normal file
View file

@ -0,0 +1,226 @@
# Imaging
[![GoDoc](https://godoc.org/github.com/disintegration/imaging?status.svg)](https://godoc.org/github.com/disintegration/imaging)
[![Build Status](https://travis-ci.org/disintegration/imaging.svg?branch=master)](https://travis-ci.org/disintegration/imaging)
[![Coverage Status](https://coveralls.io/repos/github/disintegration/imaging/badge.svg?branch=master&service=github)](https://coveralls.io/github/disintegration/imaging?branch=master)
[![Go Report Card](https://goreportcard.com/badge/github.com/disintegration/imaging)](https://goreportcard.com/report/github.com/disintegration/imaging)
Package imaging provides basic image processing functions (resize, rotate, crop, brightness/contrast adjustments, etc.).
All the image processing functions provided by the package accept any image type that implements `image.Image` interface
as an input, and return a new image of `*image.NRGBA` type (32bit RGBA colors, non-premultiplied alpha).
## Installation
go get -u github.com/disintegration/imaging
## Documentation
http://godoc.org/github.com/disintegration/imaging
## Usage examples
A few usage examples can be found below. See the documentation for the full list of supported functions.
### Image resizing
```go
// Resize srcImage to size = 128x128px using the Lanczos filter.
dstImage128 := imaging.Resize(srcImage, 128, 128, imaging.Lanczos)
// Resize srcImage to width = 800px preserving the aspect ratio.
dstImage800 := imaging.Resize(srcImage, 800, 0, imaging.Lanczos)
// Scale down srcImage to fit the 800x600px bounding box.
dstImageFit := imaging.Fit(srcImage, 800, 600, imaging.Lanczos)
// Resize and crop the srcImage to fill the 100x100px area.
dstImageFill := imaging.Fill(srcImage, 100, 100, imaging.Center, imaging.Lanczos)
```
Imaging supports image resizing using various resampling filters. The most notable ones:
- `Lanczos` - A high-quality resampling filter for photographic images yielding sharp results.
- `CatmullRom` - A sharp cubic filter that is faster than Lanczos filter while providing similar results.
- `MitchellNetravali` - A cubic filter that produces smoother results with less ringing artifacts than CatmullRom.
- `Linear` - Bilinear resampling filter, produces smooth output. Faster than cubic filters.
- `Box` - Simple and fast averaging filter appropriate for downscaling. When upscaling it's similar to NearestNeighbor.
- `NearestNeighbor` - Fastest resampling filter, no antialiasing.
The full list of supported filters: NearestNeighbor, Box, Linear, Hermite, MitchellNetravali, CatmullRom, BSpline, Gaussian, Lanczos, Hann, Hamming, Blackman, Bartlett, Welch, Cosine. Custom filters can be created using ResampleFilter struct.
**Resampling filters comparison**
Original image:
![srcImage](testdata/branches.png)
The same image resized from 600x400px to 150x100px using different resampling filters.
From faster (lower quality) to slower (higher quality):
Filter | Resize result
--------------------------|---------------------------------------------
`imaging.NearestNeighbor` | ![dstImage](testdata/out_resize_nearest.png)
`imaging.Linear` | ![dstImage](testdata/out_resize_linear.png)
`imaging.CatmullRom` | ![dstImage](testdata/out_resize_catrom.png)
`imaging.Lanczos` | ![dstImage](testdata/out_resize_lanczos.png)
### Gaussian Blur
```go
dstImage := imaging.Blur(srcImage, 0.5)
```
Sigma parameter allows to control the strength of the blurring effect.
Original image | Sigma = 0.5 | Sigma = 1.5
-----------------------------------|----------------------------------------|---------------------------------------
![srcImage](testdata/flowers_small.png) | ![dstImage](testdata/out_blur_0.5.png) | ![dstImage](testdata/out_blur_1.5.png)
### Sharpening
```go
dstImage := imaging.Sharpen(srcImage, 0.5)
```
`Sharpen` uses gaussian function internally. Sigma parameter allows to control the strength of the sharpening effect.
Original image | Sigma = 0.5 | Sigma = 1.5
-----------------------------------|-------------------------------------------|------------------------------------------
![srcImage](testdata/flowers_small.png) | ![dstImage](testdata/out_sharpen_0.5.png) | ![dstImage](testdata/out_sharpen_1.5.png)
### Gamma correction
```go
dstImage := imaging.AdjustGamma(srcImage, 0.75)
```
Original image | Gamma = 0.75 | Gamma = 1.25
-----------------------------------|------------------------------------------|-----------------------------------------
![srcImage](testdata/flowers_small.png) | ![dstImage](testdata/out_gamma_0.75.png) | ![dstImage](testdata/out_gamma_1.25.png)
### Contrast adjustment
```go
dstImage := imaging.AdjustContrast(srcImage, 20)
```
Original image | Contrast = 15 | Contrast = -15
-----------------------------------|--------------------------------------------|-------------------------------------------
![srcImage](testdata/flowers_small.png) | ![dstImage](testdata/out_contrast_p15.png) | ![dstImage](testdata/out_contrast_m15.png)
### Brightness adjustment
```go
dstImage := imaging.AdjustBrightness(srcImage, 20)
```
Original image | Brightness = 10 | Brightness = -10
-----------------------------------|----------------------------------------------|---------------------------------------------
![srcImage](testdata/flowers_small.png) | ![dstImage](testdata/out_brightness_p10.png) | ![dstImage](testdata/out_brightness_m10.png)
### Saturation adjustment
```go
dstImage := imaging.AdjustSaturation(srcImage, 20)
```
Original image | Saturation = 30 | Saturation = -30
-----------------------------------|----------------------------------------------|---------------------------------------------
![srcImage](testdata/flowers_small.png) | ![dstImage](testdata/out_saturation_p30.png) | ![dstImage](testdata/out_saturation_m30.png)
## FAQ
### Incorrect image orientation after processing (e.g. an image appears rotated after resizing)
Most probably, the given image contains the EXIF orientation tag.
The stadard `image/*` packages do not support loading and saving
this kind of information. To fix the issue, try opening images with
the `AutoOrientation` decode option. If this option is set to `true`,
the image orientation is changed after decoding, according to the
orientation tag (if present). Here's the example:
```go
img, err := imaging.Open("test.jpg", imaging.AutoOrientation(true))
```
### What's the difference between `imaging` and `gift` packages?
[imaging](https://github.com/disintegration/imaging)
is designed to be a lightweight and simple image manipulation package.
It provides basic image processing functions and a few helper functions
such as `Open` and `Save`. It consistently returns *image.NRGBA image
type (8 bits per channel, RGBA).
[gift](https://github.com/disintegration/gift)
supports more advanced image processing, for example, sRGB/Linear color
space conversions. It also supports different output image types
(e.g. 16 bits per channel) and provides easy-to-use API for chaining
multiple processing steps together.
## Example code
```go
package main
import (
"image"
"image/color"
"log"
"github.com/disintegration/imaging"
)
func main() {
// Open a test image.
src, err := imaging.Open("testdata/flowers.png")
if err != nil {
log.Fatalf("failed to open image: %v", err)
}
// Crop the original image to 300x300px size using the center anchor.
src = imaging.CropAnchor(src, 300, 300, imaging.Center)
// Resize the cropped image to width = 200px preserving the aspect ratio.
src = imaging.Resize(src, 200, 0, imaging.Lanczos)
// Create a blurred version of the image.
img1 := imaging.Blur(src, 5)
// Create a grayscale version of the image with higher contrast and sharpness.
img2 := imaging.Grayscale(src)
img2 = imaging.AdjustContrast(img2, 20)
img2 = imaging.Sharpen(img2, 2)
// Create an inverted version of the image.
img3 := imaging.Invert(src)
// Create an embossed version of the image using a convolution filter.
img4 := imaging.Convolve3x3(
src,
[9]float64{
-1, -1, 0,
-1, 1, 1,
0, 1, 1,
},
nil,
)
// Create a new image and paste the four produced images into it.
dst := imaging.New(400, 400, color.NRGBA{0, 0, 0, 0})
dst = imaging.Paste(dst, img1, image.Pt(0, 0))
dst = imaging.Paste(dst, img2, image.Pt(0, 200))
dst = imaging.Paste(dst, img3, image.Pt(200, 0))
dst = imaging.Paste(dst, img4, image.Pt(200, 200))
// Save the resulting image as JPEG.
err = imaging.Save(dst, "testdata/out_example.jpg")
if err != nil {
log.Fatalf("failed to save image: %v", err)
}
}
```
Output:
![dstImage](testdata/out_example.jpg)

253
vendor/github.com/disintegration/imaging/adjust.go generated vendored Normal file
View file

@ -0,0 +1,253 @@
package imaging
import (
"image"
"image/color"
"math"
)
// Grayscale produces a grayscale version of the image.
func Grayscale(img image.Image) *image.NRGBA {
src := newScanner(img)
dst := image.NewNRGBA(image.Rect(0, 0, src.w, src.h))
parallel(0, src.h, func(ys <-chan int) {
for y := range ys {
i := y * dst.Stride
src.scan(0, y, src.w, y+1, dst.Pix[i:i+src.w*4])
for x := 0; x < src.w; x++ {
d := dst.Pix[i : i+3 : i+3]
r := d[0]
g := d[1]
b := d[2]
f := 0.299*float64(r) + 0.587*float64(g) + 0.114*float64(b)
y := uint8(f + 0.5)
d[0] = y
d[1] = y
d[2] = y
i += 4
}
}
})
return dst
}
// Invert produces an inverted (negated) version of the image.
func Invert(img image.Image) *image.NRGBA {
src := newScanner(img)
dst := image.NewNRGBA(image.Rect(0, 0, src.w, src.h))
parallel(0, src.h, func(ys <-chan int) {
for y := range ys {
i := y * dst.Stride
src.scan(0, y, src.w, y+1, dst.Pix[i:i+src.w*4])
for x := 0; x < src.w; x++ {
d := dst.Pix[i : i+3 : i+3]
d[0] = 255 - d[0]
d[1] = 255 - d[1]
d[2] = 255 - d[2]
i += 4
}
}
})
return dst
}
// AdjustSaturation changes the saturation of the image using the percentage parameter and returns the adjusted image.
// The percentage must be in the range (-100, 100).
// The percentage = 0 gives the original image.
// The percentage = 100 gives the image with the saturation value doubled for each pixel.
// The percentage = -100 gives the image with the saturation value zeroed for each pixel (grayscale).
//
// Examples:
// dstImage = imaging.AdjustSaturation(srcImage, 25) // Increase image saturation by 25%.
// dstImage = imaging.AdjustSaturation(srcImage, -10) // Decrease image saturation by 10%.
//
func AdjustSaturation(img image.Image, percentage float64) *image.NRGBA {
percentage = math.Min(math.Max(percentage, -100), 100)
multiplier := 1 + percentage/100
return AdjustFunc(img, func(c color.NRGBA) color.NRGBA {
h, s, l := rgbToHSL(c.R, c.G, c.B)
s *= multiplier
if s > 1 {
s = 1
}
r, g, b := hslToRGB(h, s, l)
return color.NRGBA{r, g, b, c.A}
})
}
// AdjustContrast changes the contrast of the image using the percentage parameter and returns the adjusted image.
// The percentage must be in range (-100, 100). The percentage = 0 gives the original image.
// The percentage = -100 gives solid gray image.
//
// Examples:
//
// dstImage = imaging.AdjustContrast(srcImage, -10) // Decrease image contrast by 10%.
// dstImage = imaging.AdjustContrast(srcImage, 20) // Increase image contrast by 20%.
//
func AdjustContrast(img image.Image, percentage float64) *image.NRGBA {
percentage = math.Min(math.Max(percentage, -100.0), 100.0)
lut := make([]uint8, 256)
v := (100.0 + percentage) / 100.0
for i := 0; i < 256; i++ {
switch {
case 0 <= v && v <= 1:
lut[i] = clamp((0.5 + (float64(i)/255.0-0.5)*v) * 255.0)
case 1 < v && v < 2:
lut[i] = clamp((0.5 + (float64(i)/255.0-0.5)*(1/(2.0-v))) * 255.0)
default:
lut[i] = uint8(float64(i)/255.0+0.5) * 255
}
}
return adjustLUT(img, lut)
}
// AdjustBrightness changes the brightness of the image using the percentage parameter and returns the adjusted image.
// The percentage must be in range (-100, 100). The percentage = 0 gives the original image.
// The percentage = -100 gives solid black image. The percentage = 100 gives solid white image.
//
// Examples:
//
// dstImage = imaging.AdjustBrightness(srcImage, -15) // Decrease image brightness by 15%.
// dstImage = imaging.AdjustBrightness(srcImage, 10) // Increase image brightness by 10%.
//
func AdjustBrightness(img image.Image, percentage float64) *image.NRGBA {
percentage = math.Min(math.Max(percentage, -100.0), 100.0)
lut := make([]uint8, 256)
shift := 255.0 * percentage / 100.0
for i := 0; i < 256; i++ {
lut[i] = clamp(float64(i) + shift)
}
return adjustLUT(img, lut)
}
// AdjustGamma performs a gamma correction on the image and returns the adjusted image.
// Gamma parameter must be positive. Gamma = 1.0 gives the original image.
// Gamma less than 1.0 darkens the image and gamma greater than 1.0 lightens it.
//
// Example:
//
// dstImage = imaging.AdjustGamma(srcImage, 0.7)
//
func AdjustGamma(img image.Image, gamma float64) *image.NRGBA {
e := 1.0 / math.Max(gamma, 0.0001)
lut := make([]uint8, 256)
for i := 0; i < 256; i++ {
lut[i] = clamp(math.Pow(float64(i)/255.0, e) * 255.0)
}
return adjustLUT(img, lut)
}
// AdjustSigmoid changes the contrast of the image using a sigmoidal function and returns the adjusted image.
// It's a non-linear contrast change useful for photo adjustments as it preserves highlight and shadow detail.
// The midpoint parameter is the midpoint of contrast that must be between 0 and 1, typically 0.5.
// The factor parameter indicates how much to increase or decrease the contrast, typically in range (-10, 10).
// If the factor parameter is positive the image contrast is increased otherwise the contrast is decreased.
//
// Examples:
//
// dstImage = imaging.AdjustSigmoid(srcImage, 0.5, 3.0) // Increase the contrast.
// dstImage = imaging.AdjustSigmoid(srcImage, 0.5, -3.0) // Decrease the contrast.
//
func AdjustSigmoid(img image.Image, midpoint, factor float64) *image.NRGBA {
if factor == 0 {
return Clone(img)
}
lut := make([]uint8, 256)
a := math.Min(math.Max(midpoint, 0.0), 1.0)
b := math.Abs(factor)
sig0 := sigmoid(a, b, 0)
sig1 := sigmoid(a, b, 1)
e := 1.0e-6
if factor > 0 {
for i := 0; i < 256; i++ {
x := float64(i) / 255.0
sigX := sigmoid(a, b, x)
f := (sigX - sig0) / (sig1 - sig0)
lut[i] = clamp(f * 255.0)
}
} else {
for i := 0; i < 256; i++ {
x := float64(i) / 255.0
arg := math.Min(math.Max((sig1-sig0)*x+sig0, e), 1.0-e)
f := a - math.Log(1.0/arg-1.0)/b
lut[i] = clamp(f * 255.0)
}
}
return adjustLUT(img, lut)
}
func sigmoid(a, b, x float64) float64 {
return 1 / (1 + math.Exp(b*(a-x)))
}
// adjustLUT applies the given lookup table to the colors of the image.
func adjustLUT(img image.Image, lut []uint8) *image.NRGBA {
src := newScanner(img)
dst := image.NewNRGBA(image.Rect(0, 0, src.w, src.h))
lut = lut[0:256]
parallel(0, src.h, func(ys <-chan int) {
for y := range ys {
i := y * dst.Stride
src.scan(0, y, src.w, y+1, dst.Pix[i:i+src.w*4])
for x := 0; x < src.w; x++ {
d := dst.Pix[i : i+3 : i+3]
d[0] = lut[d[0]]
d[1] = lut[d[1]]
d[2] = lut[d[2]]
i += 4
}
}
})
return dst
}
// AdjustFunc applies the fn function to each pixel of the img image and returns the adjusted image.
//
// Example:
//
// dstImage = imaging.AdjustFunc(
// srcImage,
// func(c color.NRGBA) color.NRGBA {
// // Shift the red channel by 16.
// r := int(c.R) + 16
// if r > 255 {
// r = 255
// }
// return color.NRGBA{uint8(r), c.G, c.B, c.A}
// }
// )
//
func AdjustFunc(img image.Image, fn func(c color.NRGBA) color.NRGBA) *image.NRGBA {
src := newScanner(img)
dst := image.NewNRGBA(image.Rect(0, 0, src.w, src.h))
parallel(0, src.h, func(ys <-chan int) {
for y := range ys {
i := y * dst.Stride
src.scan(0, y, src.w, y+1, dst.Pix[i:i+src.w*4])
for x := 0; x < src.w; x++ {
d := dst.Pix[i : i+4 : i+4]
r := d[0]
g := d[1]
b := d[2]
a := d[3]
c := fn(color.NRGBA{r, g, b, a})
d[0] = c.R
d[1] = c.G
d[2] = c.B
d[3] = c.A
i += 4
}
}
})
return dst
}

148
vendor/github.com/disintegration/imaging/convolution.go generated vendored Normal file
View file

@ -0,0 +1,148 @@
package imaging
import (
"image"
)
// ConvolveOptions are convolution parameters.
type ConvolveOptions struct {
// If Normalize is true the kernel is normalized before convolution.
Normalize bool
// If Abs is true the absolute value of each color channel is taken after convolution.
Abs bool
// Bias is added to each color channel value after convolution.
Bias int
}
// Convolve3x3 convolves the image with the specified 3x3 convolution kernel.
// Default parameters are used if a nil *ConvolveOptions is passed.
func Convolve3x3(img image.Image, kernel [9]float64, options *ConvolveOptions) *image.NRGBA {
return convolve(img, kernel[:], options)
}
// Convolve5x5 convolves the image with the specified 5x5 convolution kernel.
// Default parameters are used if a nil *ConvolveOptions is passed.
func Convolve5x5(img image.Image, kernel [25]float64, options *ConvolveOptions) *image.NRGBA {
return convolve(img, kernel[:], options)
}
func convolve(img image.Image, kernel []float64, options *ConvolveOptions) *image.NRGBA {
src := toNRGBA(img)
w := src.Bounds().Max.X
h := src.Bounds().Max.Y
dst := image.NewNRGBA(image.Rect(0, 0, w, h))
if w < 1 || h < 1 {
return dst
}
if options == nil {
options = &ConvolveOptions{}
}
if options.Normalize {
normalizeKernel(kernel)
}
type coef struct {
x, y int
k float64
}
var coefs []coef
var m int
switch len(kernel) {
case 9:
m = 1
case 25:
m = 2
}
i := 0
for y := -m; y <= m; y++ {
for x := -m; x <= m; x++ {
if kernel[i] != 0 {
coefs = append(coefs, coef{x: x, y: y, k: kernel[i]})
}
i++
}
}
parallel(0, h, func(ys <-chan int) {
for y := range ys {
for x := 0; x < w; x++ {
var r, g, b float64
for _, c := range coefs {
ix := x + c.x
if ix < 0 {
ix = 0
} else if ix >= w {
ix = w - 1
}
iy := y + c.y
if iy < 0 {
iy = 0
} else if iy >= h {
iy = h - 1
}
off := iy*src.Stride + ix*4
s := src.Pix[off : off+3 : off+3]
r += float64(s[0]) * c.k
g += float64(s[1]) * c.k
b += float64(s[2]) * c.k
}
if options.Abs {
if r < 0 {
r = -r
}
if g < 0 {
g = -g
}
if b < 0 {
b = -b
}
}
if options.Bias != 0 {
r += float64(options.Bias)
g += float64(options.Bias)
b += float64(options.Bias)
}
srcOff := y*src.Stride + x*4
dstOff := y*dst.Stride + x*4
d := dst.Pix[dstOff : dstOff+4 : dstOff+4]
d[0] = clamp(r)
d[1] = clamp(g)
d[2] = clamp(b)
d[3] = src.Pix[srcOff+3]
}
}
})
return dst
}
func normalizeKernel(kernel []float64) {
var sum, sumpos float64
for i := range kernel {
sum += kernel[i]
if kernel[i] > 0 {
sumpos += kernel[i]
}
}
if sum != 0 {
for i := range kernel {
kernel[i] /= sum
}
} else if sumpos != 0 {
for i := range kernel {
kernel[i] /= sumpos
}
}
}

7
vendor/github.com/disintegration/imaging/doc.go generated vendored Normal file
View file

@ -0,0 +1,7 @@
/*
Package imaging provides basic image processing functions (resize, rotate, crop, brightness/contrast adjustments, etc.).
All the image processing functions provided by the package accept any image type that implements image.Image interface
as an input, and return a new image of *image.NRGBA type (32bit RGBA colors, non-premultiplied alpha).
*/
package imaging

169
vendor/github.com/disintegration/imaging/effects.go generated vendored Normal file
View file

@ -0,0 +1,169 @@
package imaging
import (
"image"
"math"
)
func gaussianBlurKernel(x, sigma float64) float64 {
return math.Exp(-(x*x)/(2*sigma*sigma)) / (sigma * math.Sqrt(2*math.Pi))
}
// Blur produces a blurred version of the image using a Gaussian function.
// Sigma parameter must be positive and indicates how much the image will be blurred.
//
// Example:
//
// dstImage := imaging.Blur(srcImage, 3.5)
//
func Blur(img image.Image, sigma float64) *image.NRGBA {
if sigma <= 0 {
return Clone(img)
}
radius := int(math.Ceil(sigma * 3.0))
kernel := make([]float64, radius+1)
for i := 0; i <= radius; i++ {
kernel[i] = gaussianBlurKernel(float64(i), sigma)
}
return blurVertical(blurHorizontal(img, kernel), kernel)
}
func blurHorizontal(img image.Image, kernel []float64) *image.NRGBA {
src := newScanner(img)
dst := image.NewNRGBA(image.Rect(0, 0, src.w, src.h))
radius := len(kernel) - 1
parallel(0, src.h, func(ys <-chan int) {
scanLine := make([]uint8, src.w*4)
scanLineF := make([]float64, len(scanLine))
for y := range ys {
src.scan(0, y, src.w, y+1, scanLine)
for i, v := range scanLine {
scanLineF[i] = float64(v)
}
for x := 0; x < src.w; x++ {
min := x - radius
if min < 0 {
min = 0
}
max := x + radius
if max > src.w-1 {
max = src.w - 1
}
var r, g, b, a, wsum float64
for ix := min; ix <= max; ix++ {
i := ix * 4
weight := kernel[absint(x-ix)]
wsum += weight
s := scanLineF[i : i+4 : i+4]
wa := s[3] * weight
r += s[0] * wa
g += s[1] * wa
b += s[2] * wa
a += wa
}
if a != 0 {
aInv := 1 / a
j := y*dst.Stride + x*4
d := dst.Pix[j : j+4 : j+4]
d[0] = clamp(r * aInv)
d[1] = clamp(g * aInv)
d[2] = clamp(b * aInv)
d[3] = clamp(a / wsum)
}
}
}
})
return dst
}
func blurVertical(img image.Image, kernel []float64) *image.NRGBA {
src := newScanner(img)
dst := image.NewNRGBA(image.Rect(0, 0, src.w, src.h))
radius := len(kernel) - 1
parallel(0, src.w, func(xs <-chan int) {
scanLine := make([]uint8, src.h*4)
scanLineF := make([]float64, len(scanLine))
for x := range xs {
src.scan(x, 0, x+1, src.h, scanLine)
for i, v := range scanLine {
scanLineF[i] = float64(v)
}
for y := 0; y < src.h; y++ {
min := y - radius
if min < 0 {
min = 0
}
max := y + radius
if max > src.h-1 {
max = src.h - 1
}
var r, g, b, a, wsum float64
for iy := min; iy <= max; iy++ {
i := iy * 4
weight := kernel[absint(y-iy)]
wsum += weight
s := scanLineF[i : i+4 : i+4]
wa := s[3] * weight
r += s[0] * wa
g += s[1] * wa
b += s[2] * wa
a += wa
}
if a != 0 {
aInv := 1 / a
j := y*dst.Stride + x*4
d := dst.Pix[j : j+4 : j+4]
d[0] = clamp(r * aInv)
d[1] = clamp(g * aInv)
d[2] = clamp(b * aInv)
d[3] = clamp(a / wsum)
}
}
}
})
return dst
}
// Sharpen produces a sharpened version of the image.
// Sigma parameter must be positive and indicates how much the image will be sharpened.
//
// Example:
//
// dstImage := imaging.Sharpen(srcImage, 3.5)
//
func Sharpen(img image.Image, sigma float64) *image.NRGBA {
if sigma <= 0 {
return Clone(img)
}
src := newScanner(img)
dst := image.NewNRGBA(image.Rect(0, 0, src.w, src.h))
blurred := Blur(img, sigma)
parallel(0, src.h, func(ys <-chan int) {
scanLine := make([]uint8, src.w*4)
for y := range ys {
src.scan(0, y, src.w, y+1, scanLine)
j := y * dst.Stride
for i := 0; i < src.w*4; i++ {
val := int(scanLine[i])<<1 - int(blurred.Pix[j])
if val < 0 {
val = 0
} else if val > 0xff {
val = 0xff
}
dst.Pix[j] = uint8(val)
j++
}
}
})
return dst
}

52
vendor/github.com/disintegration/imaging/histogram.go generated vendored Normal file
View file

@ -0,0 +1,52 @@
package imaging
import (
"image"
"sync"
)
// Histogram returns a normalized histogram of an image.
//
// Resulting histogram is represented as an array of 256 floats, where
// histogram[i] is a probability of a pixel being of a particular luminance i.
func Histogram(img image.Image) [256]float64 {
var mu sync.Mutex
var histogram [256]float64
var total float64
src := newScanner(img)
if src.w == 0 || src.h == 0 {
return histogram
}
parallel(0, src.h, func(ys <-chan int) {
var tmpHistogram [256]float64
var tmpTotal float64
scanLine := make([]uint8, src.w*4)
for y := range ys {
src.scan(0, y, src.w, y+1, scanLine)
i := 0
for x := 0; x < src.w; x++ {
s := scanLine[i : i+3 : i+3]
r := s[0]
g := s[1]
b := s[2]
y := 0.299*float32(r) + 0.587*float32(g) + 0.114*float32(b)
tmpHistogram[int(y+0.5)]++
tmpTotal++
i += 4
}
}
mu.Lock()
for i := 0; i < 256; i++ {
histogram[i] += tmpHistogram[i]
}
total += tmpTotal
mu.Unlock()
})
for i := 0; i < 256; i++ {
histogram[i] = histogram[i] / total
}
return histogram
}

444
vendor/github.com/disintegration/imaging/io.go generated vendored Normal file
View file

@ -0,0 +1,444 @@
package imaging
import (
"encoding/binary"
"errors"
"image"
"image/draw"
"image/gif"
"image/jpeg"
"image/png"
"io"
"io/ioutil"
"os"
"path/filepath"
"strings"
"golang.org/x/image/bmp"
"golang.org/x/image/tiff"
)
type fileSystem interface {
Create(string) (io.WriteCloser, error)
Open(string) (io.ReadCloser, error)
}
type localFS struct{}
func (localFS) Create(name string) (io.WriteCloser, error) { return os.Create(name) }
func (localFS) Open(name string) (io.ReadCloser, error) { return os.Open(name) }
var fs fileSystem = localFS{}
type decodeConfig struct {
autoOrientation bool
}
var defaultDecodeConfig = decodeConfig{
autoOrientation: false,
}
// DecodeOption sets an optional parameter for the Decode and Open functions.
type DecodeOption func(*decodeConfig)
// AutoOrientation returns a DecodeOption that sets the auto-orientation mode.
// If auto-orientation is enabled, the image will be transformed after decoding
// according to the EXIF orientation tag (if present). By default it's disabled.
func AutoOrientation(enabled bool) DecodeOption {
return func(c *decodeConfig) {
c.autoOrientation = enabled
}
}
// Decode reads an image from r.
func Decode(r io.Reader, opts ...DecodeOption) (image.Image, error) {
cfg := defaultDecodeConfig
for _, option := range opts {
option(&cfg)
}
if !cfg.autoOrientation {
img, _, err := image.Decode(r)
return img, err
}
var orient orientation
pr, pw := io.Pipe()
r = io.TeeReader(r, pw)
done := make(chan struct{})
go func() {
defer close(done)
orient = readOrientation(pr)
io.Copy(ioutil.Discard, pr)
}()
img, _, err := image.Decode(r)
pw.Close()
<-done
if err != nil {
return nil, err
}
return fixOrientation(img, orient), nil
}
// Open loads an image from file.
//
// Examples:
//
// // Load an image from file.
// img, err := imaging.Open("test.jpg")
//
// // Load an image and transform it depending on the EXIF orientation tag (if present).
// img, err := imaging.Open("test.jpg", imaging.AutoOrientation(true))
//
func Open(filename string, opts ...DecodeOption) (image.Image, error) {
file, err := fs.Open(filename)
if err != nil {
return nil, err
}
defer file.Close()
return Decode(file, opts...)
}
// Format is an image file format.
type Format int
// Image file formats.
const (
JPEG Format = iota
PNG
GIF
TIFF
BMP
)
var formatExts = map[string]Format{
"jpg": JPEG,
"jpeg": JPEG,
"png": PNG,
"gif": GIF,
"tif": TIFF,
"tiff": TIFF,
"bmp": BMP,
}
var formatNames = map[Format]string{
JPEG: "JPEG",
PNG: "PNG",
GIF: "GIF",
TIFF: "TIFF",
BMP: "BMP",
}
func (f Format) String() string {
return formatNames[f]
}
// ErrUnsupportedFormat means the given image format is not supported.
var ErrUnsupportedFormat = errors.New("imaging: unsupported image format")
// FormatFromExtension parses image format from filename extension:
// "jpg" (or "jpeg"), "png", "gif", "tif" (or "tiff") and "bmp" are supported.
func FormatFromExtension(ext string) (Format, error) {
if f, ok := formatExts[strings.ToLower(strings.TrimPrefix(ext, "."))]; ok {
return f, nil
}
return -1, ErrUnsupportedFormat
}
// FormatFromFilename parses image format from filename:
// "jpg" (or "jpeg"), "png", "gif", "tif" (or "tiff") and "bmp" are supported.
func FormatFromFilename(filename string) (Format, error) {
ext := filepath.Ext(filename)
return FormatFromExtension(ext)
}
type encodeConfig struct {
jpegQuality int
gifNumColors int
gifQuantizer draw.Quantizer
gifDrawer draw.Drawer
pngCompressionLevel png.CompressionLevel
}
var defaultEncodeConfig = encodeConfig{
jpegQuality: 95,
gifNumColors: 256,
gifQuantizer: nil,
gifDrawer: nil,
pngCompressionLevel: png.DefaultCompression,
}
// EncodeOption sets an optional parameter for the Encode and Save functions.
type EncodeOption func(*encodeConfig)
// JPEGQuality returns an EncodeOption that sets the output JPEG quality.
// Quality ranges from 1 to 100 inclusive, higher is better. Default is 95.
func JPEGQuality(quality int) EncodeOption {
return func(c *encodeConfig) {
c.jpegQuality = quality
}
}
// GIFNumColors returns an EncodeOption that sets the maximum number of colors
// used in the GIF-encoded image. It ranges from 1 to 256. Default is 256.
func GIFNumColors(numColors int) EncodeOption {
return func(c *encodeConfig) {
c.gifNumColors = numColors
}
}
// GIFQuantizer returns an EncodeOption that sets the quantizer that is used to produce
// a palette of the GIF-encoded image.
func GIFQuantizer(quantizer draw.Quantizer) EncodeOption {
return func(c *encodeConfig) {
c.gifQuantizer = quantizer
}
}
// GIFDrawer returns an EncodeOption that sets the drawer that is used to convert
// the source image to the desired palette of the GIF-encoded image.
func GIFDrawer(drawer draw.Drawer) EncodeOption {
return func(c *encodeConfig) {
c.gifDrawer = drawer
}
}
// PNGCompressionLevel returns an EncodeOption that sets the compression level
// of the PNG-encoded image. Default is png.DefaultCompression.
func PNGCompressionLevel(level png.CompressionLevel) EncodeOption {
return func(c *encodeConfig) {
c.pngCompressionLevel = level
}
}
// Encode writes the image img to w in the specified format (JPEG, PNG, GIF, TIFF or BMP).
func Encode(w io.Writer, img image.Image, format Format, opts ...EncodeOption) error {
cfg := defaultEncodeConfig
for _, option := range opts {
option(&cfg)
}
switch format {
case JPEG:
if nrgba, ok := img.(*image.NRGBA); ok && nrgba.Opaque() {
rgba := &image.RGBA{
Pix: nrgba.Pix,
Stride: nrgba.Stride,
Rect: nrgba.Rect,
}
return jpeg.Encode(w, rgba, &jpeg.Options{Quality: cfg.jpegQuality})
}
return jpeg.Encode(w, img, &jpeg.Options{Quality: cfg.jpegQuality})
case PNG:
encoder := png.Encoder{CompressionLevel: cfg.pngCompressionLevel}
return encoder.Encode(w, img)
case GIF:
return gif.Encode(w, img, &gif.Options{
NumColors: cfg.gifNumColors,
Quantizer: cfg.gifQuantizer,
Drawer: cfg.gifDrawer,
})
case TIFF:
return tiff.Encode(w, img, &tiff.Options{Compression: tiff.Deflate, Predictor: true})
case BMP:
return bmp.Encode(w, img)
}
return ErrUnsupportedFormat
}
// Save saves the image to file with the specified filename.
// The format is determined from the filename extension:
// "jpg" (or "jpeg"), "png", "gif", "tif" (or "tiff") and "bmp" are supported.
//
// Examples:
//
// // Save the image as PNG.
// err := imaging.Save(img, "out.png")
//
// // Save the image as JPEG with optional quality parameter set to 80.
// err := imaging.Save(img, "out.jpg", imaging.JPEGQuality(80))
//
func Save(img image.Image, filename string, opts ...EncodeOption) (err error) {
f, err := FormatFromFilename(filename)
if err != nil {
return err
}
file, err := fs.Create(filename)
if err != nil {
return err
}
err = Encode(file, img, f, opts...)
errc := file.Close()
if err == nil {
err = errc
}
return err
}
// orientation is an EXIF flag that specifies the transformation
// that should be applied to image to display it correctly.
type orientation int
const (
orientationUnspecified = 0
orientationNormal = 1
orientationFlipH = 2
orientationRotate180 = 3
orientationFlipV = 4
orientationTranspose = 5
orientationRotate270 = 6
orientationTransverse = 7
orientationRotate90 = 8
)
// readOrientation tries to read the orientation EXIF flag from image data in r.
// If the EXIF data block is not found or the orientation flag is not found
// or any other error occures while reading the data, it returns the
// orientationUnspecified (0) value.
func readOrientation(r io.Reader) orientation {
const (
markerSOI = 0xffd8
markerAPP1 = 0xffe1
exifHeader = 0x45786966
byteOrderBE = 0x4d4d
byteOrderLE = 0x4949
orientationTag = 0x0112
)
// Check if JPEG SOI marker is present.
var soi uint16
if err := binary.Read(r, binary.BigEndian, &soi); err != nil {
return orientationUnspecified
}
if soi != markerSOI {
return orientationUnspecified // Missing JPEG SOI marker.
}
// Find JPEG APP1 marker.
for {
var marker, size uint16
if err := binary.Read(r, binary.BigEndian, &marker); err != nil {
return orientationUnspecified
}
if err := binary.Read(r, binary.BigEndian, &size); err != nil {
return orientationUnspecified
}
if marker>>8 != 0xff {
return orientationUnspecified // Invalid JPEG marker.
}
if marker == markerAPP1 {
break
}
if size < 2 {
return orientationUnspecified // Invalid block size.
}
if _, err := io.CopyN(ioutil.Discard, r, int64(size-2)); err != nil {
return orientationUnspecified
}
}
// Check if EXIF header is present.
var header uint32
if err := binary.Read(r, binary.BigEndian, &header); err != nil {
return orientationUnspecified
}
if header != exifHeader {
return orientationUnspecified
}
if _, err := io.CopyN(ioutil.Discard, r, 2); err != nil {
return orientationUnspecified
}
// Read byte order information.
var (
byteOrderTag uint16
byteOrder binary.ByteOrder
)
if err := binary.Read(r, binary.BigEndian, &byteOrderTag); err != nil {
return orientationUnspecified
}
switch byteOrderTag {
case byteOrderBE:
byteOrder = binary.BigEndian
case byteOrderLE:
byteOrder = binary.LittleEndian
default:
return orientationUnspecified // Invalid byte order flag.
}
if _, err := io.CopyN(ioutil.Discard, r, 2); err != nil {
return orientationUnspecified
}
// Skip the EXIF offset.
var offset uint32
if err := binary.Read(r, byteOrder, &offset); err != nil {
return orientationUnspecified
}
if offset < 8 {
return orientationUnspecified // Invalid offset value.
}
if _, err := io.CopyN(ioutil.Discard, r, int64(offset-8)); err != nil {
return orientationUnspecified
}
// Read the number of tags.
var numTags uint16
if err := binary.Read(r, byteOrder, &numTags); err != nil {
return orientationUnspecified
}
// Find the orientation tag.
for i := 0; i < int(numTags); i++ {
var tag uint16
if err := binary.Read(r, byteOrder, &tag); err != nil {
return orientationUnspecified
}
if tag != orientationTag {
if _, err := io.CopyN(ioutil.Discard, r, 10); err != nil {
return orientationUnspecified
}
continue
}
if _, err := io.CopyN(ioutil.Discard, r, 6); err != nil {
return orientationUnspecified
}
var val uint16
if err := binary.Read(r, byteOrder, &val); err != nil {
return orientationUnspecified
}
if val < 1 || val > 8 {
return orientationUnspecified // Invalid tag value.
}
return orientation(val)
}
return orientationUnspecified // Missing orientation tag.
}
// fixOrientation applies a transform to img corresponding to the given orientation flag.
func fixOrientation(img image.Image, o orientation) image.Image {
switch o {
case orientationNormal:
case orientationFlipH:
img = FlipH(img)
case orientationFlipV:
img = FlipV(img)
case orientationRotate90:
img = Rotate90(img)
case orientationRotate180:
img = Rotate180(img)
case orientationRotate270:
img = Rotate270(img)
case orientationTranspose:
img = Transpose(img)
case orientationTransverse:
img = Transverse(img)
}
return img
}

595
vendor/github.com/disintegration/imaging/resize.go generated vendored Normal file
View file

@ -0,0 +1,595 @@
package imaging
import (
"image"
"math"
)
type indexWeight struct {
index int
weight float64
}
func precomputeWeights(dstSize, srcSize int, filter ResampleFilter) [][]indexWeight {
du := float64(srcSize) / float64(dstSize)
scale := du
if scale < 1.0 {
scale = 1.0
}
ru := math.Ceil(scale * filter.Support)
out := make([][]indexWeight, dstSize)
tmp := make([]indexWeight, 0, dstSize*int(ru+2)*2)
for v := 0; v < dstSize; v++ {
fu := (float64(v)+0.5)*du - 0.5
begin := int(math.Ceil(fu - ru))
if begin < 0 {
begin = 0
}
end := int(math.Floor(fu + ru))
if end > srcSize-1 {
end = srcSize - 1
}
var sum float64
for u := begin; u <= end; u++ {
w := filter.Kernel((float64(u) - fu) / scale)
if w != 0 {
sum += w
tmp = append(tmp, indexWeight{index: u, weight: w})
}
}
if sum != 0 {
for i := range tmp {
tmp[i].weight /= sum
}
}
out[v] = tmp
tmp = tmp[len(tmp):]
}
return out
}
// Resize resizes the image to the specified width and height using the specified resampling
// filter and returns the transformed image. If one of width or height is 0, the image aspect
// ratio is preserved.
//
// Example:
//
// dstImage := imaging.Resize(srcImage, 800, 600, imaging.Lanczos)
//
func Resize(img image.Image, width, height int, filter ResampleFilter) *image.NRGBA {
dstW, dstH := width, height
if dstW < 0 || dstH < 0 {
return &image.NRGBA{}
}
if dstW == 0 && dstH == 0 {
return &image.NRGBA{}
}
srcW := img.Bounds().Dx()
srcH := img.Bounds().Dy()
if srcW <= 0 || srcH <= 0 {
return &image.NRGBA{}
}
// If new width or height is 0 then preserve aspect ratio, minimum 1px.
if dstW == 0 {
tmpW := float64(dstH) * float64(srcW) / float64(srcH)
dstW = int(math.Max(1.0, math.Floor(tmpW+0.5)))
}
if dstH == 0 {
tmpH := float64(dstW) * float64(srcH) / float64(srcW)
dstH = int(math.Max(1.0, math.Floor(tmpH+0.5)))
}
if filter.Support <= 0 {
// Nearest-neighbor special case.
return resizeNearest(img, dstW, dstH)
}
if srcW != dstW && srcH != dstH {
return resizeVertical(resizeHorizontal(img, dstW, filter), dstH, filter)
}
if srcW != dstW {
return resizeHorizontal(img, dstW, filter)
}
if srcH != dstH {
return resizeVertical(img, dstH, filter)
}
return Clone(img)
}
func resizeHorizontal(img image.Image, width int, filter ResampleFilter) *image.NRGBA {
src := newScanner(img)
dst := image.NewNRGBA(image.Rect(0, 0, width, src.h))
weights := precomputeWeights(width, src.w, filter)
parallel(0, src.h, func(ys <-chan int) {
scanLine := make([]uint8, src.w*4)
for y := range ys {
src.scan(0, y, src.w, y+1, scanLine)
j0 := y * dst.Stride
for x := range weights {
var r, g, b, a float64
for _, w := range weights[x] {
i := w.index * 4
s := scanLine[i : i+4 : i+4]
aw := float64(s[3]) * w.weight
r += float64(s[0]) * aw
g += float64(s[1]) * aw
b += float64(s[2]) * aw
a += aw
}
if a != 0 {
aInv := 1 / a
j := j0 + x*4
d := dst.Pix[j : j+4 : j+4]
d[0] = clamp(r * aInv)
d[1] = clamp(g * aInv)
d[2] = clamp(b * aInv)
d[3] = clamp(a)
}
}
}
})
return dst
}
func resizeVertical(img image.Image, height int, filter ResampleFilter) *image.NRGBA {
src := newScanner(img)
dst := image.NewNRGBA(image.Rect(0, 0, src.w, height))
weights := precomputeWeights(height, src.h, filter)
parallel(0, src.w, func(xs <-chan int) {
scanLine := make([]uint8, src.h*4)
for x := range xs {
src.scan(x, 0, x+1, src.h, scanLine)
for y := range weights {
var r, g, b, a float64
for _, w := range weights[y] {
i := w.index * 4
s := scanLine[i : i+4 : i+4]
aw := float64(s[3]) * w.weight
r += float64(s[0]) * aw
g += float64(s[1]) * aw
b += float64(s[2]) * aw
a += aw
}
if a != 0 {
aInv := 1 / a
j := y*dst.Stride + x*4
d := dst.Pix[j : j+4 : j+4]
d[0] = clamp(r * aInv)
d[1] = clamp(g * aInv)
d[2] = clamp(b * aInv)
d[3] = clamp(a)
}
}
}
})
return dst
}
// resizeNearest is a fast nearest-neighbor resize, no filtering.
func resizeNearest(img image.Image, width, height int) *image.NRGBA {
dst := image.NewNRGBA(image.Rect(0, 0, width, height))
dx := float64(img.Bounds().Dx()) / float64(width)
dy := float64(img.Bounds().Dy()) / float64(height)
if dx > 1 && dy > 1 {
src := newScanner(img)
parallel(0, height, func(ys <-chan int) {
for y := range ys {
srcY := int((float64(y) + 0.5) * dy)
dstOff := y * dst.Stride
for x := 0; x < width; x++ {
srcX := int((float64(x) + 0.5) * dx)
src.scan(srcX, srcY, srcX+1, srcY+1, dst.Pix[dstOff:dstOff+4])
dstOff += 4
}
}
})
} else {
src := toNRGBA(img)
parallel(0, height, func(ys <-chan int) {
for y := range ys {
srcY := int((float64(y) + 0.5) * dy)
srcOff0 := srcY * src.Stride
dstOff := y * dst.Stride
for x := 0; x < width; x++ {
srcX := int((float64(x) + 0.5) * dx)
srcOff := srcOff0 + srcX*4
copy(dst.Pix[dstOff:dstOff+4], src.Pix[srcOff:srcOff+4])
dstOff += 4
}
}
})
}
return dst
}
// Fit scales down the image using the specified resample filter to fit the specified
// maximum width and height and returns the transformed image.
//
// Example:
//
// dstImage := imaging.Fit(srcImage, 800, 600, imaging.Lanczos)
//
func Fit(img image.Image, width, height int, filter ResampleFilter) *image.NRGBA {
maxW, maxH := width, height
if maxW <= 0 || maxH <= 0 {
return &image.NRGBA{}
}
srcBounds := img.Bounds()
srcW := srcBounds.Dx()
srcH := srcBounds.Dy()
if srcW <= 0 || srcH <= 0 {
return &image.NRGBA{}
}
if srcW <= maxW && srcH <= maxH {
return Clone(img)
}
srcAspectRatio := float64(srcW) / float64(srcH)
maxAspectRatio := float64(maxW) / float64(maxH)
var newW, newH int
if srcAspectRatio > maxAspectRatio {
newW = maxW
newH = int(float64(newW) / srcAspectRatio)
} else {
newH = maxH
newW = int(float64(newH) * srcAspectRatio)
}
return Resize(img, newW, newH, filter)
}
// Fill creates an image with the specified dimensions and fills it with the scaled source image.
// To achieve the correct aspect ratio without stretching, the source image will be cropped.
//
// Example:
//
// dstImage := imaging.Fill(srcImage, 800, 600, imaging.Center, imaging.Lanczos)
//
func Fill(img image.Image, width, height int, anchor Anchor, filter ResampleFilter) *image.NRGBA {
dstW, dstH := width, height
if dstW <= 0 || dstH <= 0 {
return &image.NRGBA{}
}
srcBounds := img.Bounds()
srcW := srcBounds.Dx()
srcH := srcBounds.Dy()
if srcW <= 0 || srcH <= 0 {
return &image.NRGBA{}
}
if srcW == dstW && srcH == dstH {
return Clone(img)
}
if srcW >= 100 && srcH >= 100 {
return cropAndResize(img, dstW, dstH, anchor, filter)
}
return resizeAndCrop(img, dstW, dstH, anchor, filter)
}
// cropAndResize crops the image to the smallest possible size that has the required aspect ratio using
// the given anchor point, then scales it to the specified dimensions and returns the transformed image.
//
// This is generally faster than resizing first, but may result in inaccuracies when used on small source images.
func cropAndResize(img image.Image, width, height int, anchor Anchor, filter ResampleFilter) *image.NRGBA {
dstW, dstH := width, height
srcBounds := img.Bounds()
srcW := srcBounds.Dx()
srcH := srcBounds.Dy()
srcAspectRatio := float64(srcW) / float64(srcH)
dstAspectRatio := float64(dstW) / float64(dstH)
var tmp *image.NRGBA
if srcAspectRatio < dstAspectRatio {
cropH := float64(srcW) * float64(dstH) / float64(dstW)
tmp = CropAnchor(img, srcW, int(math.Max(1, cropH)+0.5), anchor)
} else {
cropW := float64(srcH) * float64(dstW) / float64(dstH)
tmp = CropAnchor(img, int(math.Max(1, cropW)+0.5), srcH, anchor)
}
return Resize(tmp, dstW, dstH, filter)
}
// resizeAndCrop resizes the image to the smallest possible size that will cover the specified dimensions,
// crops the resized image to the specified dimensions using the given anchor point and returns
// the transformed image.
func resizeAndCrop(img image.Image, width, height int, anchor Anchor, filter ResampleFilter) *image.NRGBA {
dstW, dstH := width, height
srcBounds := img.Bounds()
srcW := srcBounds.Dx()
srcH := srcBounds.Dy()
srcAspectRatio := float64(srcW) / float64(srcH)
dstAspectRatio := float64(dstW) / float64(dstH)
var tmp *image.NRGBA
if srcAspectRatio < dstAspectRatio {
tmp = Resize(img, dstW, 0, filter)
} else {
tmp = Resize(img, 0, dstH, filter)
}
return CropAnchor(tmp, dstW, dstH, anchor)
}
// Thumbnail scales the image up or down using the specified resample filter, crops it
// to the specified width and hight and returns the transformed image.
//
// Example:
//
// dstImage := imaging.Thumbnail(srcImage, 100, 100, imaging.Lanczos)
//
func Thumbnail(img image.Image, width, height int, filter ResampleFilter) *image.NRGBA {
return Fill(img, width, height, Center, filter)
}
// ResampleFilter specifies a resampling filter to be used for image resizing.
//
// General filter recommendations:
//
// - Lanczos
// A high-quality resampling filter for photographic images yielding sharp results.
//
// - CatmullRom
// A sharp cubic filter that is faster than Lanczos filter while providing similar results.
//
// - MitchellNetravali
// A cubic filter that produces smoother results with less ringing artifacts than CatmullRom.
//
// - Linear
// Bilinear resampling filter, produces a smooth output. Faster than cubic filters.
//
// - Box
// Simple and fast averaging filter appropriate for downscaling.
// When upscaling it's similar to NearestNeighbor.
//
// - NearestNeighbor
// Fastest resampling filter, no antialiasing.
//
type ResampleFilter struct {
Support float64
Kernel func(float64) float64
}
// NearestNeighbor is a nearest-neighbor filter (no anti-aliasing).
var NearestNeighbor ResampleFilter
// Box filter (averaging pixels).
var Box ResampleFilter
// Linear filter.
var Linear ResampleFilter
// Hermite cubic spline filter (BC-spline; B=0; C=0).
var Hermite ResampleFilter
// MitchellNetravali is Mitchell-Netravali cubic filter (BC-spline; B=1/3; C=1/3).
var MitchellNetravali ResampleFilter
// CatmullRom is a Catmull-Rom - sharp cubic filter (BC-spline; B=0; C=0.5).
var CatmullRom ResampleFilter
// BSpline is a smooth cubic filter (BC-spline; B=1; C=0).
var BSpline ResampleFilter
// Gaussian is a Gaussian blurring filter.
var Gaussian ResampleFilter
// Bartlett is a Bartlett-windowed sinc filter (3 lobes).
var Bartlett ResampleFilter
// Lanczos filter (3 lobes).
var Lanczos ResampleFilter
// Hann is a Hann-windowed sinc filter (3 lobes).
var Hann ResampleFilter
// Hamming is a Hamming-windowed sinc filter (3 lobes).
var Hamming ResampleFilter
// Blackman is a Blackman-windowed sinc filter (3 lobes).
var Blackman ResampleFilter
// Welch is a Welch-windowed sinc filter (parabolic window, 3 lobes).
var Welch ResampleFilter
// Cosine is a Cosine-windowed sinc filter (3 lobes).
var Cosine ResampleFilter
func bcspline(x, b, c float64) float64 {
var y float64
x = math.Abs(x)
if x < 1.0 {
y = ((12-9*b-6*c)*x*x*x + (-18+12*b+6*c)*x*x + (6 - 2*b)) / 6
} else if x < 2.0 {
y = ((-b-6*c)*x*x*x + (6*b+30*c)*x*x + (-12*b-48*c)*x + (8*b + 24*c)) / 6
}
return y
}
func sinc(x float64) float64 {
if x == 0 {
return 1
}
return math.Sin(math.Pi*x) / (math.Pi * x)
}
func init() {
NearestNeighbor = ResampleFilter{
Support: 0.0, // special case - not applying the filter
}
Box = ResampleFilter{
Support: 0.5,
Kernel: func(x float64) float64 {
x = math.Abs(x)
if x <= 0.5 {
return 1.0
}
return 0
},
}
Linear = ResampleFilter{
Support: 1.0,
Kernel: func(x float64) float64 {
x = math.Abs(x)
if x < 1.0 {
return 1.0 - x
}
return 0
},
}
Hermite = ResampleFilter{
Support: 1.0,
Kernel: func(x float64) float64 {
x = math.Abs(x)
if x < 1.0 {
return bcspline(x, 0.0, 0.0)
}
return 0
},
}
MitchellNetravali = ResampleFilter{
Support: 2.0,
Kernel: func(x float64) float64 {
x = math.Abs(x)
if x < 2.0 {
return bcspline(x, 1.0/3.0, 1.0/3.0)
}
return 0
},
}
CatmullRom = ResampleFilter{
Support: 2.0,
Kernel: func(x float64) float64 {
x = math.Abs(x)
if x < 2.0 {
return bcspline(x, 0.0, 0.5)
}
return 0
},
}
BSpline = ResampleFilter{
Support: 2.0,
Kernel: func(x float64) float64 {
x = math.Abs(x)
if x < 2.0 {
return bcspline(x, 1.0, 0.0)
}
return 0
},
}
Gaussian = ResampleFilter{
Support: 2.0,
Kernel: func(x float64) float64 {
x = math.Abs(x)
if x < 2.0 {
return math.Exp(-2 * x * x)
}
return 0
},
}
Bartlett = ResampleFilter{
Support: 3.0,
Kernel: func(x float64) float64 {
x = math.Abs(x)
if x < 3.0 {
return sinc(x) * (3.0 - x) / 3.0
}
return 0
},
}
Lanczos = ResampleFilter{
Support: 3.0,
Kernel: func(x float64) float64 {
x = math.Abs(x)
if x < 3.0 {
return sinc(x) * sinc(x/3.0)
}
return 0
},
}
Hann = ResampleFilter{
Support: 3.0,
Kernel: func(x float64) float64 {
x = math.Abs(x)
if x < 3.0 {
return sinc(x) * (0.5 + 0.5*math.Cos(math.Pi*x/3.0))
}
return 0
},
}
Hamming = ResampleFilter{
Support: 3.0,
Kernel: func(x float64) float64 {
x = math.Abs(x)
if x < 3.0 {
return sinc(x) * (0.54 + 0.46*math.Cos(math.Pi*x/3.0))
}
return 0
},
}
Blackman = ResampleFilter{
Support: 3.0,
Kernel: func(x float64) float64 {
x = math.Abs(x)
if x < 3.0 {
return sinc(x) * (0.42 - 0.5*math.Cos(math.Pi*x/3.0+math.Pi) + 0.08*math.Cos(2.0*math.Pi*x/3.0))
}
return 0
},
}
Welch = ResampleFilter{
Support: 3.0,
Kernel: func(x float64) float64 {
x = math.Abs(x)
if x < 3.0 {
return sinc(x) * (1.0 - (x * x / 9.0))
}
return 0
},
}
Cosine = ResampleFilter{
Support: 3.0,
Kernel: func(x float64) float64 {
x = math.Abs(x)
if x < 3.0 {
return sinc(x) * math.Cos((math.Pi/2.0)*(x/3.0))
}
return 0
},
}
}

285
vendor/github.com/disintegration/imaging/scanner.go generated vendored Normal file
View file

@ -0,0 +1,285 @@
package imaging
import (
"image"
"image/color"
)
type scanner struct {
image image.Image
w, h int
palette []color.NRGBA
}
func newScanner(img image.Image) *scanner {
s := &scanner{
image: img,
w: img.Bounds().Dx(),
h: img.Bounds().Dy(),
}
if img, ok := img.(*image.Paletted); ok {
s.palette = make([]color.NRGBA, len(img.Palette))
for i := 0; i < len(img.Palette); i++ {
s.palette[i] = color.NRGBAModel.Convert(img.Palette[i]).(color.NRGBA)
}
}
return s
}
// scan scans the given rectangular region of the image into dst.
func (s *scanner) scan(x1, y1, x2, y2 int, dst []uint8) {
switch img := s.image.(type) {
case *image.NRGBA:
size := (x2 - x1) * 4
j := 0
i := y1*img.Stride + x1*4
if size == 4 {
for y := y1; y < y2; y++ {
d := dst[j : j+4 : j+4]
s := img.Pix[i : i+4 : i+4]
d[0] = s[0]
d[1] = s[1]
d[2] = s[2]
d[3] = s[3]
j += size
i += img.Stride
}
} else {
for y := y1; y < y2; y++ {
copy(dst[j:j+size], img.Pix[i:i+size])
j += size
i += img.Stride
}
}
case *image.NRGBA64:
j := 0
for y := y1; y < y2; y++ {
i := y*img.Stride + x1*8
for x := x1; x < x2; x++ {
s := img.Pix[i : i+8 : i+8]
d := dst[j : j+4 : j+4]
d[0] = s[0]
d[1] = s[2]
d[2] = s[4]
d[3] = s[6]
j += 4
i += 8
}
}
case *image.RGBA:
j := 0
for y := y1; y < y2; y++ {
i := y*img.Stride + x1*4
for x := x1; x < x2; x++ {
d := dst[j : j+4 : j+4]
a := img.Pix[i+3]
switch a {
case 0:
d[0] = 0
d[1] = 0
d[2] = 0
d[3] = a
case 0xff:
s := img.Pix[i : i+4 : i+4]
d[0] = s[0]
d[1] = s[1]
d[2] = s[2]
d[3] = a
default:
s := img.Pix[i : i+4 : i+4]
r16 := uint16(s[0])
g16 := uint16(s[1])
b16 := uint16(s[2])
a16 := uint16(a)
d[0] = uint8(r16 * 0xff / a16)
d[1] = uint8(g16 * 0xff / a16)
d[2] = uint8(b16 * 0xff / a16)
d[3] = a
}
j += 4
i += 4
}
}
case *image.RGBA64:
j := 0
for y := y1; y < y2; y++ {
i := y*img.Stride + x1*8
for x := x1; x < x2; x++ {
s := img.Pix[i : i+8 : i+8]
d := dst[j : j+4 : j+4]
a := s[6]
switch a {
case 0:
d[0] = 0
d[1] = 0
d[2] = 0
case 0xff:
d[0] = s[0]
d[1] = s[2]
d[2] = s[4]
default:
r32 := uint32(s[0])<<8 | uint32(s[1])
g32 := uint32(s[2])<<8 | uint32(s[3])
b32 := uint32(s[4])<<8 | uint32(s[5])
a32 := uint32(s[6])<<8 | uint32(s[7])
d[0] = uint8((r32 * 0xffff / a32) >> 8)
d[1] = uint8((g32 * 0xffff / a32) >> 8)
d[2] = uint8((b32 * 0xffff / a32) >> 8)
}
d[3] = a
j += 4
i += 8
}
}
case *image.Gray:
j := 0
for y := y1; y < y2; y++ {
i := y*img.Stride + x1
for x := x1; x < x2; x++ {
c := img.Pix[i]
d := dst[j : j+4 : j+4]
d[0] = c
d[1] = c
d[2] = c
d[3] = 0xff
j += 4
i++
}
}
case *image.Gray16:
j := 0
for y := y1; y < y2; y++ {
i := y*img.Stride + x1*2
for x := x1; x < x2; x++ {
c := img.Pix[i]
d := dst[j : j+4 : j+4]
d[0] = c
d[1] = c
d[2] = c
d[3] = 0xff
j += 4
i += 2
}
}
case *image.YCbCr:
j := 0
x1 += img.Rect.Min.X
x2 += img.Rect.Min.X
y1 += img.Rect.Min.Y
y2 += img.Rect.Min.Y
hy := img.Rect.Min.Y / 2
hx := img.Rect.Min.X / 2
for y := y1; y < y2; y++ {
iy := (y-img.Rect.Min.Y)*img.YStride + (x1 - img.Rect.Min.X)
var yBase int
switch img.SubsampleRatio {
case image.YCbCrSubsampleRatio444, image.YCbCrSubsampleRatio422:
yBase = (y - img.Rect.Min.Y) * img.CStride
case image.YCbCrSubsampleRatio420, image.YCbCrSubsampleRatio440:
yBase = (y/2 - hy) * img.CStride
}
for x := x1; x < x2; x++ {
var ic int
switch img.SubsampleRatio {
case image.YCbCrSubsampleRatio444, image.YCbCrSubsampleRatio440:
ic = yBase + (x - img.Rect.Min.X)
case image.YCbCrSubsampleRatio422, image.YCbCrSubsampleRatio420:
ic = yBase + (x/2 - hx)
default:
ic = img.COffset(x, y)
}
yy1 := int32(img.Y[iy]) * 0x10101
cb1 := int32(img.Cb[ic]) - 128
cr1 := int32(img.Cr[ic]) - 128
r := yy1 + 91881*cr1
if uint32(r)&0xff000000 == 0 {
r >>= 16
} else {
r = ^(r >> 31)
}
g := yy1 - 22554*cb1 - 46802*cr1
if uint32(g)&0xff000000 == 0 {
g >>= 16
} else {
g = ^(g >> 31)
}
b := yy1 + 116130*cb1
if uint32(b)&0xff000000 == 0 {
b >>= 16
} else {
b = ^(b >> 31)
}
d := dst[j : j+4 : j+4]
d[0] = uint8(r)
d[1] = uint8(g)
d[2] = uint8(b)
d[3] = 0xff
iy++
j += 4
}
}
case *image.Paletted:
j := 0
for y := y1; y < y2; y++ {
i := y*img.Stride + x1
for x := x1; x < x2; x++ {
c := s.palette[img.Pix[i]]
d := dst[j : j+4 : j+4]
d[0] = c.R
d[1] = c.G
d[2] = c.B
d[3] = c.A
j += 4
i++
}
}
default:
j := 0
b := s.image.Bounds()
x1 += b.Min.X
x2 += b.Min.X
y1 += b.Min.Y
y2 += b.Min.Y
for y := y1; y < y2; y++ {
for x := x1; x < x2; x++ {
r16, g16, b16, a16 := s.image.At(x, y).RGBA()
d := dst[j : j+4 : j+4]
switch a16 {
case 0xffff:
d[0] = uint8(r16 >> 8)
d[1] = uint8(g16 >> 8)
d[2] = uint8(b16 >> 8)
d[3] = 0xff
case 0:
d[0] = 0
d[1] = 0
d[2] = 0
d[3] = 0
default:
d[0] = uint8(((r16 * 0xffff) / a16) >> 8)
d[1] = uint8(((g16 * 0xffff) / a16) >> 8)
d[2] = uint8(((b16 * 0xffff) / a16) >> 8)
d[3] = uint8(a16 >> 8)
}
j += 4
}
}
}
}

249
vendor/github.com/disintegration/imaging/tools.go generated vendored Normal file
View file

@ -0,0 +1,249 @@
package imaging
import (
"bytes"
"image"
"image/color"
"math"
)
// New creates a new image with the specified width and height, and fills it with the specified color.
func New(width, height int, fillColor color.Color) *image.NRGBA {
if width <= 0 || height <= 0 {
return &image.NRGBA{}
}
c := color.NRGBAModel.Convert(fillColor).(color.NRGBA)
if (c == color.NRGBA{0, 0, 0, 0}) {
return image.NewNRGBA(image.Rect(0, 0, width, height))
}
return &image.NRGBA{
Pix: bytes.Repeat([]byte{c.R, c.G, c.B, c.A}, width*height),
Stride: 4 * width,
Rect: image.Rect(0, 0, width, height),
}
}
// Clone returns a copy of the given image.
func Clone(img image.Image) *image.NRGBA {
src := newScanner(img)
dst := image.NewNRGBA(image.Rect(0, 0, src.w, src.h))
size := src.w * 4
parallel(0, src.h, func(ys <-chan int) {
for y := range ys {
i := y * dst.Stride
src.scan(0, y, src.w, y+1, dst.Pix[i:i+size])
}
})
return dst
}
// Anchor is the anchor point for image alignment.
type Anchor int
// Anchor point positions.
const (
Center Anchor = iota
TopLeft
Top
TopRight
Left
Right
BottomLeft
Bottom
BottomRight
)
func anchorPt(b image.Rectangle, w, h int, anchor Anchor) image.Point {
var x, y int
switch anchor {
case TopLeft:
x = b.Min.X
y = b.Min.Y
case Top:
x = b.Min.X + (b.Dx()-w)/2
y = b.Min.Y
case TopRight:
x = b.Max.X - w
y = b.Min.Y
case Left:
x = b.Min.X
y = b.Min.Y + (b.Dy()-h)/2
case Right:
x = b.Max.X - w
y = b.Min.Y + (b.Dy()-h)/2
case BottomLeft:
x = b.Min.X
y = b.Max.Y - h
case Bottom:
x = b.Min.X + (b.Dx()-w)/2
y = b.Max.Y - h
case BottomRight:
x = b.Max.X - w
y = b.Max.Y - h
default:
x = b.Min.X + (b.Dx()-w)/2
y = b.Min.Y + (b.Dy()-h)/2
}
return image.Pt(x, y)
}
// Crop cuts out a rectangular region with the specified bounds
// from the image and returns the cropped image.
func Crop(img image.Image, rect image.Rectangle) *image.NRGBA {
r := rect.Intersect(img.Bounds()).Sub(img.Bounds().Min)
if r.Empty() {
return &image.NRGBA{}
}
src := newScanner(img)
dst := image.NewNRGBA(image.Rect(0, 0, r.Dx(), r.Dy()))
rowSize := r.Dx() * 4
parallel(r.Min.Y, r.Max.Y, func(ys <-chan int) {
for y := range ys {
i := (y - r.Min.Y) * dst.Stride
src.scan(r.Min.X, y, r.Max.X, y+1, dst.Pix[i:i+rowSize])
}
})
return dst
}
// CropAnchor cuts out a rectangular region with the specified size
// from the image using the specified anchor point and returns the cropped image.
func CropAnchor(img image.Image, width, height int, anchor Anchor) *image.NRGBA {
srcBounds := img.Bounds()
pt := anchorPt(srcBounds, width, height, anchor)
r := image.Rect(0, 0, width, height).Add(pt)
b := srcBounds.Intersect(r)
return Crop(img, b)
}
// CropCenter cuts out a rectangular region with the specified size
// from the center of the image and returns the cropped image.
func CropCenter(img image.Image, width, height int) *image.NRGBA {
return CropAnchor(img, width, height, Center)
}
// Paste pastes the img image to the background image at the specified position and returns the combined image.
func Paste(background, img image.Image, pos image.Point) *image.NRGBA {
dst := Clone(background)
pos = pos.Sub(background.Bounds().Min)
pasteRect := image.Rectangle{Min: pos, Max: pos.Add(img.Bounds().Size())}
interRect := pasteRect.Intersect(dst.Bounds())
if interRect.Empty() {
return dst
}
src := newScanner(img)
parallel(interRect.Min.Y, interRect.Max.Y, func(ys <-chan int) {
for y := range ys {
x1 := interRect.Min.X - pasteRect.Min.X
x2 := interRect.Max.X - pasteRect.Min.X
y1 := y - pasteRect.Min.Y
y2 := y1 + 1
i1 := y*dst.Stride + interRect.Min.X*4
i2 := i1 + interRect.Dx()*4
src.scan(x1, y1, x2, y2, dst.Pix[i1:i2])
}
})
return dst
}
// PasteCenter pastes the img image to the center of the background image and returns the combined image.
func PasteCenter(background, img image.Image) *image.NRGBA {
bgBounds := background.Bounds()
bgW := bgBounds.Dx()
bgH := bgBounds.Dy()
bgMinX := bgBounds.Min.X
bgMinY := bgBounds.Min.Y
centerX := bgMinX + bgW/2
centerY := bgMinY + bgH/2
x0 := centerX - img.Bounds().Dx()/2
y0 := centerY - img.Bounds().Dy()/2
return Paste(background, img, image.Pt(x0, y0))
}
// Overlay draws the img image over the background image at given position
// and returns the combined image. Opacity parameter is the opacity of the img
// image layer, used to compose the images, it must be from 0.0 to 1.0.
//
// Examples:
//
// // Draw spriteImage over backgroundImage at the given position (x=50, y=50).
// dstImage := imaging.Overlay(backgroundImage, spriteImage, image.Pt(50, 50), 1.0)
//
// // Blend two opaque images of the same size.
// dstImage := imaging.Overlay(imageOne, imageTwo, image.Pt(0, 0), 0.5)
//
func Overlay(background, img image.Image, pos image.Point, opacity float64) *image.NRGBA {
opacity = math.Min(math.Max(opacity, 0.0), 1.0) // Ensure 0.0 <= opacity <= 1.0.
dst := Clone(background)
pos = pos.Sub(background.Bounds().Min)
pasteRect := image.Rectangle{Min: pos, Max: pos.Add(img.Bounds().Size())}
interRect := pasteRect.Intersect(dst.Bounds())
if interRect.Empty() {
return dst
}
src := newScanner(img)
parallel(interRect.Min.Y, interRect.Max.Y, func(ys <-chan int) {
scanLine := make([]uint8, interRect.Dx()*4)
for y := range ys {
x1 := interRect.Min.X - pasteRect.Min.X
x2 := interRect.Max.X - pasteRect.Min.X
y1 := y - pasteRect.Min.Y
y2 := y1 + 1
src.scan(x1, y1, x2, y2, scanLine)
i := y*dst.Stride + interRect.Min.X*4
j := 0
for x := interRect.Min.X; x < interRect.Max.X; x++ {
d := dst.Pix[i : i+4 : i+4]
r1 := float64(d[0])
g1 := float64(d[1])
b1 := float64(d[2])
a1 := float64(d[3])
s := scanLine[j : j+4 : j+4]
r2 := float64(s[0])
g2 := float64(s[1])
b2 := float64(s[2])
a2 := float64(s[3])
coef2 := opacity * a2 / 255
coef1 := (1 - coef2) * a1 / 255
coefSum := coef1 + coef2
coef1 /= coefSum
coef2 /= coefSum
d[0] = uint8(r1*coef1 + r2*coef2)
d[1] = uint8(g1*coef1 + g2*coef2)
d[2] = uint8(b1*coef1 + b2*coef2)
d[3] = uint8(math.Min(a1+a2*opacity*(255-a1)/255, 255))
i += 4
j += 4
}
}
})
return dst
}
// OverlayCenter overlays the img image to the center of the background image and
// returns the combined image. Opacity parameter is the opacity of the img
// image layer, used to compose the images, it must be from 0.0 to 1.0.
func OverlayCenter(background, img image.Image, opacity float64) *image.NRGBA {
bgBounds := background.Bounds()
bgW := bgBounds.Dx()
bgH := bgBounds.Dy()
bgMinX := bgBounds.Min.X
bgMinY := bgBounds.Min.Y
centerX := bgMinX + bgW/2
centerY := bgMinY + bgH/2
x0 := centerX - img.Bounds().Dx()/2
y0 := centerY - img.Bounds().Dy()/2
return Overlay(background, img, image.Point{x0, y0}, opacity)
}

268
vendor/github.com/disintegration/imaging/transform.go generated vendored Normal file
View file

@ -0,0 +1,268 @@
package imaging
import (
"image"
"image/color"
"math"
)
// FlipH flips the image horizontally (from left to right) and returns the transformed image.
func FlipH(img image.Image) *image.NRGBA {
src := newScanner(img)
dstW := src.w
dstH := src.h
rowSize := dstW * 4
dst := image.NewNRGBA(image.Rect(0, 0, dstW, dstH))
parallel(0, dstH, func(ys <-chan int) {
for dstY := range ys {
i := dstY * dst.Stride
srcY := dstY
src.scan(0, srcY, src.w, srcY+1, dst.Pix[i:i+rowSize])
reverse(dst.Pix[i : i+rowSize])
}
})
return dst
}
// FlipV flips the image vertically (from top to bottom) and returns the transformed image.
func FlipV(img image.Image) *image.NRGBA {
src := newScanner(img)
dstW := src.w
dstH := src.h
rowSize := dstW * 4
dst := image.NewNRGBA(image.Rect(0, 0, dstW, dstH))
parallel(0, dstH, func(ys <-chan int) {
for dstY := range ys {
i := dstY * dst.Stride
srcY := dstH - dstY - 1
src.scan(0, srcY, src.w, srcY+1, dst.Pix[i:i+rowSize])
}
})
return dst
}
// Transpose flips the image horizontally and rotates 90 degrees counter-clockwise.
func Transpose(img image.Image) *image.NRGBA {
src := newScanner(img)
dstW := src.h
dstH := src.w
rowSize := dstW * 4
dst := image.NewNRGBA(image.Rect(0, 0, dstW, dstH))
parallel(0, dstH, func(ys <-chan int) {
for dstY := range ys {
i := dstY * dst.Stride
srcX := dstY
src.scan(srcX, 0, srcX+1, src.h, dst.Pix[i:i+rowSize])
}
})
return dst
}
// Transverse flips the image vertically and rotates 90 degrees counter-clockwise.
func Transverse(img image.Image) *image.NRGBA {
src := newScanner(img)
dstW := src.h
dstH := src.w
rowSize := dstW * 4
dst := image.NewNRGBA(image.Rect(0, 0, dstW, dstH))
parallel(0, dstH, func(ys <-chan int) {
for dstY := range ys {
i := dstY * dst.Stride
srcX := dstH - dstY - 1
src.scan(srcX, 0, srcX+1, src.h, dst.Pix[i:i+rowSize])
reverse(dst.Pix[i : i+rowSize])
}
})
return dst
}
// Rotate90 rotates the image 90 degrees counter-clockwise and returns the transformed image.
func Rotate90(img image.Image) *image.NRGBA {
src := newScanner(img)
dstW := src.h
dstH := src.w
rowSize := dstW * 4
dst := image.NewNRGBA(image.Rect(0, 0, dstW, dstH))
parallel(0, dstH, func(ys <-chan int) {
for dstY := range ys {
i := dstY * dst.Stride
srcX := dstH - dstY - 1
src.scan(srcX, 0, srcX+1, src.h, dst.Pix[i:i+rowSize])
}
})
return dst
}
// Rotate180 rotates the image 180 degrees counter-clockwise and returns the transformed image.
func Rotate180(img image.Image) *image.NRGBA {
src := newScanner(img)
dstW := src.w
dstH := src.h
rowSize := dstW * 4
dst := image.NewNRGBA(image.Rect(0, 0, dstW, dstH))
parallel(0, dstH, func(ys <-chan int) {
for dstY := range ys {
i := dstY * dst.Stride
srcY := dstH - dstY - 1
src.scan(0, srcY, src.w, srcY+1, dst.Pix[i:i+rowSize])
reverse(dst.Pix[i : i+rowSize])
}
})
return dst
}
// Rotate270 rotates the image 270 degrees counter-clockwise and returns the transformed image.
func Rotate270(img image.Image) *image.NRGBA {
src := newScanner(img)
dstW := src.h
dstH := src.w
rowSize := dstW * 4
dst := image.NewNRGBA(image.Rect(0, 0, dstW, dstH))
parallel(0, dstH, func(ys <-chan int) {
for dstY := range ys {
i := dstY * dst.Stride
srcX := dstY
src.scan(srcX, 0, srcX+1, src.h, dst.Pix[i:i+rowSize])
reverse(dst.Pix[i : i+rowSize])
}
})
return dst
}
// Rotate rotates an image by the given angle counter-clockwise .
// The angle parameter is the rotation angle in degrees.
// The bgColor parameter specifies the color of the uncovered zone after the rotation.
func Rotate(img image.Image, angle float64, bgColor color.Color) *image.NRGBA {
angle = angle - math.Floor(angle/360)*360
switch angle {
case 0:
return Clone(img)
case 90:
return Rotate90(img)
case 180:
return Rotate180(img)
case 270:
return Rotate270(img)
}
src := toNRGBA(img)
srcW := src.Bounds().Max.X
srcH := src.Bounds().Max.Y
dstW, dstH := rotatedSize(srcW, srcH, angle)
dst := image.NewNRGBA(image.Rect(0, 0, dstW, dstH))
if dstW <= 0 || dstH <= 0 {
return dst
}
srcXOff := float64(srcW)/2 - 0.5
srcYOff := float64(srcH)/2 - 0.5
dstXOff := float64(dstW)/2 - 0.5
dstYOff := float64(dstH)/2 - 0.5
bgColorNRGBA := color.NRGBAModel.Convert(bgColor).(color.NRGBA)
sin, cos := math.Sincos(math.Pi * angle / 180)
parallel(0, dstH, func(ys <-chan int) {
for dstY := range ys {
for dstX := 0; dstX < dstW; dstX++ {
xf, yf := rotatePoint(float64(dstX)-dstXOff, float64(dstY)-dstYOff, sin, cos)
xf, yf = xf+srcXOff, yf+srcYOff
interpolatePoint(dst, dstX, dstY, src, xf, yf, bgColorNRGBA)
}
}
})
return dst
}
func rotatePoint(x, y, sin, cos float64) (float64, float64) {
return x*cos - y*sin, x*sin + y*cos
}
func rotatedSize(w, h int, angle float64) (int, int) {
if w <= 0 || h <= 0 {
return 0, 0
}
sin, cos := math.Sincos(math.Pi * angle / 180)
x1, y1 := rotatePoint(float64(w-1), 0, sin, cos)
x2, y2 := rotatePoint(float64(w-1), float64(h-1), sin, cos)
x3, y3 := rotatePoint(0, float64(h-1), sin, cos)
minx := math.Min(x1, math.Min(x2, math.Min(x3, 0)))
maxx := math.Max(x1, math.Max(x2, math.Max(x3, 0)))
miny := math.Min(y1, math.Min(y2, math.Min(y3, 0)))
maxy := math.Max(y1, math.Max(y2, math.Max(y3, 0)))
neww := maxx - minx + 1
if neww-math.Floor(neww) > 0.1 {
neww++
}
newh := maxy - miny + 1
if newh-math.Floor(newh) > 0.1 {
newh++
}
return int(neww), int(newh)
}
func interpolatePoint(dst *image.NRGBA, dstX, dstY int, src *image.NRGBA, xf, yf float64, bgColor color.NRGBA) {
j := dstY*dst.Stride + dstX*4
d := dst.Pix[j : j+4 : j+4]
x0 := int(math.Floor(xf))
y0 := int(math.Floor(yf))
bounds := src.Bounds()
if !image.Pt(x0, y0).In(image.Rect(bounds.Min.X-1, bounds.Min.Y-1, bounds.Max.X, bounds.Max.Y)) {
d[0] = bgColor.R
d[1] = bgColor.G
d[2] = bgColor.B
d[3] = bgColor.A
return
}
xq := xf - float64(x0)
yq := yf - float64(y0)
points := [4]image.Point{
{x0, y0},
{x0 + 1, y0},
{x0, y0 + 1},
{x0 + 1, y0 + 1},
}
weights := [4]float64{
(1 - xq) * (1 - yq),
xq * (1 - yq),
(1 - xq) * yq,
xq * yq,
}
var r, g, b, a float64
for i := 0; i < 4; i++ {
p := points[i]
w := weights[i]
if p.In(bounds) {
i := p.Y*src.Stride + p.X*4
s := src.Pix[i : i+4 : i+4]
wa := float64(s[3]) * w
r += float64(s[0]) * wa
g += float64(s[1]) * wa
b += float64(s[2]) * wa
a += wa
} else {
wa := float64(bgColor.A) * w
r += float64(bgColor.R) * wa
g += float64(bgColor.G) * wa
b += float64(bgColor.B) * wa
a += wa
}
}
if a != 0 {
aInv := 1 / a
d[0] = clamp(r * aInv)
d[1] = clamp(g * aInv)
d[2] = clamp(b * aInv)
d[3] = clamp(a)
}
}

167
vendor/github.com/disintegration/imaging/utils.go generated vendored Normal file
View file

@ -0,0 +1,167 @@
package imaging
import (
"image"
"math"
"runtime"
"sync"
)
// parallel processes the data in separate goroutines.
func parallel(start, stop int, fn func(<-chan int)) {
count := stop - start
if count < 1 {
return
}
procs := runtime.GOMAXPROCS(0)
if procs > count {
procs = count
}
c := make(chan int, count)
for i := start; i < stop; i++ {
c <- i
}
close(c)
var wg sync.WaitGroup
for i := 0; i < procs; i++ {
wg.Add(1)
go func() {
defer wg.Done()
fn(c)
}()
}
wg.Wait()
}
// absint returns the absolute value of i.
func absint(i int) int {
if i < 0 {
return -i
}
return i
}
// clamp rounds and clamps float64 value to fit into uint8.
func clamp(x float64) uint8 {
v := int64(x + 0.5)
if v > 255 {
return 255
}
if v > 0 {
return uint8(v)
}
return 0
}
func reverse(pix []uint8) {
if len(pix) <= 4 {
return
}
i := 0
j := len(pix) - 4
for i < j {
pi := pix[i : i+4 : i+4]
pj := pix[j : j+4 : j+4]
pi[0], pj[0] = pj[0], pi[0]
pi[1], pj[1] = pj[1], pi[1]
pi[2], pj[2] = pj[2], pi[2]
pi[3], pj[3] = pj[3], pi[3]
i += 4
j -= 4
}
}
func toNRGBA(img image.Image) *image.NRGBA {
if img, ok := img.(*image.NRGBA); ok {
return &image.NRGBA{
Pix: img.Pix,
Stride: img.Stride,
Rect: img.Rect.Sub(img.Rect.Min),
}
}
return Clone(img)
}
// rgbToHSL converts a color from RGB to HSL.
func rgbToHSL(r, g, b uint8) (float64, float64, float64) {
rr := float64(r) / 255
gg := float64(g) / 255
bb := float64(b) / 255
max := math.Max(rr, math.Max(gg, bb))
min := math.Min(rr, math.Min(gg, bb))
l := (max + min) / 2
if max == min {
return 0, 0, l
}
var h, s float64
d := max - min
if l > 0.5 {
s = d / (2 - max - min)
} else {
s = d / (max + min)
}
switch max {
case rr:
h = (gg - bb) / d
if g < b {
h += 6
}
case gg:
h = (bb-rr)/d + 2
case bb:
h = (rr-gg)/d + 4
}
h /= 6
return h, s, l
}
// hslToRGB converts a color from HSL to RGB.
func hslToRGB(h, s, l float64) (uint8, uint8, uint8) {
var r, g, b float64
if s == 0 {
v := clamp(l * 255)
return v, v, v
}
var q float64
if l < 0.5 {
q = l * (1 + s)
} else {
q = l + s - l*s
}
p := 2*l - q
r = hueToRGB(p, q, h+1/3.0)
g = hueToRGB(p, q, h)
b = hueToRGB(p, q, h-1/3.0)
return clamp(r * 255), clamp(g * 255), clamp(b * 255)
}
func hueToRGB(p, q, t float64) float64 {
if t < 0 {
t++
}
if t > 1 {
t--
}
if t < 1/6.0 {
return p + (q-p)*6*t
}
if t < 1/2.0 {
return q
}
if t < 2/3.0 {
return p + (q-p)*(2/3.0-t)*6
}
return p
}

41
vendor/github.com/google/uuid/CHANGELOG.md generated vendored Normal file
View file

@ -0,0 +1,41 @@
# Changelog
## [1.6.0](https://github.com/google/uuid/compare/v1.5.0...v1.6.0) (2024-01-16)
### Features
* add Max UUID constant ([#149](https://github.com/google/uuid/issues/149)) ([c58770e](https://github.com/google/uuid/commit/c58770eb495f55fe2ced6284f93c5158a62e53e3))
### Bug Fixes
* fix typo in version 7 uuid documentation ([#153](https://github.com/google/uuid/issues/153)) ([016b199](https://github.com/google/uuid/commit/016b199544692f745ffc8867b914129ecb47ef06))
* Monotonicity in UUIDv7 ([#150](https://github.com/google/uuid/issues/150)) ([a2b2b32](https://github.com/google/uuid/commit/a2b2b32373ff0b1a312b7fdf6d38a977099698a6))
## [1.5.0](https://github.com/google/uuid/compare/v1.4.0...v1.5.0) (2023-12-12)
### Features
* Validate UUID without creating new UUID ([#141](https://github.com/google/uuid/issues/141)) ([9ee7366](https://github.com/google/uuid/commit/9ee7366e66c9ad96bab89139418a713dc584ae29))
## [1.4.0](https://github.com/google/uuid/compare/v1.3.1...v1.4.0) (2023-10-26)
### Features
* UUIDs slice type with Strings() convenience method ([#133](https://github.com/google/uuid/issues/133)) ([cd5fbbd](https://github.com/google/uuid/commit/cd5fbbdd02f3e3467ac18940e07e062be1f864b4))
### Fixes
* Clarify that Parse's job is to parse but not necessarily validate strings. (Documents current behavior)
## [1.3.1](https://github.com/google/uuid/compare/v1.3.0...v1.3.1) (2023-08-18)
### Bug Fixes
* Use .EqualFold() to parse urn prefixed UUIDs ([#118](https://github.com/google/uuid/issues/118)) ([574e687](https://github.com/google/uuid/commit/574e6874943741fb99d41764c705173ada5293f0))
## Changelog

26
vendor/github.com/google/uuid/CONTRIBUTING.md generated vendored Normal file
View file

@ -0,0 +1,26 @@
# How to contribute
We definitely welcome patches and contribution to this project!
### Tips
Commits must be formatted according to the [Conventional Commits Specification](https://www.conventionalcommits.org).
Always try to include a test case! If it is not possible or not necessary,
please explain why in the pull request description.
### Releasing
Commits that would precipitate a SemVer change, as described in the Conventional
Commits Specification, will trigger [`release-please`](https://github.com/google-github-actions/release-please-action)
to create a release candidate pull request. Once submitted, `release-please`
will create a release.
For tips on how to work with `release-please`, see its documentation.
### Legal requirements
In order to protect both you and ourselves, you will need to sign the
[Contributor License Agreement](https://cla.developers.google.com/clas).
You may have already signed it for other Google projects.

9
vendor/github.com/google/uuid/CONTRIBUTORS generated vendored Normal file
View file

@ -0,0 +1,9 @@
Paul Borman <borman@google.com>
bmatsuo
shawnps
theory
jboverfelt
dsymonds
cd1
wallclockbuilder
dansouza

27
vendor/github.com/google/uuid/LICENSE generated vendored Normal file
View file

@ -0,0 +1,27 @@
Copyright (c) 2009,2014 Google Inc. All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following disclaimer
in the documentation and/or other materials provided with the
distribution.
* Neither the name of Google Inc. nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

21
vendor/github.com/google/uuid/README.md generated vendored Normal file
View file

@ -0,0 +1,21 @@
# uuid
The uuid package generates and inspects UUIDs based on
[RFC 4122](https://datatracker.ietf.org/doc/html/rfc4122)
and DCE 1.1: Authentication and Security Services.
This package is based on the github.com/pborman/uuid package (previously named
code.google.com/p/go-uuid). It differs from these earlier packages in that
a UUID is a 16 byte array rather than a byte slice. One loss due to this
change is the ability to represent an invalid UUID (vs a NIL UUID).
###### Install
```sh
go get github.com/google/uuid
```
###### Documentation
[![Go Reference](https://pkg.go.dev/badge/github.com/google/uuid.svg)](https://pkg.go.dev/github.com/google/uuid)
Full `go doc` style documentation for the package can be viewed online without
installing this package by using the GoDoc site here:
http://pkg.go.dev/github.com/google/uuid

80
vendor/github.com/google/uuid/dce.go generated vendored Normal file
View file

@ -0,0 +1,80 @@
// Copyright 2016 Google Inc. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package uuid
import (
"encoding/binary"
"fmt"
"os"
)
// A Domain represents a Version 2 domain
type Domain byte
// Domain constants for DCE Security (Version 2) UUIDs.
const (
Person = Domain(0)
Group = Domain(1)
Org = Domain(2)
)
// NewDCESecurity returns a DCE Security (Version 2) UUID.
//
// The domain should be one of Person, Group or Org.
// On a POSIX system the id should be the users UID for the Person
// domain and the users GID for the Group. The meaning of id for
// the domain Org or on non-POSIX systems is site defined.
//
// For a given domain/id pair the same token may be returned for up to
// 7 minutes and 10 seconds.
func NewDCESecurity(domain Domain, id uint32) (UUID, error) {
uuid, err := NewUUID()
if err == nil {
uuid[6] = (uuid[6] & 0x0f) | 0x20 // Version 2
uuid[9] = byte(domain)
binary.BigEndian.PutUint32(uuid[0:], id)
}
return uuid, err
}
// NewDCEPerson returns a DCE Security (Version 2) UUID in the person
// domain with the id returned by os.Getuid.
//
// NewDCESecurity(Person, uint32(os.Getuid()))
func NewDCEPerson() (UUID, error) {
return NewDCESecurity(Person, uint32(os.Getuid()))
}
// NewDCEGroup returns a DCE Security (Version 2) UUID in the group
// domain with the id returned by os.Getgid.
//
// NewDCESecurity(Group, uint32(os.Getgid()))
func NewDCEGroup() (UUID, error) {
return NewDCESecurity(Group, uint32(os.Getgid()))
}
// Domain returns the domain for a Version 2 UUID. Domains are only defined
// for Version 2 UUIDs.
func (uuid UUID) Domain() Domain {
return Domain(uuid[9])
}
// ID returns the id for a Version 2 UUID. IDs are only defined for Version 2
// UUIDs.
func (uuid UUID) ID() uint32 {
return binary.BigEndian.Uint32(uuid[0:4])
}
func (d Domain) String() string {
switch d {
case Person:
return "Person"
case Group:
return "Group"
case Org:
return "Org"
}
return fmt.Sprintf("Domain%d", int(d))
}

12
vendor/github.com/google/uuid/doc.go generated vendored Normal file
View file

@ -0,0 +1,12 @@
// Copyright 2016 Google Inc. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// Package uuid generates and inspects UUIDs.
//
// UUIDs are based on RFC 4122 and DCE 1.1: Authentication and Security
// Services.
//
// A UUID is a 16 byte (128 bit) array. UUIDs may be used as keys to
// maps or compared directly.
package uuid

59
vendor/github.com/google/uuid/hash.go generated vendored Normal file
View file

@ -0,0 +1,59 @@
// Copyright 2016 Google Inc. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package uuid
import (
"crypto/md5"
"crypto/sha1"
"hash"
)
// Well known namespace IDs and UUIDs
var (
NameSpaceDNS = Must(Parse("6ba7b810-9dad-11d1-80b4-00c04fd430c8"))
NameSpaceURL = Must(Parse("6ba7b811-9dad-11d1-80b4-00c04fd430c8"))
NameSpaceOID = Must(Parse("6ba7b812-9dad-11d1-80b4-00c04fd430c8"))
NameSpaceX500 = Must(Parse("6ba7b814-9dad-11d1-80b4-00c04fd430c8"))
Nil UUID // empty UUID, all zeros
// The Max UUID is special form of UUID that is specified to have all 128 bits set to 1.
Max = UUID{
0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
}
)
// NewHash returns a new UUID derived from the hash of space concatenated with
// data generated by h. The hash should be at least 16 byte in length. The
// first 16 bytes of the hash are used to form the UUID. The version of the
// UUID will be the lower 4 bits of version. NewHash is used to implement
// NewMD5 and NewSHA1.
func NewHash(h hash.Hash, space UUID, data []byte, version int) UUID {
h.Reset()
h.Write(space[:]) //nolint:errcheck
h.Write(data) //nolint:errcheck
s := h.Sum(nil)
var uuid UUID
copy(uuid[:], s)
uuid[6] = (uuid[6] & 0x0f) | uint8((version&0xf)<<4)
uuid[8] = (uuid[8] & 0x3f) | 0x80 // RFC 4122 variant
return uuid
}
// NewMD5 returns a new MD5 (Version 3) UUID based on the
// supplied name space and data. It is the same as calling:
//
// NewHash(md5.New(), space, data, 3)
func NewMD5(space UUID, data []byte) UUID {
return NewHash(md5.New(), space, data, 3)
}
// NewSHA1 returns a new SHA1 (Version 5) UUID based on the
// supplied name space and data. It is the same as calling:
//
// NewHash(sha1.New(), space, data, 5)
func NewSHA1(space UUID, data []byte) UUID {
return NewHash(sha1.New(), space, data, 5)
}

38
vendor/github.com/google/uuid/marshal.go generated vendored Normal file
View file

@ -0,0 +1,38 @@
// Copyright 2016 Google Inc. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package uuid
import "fmt"
// MarshalText implements encoding.TextMarshaler.
func (uuid UUID) MarshalText() ([]byte, error) {
var js [36]byte
encodeHex(js[:], uuid)
return js[:], nil
}
// UnmarshalText implements encoding.TextUnmarshaler.
func (uuid *UUID) UnmarshalText(data []byte) error {
id, err := ParseBytes(data)
if err != nil {
return err
}
*uuid = id
return nil
}
// MarshalBinary implements encoding.BinaryMarshaler.
func (uuid UUID) MarshalBinary() ([]byte, error) {
return uuid[:], nil
}
// UnmarshalBinary implements encoding.BinaryUnmarshaler.
func (uuid *UUID) UnmarshalBinary(data []byte) error {
if len(data) != 16 {
return fmt.Errorf("invalid UUID (got %d bytes)", len(data))
}
copy(uuid[:], data)
return nil
}

90
vendor/github.com/google/uuid/node.go generated vendored Normal file
View file

@ -0,0 +1,90 @@
// Copyright 2016 Google Inc. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package uuid
import (
"sync"
)
var (
nodeMu sync.Mutex
ifname string // name of interface being used
nodeID [6]byte // hardware for version 1 UUIDs
zeroID [6]byte // nodeID with only 0's
)
// NodeInterface returns the name of the interface from which the NodeID was
// derived. The interface "user" is returned if the NodeID was set by
// SetNodeID.
func NodeInterface() string {
defer nodeMu.Unlock()
nodeMu.Lock()
return ifname
}
// SetNodeInterface selects the hardware address to be used for Version 1 UUIDs.
// If name is "" then the first usable interface found will be used or a random
// Node ID will be generated. If a named interface cannot be found then false
// is returned.
//
// SetNodeInterface never fails when name is "".
func SetNodeInterface(name string) bool {
defer nodeMu.Unlock()
nodeMu.Lock()
return setNodeInterface(name)
}
func setNodeInterface(name string) bool {
iname, addr := getHardwareInterface(name) // null implementation for js
if iname != "" && addr != nil {
ifname = iname
copy(nodeID[:], addr)
return true
}
// We found no interfaces with a valid hardware address. If name
// does not specify a specific interface generate a random Node ID
// (section 4.1.6)
if name == "" {
ifname = "random"
randomBits(nodeID[:])
return true
}
return false
}
// NodeID returns a slice of a copy of the current Node ID, setting the Node ID
// if not already set.
func NodeID() []byte {
defer nodeMu.Unlock()
nodeMu.Lock()
if nodeID == zeroID {
setNodeInterface("")
}
nid := nodeID
return nid[:]
}
// SetNodeID sets the Node ID to be used for Version 1 UUIDs. The first 6 bytes
// of id are used. If id is less than 6 bytes then false is returned and the
// Node ID is not set.
func SetNodeID(id []byte) bool {
if len(id) < 6 {
return false
}
defer nodeMu.Unlock()
nodeMu.Lock()
copy(nodeID[:], id)
ifname = "user"
return true
}
// NodeID returns the 6 byte node id encoded in uuid. It returns nil if uuid is
// not valid. The NodeID is only well defined for version 1 and 2 UUIDs.
func (uuid UUID) NodeID() []byte {
var node [6]byte
copy(node[:], uuid[10:])
return node[:]
}

12
vendor/github.com/google/uuid/node_js.go generated vendored Normal file
View file

@ -0,0 +1,12 @@
// Copyright 2017 Google Inc. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// +build js
package uuid
// getHardwareInterface returns nil values for the JS version of the code.
// This removes the "net" dependency, because it is not used in the browser.
// Using the "net" library inflates the size of the transpiled JS code by 673k bytes.
func getHardwareInterface(name string) (string, []byte) { return "", nil }

33
vendor/github.com/google/uuid/node_net.go generated vendored Normal file
View file

@ -0,0 +1,33 @@
// Copyright 2017 Google Inc. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// +build !js
package uuid
import "net"
var interfaces []net.Interface // cached list of interfaces
// getHardwareInterface returns the name and hardware address of interface name.
// If name is "" then the name and hardware address of one of the system's
// interfaces is returned. If no interfaces are found (name does not exist or
// there are no interfaces) then "", nil is returned.
//
// Only addresses of at least 6 bytes are returned.
func getHardwareInterface(name string) (string, []byte) {
if interfaces == nil {
var err error
interfaces, err = net.Interfaces()
if err != nil {
return "", nil
}
}
for _, ifs := range interfaces {
if len(ifs.HardwareAddr) >= 6 && (name == "" || name == ifs.Name) {
return ifs.Name, ifs.HardwareAddr
}
}
return "", nil
}

118
vendor/github.com/google/uuid/null.go generated vendored Normal file
View file

@ -0,0 +1,118 @@
// Copyright 2021 Google Inc. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package uuid
import (
"bytes"
"database/sql/driver"
"encoding/json"
"fmt"
)
var jsonNull = []byte("null")
// NullUUID represents a UUID that may be null.
// NullUUID implements the SQL driver.Scanner interface so
// it can be used as a scan destination:
//
// var u uuid.NullUUID
// err := db.QueryRow("SELECT name FROM foo WHERE id=?", id).Scan(&u)
// ...
// if u.Valid {
// // use u.UUID
// } else {
// // NULL value
// }
//
type NullUUID struct {
UUID UUID
Valid bool // Valid is true if UUID is not NULL
}
// Scan implements the SQL driver.Scanner interface.
func (nu *NullUUID) Scan(value interface{}) error {
if value == nil {
nu.UUID, nu.Valid = Nil, false
return nil
}
err := nu.UUID.Scan(value)
if err != nil {
nu.Valid = false
return err
}
nu.Valid = true
return nil
}
// Value implements the driver Valuer interface.
func (nu NullUUID) Value() (driver.Value, error) {
if !nu.Valid {
return nil, nil
}
// Delegate to UUID Value function
return nu.UUID.Value()
}
// MarshalBinary implements encoding.BinaryMarshaler.
func (nu NullUUID) MarshalBinary() ([]byte, error) {
if nu.Valid {
return nu.UUID[:], nil
}
return []byte(nil), nil
}
// UnmarshalBinary implements encoding.BinaryUnmarshaler.
func (nu *NullUUID) UnmarshalBinary(data []byte) error {
if len(data) != 16 {
return fmt.Errorf("invalid UUID (got %d bytes)", len(data))
}
copy(nu.UUID[:], data)
nu.Valid = true
return nil
}
// MarshalText implements encoding.TextMarshaler.
func (nu NullUUID) MarshalText() ([]byte, error) {
if nu.Valid {
return nu.UUID.MarshalText()
}
return jsonNull, nil
}
// UnmarshalText implements encoding.TextUnmarshaler.
func (nu *NullUUID) UnmarshalText(data []byte) error {
id, err := ParseBytes(data)
if err != nil {
nu.Valid = false
return err
}
nu.UUID = id
nu.Valid = true
return nil
}
// MarshalJSON implements json.Marshaler.
func (nu NullUUID) MarshalJSON() ([]byte, error) {
if nu.Valid {
return json.Marshal(nu.UUID)
}
return jsonNull, nil
}
// UnmarshalJSON implements json.Unmarshaler.
func (nu *NullUUID) UnmarshalJSON(data []byte) error {
if bytes.Equal(data, jsonNull) {
*nu = NullUUID{}
return nil // valid null UUID
}
err := json.Unmarshal(data, &nu.UUID)
nu.Valid = err == nil
return err
}

59
vendor/github.com/google/uuid/sql.go generated vendored Normal file
View file

@ -0,0 +1,59 @@
// Copyright 2016 Google Inc. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package uuid
import (
"database/sql/driver"
"fmt"
)
// Scan implements sql.Scanner so UUIDs can be read from databases transparently.
// Currently, database types that map to string and []byte are supported. Please
// consult database-specific driver documentation for matching types.
func (uuid *UUID) Scan(src interface{}) error {
switch src := src.(type) {
case nil:
return nil
case string:
// if an empty UUID comes from a table, we return a null UUID
if src == "" {
return nil
}
// see Parse for required string format
u, err := Parse(src)
if err != nil {
return fmt.Errorf("Scan: %v", err)
}
*uuid = u
case []byte:
// if an empty UUID comes from a table, we return a null UUID
if len(src) == 0 {
return nil
}
// assumes a simple slice of bytes if 16 bytes
// otherwise attempts to parse
if len(src) != 16 {
return uuid.Scan(string(src))
}
copy((*uuid)[:], src)
default:
return fmt.Errorf("Scan: unable to scan type %T into UUID", src)
}
return nil
}
// Value implements sql.Valuer so that UUIDs can be written to databases
// transparently. Currently, UUIDs map to strings. Please consult
// database-specific driver documentation for matching types.
func (uuid UUID) Value() (driver.Value, error) {
return uuid.String(), nil
}

134
vendor/github.com/google/uuid/time.go generated vendored Normal file
View file

@ -0,0 +1,134 @@
// Copyright 2016 Google Inc. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package uuid
import (
"encoding/binary"
"sync"
"time"
)
// A Time represents a time as the number of 100's of nanoseconds since 15 Oct
// 1582.
type Time int64
const (
lillian = 2299160 // Julian day of 15 Oct 1582
unix = 2440587 // Julian day of 1 Jan 1970
epoch = unix - lillian // Days between epochs
g1582 = epoch * 86400 // seconds between epochs
g1582ns100 = g1582 * 10000000 // 100s of a nanoseconds between epochs
)
var (
timeMu sync.Mutex
lasttime uint64 // last time we returned
clockSeq uint16 // clock sequence for this run
timeNow = time.Now // for testing
)
// UnixTime converts t the number of seconds and nanoseconds using the Unix
// epoch of 1 Jan 1970.
func (t Time) UnixTime() (sec, nsec int64) {
sec = int64(t - g1582ns100)
nsec = (sec % 10000000) * 100
sec /= 10000000
return sec, nsec
}
// GetTime returns the current Time (100s of nanoseconds since 15 Oct 1582) and
// clock sequence as well as adjusting the clock sequence as needed. An error
// is returned if the current time cannot be determined.
func GetTime() (Time, uint16, error) {
defer timeMu.Unlock()
timeMu.Lock()
return getTime()
}
func getTime() (Time, uint16, error) {
t := timeNow()
// If we don't have a clock sequence already, set one.
if clockSeq == 0 {
setClockSequence(-1)
}
now := uint64(t.UnixNano()/100) + g1582ns100
// If time has gone backwards with this clock sequence then we
// increment the clock sequence
if now <= lasttime {
clockSeq = ((clockSeq + 1) & 0x3fff) | 0x8000
}
lasttime = now
return Time(now), clockSeq, nil
}
// ClockSequence returns the current clock sequence, generating one if not
// already set. The clock sequence is only used for Version 1 UUIDs.
//
// The uuid package does not use global static storage for the clock sequence or
// the last time a UUID was generated. Unless SetClockSequence is used, a new
// random clock sequence is generated the first time a clock sequence is
// requested by ClockSequence, GetTime, or NewUUID. (section 4.2.1.1)
func ClockSequence() int {
defer timeMu.Unlock()
timeMu.Lock()
return clockSequence()
}
func clockSequence() int {
if clockSeq == 0 {
setClockSequence(-1)
}
return int(clockSeq & 0x3fff)
}
// SetClockSequence sets the clock sequence to the lower 14 bits of seq. Setting to
// -1 causes a new sequence to be generated.
func SetClockSequence(seq int) {
defer timeMu.Unlock()
timeMu.Lock()
setClockSequence(seq)
}
func setClockSequence(seq int) {
if seq == -1 {
var b [2]byte
randomBits(b[:]) // clock sequence
seq = int(b[0])<<8 | int(b[1])
}
oldSeq := clockSeq
clockSeq = uint16(seq&0x3fff) | 0x8000 // Set our variant
if oldSeq != clockSeq {
lasttime = 0
}
}
// Time returns the time in 100s of nanoseconds since 15 Oct 1582 encoded in
// uuid. The time is only defined for version 1, 2, 6 and 7 UUIDs.
func (uuid UUID) Time() Time {
var t Time
switch uuid.Version() {
case 6:
time := binary.BigEndian.Uint64(uuid[:8]) // Ignore uuid[6] version b0110
t = Time(time)
case 7:
time := binary.BigEndian.Uint64(uuid[:8])
t = Time((time>>16)*10000 + g1582ns100)
default: // forward compatible
time := int64(binary.BigEndian.Uint32(uuid[0:4]))
time |= int64(binary.BigEndian.Uint16(uuid[4:6])) << 32
time |= int64(binary.BigEndian.Uint16(uuid[6:8])&0xfff) << 48
t = Time(time)
}
return t
}
// ClockSequence returns the clock sequence encoded in uuid.
// The clock sequence is only well defined for version 1 and 2 UUIDs.
func (uuid UUID) ClockSequence() int {
return int(binary.BigEndian.Uint16(uuid[8:10])) & 0x3fff
}

43
vendor/github.com/google/uuid/util.go generated vendored Normal file
View file

@ -0,0 +1,43 @@
// Copyright 2016 Google Inc. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package uuid
import (
"io"
)
// randomBits completely fills slice b with random data.
func randomBits(b []byte) {
if _, err := io.ReadFull(rander, b); err != nil {
panic(err.Error()) // rand should never fail
}
}
// xvalues returns the value of a byte as a hexadecimal digit or 255.
var xvalues = [256]byte{
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 255, 255, 255, 255, 255, 255,
255, 10, 11, 12, 13, 14, 15, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 10, 11, 12, 13, 14, 15, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
}
// xtob converts hex characters x1 and x2 into a byte.
func xtob(x1, x2 byte) (byte, bool) {
b1 := xvalues[x1]
b2 := xvalues[x2]
return (b1 << 4) | b2, b1 != 255 && b2 != 255
}

365
vendor/github.com/google/uuid/uuid.go generated vendored Normal file
View file

@ -0,0 +1,365 @@
// Copyright 2018 Google Inc. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package uuid
import (
"bytes"
"crypto/rand"
"encoding/hex"
"errors"
"fmt"
"io"
"strings"
"sync"
)
// A UUID is a 128 bit (16 byte) Universal Unique IDentifier as defined in RFC
// 4122.
type UUID [16]byte
// A Version represents a UUID's version.
type Version byte
// A Variant represents a UUID's variant.
type Variant byte
// Constants returned by Variant.
const (
Invalid = Variant(iota) // Invalid UUID
RFC4122 // The variant specified in RFC4122
Reserved // Reserved, NCS backward compatibility.
Microsoft // Reserved, Microsoft Corporation backward compatibility.
Future // Reserved for future definition.
)
const randPoolSize = 16 * 16
var (
rander = rand.Reader // random function
poolEnabled = false
poolMu sync.Mutex
poolPos = randPoolSize // protected with poolMu
pool [randPoolSize]byte // protected with poolMu
)
type invalidLengthError struct{ len int }
func (err invalidLengthError) Error() string {
return fmt.Sprintf("invalid UUID length: %d", err.len)
}
// IsInvalidLengthError is matcher function for custom error invalidLengthError
func IsInvalidLengthError(err error) bool {
_, ok := err.(invalidLengthError)
return ok
}
// Parse decodes s into a UUID or returns an error if it cannot be parsed. Both
// the standard UUID forms defined in RFC 4122
// (xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx and
// urn:uuid:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) are decoded. In addition,
// Parse accepts non-standard strings such as the raw hex encoding
// xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx and 38 byte "Microsoft style" encodings,
// e.g. {xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx}. Only the middle 36 bytes are
// examined in the latter case. Parse should not be used to validate strings as
// it parses non-standard encodings as indicated above.
func Parse(s string) (UUID, error) {
var uuid UUID
switch len(s) {
// xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx
case 36:
// urn:uuid:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx
case 36 + 9:
if !strings.EqualFold(s[:9], "urn:uuid:") {
return uuid, fmt.Errorf("invalid urn prefix: %q", s[:9])
}
s = s[9:]
// {xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx}
case 36 + 2:
s = s[1:]
// xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx
case 32:
var ok bool
for i := range uuid {
uuid[i], ok = xtob(s[i*2], s[i*2+1])
if !ok {
return uuid, errors.New("invalid UUID format")
}
}
return uuid, nil
default:
return uuid, invalidLengthError{len(s)}
}
// s is now at least 36 bytes long
// it must be of the form xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx
if s[8] != '-' || s[13] != '-' || s[18] != '-' || s[23] != '-' {
return uuid, errors.New("invalid UUID format")
}
for i, x := range [16]int{
0, 2, 4, 6,
9, 11,
14, 16,
19, 21,
24, 26, 28, 30, 32, 34,
} {
v, ok := xtob(s[x], s[x+1])
if !ok {
return uuid, errors.New("invalid UUID format")
}
uuid[i] = v
}
return uuid, nil
}
// ParseBytes is like Parse, except it parses a byte slice instead of a string.
func ParseBytes(b []byte) (UUID, error) {
var uuid UUID
switch len(b) {
case 36: // xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx
case 36 + 9: // urn:uuid:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx
if !bytes.EqualFold(b[:9], []byte("urn:uuid:")) {
return uuid, fmt.Errorf("invalid urn prefix: %q", b[:9])
}
b = b[9:]
case 36 + 2: // {xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx}
b = b[1:]
case 32: // xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx
var ok bool
for i := 0; i < 32; i += 2 {
uuid[i/2], ok = xtob(b[i], b[i+1])
if !ok {
return uuid, errors.New("invalid UUID format")
}
}
return uuid, nil
default:
return uuid, invalidLengthError{len(b)}
}
// s is now at least 36 bytes long
// it must be of the form xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx
if b[8] != '-' || b[13] != '-' || b[18] != '-' || b[23] != '-' {
return uuid, errors.New("invalid UUID format")
}
for i, x := range [16]int{
0, 2, 4, 6,
9, 11,
14, 16,
19, 21,
24, 26, 28, 30, 32, 34,
} {
v, ok := xtob(b[x], b[x+1])
if !ok {
return uuid, errors.New("invalid UUID format")
}
uuid[i] = v
}
return uuid, nil
}
// MustParse is like Parse but panics if the string cannot be parsed.
// It simplifies safe initialization of global variables holding compiled UUIDs.
func MustParse(s string) UUID {
uuid, err := Parse(s)
if err != nil {
panic(`uuid: Parse(` + s + `): ` + err.Error())
}
return uuid
}
// FromBytes creates a new UUID from a byte slice. Returns an error if the slice
// does not have a length of 16. The bytes are copied from the slice.
func FromBytes(b []byte) (uuid UUID, err error) {
err = uuid.UnmarshalBinary(b)
return uuid, err
}
// Must returns uuid if err is nil and panics otherwise.
func Must(uuid UUID, err error) UUID {
if err != nil {
panic(err)
}
return uuid
}
// Validate returns an error if s is not a properly formatted UUID in one of the following formats:
// xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx
// urn:uuid:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx
// xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx
// {xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx}
// It returns an error if the format is invalid, otherwise nil.
func Validate(s string) error {
switch len(s) {
// Standard UUID format
case 36:
// UUID with "urn:uuid:" prefix
case 36 + 9:
if !strings.EqualFold(s[:9], "urn:uuid:") {
return fmt.Errorf("invalid urn prefix: %q", s[:9])
}
s = s[9:]
// UUID enclosed in braces
case 36 + 2:
if s[0] != '{' || s[len(s)-1] != '}' {
return fmt.Errorf("invalid bracketed UUID format")
}
s = s[1 : len(s)-1]
// UUID without hyphens
case 32:
for i := 0; i < len(s); i += 2 {
_, ok := xtob(s[i], s[i+1])
if !ok {
return errors.New("invalid UUID format")
}
}
default:
return invalidLengthError{len(s)}
}
// Check for standard UUID format
if len(s) == 36 {
if s[8] != '-' || s[13] != '-' || s[18] != '-' || s[23] != '-' {
return errors.New("invalid UUID format")
}
for _, x := range []int{0, 2, 4, 6, 9, 11, 14, 16, 19, 21, 24, 26, 28, 30, 32, 34} {
if _, ok := xtob(s[x], s[x+1]); !ok {
return errors.New("invalid UUID format")
}
}
}
return nil
}
// String returns the string form of uuid, xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx
// , or "" if uuid is invalid.
func (uuid UUID) String() string {
var buf [36]byte
encodeHex(buf[:], uuid)
return string(buf[:])
}
// URN returns the RFC 2141 URN form of uuid,
// urn:uuid:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx, or "" if uuid is invalid.
func (uuid UUID) URN() string {
var buf [36 + 9]byte
copy(buf[:], "urn:uuid:")
encodeHex(buf[9:], uuid)
return string(buf[:])
}
func encodeHex(dst []byte, uuid UUID) {
hex.Encode(dst, uuid[:4])
dst[8] = '-'
hex.Encode(dst[9:13], uuid[4:6])
dst[13] = '-'
hex.Encode(dst[14:18], uuid[6:8])
dst[18] = '-'
hex.Encode(dst[19:23], uuid[8:10])
dst[23] = '-'
hex.Encode(dst[24:], uuid[10:])
}
// Variant returns the variant encoded in uuid.
func (uuid UUID) Variant() Variant {
switch {
case (uuid[8] & 0xc0) == 0x80:
return RFC4122
case (uuid[8] & 0xe0) == 0xc0:
return Microsoft
case (uuid[8] & 0xe0) == 0xe0:
return Future
default:
return Reserved
}
}
// Version returns the version of uuid.
func (uuid UUID) Version() Version {
return Version(uuid[6] >> 4)
}
func (v Version) String() string {
if v > 15 {
return fmt.Sprintf("BAD_VERSION_%d", v)
}
return fmt.Sprintf("VERSION_%d", v)
}
func (v Variant) String() string {
switch v {
case RFC4122:
return "RFC4122"
case Reserved:
return "Reserved"
case Microsoft:
return "Microsoft"
case Future:
return "Future"
case Invalid:
return "Invalid"
}
return fmt.Sprintf("BadVariant%d", int(v))
}
// SetRand sets the random number generator to r, which implements io.Reader.
// If r.Read returns an error when the package requests random data then
// a panic will be issued.
//
// Calling SetRand with nil sets the random number generator to the default
// generator.
func SetRand(r io.Reader) {
if r == nil {
rander = rand.Reader
return
}
rander = r
}
// EnableRandPool enables internal randomness pool used for Random
// (Version 4) UUID generation. The pool contains random bytes read from
// the random number generator on demand in batches. Enabling the pool
// may improve the UUID generation throughput significantly.
//
// Since the pool is stored on the Go heap, this feature may be a bad fit
// for security sensitive applications.
//
// Both EnableRandPool and DisableRandPool are not thread-safe and should
// only be called when there is no possibility that New or any other
// UUID Version 4 generation function will be called concurrently.
func EnableRandPool() {
poolEnabled = true
}
// DisableRandPool disables the randomness pool if it was previously
// enabled with EnableRandPool.
//
// Both EnableRandPool and DisableRandPool are not thread-safe and should
// only be called when there is no possibility that New or any other
// UUID Version 4 generation function will be called concurrently.
func DisableRandPool() {
poolEnabled = false
defer poolMu.Unlock()
poolMu.Lock()
poolPos = randPoolSize
}
// UUIDs is a slice of UUID types.
type UUIDs []UUID
// Strings returns a string slice containing the string form of each UUID in uuids.
func (uuids UUIDs) Strings() []string {
var uuidStrs = make([]string, len(uuids))
for i, uuid := range uuids {
uuidStrs[i] = uuid.String()
}
return uuidStrs
}

44
vendor/github.com/google/uuid/version1.go generated vendored Normal file
View file

@ -0,0 +1,44 @@
// Copyright 2016 Google Inc. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package uuid
import (
"encoding/binary"
)
// NewUUID returns a Version 1 UUID based on the current NodeID and clock
// sequence, and the current time. If the NodeID has not been set by SetNodeID
// or SetNodeInterface then it will be set automatically. If the NodeID cannot
// be set NewUUID returns nil. If clock sequence has not been set by
// SetClockSequence then it will be set automatically. If GetTime fails to
// return the current NewUUID returns nil and an error.
//
// In most cases, New should be used.
func NewUUID() (UUID, error) {
var uuid UUID
now, seq, err := GetTime()
if err != nil {
return uuid, err
}
timeLow := uint32(now & 0xffffffff)
timeMid := uint16((now >> 32) & 0xffff)
timeHi := uint16((now >> 48) & 0x0fff)
timeHi |= 0x1000 // Version 1
binary.BigEndian.PutUint32(uuid[0:], timeLow)
binary.BigEndian.PutUint16(uuid[4:], timeMid)
binary.BigEndian.PutUint16(uuid[6:], timeHi)
binary.BigEndian.PutUint16(uuid[8:], seq)
nodeMu.Lock()
if nodeID == zeroID {
setNodeInterface("")
}
copy(uuid[10:], nodeID[:])
nodeMu.Unlock()
return uuid, nil
}

76
vendor/github.com/google/uuid/version4.go generated vendored Normal file
View file

@ -0,0 +1,76 @@
// Copyright 2016 Google Inc. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package uuid
import "io"
// New creates a new random UUID or panics. New is equivalent to
// the expression
//
// uuid.Must(uuid.NewRandom())
func New() UUID {
return Must(NewRandom())
}
// NewString creates a new random UUID and returns it as a string or panics.
// NewString is equivalent to the expression
//
// uuid.New().String()
func NewString() string {
return Must(NewRandom()).String()
}
// NewRandom returns a Random (Version 4) UUID.
//
// The strength of the UUIDs is based on the strength of the crypto/rand
// package.
//
// Uses the randomness pool if it was enabled with EnableRandPool.
//
// A note about uniqueness derived from the UUID Wikipedia entry:
//
// Randomly generated UUIDs have 122 random bits. One's annual risk of being
// hit by a meteorite is estimated to be one chance in 17 billion, that
// means the probability is about 0.00000000006 (6 × 1011),
// equivalent to the odds of creating a few tens of trillions of UUIDs in a
// year and having one duplicate.
func NewRandom() (UUID, error) {
if !poolEnabled {
return NewRandomFromReader(rander)
}
return newRandomFromPool()
}
// NewRandomFromReader returns a UUID based on bytes read from a given io.Reader.
func NewRandomFromReader(r io.Reader) (UUID, error) {
var uuid UUID
_, err := io.ReadFull(r, uuid[:])
if err != nil {
return Nil, err
}
uuid[6] = (uuid[6] & 0x0f) | 0x40 // Version 4
uuid[8] = (uuid[8] & 0x3f) | 0x80 // Variant is 10
return uuid, nil
}
func newRandomFromPool() (UUID, error) {
var uuid UUID
poolMu.Lock()
if poolPos == randPoolSize {
_, err := io.ReadFull(rander, pool[:])
if err != nil {
poolMu.Unlock()
return Nil, err
}
poolPos = 0
}
copy(uuid[:], pool[poolPos:(poolPos+16)])
poolPos += 16
poolMu.Unlock()
uuid[6] = (uuid[6] & 0x0f) | 0x40 // Version 4
uuid[8] = (uuid[8] & 0x3f) | 0x80 // Variant is 10
return uuid, nil
}

56
vendor/github.com/google/uuid/version6.go generated vendored Normal file
View file

@ -0,0 +1,56 @@
// Copyright 2023 Google Inc. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package uuid
import "encoding/binary"
// UUID version 6 is a field-compatible version of UUIDv1, reordered for improved DB locality.
// It is expected that UUIDv6 will primarily be used in contexts where there are existing v1 UUIDs.
// Systems that do not involve legacy UUIDv1 SHOULD consider using UUIDv7 instead.
//
// see https://datatracker.ietf.org/doc/html/draft-peabody-dispatch-new-uuid-format-03#uuidv6
//
// NewV6 returns a Version 6 UUID based on the current NodeID and clock
// sequence, and the current time. If the NodeID has not been set by SetNodeID
// or SetNodeInterface then it will be set automatically. If the NodeID cannot
// be set NewV6 set NodeID is random bits automatically . If clock sequence has not been set by
// SetClockSequence then it will be set automatically. If GetTime fails to
// return the current NewV6 returns Nil and an error.
func NewV6() (UUID, error) {
var uuid UUID
now, seq, err := GetTime()
if err != nil {
return uuid, err
}
/*
0 1 2 3
0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
| time_high |
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
| time_mid | time_low_and_version |
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
|clk_seq_hi_res | clk_seq_low | node (0-1) |
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
| node (2-5) |
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
*/
binary.BigEndian.PutUint64(uuid[0:], uint64(now))
binary.BigEndian.PutUint16(uuid[8:], seq)
uuid[6] = 0x60 | (uuid[6] & 0x0F)
uuid[8] = 0x80 | (uuid[8] & 0x3F)
nodeMu.Lock()
if nodeID == zeroID {
setNodeInterface("")
}
copy(uuid[10:], nodeID[:])
nodeMu.Unlock()
return uuid, nil
}

104
vendor/github.com/google/uuid/version7.go generated vendored Normal file
View file

@ -0,0 +1,104 @@
// Copyright 2023 Google Inc. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package uuid
import (
"io"
)
// UUID version 7 features a time-ordered value field derived from the widely
// implemented and well known Unix Epoch timestamp source,
// the number of milliseconds seconds since midnight 1 Jan 1970 UTC, leap seconds excluded.
// As well as improved entropy characteristics over versions 1 or 6.
//
// see https://datatracker.ietf.org/doc/html/draft-peabody-dispatch-new-uuid-format-03#name-uuid-version-7
//
// Implementations SHOULD utilize UUID version 7 over UUID version 1 and 6 if possible.
//
// NewV7 returns a Version 7 UUID based on the current time(Unix Epoch).
// Uses the randomness pool if it was enabled with EnableRandPool.
// On error, NewV7 returns Nil and an error
func NewV7() (UUID, error) {
uuid, err := NewRandom()
if err != nil {
return uuid, err
}
makeV7(uuid[:])
return uuid, nil
}
// NewV7FromReader returns a Version 7 UUID based on the current time(Unix Epoch).
// it use NewRandomFromReader fill random bits.
// On error, NewV7FromReader returns Nil and an error.
func NewV7FromReader(r io.Reader) (UUID, error) {
uuid, err := NewRandomFromReader(r)
if err != nil {
return uuid, err
}
makeV7(uuid[:])
return uuid, nil
}
// makeV7 fill 48 bits time (uuid[0] - uuid[5]), set version b0111 (uuid[6])
// uuid[8] already has the right version number (Variant is 10)
// see function NewV7 and NewV7FromReader
func makeV7(uuid []byte) {
/*
0 1 2 3
0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
| unix_ts_ms |
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
| unix_ts_ms | ver | rand_a (12 bit seq) |
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
|var| rand_b |
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
| rand_b |
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
*/
_ = uuid[15] // bounds check
t, s := getV7Time()
uuid[0] = byte(t >> 40)
uuid[1] = byte(t >> 32)
uuid[2] = byte(t >> 24)
uuid[3] = byte(t >> 16)
uuid[4] = byte(t >> 8)
uuid[5] = byte(t)
uuid[6] = 0x70 | (0x0F & byte(s>>8))
uuid[7] = byte(s)
}
// lastV7time is the last time we returned stored as:
//
// 52 bits of time in milliseconds since epoch
// 12 bits of (fractional nanoseconds) >> 8
var lastV7time int64
const nanoPerMilli = 1000000
// getV7Time returns the time in milliseconds and nanoseconds / 256.
// The returned (milli << 12 + seq) is guarenteed to be greater than
// (milli << 12 + seq) returned by any previous call to getV7Time.
func getV7Time() (milli, seq int64) {
timeMu.Lock()
defer timeMu.Unlock()
nano := timeNow().UnixNano()
milli = nano / nanoPerMilli
// Sequence number is between 0 and 3906 (nanoPerMilli>>8)
seq = (nano - milli*nanoPerMilli) >> 8
now := milli<<12 + seq
if now <= lastV7time {
now = lastV7time + 1
milli = now >> 12
seq = now & 0xfff
}
lastV7time = now
return milli, seq
}

9
vendor/github.com/jackc/pgpassfile/.travis.yml generated vendored Normal file
View file

@ -0,0 +1,9 @@
language: go
go:
- 1.x
- tip
matrix:
allow_failures:
- go: tip

22
vendor/github.com/jackc/pgpassfile/LICENSE generated vendored Normal file
View file

@ -0,0 +1,22 @@
Copyright (c) 2019 Jack Christensen
MIT License
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
"Software"), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

8
vendor/github.com/jackc/pgpassfile/README.md generated vendored Normal file
View file

@ -0,0 +1,8 @@
[![](https://godoc.org/github.com/jackc/pgpassfile?status.svg)](https://godoc.org/github.com/jackc/pgpassfile)
[![Build Status](https://travis-ci.org/jackc/pgpassfile.svg)](https://travis-ci.org/jackc/pgpassfile)
# pgpassfile
Package pgpassfile is a parser PostgreSQL .pgpass files.
Extracted and rewritten from original implementation in https://github.com/jackc/pgx.

Some files were not shown because too many files have changed in this diff Show more