init
All checks were successful
release-tag / release-image (push) Successful in 1m32s

This commit is contained in:
2025-09-24 11:50:07 +02:00
parent e2b164640c
commit b5ad30cfbd
8 changed files with 983 additions and 0 deletions

View File

@@ -0,0 +1,51 @@
name: release-tag
on:
push:
branches:
- 'main'
jobs:
release-image:
runs-on: ubuntu-fast
env:
DOCKER_ORG: ${{ vars.DOCKER_ORG }}
DOCKER_LATEST: latest
RUNNER_TOOL_CACHE: /toolcache
steps:
- name: Checkout
uses: actions/checkout@v3
- name: Set up QEMU
uses: docker/setup-qemu-action@v2
- name: Set up Docker BuildX
uses: docker/setup-buildx-action@v2
with: # replace it with your local IP
config-inline: |
[registry."${{ vars.DOCKER_REGISTRY }}"]
http = true
insecure = true
- name: Login to DockerHub
uses: docker/login-action@v2
with:
registry: ${{ vars.DOCKER_REGISTRY }} # replace it with your local IP
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Get Meta
id: meta
run: |
echo REPO_NAME=$(echo ${GITHUB_REPOSITORY} | awk -F"/" '{print $2}') >> $GITHUB_OUTPUT
echo REPO_VERSION=$(git describe --tags --always | sed 's/^v//') >> $GITHUB_OUTPUT
- name: Build and push
uses: docker/build-push-action@v4
with:
context: .
file: ./Dockerfile
platforms: |
linux/amd64
push: true
tags: | # replace it with your local IP and tags
${{ vars.DOCKER_REGISTRY }}/${{ env.DOCKER_ORG }}/${{ steps.meta.outputs.REPO_NAME }}:${{ steps.meta.outputs.REPO_VERSION }}
${{ vars.DOCKER_REGISTRY }}/${{ env.DOCKER_ORG }}/${{ steps.meta.outputs.REPO_NAME }}:${{ env.DOCKER_LATEST }}

22
Dockerfile Normal file
View File

@@ -0,0 +1,22 @@
# ---------- build ----------
FROM golang:1.24-alpine AS build
WORKDIR /src
COPY . .
RUN CGO_ENABLED=0 go build -trimpath -ldflags="-s -w" -o /out/filesvc ./cmd/filesvc
# ---------- run ----------
FROM alpine:3.22
RUN adduser -D -u 10001 filesvc && apk add --no-cache ca-certificates
USER filesvc
WORKDIR /app
# Daten & Schlüssel landen unter /data (als Volume mounten)
ENV CONFIG_DIR=/data \
ADDR=:8080 \
MESH_ADDR=:8443 \
BEACON_ADDR=:9443 \
MAX_BODY_BYTES=8388608
COPY --from=build /out/filesvc /app/filesvc
EXPOSE 8080 8443 9443
ENTRYPOINT ["/app/filesvc"]

363
cmd/filesvc/main.go Normal file
View File

@@ -0,0 +1,363 @@
package main
import (
"context"
"crypto/subtle"
"embed"
"encoding/json"
"errors"
"log"
"mime"
"net/http"
"os"
"os/signal"
"path/filepath"
"strconv"
"strings"
"syscall"
"time"
"git.send.nrw/sendnrw/decent-webui/internal/store"
)
//go:embed ui/*
var uiFS embed.FS
type Config struct {
ListenAddr string
DataDir string
APIKey string
}
func (c Config) BlobDir() string { return filepath.Join(c.DataDir, "blobs") }
func (c Config) MetaDir() string { return filepath.Join(c.DataDir, "meta") }
func (c Config) TempDir() string { return filepath.Join(c.DataDir, "tmp") }
func getenv(k, d string) string {
if v := os.Getenv(k); v != "" {
return v
}
return d
}
func LoadConfig() Config {
addr := getenv("FILESVC_LISTEN", ":8085")
datadir := getenv("FILESVC_DATA", "/data")
key := os.Getenv("FILESVC_API_KEY")
if key == "" {
log.Println("[warn] FILESVC_API_KEY is empty — set it for protection")
}
return Config{ListenAddr: addr, DataDir: datadir, APIKey: key}
}
type App struct {
cfg Config
store *store.Store
}
func main() {
cfg := LoadConfig()
for _, p := range []string{cfg.DataDir, cfg.BlobDir(), cfg.MetaDir(), cfg.TempDir()} {
if err := os.MkdirAll(p, 0o755); err != nil {
log.Fatalf("mkdir %s: %v", p, err)
}
}
st, err := store.Open(cfg.BlobDir(), cfg.MetaDir(), cfg.TempDir())
if err != nil {
log.Fatal(err)
}
app := &App{cfg: cfg, store: st}
mux := http.NewServeMux()
// API routes
mux.HandleFunc("/healthz", app.health)
mux.HandleFunc("/v1/files", app.with(app.files))
mux.HandleFunc("/v1/files/", app.with(app.fileByID)) // /v1/files/{id}[ /meta]
mux.HandleFunc("/v1/uploads", app.with(app.uploadsRoot)) // POST init
mux.HandleFunc("/v1/uploads/", app.with(app.uploadsByID)) // parts/complete/abort
// UI routes (embedded)
mux.HandleFunc("/", func(w http.ResponseWriter, r *http.Request) {
if r.URL.Path != "/" {
http.NotFound(w, r)
return
}
http.ServeFileFS(w, r, uiFS, "ui/index.html")
})
mux.Handle("/static/", http.StripPrefix("/static/", http.FileServerFS(uiFS)))
srv := &http.Server{
Addr: cfg.ListenAddr,
Handler: logMiddleware(securityHeaders(mux)),
ReadTimeout: 60 * time.Second,
ReadHeaderTimeout: 10 * time.Second,
WriteTimeout: 0,
IdleTimeout: 120 * time.Second,
}
go func() {
log.Printf("file-service listening on %s", cfg.ListenAddr)
if err := srv.ListenAndServe(); err != nil && !errors.Is(err, http.ErrServerClosed) {
log.Fatalf("server: %v", err)
}
}()
stop := make(chan os.Signal, 1)
signal.Notify(stop, os.Interrupt, syscall.SIGTERM)
<-stop
log.Println("shutting down...")
ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)
defer cancel()
_ = srv.Shutdown(ctx)
}
func (a *App) with(h func(http.ResponseWriter, *http.Request)) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
if a.cfg.APIKey != "" {
key := r.Header.Get("X-API-Key")
if subtle.ConstantTimeCompare([]byte(key), []byte(a.cfg.APIKey)) != 1 {
http.Error(w, "unauthorized", http.StatusUnauthorized)
return
}
}
h(w, r)
}
}
func logMiddleware(next http.Handler) http.Handler {
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
start := time.Now()
next.ServeHTTP(w, r)
log.Printf("%s %s %s", r.Method, r.URL.Path, time.Since(start))
})
}
func securityHeaders(next http.Handler) http.Handler {
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
// immer sinnvolle Sicherheits-Header
w.Header().Set("X-Content-Type-Options", "nosniff")
w.Header().Set("X-Frame-Options", "DENY")
w.Header().Set("Referrer-Policy", "no-referrer")
// Für UI (/, /static/...) dürfen CSS/JS & XHR von "self" laden.
if r.URL.Path == "/" || strings.HasPrefix(r.URL.Path, "/static/") {
w.Header().Set("Content-Security-Policy",
"default-src 'self'; script-src 'self'; style-src 'self'; img-src 'self' data:; connect-src 'self'; object-src 'none'; base-uri 'self'; frame-ancestors 'none'")
} else {
// Für API schön streng
w.Header().Set("Content-Security-Policy",
"default-src 'none'; object-src 'none'; base-uri 'none'; frame-ancestors 'none'")
}
next.ServeHTTP(w, r)
})
}
func (a *App) writeJSON(w http.ResponseWriter, status int, v any) {
w.Header().Set("Content-Type", "application/json")
w.WriteHeader(status)
_ = json.NewEncoder(w).Encode(v)
}
func (a *App) health(w http.ResponseWriter, _ *http.Request) {
w.WriteHeader(200)
_, _ = w.Write([]byte("ok"))
}
// --- Routes ---
// /v1/files (GET list, POST upload)
func (a *App) files(w http.ResponseWriter, r *http.Request) {
switch r.Method {
case http.MethodGet:
q := r.URL.Query().Get("q")
off := atoiDefault(r.URL.Query().Get("offset"), 0)
lim := atoiDefault(r.URL.Query().Get("limit"), 50)
items, next, err := a.store.List(r.Context(), q, off, lim)
if err != nil {
http.Error(w, err.Error(), 500)
return
}
a.writeJSON(w, 200, map[string]any{"items": items, "next": next})
case http.MethodPost:
r.Body = http.MaxBytesReader(w, r.Body, 1<<34) // ~16GiB
ct := r.Header.Get("Content-Type")
name := r.Header.Get("X-Filename")
meta := r.URL.Query().Get("meta")
if strings.HasPrefix(ct, "multipart/") {
if err := r.ParseMultipartForm(32 << 20); err != nil {
http.Error(w, err.Error(), 400)
return
}
f, hdr, err := r.FormFile("file")
if err != nil {
http.Error(w, err.Error(), 400)
return
}
defer f.Close()
if hdr != nil {
name = hdr.Filename
}
rec, err := a.store.Put(r.Context(), f, name, meta)
if err != nil {
http.Error(w, err.Error(), 500)
return
}
a.writeJSON(w, 201, rec)
return
}
rec, err := a.store.Put(r.Context(), r.Body, name, meta)
if err != nil {
http.Error(w, err.Error(), 500)
return
}
a.writeJSON(w, 201, rec)
default:
w.WriteHeader(http.StatusMethodNotAllowed)
}
}
// /v1/files/{id} and /v1/files/{id}/meta
func (a *App) fileByID(w http.ResponseWriter, r *http.Request) {
// path after /v1/files/
rest := strings.TrimPrefix(r.URL.Path, "/v1/files/")
parts := strings.Split(rest, "/")
if len(parts) == 0 || parts[0] == "" {
http.NotFound(w, r)
return
}
id := parts[0]
if len(parts) == 2 && parts[1] == "meta" {
switch r.Method {
case http.MethodGet:
rec, err := a.store.GetMeta(r.Context(), id)
if err != nil {
http.Error(w, err.Error(), 404)
return
}
a.writeJSON(w, 200, rec)
case http.MethodPut:
var m map[string]string
if err := json.NewDecoder(r.Body).Decode(&m); err != nil {
http.Error(w, err.Error(), 400)
return
}
rec, err := a.store.UpdateMeta(r.Context(), id, m)
if err != nil {
http.Error(w, err.Error(), 500)
return
}
a.writeJSON(w, 200, rec)
default:
w.WriteHeader(http.StatusMethodNotAllowed)
}
return
}
// /v1/files/{id}
switch r.Method {
case http.MethodGet:
f, rec, err := a.store.Open(r.Context(), id)
if err != nil {
http.Error(w, err.Error(), 404)
return
}
defer f.Close()
ctype := rec.ContentType
if ctype == "" {
ctype = mime.TypeByExtension(filepath.Ext(rec.Name))
}
if ctype == "" {
ctype = "application/octet-stream"
}
w.Header().Set("Content-Type", ctype)
w.Header().Set("Content-Length", strconv.FormatInt(rec.Size, 10))
w.Header().Set("Accept-Ranges", "bytes")
if r.URL.Query().Get("download") == "1" {
w.Header().Set("Content-Disposition", "attachment; filename=\""+rec.SafeName()+"\"")
}
http.ServeContent(w, r, rec.SafeName(), rec.CreatedAt, f)
case http.MethodDelete:
if err := a.store.Delete(r.Context(), id); err != nil {
http.Error(w, err.Error(), 404)
return
}
w.WriteHeader(204)
default:
w.WriteHeader(http.StatusMethodNotAllowed)
}
}
// /v1/uploads (POST) and /v1/uploads/{uid}/ ...
func (a *App) uploadsRoot(w http.ResponseWriter, r *http.Request) {
if r.Method != http.MethodPost {
w.WriteHeader(http.StatusMethodNotAllowed)
return
}
name := r.URL.Query().Get("name")
meta := r.URL.Query().Get("meta")
u, err := a.store.UploadInit(r.Context(), name, meta)
if err != nil {
http.Error(w, err.Error(), 500)
return
}
a.writeJSON(w, 201, u)
}
func (a *App) uploadsByID(w http.ResponseWriter, r *http.Request) {
rest := strings.TrimPrefix(r.URL.Path, "/v1/uploads/")
parts := strings.Split(rest, "/")
if len(parts) < 1 || parts[0] == "" {
http.NotFound(w, r)
return
}
uid := parts[0]
if len(parts) == 3 && parts[1] == "parts" {
n := atoiDefault(parts[2], -1)
if r.Method != http.MethodPut || n < 1 {
http.Error(w, "invalid part", 400)
return
}
if err := a.store.UploadPart(r.Context(), uid, n, r.Body); err != nil {
http.Error(w, err.Error(), 400)
return
}
w.WriteHeader(204)
return
}
if len(parts) == 2 && parts[1] == "complete" {
if r.Method != http.MethodPost {
w.WriteHeader(http.StatusMethodNotAllowed)
return
}
rec, err := a.store.UploadComplete(r.Context(), uid)
if err != nil {
http.Error(w, err.Error(), 400)
return
}
a.writeJSON(w, 201, rec)
return
}
if len(parts) == 1 && r.Method == http.MethodDelete {
if err := a.store.UploadAbort(r.Context(), uid); err != nil {
http.Error(w, err.Error(), 400)
return
}
w.WriteHeader(204)
return
}
http.NotFound(w, r)
}
func atoiDefault(s string, d int) int {
n, err := strconv.Atoi(s)
if err != nil {
return d
}
return n
}

132
cmd/filesvc/ui/app.js Normal file
View File

@@ -0,0 +1,132 @@
(function() {
const $ = sel => document.querySelector(sel);
const $$ = sel => Array.from(document.querySelectorAll(sel));
const state = { offset: 0, limit: 20, total: null };
function loadCfg() {
try { return JSON.parse(localStorage.getItem('cfg')) || {}; } catch { return {}; }
}
function saveCfg(cfg) { localStorage.setItem('cfg', JSON.stringify(cfg)); }
const cfg = loadCfg();
$('#apiKey').value = cfg.apiKey || '';
$('#baseUrl').value = cfg.baseUrl || '';
$('#saveCfg').onclick = () => {
cfg.apiKey = $('#apiKey').value.trim();
cfg.baseUrl = $('#baseUrl').value.trim();
saveCfg(cfg);
refresh();
};
function api(path, opts = {}) {
const base = cfg.baseUrl || '';
opts.headers = Object.assign({ 'X-API-Key': cfg.apiKey || '' }, opts.headers || {});
return fetch(base + path, opts).then(r => {
if (!r.ok) throw new Error(`${r.status} ${r.statusText}`);
const ct = r.headers.get('content-type') || '';
if (ct.includes('application/json')) return r.json();
return r.text();
});
}
async function refresh() {
const q = encodeURIComponent($('#q').value || '');
try {
const data = await api(`/v1/files?limit=${state.limit}&offset=${state.offset}&q=${q}`);
renderTable(data.items || []);
const next = data.next || 0;
state.hasNext = next > 0;
state.nextOffset = next;
$('#pageInfo').textContent = `offset ${state.offset}`;
} catch (e) {
alert('List failed: ' + e.message);
}
}
function renderTable(items) {
const tbody = $('#files tbody');
tbody.innerHTML = '';
const tpl = $('#rowTpl').content;
for (const it of items) {
const tr = tpl.cloneNode(true);
tr.querySelector('.id').textContent = it.id;
tr.querySelector('.name').textContent = it.name;
tr.querySelector('.size').textContent = human(it.size);
tr.querySelector('.created').textContent = new Date(it.createdAt).toLocaleString();
const act = tr.querySelector('.actions');
const dl = btn('Download', async () => {
const base = cfg.baseUrl || '';
const url = `${base}/v1/files/${it.id}?download=1`;
const a = document.createElement('a');
a.href = url; a.download = '';
a.click();
});
const meta = btn('Meta', async () => showMeta(it.id));
const del = btn('Delete', async () => {
if (!confirm('Delete file?')) return;
try { await api(`/v1/files/${it.id}`, { method:'DELETE' }); refresh(); } catch(e){ alert('Delete failed: '+e.message); }
});
act.append(dl, meta, del);
tbody.appendChild(tr);
}
}
function btn(text, on) { const b = document.createElement('button'); b.textContent = text; b.onclick = on; return b; }
function human(n) { if (n < 1024) return n + ' B'; const u=['KB','MB','GB','TB']; let i=-1; do { n/=1024; i++; } while(n>=1024 && i<u.length-1); return n.toFixed(1)+' '+u[i]; }
$('#refresh').onclick = () => { state.offset = 0; refresh(); };
$('#q').addEventListener('keydown', e => { if (e.key==='Enter') { state.offset=0; refresh(); } });
$('#prev').onclick = () => { state.offset = Math.max(0, state.offset - state.limit); refresh(); };
$('#next').onclick = () => { if (state.hasNext) { state.offset = state.nextOffset; refresh(); } };
// Upload form
$('#uploadForm').addEventListener('submit', async (e) => {
e.preventDefault();
const f = $('#fileInput').files[0];
if (!f) return alert('Pick a file');
const meta = $('#metaInput').value.trim();
const fd = new FormData();
fd.append('file', f);
fd.append('meta', meta);
try { await api('/v1/files?meta='+encodeURIComponent(meta), { method: 'POST', body: fd }); refresh(); } catch(e){ alert('Upload failed: '+e.message); }
});
// Chunked upload
$('#chunkInit').onclick = async () => {
try {
const name = $('#chunkName').value.trim() || 'file';
const meta = $('#chunkMeta').value.trim();
const r = await api(`/v1/uploads?name=${encodeURIComponent(name)}&meta=${encodeURIComponent(meta)}`, { method:'POST' });
$('#chunkId').textContent = r.id;
} catch(e){ alert('Init failed: '+e.message); }
};
$('#chunkPut').onclick = async () => {
const uid = $('#chunkId').textContent.trim();
const part = parseInt($('#chunkPart').value,10) || 1;
const file = $('#chunkFile').files[0];
if (!uid) return alert('Init first');
if (!file) return alert('Choose a file (this will send the whole file as one part).');
try { await api(`/v1/uploads/${uid}/parts/${part}`, { method:'PUT', body: file }); alert('Part uploaded'); } catch(e){ alert('PUT failed: '+e.message); }
};
$('#chunkComplete').onclick = async () => {
const uid = $('#chunkId').textContent.trim(); if (!uid) return;
try { await api(`/v1/uploads/${uid}/complete`, { method:'POST' }); refresh(); } catch(e){ alert('Complete failed: '+e.message); }
};
$('#chunkAbort').onclick = async () => {
const uid = $('#chunkId').textContent.trim(); if (!uid) return;
try { await api(`/v1/uploads/${uid}`, { method:'DELETE' }); $('#chunkId').textContent=''; alert('Aborted'); } catch(e){ alert('Abort failed: '+e.message); }
};
async function showMeta(id) {
try {
const rec = await api(`/v1/files/${id}/meta`);
const json = prompt('Edit meta as JSON (object of string:string)', JSON.stringify(rec.meta||{}));
if (json == null) return;
const obj = JSON.parse(json);
await api(`/v1/files/${id}/meta`, { method:'PUT', headers:{'Content-Type':'application/json'}, body: JSON.stringify(obj) });
refresh();
} catch(e){ alert('Meta failed: '+e.message); }
}
refresh();
})();

77
cmd/filesvc/ui/index.html Normal file
View File

@@ -0,0 +1,77 @@
<!doctype html>
<html lang="en">
<head>
<meta charset="utf-8" />
<meta name="viewport" content="width=device-width, initial-scale=1" />
<title>File Service UI</title>
<link rel="stylesheet" href="/static/ui/style.css" />
</head>
<body>
<header>
<h1>File Service</h1>
<div class="cfg">
<label>API Base <input id="baseUrl" value="" placeholder="(same origin)"/></label>
<label>API Key <input id="apiKey" placeholder="X-API-Key"/></label>
<button id="saveCfg">Save</button>
</div>
</header>
<main>
<section class="card">
<h2>Upload</h2>
<form id="uploadForm">
<input type="file" id="fileInput" name="file" required />
<input type="text" id="metaInput" placeholder="meta e.g. project=alpha,owner=alice" />
<button type="submit">Upload</button>
</form>
<details>
<summary>Chunked upload</summary>
<div class="chunk">
<input type="text" id="chunkName" placeholder="filename"/>
<input type="text" id="chunkMeta" placeholder="meta key=val,..."/>
<button id="chunkInit">Init</button>
<span id="chunkId"></span>
<div>
<input type="file" id="chunkFile"/>
<input type="number" id="chunkPart" min="1" value="1"/>
<button id="chunkPut">PUT Part</button>
<button id="chunkComplete">Complete</button>
<button id="chunkAbort">Abort</button>
</div>
</div>
</details>
</section>
<section class="card">
<h2>Files</h2>
<div class="toolbar">
<input type="search" id="q" placeholder="search by name"/>
<button id="refresh">Refresh</button>
</div>
<table id="files">
<thead>
<tr><th>ID</th><th>Name</th><th>Size</th><th>Created</th><th>Actions</th></tr>
</thead>
<tbody></tbody>
</table>
<div class="pager">
<button id="prev">Prev</button>
<span id="pageInfo"></span>
<button id="next">Next</button>
</div>
</section>
</main>
<template id="rowTpl">
<tr>
<td class="mono id"></td>
<td class="name"></td>
<td class="size"></td>
<td class="created"></td>
<td class="actions"></td>
</tr>
</template>
<script src="/static/ui/app.js"></script>
</body>
</html>

19
cmd/filesvc/ui/style.css Normal file
View File

@@ -0,0 +1,19 @@
:root { --bg: #0b0f14; --fg: #e6eef8; --muted: #9bb0c8; --card: #121923; --accent: #5aa9ff; }
* { box-sizing: border-box; }
body { margin: 0; font-family: ui-sans-serif, system-ui, -apple-system, Segoe UI, Roboto, sans-serif; background: var(--bg); color: var(--fg); }
header { display: flex; justify-content: space-between; align-items: center; padding: 16px 20px; background: #0e141b; border-bottom: 1px solid #2a3543; }
h1 { margin: 0; font-size: 20px; }
.cfg label { margin-right: 8px; font-size: 12px; color: var(--muted); }
.cfg input { margin-left: 6px; padding: 6px 8px; background: #0c1219; border: 1px solid #2a3543; color: var(--fg); border-radius: 6px; }
button { padding: 8px 12px; border: 1px solid #2a3543; background: #111a24; color: var(--fg); border-radius: 8px; cursor: pointer; }
button:hover { border-color: var(--accent); }
main { padding: 20px; max-width: 1100px; margin: 0 auto; }
.card { background: var(--card); border: 1px solid #1f2a38; border-radius: 14px; padding: 16px; margin-bottom: 16px; box-shadow: 0 6px 20px rgba(0,0,0,.25); }
.toolbar { display:flex; gap: 8px; align-items: center; margin-bottom: 10px; }
table { width: 100%; border-collapse: collapse; }
th, td { text-align: left; padding: 8px; border-bottom: 1px solid #213043; }
.mono { font-family: ui-monospace, SFMono-Regular, Menlo, Consolas, monospace; font-size: 12px; }
.name { max-width: 340px; overflow: hidden; text-overflow: ellipsis; white-space: nowrap; }
.pager { display:flex; gap: 8px; align-items:center; justify-content:flex-end; padding-top: 8px; }
.actions button { margin-right: 6px; }
summary { cursor: pointer; }

3
go.mod Normal file
View File

@@ -0,0 +1,3 @@
module git.send.nrw/sendnrw/decent-webui
go 1.24.4

316
internal/store/store.go Normal file
View File

@@ -0,0 +1,316 @@
package store
import (
"context"
"crypto/sha256"
"encoding/hex"
"encoding/json"
"errors"
"fmt"
"io"
"os"
"path/filepath"
"sort"
"strings"
"sync"
"time"
)
type Store struct {
blobDir string
metaDir string
tmpDir string
mu sync.RWMutex
}
type FileRecord struct {
ID string `json:"id"`
Name string `json:"name"`
Hash string `json:"hash"`
Size int64 `json:"size"`
Meta map[string]string `json:"meta,omitempty"`
CreatedAt time.Time `json:"createdAt"`
ContentType string `json:"contentType,omitempty"`
}
func (fr FileRecord) SafeName() string {
n := strings.TrimSpace(fr.Name)
if n == "" {
return fr.ID
}
return n
}
func Open(blobDir, metaDir, tmpDir string) (*Store, error) {
for _, p := range []string{blobDir, metaDir, tmpDir} {
if err := os.MkdirAll(p, 0o755); err != nil {
return nil, err
}
}
return &Store{blobDir: blobDir, metaDir: metaDir, tmpDir: tmpDir}, nil
}
func (s *Store) Put(ctx context.Context, r io.Reader, name, metaStr string) (*FileRecord, error) {
if name == "" {
name = "file"
}
tmp, err := os.CreateTemp(s.tmpDir, "upload-*")
if err != nil {
return nil, err
}
defer func() { tmp.Close(); os.Remove(tmp.Name()) }()
h := sha256.New()
n, err := io.Copy(io.MultiWriter(tmp, h), r)
if err != nil {
return nil, err
}
hash := hex.EncodeToString(h.Sum(nil))
blobPath := filepath.Join(s.blobDir, hash)
if _, err := os.Stat(blobPath); errors.Is(err, os.ErrNotExist) {
if err := os.Rename(tmp.Name(), blobPath); err != nil {
return nil, err
}
} else {
_ = os.Remove(tmp.Name())
}
rec := &FileRecord{
ID: newID(hash),
Name: name,
Hash: hash,
Size: n,
Meta: parseMeta(metaStr),
CreatedAt: time.Now().UTC(),
ContentType: "", // filled on GET via extension
}
if err := s.writeMeta(rec); err != nil {
return nil, err
}
return rec, nil
}
func (s *Store) Open(ctx context.Context, id string) (io.ReadSeekCloser, *FileRecord, error) {
rec, err := s.GetMeta(ctx, id)
if err != nil {
return nil, nil, err
}
f, err := os.Open(filepath.Join(s.blobDir, rec.Hash))
if err != nil {
return nil, nil, err
}
return f, rec, nil
}
func (s *Store) GetMeta(_ context.Context, id string) (*FileRecord, error) {
s.mu.RLock()
defer s.mu.RUnlock()
bb, err := os.ReadFile(filepath.Join(s.metaDir, id+".json"))
if err != nil {
return nil, err
}
var rec FileRecord
if err := json.Unmarshal(bb, &rec); err != nil {
return nil, err
}
return &rec, nil
}
func (s *Store) UpdateMeta(_ context.Context, id string, meta map[string]string) (*FileRecord, error) {
s.mu.Lock()
defer s.mu.Unlock()
path := filepath.Join(s.metaDir, id+".json")
bb, err := os.ReadFile(path)
if err != nil {
return nil, err
}
var rec FileRecord
if err := json.Unmarshal(bb, &rec); err != nil {
return nil, err
}
if rec.Meta == nil {
rec.Meta = map[string]string{}
}
for k, v := range meta {
rec.Meta[k] = v
}
nb, _ := json.Marshal(&rec)
if err := os.WriteFile(path, nb, 0o600); err != nil {
return nil, err
}
return &rec, nil
}
func (s *Store) Delete(_ context.Context, id string) error {
s.mu.Lock()
defer s.mu.Unlock()
// Only delete metadata; GC for unreferenced blobs is a separate task
return os.Remove(filepath.Join(s.metaDir, id+".json"))
}
func (s *Store) List(_ context.Context, q string, offset, limit int) ([]*FileRecord, int, error) {
if limit <= 0 || limit > 200 {
limit = 50
}
entries, err := os.ReadDir(s.metaDir)
if err != nil {
return nil, 0, err
}
var items []*FileRecord
for _, e := range entries {
if e.IsDir() || !strings.HasSuffix(e.Name(), ".json") {
continue
}
bb, err := os.ReadFile(filepath.Join(s.metaDir, e.Name()))
if err != nil {
continue
}
var rec FileRecord
if err := json.Unmarshal(bb, &rec); err != nil {
continue
}
if q == "" || strings.Contains(strings.ToLower(rec.Name), strings.ToLower(q)) {
items = append(items, &rec)
}
}
sort.Slice(items, func(i, j int) bool { return items[i].CreatedAt.After(items[j].CreatedAt) })
end := offset + limit
if offset > len(items) {
return []*FileRecord{}, 0, nil
}
if end > len(items) {
end = len(items)
}
next := 0
if end < len(items) {
next = end
}
return items[offset:end], next, nil
}
// --- Chunked uploads ---
type UploadSession struct {
ID string `json:"id"`
Name string `json:"name"`
Meta string `json:"meta"`
CreatedAt time.Time `json:"createdAt"`
}
func (s *Store) UploadInit(_ context.Context, name, meta string) (*UploadSession, error) {
id := newID(fmt.Sprintf("sess-%d", time.Now().UnixNano()))
us := &UploadSession{ID: id, Name: name, Meta: meta, CreatedAt: time.Now().UTC()}
// session file marker
if err := os.WriteFile(filepath.Join(s.tmpDir, id+".session"), []byte(name+""+meta), 0o600); err != nil {
return nil, err
}
return us, nil
}
func (s *Store) partPath(uid string, n int) string {
return filepath.Join(s.tmpDir, fmt.Sprintf("%s.part.%06d", uid, n))
}
func (s *Store) UploadPart(_ context.Context, uid string, n int, r io.Reader) error {
if _, err := os.Stat(filepath.Join(s.tmpDir, uid+".session")); err != nil {
return err
}
f, err := os.Create(s.partPath(uid, n))
if err != nil {
return err
}
defer f.Close()
_, err = io.Copy(f, r)
return err
}
func (s *Store) UploadComplete(ctx context.Context, uid string) (*FileRecord, error) {
if _, err := os.Stat(filepath.Join(s.tmpDir, uid+".session")); err != nil {
return nil, err
}
matches, _ := filepath.Glob(filepath.Join(s.tmpDir, uid+".part.*"))
if len(matches) == 0 {
return nil, errors.New("no parts uploaded")
}
sort.Strings(matches)
pr, pw := io.Pipe()
go func() {
for _, p := range matches {
f, err := os.Open(p)
if err != nil {
_ = pw.CloseWithError(err)
return
}
if _, err := io.Copy(pw, f); err != nil {
_ = pw.CloseWithError(err)
_ = f.Close()
return
}
_ = f.Close()
}
_ = pw.Close()
}()
// Read first line of session file for name/meta (simple format)
bb, _ := os.ReadFile(filepath.Join(s.tmpDir, uid+".session"))
lines := strings.SplitN(string(bb), "", 2)
name := "file"
meta := ""
if len(lines) >= 1 && strings.TrimSpace(lines[0]) != "" {
name = strings.TrimSpace(lines[0])
}
if len(lines) == 2 {
meta = strings.TrimSpace(lines[1])
}
rec, err := s.Put(ctx, pr, name, meta)
if err != nil {
return nil, err
}
for _, p := range matches {
_ = os.Remove(p)
}
_ = os.Remove(filepath.Join(s.tmpDir, uid+".session"))
return rec, nil
}
func (s *Store) UploadAbort(_ context.Context, uid string) error {
if _, err := os.Stat(filepath.Join(s.tmpDir, uid+".session")); err != nil {
return err
}
matches, _ := filepath.Glob(filepath.Join(s.tmpDir, uid+".part.*"))
for _, p := range matches {
_ = os.Remove(p)
}
return os.Remove(filepath.Join(s.tmpDir, uid+".session"))
}
// --- helpers ---
func (s *Store) writeMeta(rec *FileRecord) error {
s.mu.Lock()
defer s.mu.Unlock()
bb, _ := json.Marshal(rec)
return os.WriteFile(filepath.Join(s.metaDir, rec.ID+".json"), bb, 0o600)
}
func newID(seed string) string {
h := sha256.Sum256([]byte(fmt.Sprintf("%s|%d", seed, time.Now().UnixNano())))
return hex.EncodeToString(h[:16])
}
func parseMeta(s string) map[string]string {
if s == "" {
return nil
}
m := map[string]string{}
for _, kv := range strings.Split(s, ",") {
kvp := strings.SplitN(kv, "=", 2)
if len(kvp) == 2 {
m[strings.TrimSpace(kvp[0])] = strings.TrimSpace(kvp[1])
}
}
return m
}