Compare commits
35 Commits
f3f4986bc1
...
main
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
137e3da6d4
|
||
|
|
148bc2c15c
|
||
|
|
cc19724277
|
||
|
|
851babe1e0
|
||
|
|
f14b685e43
|
||
|
|
fd6aeb8a27
|
||
|
|
4bfd2b9450
|
||
|
|
dfc38e2c86
|
||
|
|
3fe7577250
|
||
|
|
3cd320637c
|
||
|
|
7c5f9f7829
|
||
|
|
6c47c8c3e9
|
||
|
|
814be43be8
|
||
|
|
dc2cd1108e
|
||
|
|
5f52a630ea
|
||
|
|
a07e489700
|
||
|
|
0cf5e77b2a
|
||
|
|
cb132e90dd
|
||
|
|
53e6c6fb9f
|
||
|
|
026537fa05
|
||
|
|
2b965cb8f7
|
||
|
|
cb40a76eeb
|
||
|
|
28ae34b638
|
||
|
|
75d6dab008
|
||
|
|
fcbce7b443
|
||
|
|
bff86b8af8
|
||
|
|
22a6c43d1f
|
||
|
|
3789522abc
|
||
|
|
77bad7401e
|
||
|
|
3395785565
|
||
|
|
35183f267f
|
||
|
|
1d9851dabd
|
||
|
|
6dff0778e2
|
||
|
|
a12f1e7244
|
||
|
|
d53a6745aa
|
85
.gitea/workflows/build.yml
Normal file
85
.gitea/workflows/build.yml
Normal file
@@ -0,0 +1,85 @@
|
|||||||
|
name: Build and Push Server
|
||||||
|
|
||||||
|
# This workflow runs on every push to the main branch.
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- main
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build-and-push:
|
||||||
|
# The job runs on a standard Ubuntu runner.
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
steps:
|
||||||
|
# Step 1: Check out the repository code.
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
# Step 2: Set up the Node.js environment for building the SvelteKit playground.
|
||||||
|
- name: Setup Mise
|
||||||
|
uses: jdx/mise-action@v3
|
||||||
|
|
||||||
|
# Step 3: Set up pnpm, which is used for dependency management in the playground.
|
||||||
|
- name: Set up pnpm
|
||||||
|
uses: pnpm/action-setup@v4
|
||||||
|
with:
|
||||||
|
version: 9
|
||||||
|
|
||||||
|
- uses: acifani/setup-tinygo@v2
|
||||||
|
with:
|
||||||
|
tinygo-version: '0.37.0'
|
||||||
|
|
||||||
|
# Step 4: Install the playground's dependencies.
|
||||||
|
- name: Install playground dependencies
|
||||||
|
working-directory: ./playground
|
||||||
|
run: pnpm install --frozen-lockfile
|
||||||
|
|
||||||
|
- name: Build marka wasm
|
||||||
|
working-directory: ./playground
|
||||||
|
run: ./wasm/build.sh
|
||||||
|
|
||||||
|
# Step 5: Build the SvelteKit playground to generate static assets.
|
||||||
|
- name: Build playground
|
||||||
|
working-directory: ./playground
|
||||||
|
run: pnpm build
|
||||||
|
|
||||||
|
# Step 6: The Dockerfile expects the built playground assets in './server/playground'.
|
||||||
|
# This step moves the build output from './playground/build' to the correct location.
|
||||||
|
- name: Move playground build to server context
|
||||||
|
run: |
|
||||||
|
echo "Moving playground build output..."
|
||||||
|
rm -rf ./server/playground
|
||||||
|
mv ./playground/build ./server/playground
|
||||||
|
echo "Move complete."
|
||||||
|
|
||||||
|
- name: Set up Docker BuildX
|
||||||
|
uses: docker/setup-buildx-action@v2
|
||||||
|
with: # replace it with your local IP
|
||||||
|
config-inline: |
|
||||||
|
[registry."git.max-richter.dev"]
|
||||||
|
https = true
|
||||||
|
insecure = false
|
||||||
|
|
||||||
|
# Step 7: Log in to the Gitea container registry.
|
||||||
|
# You need to create a repository secret named GITEA_TOKEN with an access token.
|
||||||
|
- name: Login to Gitea Registry
|
||||||
|
uses: docker/login-action@v3
|
||||||
|
with:
|
||||||
|
registry: git.max-richter.dev
|
||||||
|
username: ${{ gitea.actor }}
|
||||||
|
password: ${{ secrets.REGISTRY_TOKEN }}
|
||||||
|
|
||||||
|
# Step 8: Build the Docker image and push it to the Gitea registry.
|
||||||
|
# The image is tagged with 'latest' and the short commit SHA.
|
||||||
|
- name: Build and push Docker image
|
||||||
|
uses: docker/build-push-action@v5
|
||||||
|
with:
|
||||||
|
context: .
|
||||||
|
file: ./server/Dockerfile
|
||||||
|
push: true
|
||||||
|
tags: |
|
||||||
|
git.max-richter.dev/${{ gitea.repository }}:latest
|
||||||
|
git.max-richter.dev/${{ gitea.repository }}:${{ gitea.sha }}
|
||||||
|
build-args: |
|
||||||
|
GIT_COMMIT=${{ gitea.sha }}
|
||||||
@@ -33,6 +33,13 @@ func Yaml(input string, block template.Block) (value any, error error) {
|
|||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
|
for _, alias := range f.PathAliases {
|
||||||
|
if value, ok := res[alias]; ok {
|
||||||
|
out = utils.SetPathValue(f.Path, value, out)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
if value, ok := renderUtils.GetValueFromPath(res, f.Path); ok {
|
if value, ok := renderUtils.GetValueFromPath(res, f.Path); ok {
|
||||||
out = utils.SetPathValue(f.Path, value, out)
|
out = utils.SetPathValue(f.Path, value, out)
|
||||||
continue
|
continue
|
||||||
|
|||||||
@@ -41,8 +41,6 @@ func DetectType(markdownContent string) (string, error) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func MatchBlocks(markdownContent, templateContent string) ([]matcher.Block, error) {
|
func MatchBlocks(markdownContent, templateContent string) ([]matcher.Block, error) {
|
||||||
markdownContent = strings.TrimSuffix(markdownContent, "\n")
|
|
||||||
|
|
||||||
tpl, err := template.CompileTemplate(templateContent)
|
tpl, err := template.CompileTemplate(templateContent)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, fmt.Errorf("failed to compile template -> %w", err)
|
return nil, fmt.Errorf("failed to compile template -> %w", err)
|
||||||
@@ -66,13 +64,16 @@ func ParseFile(markdownContent string) (any, error) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func ParseFileWithTemplate(markdownContent string, templateContent string) (any, error) {
|
func ParseFileWithTemplate(markdownContent string, templateContent string) (any, error) {
|
||||||
markdownContent = strings.TrimSuffix(markdownContent, "\n")
|
|
||||||
|
|
||||||
tpl, err := template.CompileTemplate(templateContent)
|
tpl, err := template.CompileTemplate(templateContent)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, fmt.Errorf("failed to compile template -> %w", err)
|
return nil, fmt.Errorf("failed to compile template -> %w", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
for strings.HasSuffix(markdownContent, "\n") {
|
||||||
|
markdownContent = strings.TrimSuffix(markdownContent, "\n")
|
||||||
|
}
|
||||||
|
markdownContent = markdownContent + "\n"
|
||||||
|
|
||||||
blocks := matcher.MatchBlocksFuzzy(markdownContent, tpl, 0.3)
|
blocks := matcher.MatchBlocksFuzzy(markdownContent, tpl, 0.3)
|
||||||
|
|
||||||
result, err := decoders.Parse(blocks)
|
result, err := decoders.Parse(blocks)
|
||||||
|
|||||||
@@ -87,7 +87,8 @@ My favourite baguette recipe
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
try {
|
try {
|
||||||
compileTemplate(templateValue);
|
const compiledTemplate = compileTemplate(templateValue);
|
||||||
|
console.log({ compiledTemplate, templateValue });
|
||||||
|
|
||||||
const result = templateValue
|
const result = templateValue
|
||||||
? parseMarkdownWithTemplate(markdownValue, templateValue)
|
? parseMarkdownWithTemplate(markdownValue, templateValue)
|
||||||
|
|||||||
Binary file not shown.
@@ -3,7 +3,9 @@
|
|||||||
path: .
|
path: .
|
||||||
codec: yaml
|
codec: yaml
|
||||||
fields:
|
fields:
|
||||||
- path: "_type"
|
- path: _type
|
||||||
|
codec: const
|
||||||
|
value: Anki
|
||||||
}
|
}
|
||||||
---
|
---
|
||||||
|
|
||||||
|
|||||||
@@ -14,23 +14,28 @@
|
|||||||
- path: about
|
- path: about
|
||||||
- path: url
|
- path: url
|
||||||
- path: author.name
|
- path: author.name
|
||||||
|
pathAlias: author
|
||||||
- path: author._type
|
- path: author._type
|
||||||
codec: const
|
codec: const
|
||||||
value: Person
|
value: Person
|
||||||
hidden: true
|
hidden: true
|
||||||
- path: datePublished
|
- path: datePublished
|
||||||
|
- path: url
|
||||||
|
pathAlias: link
|
||||||
- path: articleSection
|
- path: articleSection
|
||||||
- path: reviewRating.ratingValue
|
- path: reviewRating.ratingValue
|
||||||
pathAlias: rating
|
pathAlias: rating
|
||||||
- path: reviewRating.bestRating
|
- path: reviewRating.bestRating
|
||||||
|
value: 5
|
||||||
codec: const
|
codec: const
|
||||||
hidden: true
|
hidden: true
|
||||||
|
- path: keywords
|
||||||
- path: reviewRating.worstRating
|
- path: reviewRating.worstRating
|
||||||
|
value: 1
|
||||||
codec: const
|
codec: const
|
||||||
hidden: true
|
hidden: true
|
||||||
}
|
}
|
||||||
---
|
---
|
||||||
|
|
||||||
# { headline }
|
# { headline }
|
||||||
|
|
||||||
{ articleBody }
|
{ articleBody }
|
||||||
|
|||||||
@@ -10,18 +10,24 @@
|
|||||||
- path: "_type"
|
- path: "_type"
|
||||||
codec: const
|
codec: const
|
||||||
value: Recipe
|
value: Recipe
|
||||||
- path: link
|
- path: url
|
||||||
|
pathAlias: link
|
||||||
- path: image
|
- path: image
|
||||||
- path: author._type
|
- path: author._type
|
||||||
codec: const
|
codec: const
|
||||||
hidden: true
|
hidden: true
|
||||||
value: Person
|
value: Person
|
||||||
- path: author.name
|
- path: author.name
|
||||||
|
pathAlias: author
|
||||||
- path: author.email
|
- path: author.email
|
||||||
- path: datePublished
|
- path: datePublished
|
||||||
|
pathAlias: date
|
||||||
- path: prepTime
|
- path: prepTime
|
||||||
- path: cookTime
|
- path: cookTime
|
||||||
- path: recipeYield
|
- path: recipeYield
|
||||||
|
pathAlias:
|
||||||
|
- yield
|
||||||
|
- portion
|
||||||
- path: cookingMethod
|
- path: cookingMethod
|
||||||
- path: nutrition
|
- path: nutrition
|
||||||
- path: recipeCategory
|
- path: recipeCategory
|
||||||
@@ -30,7 +36,6 @@
|
|||||||
---
|
---
|
||||||
|
|
||||||
# { name | text }
|
# { name | text }
|
||||||
|
|
||||||
{ description | text }
|
{ description | text }
|
||||||
|
|
||||||
## Ingredients
|
## Ingredients
|
||||||
|
|||||||
@@ -11,15 +11,19 @@
|
|||||||
codec: const
|
codec: const
|
||||||
value: Review
|
value: Review
|
||||||
- path: tmdbId
|
- path: tmdbId
|
||||||
|
- path: link
|
||||||
|
pathAlias: link
|
||||||
- path: image
|
- path: image
|
||||||
- path: author.name
|
- path: author.name
|
||||||
|
pathAlias: author
|
||||||
- path: author._type
|
- path: author._type
|
||||||
codec: const
|
codec: const
|
||||||
value: Person
|
value: Person
|
||||||
hidden: true
|
hidden: true
|
||||||
- path: datePublished
|
- path: datePublished
|
||||||
- path: itemReviewed.name
|
pathAlias: date
|
||||||
- path: reviewRating.ratingValue
|
- path: reviewRating.ratingValue
|
||||||
|
pathAlias: rating
|
||||||
- path: reviewRating.bestRating
|
- path: reviewRating.bestRating
|
||||||
codec: const
|
codec: const
|
||||||
value: 5
|
value: 5
|
||||||
@@ -33,7 +37,4 @@
|
|||||||
---
|
---
|
||||||
|
|
||||||
# { itemReviewed.name }
|
# { itemReviewed.name }
|
||||||
{ keywords | hashtags }
|
|
||||||
|
|
||||||
## Review
|
|
||||||
{ reviewBody }
|
{ reviewBody }
|
||||||
|
|||||||
@@ -37,7 +37,7 @@ func RenderFile(rawJSON []byte) ([]byte, error) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// 5) validate JSON against schema
|
// 5) validate JSON against schema
|
||||||
if schemaName, ok := data["@schema"].(string); ok {
|
if schemaName, ok := data["_schema"].(string); ok {
|
||||||
if validationErr := validator.ValidateSchema(data, schemaName); validationErr != nil {
|
if validationErr := validator.ValidateSchema(data, schemaName); validationErr != nil {
|
||||||
return nil, fmt.Errorf("failed to validate schema: %w", validationErr)
|
return nil, fmt.Errorf("failed to validate schema: %w", validationErr)
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -9,11 +9,13 @@ cmd = "go build -o ./tmp/marka-server ./cmd/marka-server"
|
|||||||
bin = "./tmp/marka-server"
|
bin = "./tmp/marka-server"
|
||||||
|
|
||||||
# Command to run the application with arguments.
|
# Command to run the application with arguments.
|
||||||
full_bin = "./tmp/marka-server -root=../examples -addr=:8080"
|
full_bin = "MARKA_API_KEY='SECRET' ./tmp/marka-server -root=/home/max/Notes/resources -addr=:8080 -playground-root=./playground"
|
||||||
|
|
||||||
# Watch these file extensions.
|
# Watch these file extensions.
|
||||||
include_ext = ["go", "http"]
|
include_ext = ["go", "http"]
|
||||||
|
|
||||||
|
include_dir = ["../registry","../renderer","../template"]
|
||||||
|
|
||||||
# Ignore these directories.
|
# Ignore these directories.
|
||||||
exclude_dir = ["tmp"]
|
exclude_dir = ["tmp"]
|
||||||
|
|
||||||
|
|||||||
@@ -1,10 +1,11 @@
|
|||||||
FROM golang:1.24.7 AS build
|
FROM golang:1.24.7 AS build
|
||||||
|
ARG GIT_COMMIT=unknown
|
||||||
WORKDIR /src
|
WORKDIR /src
|
||||||
COPY . .
|
COPY . .
|
||||||
RUN --mount=type=cache,target=/go/pkg/mod \
|
RUN --mount=type=cache,target=/go/pkg/mod \
|
||||||
--mount=type=cache,target=/root/.cache/go-build \
|
--mount=type=cache,target=/root/.cache/go-build \
|
||||||
CGO_ENABLED=0 GOOS=linux GOARCH=amd64 \
|
CGO_ENABLED=0 GOOS=linux GOARCH=amd64 \
|
||||||
go build -trimpath -ldflags="-s -w" \
|
go build -trimpath -ldflags="-s -w -X main.commit=${GIT_COMMIT}" \
|
||||||
-o /out/server ./server/cmd/marka-server
|
-o /out/server ./server/cmd/marka-server
|
||||||
|
|
||||||
FROM scratch
|
FROM scratch
|
||||||
|
|||||||
@@ -2,6 +2,7 @@ package main
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"flag"
|
"flag"
|
||||||
|
"fmt"
|
||||||
"log"
|
"log"
|
||||||
"net/http"
|
"net/http"
|
||||||
"os"
|
"os"
|
||||||
@@ -12,6 +13,8 @@ import (
|
|||||||
"git.max-richter.dev/max/marka/server/internal/handler"
|
"git.max-richter.dev/max/marka/server/internal/handler"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
var commit = "unknown"
|
||||||
|
|
||||||
type multi []string
|
type multi []string
|
||||||
|
|
||||||
func (m *multi) String() string { return strings.Join(*m, ",") }
|
func (m *multi) String() string { return strings.Join(*m, ",") }
|
||||||
@@ -21,6 +24,7 @@ func (m *multi) Set(v string) error {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func main() {
|
func main() {
|
||||||
|
log.Printf("marka-server, commit=%s", commit)
|
||||||
var roots multi
|
var roots multi
|
||||||
flag.Var(&roots, "root", "repeatable; specify multiple -root flags")
|
flag.Var(&roots, "root", "repeatable; specify multiple -root flags")
|
||||||
addr := flag.String("addr", ":8080", "listen address")
|
addr := flag.String("addr", ":8080", "listen address")
|
||||||
@@ -58,7 +62,9 @@ func main() {
|
|||||||
fsAdapter, err := adapters.NewLocalFsAdapter(absRoots)
|
fsAdapter, err := adapters.NewLocalFsAdapter(absRoots)
|
||||||
must(err)
|
must(err)
|
||||||
|
|
||||||
http.Handle("/", handler.NewHandler(fsAdapter))
|
apiKey := os.Getenv("MARKA_API_KEY")
|
||||||
|
fmt.Println(apiKey)
|
||||||
|
http.Handle("/", handler.NewHandler(fsAdapter, apiKey))
|
||||||
|
|
||||||
log.Printf("listening on %s, roots=%s", *addr, strings.Join(absRoots, ", "))
|
log.Printf("listening on %s, roots=%s", *addr, strings.Join(absRoots, ", "))
|
||||||
log.Fatal(http.ListenAndServe(*addr, nil))
|
log.Fatal(http.ListenAndServe(*addr, nil))
|
||||||
|
|||||||
@@ -2,6 +2,7 @@ package adapters
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"errors"
|
"errors"
|
||||||
|
"fmt"
|
||||||
"os"
|
"os"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
"strings"
|
"strings"
|
||||||
@@ -118,6 +119,7 @@ func (l *LocalFsAdapter) readFile(path string, root string) (*Entry, error) {
|
|||||||
if err == nil {
|
if err == nil {
|
||||||
content = parsedContent
|
content = parsedContent
|
||||||
} else {
|
} else {
|
||||||
|
fmt.Println(err)
|
||||||
// Fallback to raw content on parsing error
|
// Fallback to raw content on parsing error
|
||||||
content = fileContent
|
content = fileContent
|
||||||
}
|
}
|
||||||
@@ -255,6 +257,66 @@ func (l *LocalFsAdapter) Read(path string) (*Entry, error) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (l *LocalFsAdapter) Write(path string, content []byte) error {
|
func (l *LocalFsAdapter) Write(path string, content []byte) error {
|
||||||
|
pathParts := strings.Split(strings.Trim(path, "/"), "/")
|
||||||
|
if len(pathParts) == 0 || pathParts[0] == "" {
|
||||||
|
return errors.New("invalid path")
|
||||||
|
}
|
||||||
|
|
||||||
|
rootIdentifier := pathParts[0]
|
||||||
|
var targetRoot string
|
||||||
|
for _, r := range l.roots {
|
||||||
|
if filepath.Base(r) == rootIdentifier {
|
||||||
|
targetRoot = r
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if targetRoot == "" {
|
||||||
|
return ErrNotFound
|
||||||
|
}
|
||||||
|
|
||||||
|
subPath := filepath.Join(pathParts[1:]...)
|
||||||
|
target := filepath.Join(targetRoot, subPath)
|
||||||
|
|
||||||
|
absTarget, err := filepath.Abs(target)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
absRoot, err := filepath.Abs(targetRoot)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if !strings.HasPrefix(absTarget, absRoot) {
|
||||||
|
return errors.New("path escapes root")
|
||||||
|
}
|
||||||
|
|
||||||
|
dir := filepath.Dir(absTarget)
|
||||||
|
if err := os.MkdirAll(dir, 0o755); err != nil {
|
||||||
|
return fmt.Errorf("failed to create directory: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
err = os.WriteFile(absTarget, content, 0o644)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Invalidate cache
|
||||||
|
l.mu.Lock()
|
||||||
|
defer l.mu.Unlock()
|
||||||
|
currentPath := path
|
||||||
|
for {
|
||||||
|
delete(l.cache, currentPath)
|
||||||
|
if currentPath == "/" {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
lastSlash := strings.LastIndex(currentPath, "/")
|
||||||
|
if lastSlash <= 0 {
|
||||||
|
currentPath = "/"
|
||||||
|
} else {
|
||||||
|
currentPath = currentPath[:lastSlash]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -2,6 +2,7 @@ package handler
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
|
"log"
|
||||||
"net/http"
|
"net/http"
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -10,6 +11,7 @@ type ErrorResponse struct {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func writeError(w http.ResponseWriter, code int, err error) {
|
func writeError(w http.ResponseWriter, code int, err error) {
|
||||||
|
log.Printf("error: %s", err)
|
||||||
writeJSON(w, code, ErrorResponse{Error: err.Error()})
|
writeJSON(w, code, ErrorResponse{Error: err.Error()})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -2,14 +2,20 @@
|
|||||||
package handler
|
package handler
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"encoding/json"
|
||||||
"errors"
|
"errors"
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
"net/http"
|
"net/http"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"git.max-richter.dev/max/marka/renderer"
|
||||||
"git.max-richter.dev/max/marka/server/internal/adapters"
|
"git.max-richter.dev/max/marka/server/internal/adapters"
|
||||||
)
|
)
|
||||||
|
|
||||||
type Handler struct {
|
type Handler struct {
|
||||||
adapter adapters.FileAdapter
|
adapter adapters.FileAdapter
|
||||||
|
apiKey string
|
||||||
}
|
}
|
||||||
|
|
||||||
func (h *Handler) get(w http.ResponseWriter, target string) {
|
func (h *Handler) get(w http.ResponseWriter, target string) {
|
||||||
@@ -44,7 +50,55 @@ func (h *Handler) get(w http.ResponseWriter, target string) {
|
|||||||
writeError(w, http.StatusInternalServerError, errors.New("unknown entry type"))
|
writeError(w, http.StatusInternalServerError, errors.New("unknown entry type"))
|
||||||
}
|
}
|
||||||
|
|
||||||
func (h *Handler) post(w http.ResponseWriter, target string) {
|
func (h *Handler) post(w http.ResponseWriter, r *http.Request, target string) {
|
||||||
|
if h.apiKey != "" {
|
||||||
|
if r.Header.Get("Authentication") != h.apiKey {
|
||||||
|
writeError(w, http.StatusUnauthorized, errors.New("invalid api key"))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
writeError(w, http.StatusUnauthorized, errors.New("invalid api key"))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
body, err := io.ReadAll(r.Body)
|
||||||
|
if err != nil {
|
||||||
|
writeError(w, http.StatusBadRequest, fmt.Errorf("failed to decode body: %w", err))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
defer r.Body.Close()
|
||||||
|
|
||||||
|
contentType := r.Header.Get("Content-Type")
|
||||||
|
isResource := strings.HasPrefix(contentType, "application/json") && strings.HasSuffix(target, ".md")
|
||||||
|
|
||||||
|
var contentToWrite []byte
|
||||||
|
if isResource {
|
||||||
|
renderedContent, err := renderer.RenderFile(body)
|
||||||
|
if err != nil {
|
||||||
|
writeError(w, http.StatusInternalServerError, fmt.Errorf("failed to render file: %w", err))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
contentToWrite = renderedContent
|
||||||
|
} else {
|
||||||
|
contentToWrite = body
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := h.adapter.Write(target, contentToWrite); err != nil {
|
||||||
|
writeError(w, http.StatusInternalServerError, fmt.Errorf("failed to write file: %w", err))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if isResource {
|
||||||
|
var data map[string]any
|
||||||
|
if err := json.Unmarshal(body, &data); err != nil && strings.HasSuffix(target, ".md") {
|
||||||
|
writeError(w, http.StatusInternalServerError, fmt.Errorf("failed to decode body: %w", err))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
writeJSON(w, http.StatusOK, data)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
writeJSON(w, http.StatusOK, map[string]any{"success": true})
|
||||||
}
|
}
|
||||||
|
|
||||||
func (h *Handler) ServeHTTP(w http.ResponseWriter, r *http.Request) {
|
func (h *Handler) ServeHTTP(w http.ResponseWriter, r *http.Request) {
|
||||||
@@ -55,18 +109,21 @@ func (h *Handler) ServeHTTP(w http.ResponseWriter, r *http.Request) {
|
|||||||
|
|
||||||
target := cleanURLLike(reqPath)
|
target := cleanURLLike(reqPath)
|
||||||
|
|
||||||
|
fmt.Printf("[serve] %s %s\n", r.Method, target)
|
||||||
|
|
||||||
switch r.Method {
|
switch r.Method {
|
||||||
case http.MethodGet:
|
case http.MethodGet:
|
||||||
h.get(w, target)
|
h.get(w, target)
|
||||||
case http.MethodPost:
|
case http.MethodPost:
|
||||||
h.post(w, target)
|
h.post(w, r, target)
|
||||||
default:
|
default:
|
||||||
writeError(w, http.StatusMethodNotAllowed, errors.New("method not allowed"))
|
writeError(w, http.StatusMethodNotAllowed, errors.New("method not allowed"))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewHandler(adapter adapters.FileAdapter) http.Handler {
|
func NewHandler(adapter adapters.FileAdapter, apiKey string) http.Handler {
|
||||||
return &Handler{
|
return &Handler{
|
||||||
adapter: adapter,
|
adapter: adapter,
|
||||||
|
apiKey: apiKey,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
@@ -1 +0,0 @@
|
|||||||
import{l as o,a as r}from"../chunks/BsPC8ki2.js";export{o as load_css,r as start};
|
|
||||||
1
server/playground/_app/immutable/entry/start.DXq0HO7Q.js
Normal file
1
server/playground/_app/immutable/entry/start.DXq0HO7Q.js
Normal file
@@ -0,0 +1 @@
|
|||||||
|
import{l as o,a as r}from"../chunks/Dqs1i6nG.js";export{o as load_css,r as start};
|
||||||
@@ -1 +1 @@
|
|||||||
import"../chunks/DsnmJJEf.js";import{i as u}from"../chunks/kadZwC1X.js";import{p as h,o as g,k as l,t as v,l as d,q as x,v as e,x as a,y as _}from"../chunks/bc36GTfJ.js";import{s as o}from"../chunks/B1lAeocp.js";import{s as k,p}from"../chunks/BsPC8ki2.js";const $={get error(){return p.error},get status(){return p.status}};k.updated.check;const m=$;var b=g("<h1> </h1> <p> </p>",1);function z(i,n){h(n,!1),u();var r=b(),t=l(r),c=e(t,!0);a(t);var s=_(t,2),f=e(s,!0);a(s),v(()=>{o(c,m.status),o(f,m.error?.message)}),d(i,r),x()}export{z as component};
|
import"../chunks/DsnmJJEf.js";import{i as u}from"../chunks/kadZwC1X.js";import{p as h,o as g,k as l,t as v,l as d,q as x,v as e,x as a,y as _}from"../chunks/bc36GTfJ.js";import{s as o}from"../chunks/B1lAeocp.js";import{s as k,p}from"../chunks/Dqs1i6nG.js";const $={get error(){return p.error},get status(){return p.status}};k.updated.check;const m=$;var b=g("<h1> </h1> <p> </p>",1);function z(i,n){h(n,!1),u();var r=b(),t=l(r),c=e(t,!0);a(t);var s=_(t,2),f=e(s,!0);a(s),v(()=>{o(c,m.status),o(f,m.error?.message)}),d(i,r),x()}export{z as component};
|
||||||
File diff suppressed because one or more lines are too long
@@ -1 +1 @@
|
|||||||
{"version":"1759702763367"}
|
{"version":"1761298975653"}
|
||||||
File diff suppressed because one or more lines are too long
Binary file not shown.
@@ -14,7 +14,7 @@ type yamlBlock struct {
|
|||||||
Fields []yamlField `yaml:"fields"`
|
Fields []yamlField `yaml:"fields"`
|
||||||
ListTemplate string `yaml:"listTemplate,omitempty"`
|
ListTemplate string `yaml:"listTemplate,omitempty"`
|
||||||
Hidden bool `yaml:"hidden,omitempty"`
|
Hidden bool `yaml:"hidden,omitempty"`
|
||||||
PathAlias []string `yaml:"pathAlias,omitempty"`
|
PathAlias StringOrSlice `yaml:"pathAlias,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type yamlField struct {
|
type yamlField struct {
|
||||||
@@ -22,7 +22,7 @@ type yamlField struct {
|
|||||||
Value any `yaml:"value,omitempty"`
|
Value any `yaml:"value,omitempty"`
|
||||||
Codec string `yaml:"codec"`
|
Codec string `yaml:"codec"`
|
||||||
Hidden bool `yaml:"hidden,omitempty"`
|
Hidden bool `yaml:"hidden,omitempty"`
|
||||||
PathAlias string `yaml:"pathAlias,omitempty"`
|
PathAlias StringOrSlice `yaml:"pathAlias,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
func parseYamlTemplate(input Slice) (block Block, err error) {
|
func parseYamlTemplate(input Slice) (block Block, err error) {
|
||||||
@@ -53,6 +53,7 @@ func parseYamlTemplate(input Slice) (block Block, err error) {
|
|||||||
var fields []BlockField
|
var fields []BlockField
|
||||||
|
|
||||||
for _, field := range blk.Fields {
|
for _, field := range blk.Fields {
|
||||||
|
|
||||||
if field.Path == "" {
|
if field.Path == "" {
|
||||||
return block, fmt.Errorf("failed to parse field: %v", field)
|
return block, fmt.Errorf("failed to parse field: %v", field)
|
||||||
}
|
}
|
||||||
@@ -66,18 +67,22 @@ func parseYamlTemplate(input Slice) (block Block, err error) {
|
|||||||
return block, fmt.Errorf("failed to parse codec -> %w", err)
|
return block, fmt.Errorf("failed to parse codec -> %w", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
fields = append(fields, BlockField{
|
block := BlockField{
|
||||||
Path: field.Path,
|
Path: field.Path,
|
||||||
|
PathAliases: field.PathAlias,
|
||||||
CodecType: fieldCodec,
|
CodecType: fieldCodec,
|
||||||
Value: field.Value,
|
Value: field.Value,
|
||||||
Hidden: field.Hidden,
|
Hidden: field.Hidden,
|
||||||
})
|
}
|
||||||
|
|
||||||
|
fields = append(fields, block)
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return Block{
|
return Block{
|
||||||
Type: DataBlock,
|
Type: DataBlock,
|
||||||
Path: blk.Path,
|
Path: blk.Path,
|
||||||
|
PathAliases: blk.PathAlias,
|
||||||
Codec: codec,
|
Codec: codec,
|
||||||
Fields: fields,
|
Fields: fields,
|
||||||
ListTemplate: blk.ListTemplate,
|
ListTemplate: blk.ListTemplate,
|
||||||
|
|||||||
@@ -1,6 +1,8 @@
|
|||||||
package template
|
package template
|
||||||
|
|
||||||
import "strings"
|
import (
|
||||||
|
"strings"
|
||||||
|
)
|
||||||
|
|
||||||
// CompileTemplate scans once, emitting:
|
// CompileTemplate scans once, emitting:
|
||||||
// - data blocks: inner content between a line that's exactly "{" and a line that's exactly "}"
|
// - data blocks: inner content between a line that's exactly "{" and a line that's exactly "}"
|
||||||
|
|||||||
@@ -9,6 +9,7 @@ const (
|
|||||||
|
|
||||||
type BlockField struct {
|
type BlockField struct {
|
||||||
Path string
|
Path string
|
||||||
|
PathAliases []string
|
||||||
CodecType CodecType
|
CodecType CodecType
|
||||||
Value any
|
Value any
|
||||||
Hidden bool
|
Hidden bool
|
||||||
@@ -17,6 +18,7 @@ type BlockField struct {
|
|||||||
type Block struct {
|
type Block struct {
|
||||||
Type BlockType
|
Type BlockType
|
||||||
Path string
|
Path string
|
||||||
|
PathAliases []string
|
||||||
Codec CodecType
|
Codec CodecType
|
||||||
ListTemplate string
|
ListTemplate string
|
||||||
Fields []BlockField
|
Fields []BlockField
|
||||||
|
|||||||
31
template/yaml_types.go
Normal file
31
template/yaml_types.go
Normal file
@@ -0,0 +1,31 @@
|
|||||||
|
package template
|
||||||
|
|
||||||
|
import "errors"
|
||||||
|
|
||||||
|
type StringOrSlice []string
|
||||||
|
|
||||||
|
func (s *StringOrSlice) UnmarshalYAML(unmarshal func(any) error) error {
|
||||||
|
var single string
|
||||||
|
if err := unmarshal(&single); err == nil {
|
||||||
|
if single == "" {
|
||||||
|
*s = nil
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
*s = []string{single}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
var multi []string
|
||||||
|
if err := unmarshal(&multi); err == nil {
|
||||||
|
*s = multi
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
var nothing *struct{}
|
||||||
|
if err := unmarshal(¬hing); err == nil {
|
||||||
|
*s = nil
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
return errors.New("expected string, []string, or null")
|
||||||
|
}
|
||||||
Reference in New Issue
Block a user