feat: renderer
This commit is contained in:
@@ -1,118 +1,33 @@
|
||||
package registry
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"embed"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"sync"
|
||||
|
||||
"github.com/santhosh-tekuri/jsonschema/v6"
|
||||
)
|
||||
|
||||
//go:embed schema-org/*.json
|
||||
var schemasFS embed.FS
|
||||
|
||||
var (
|
||||
loadOnce sync.Once
|
||||
compiler *jsonschema.Compiler
|
||||
compileErr error
|
||||
schemaCache sync.Map // map[string]*jsonschema.Schema
|
||||
)
|
||||
|
||||
// ValidateSchema validates instance against the Schema.org JSON Schema named `schemaName`.
|
||||
// Examples: ValidateSchema(inst, "Recipe"), ValidateSchema(inst, "schema:Recipe").
|
||||
func ValidateSchema(instance any, schemaName string) error {
|
||||
if err := ensureCompiler(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
ref := normalizeRef(schemaName)
|
||||
|
||||
// Fast-path: reuse compiled schema if we have it.
|
||||
if v, ok := schemaCache.Load(ref); ok {
|
||||
if err := v.(*jsonschema.Schema).Validate(instance); err != nil {
|
||||
return fmt.Errorf("validation failed: %w", err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// Compile on first use, then cache.
|
||||
sch, err := compiler.Compile(ref)
|
||||
func GetSchemas() ([][]byte, error) {
|
||||
entries, err := schemasFS.ReadDir("schema-org")
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to compile schema %q: %w", ref, err)
|
||||
return nil, fmt.Errorf("read schema directory: %w", err)
|
||||
}
|
||||
schemaCache.Store(ref, sch)
|
||||
|
||||
if err := sch.Validate(instance); err != nil {
|
||||
return fmt.Errorf("validation failed: %w", err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
var out [][]byte
|
||||
|
||||
// --- internals ---
|
||||
|
||||
func ensureCompiler() error {
|
||||
loadOnce.Do(func() {
|
||||
c := jsonschema.NewCompiler()
|
||||
|
||||
// Load all embedded schemas and register them under their $id (e.g., "schema:Recipe").
|
||||
entries, err := schemasFS.ReadDir("schema-org")
|
||||
for _, e := range entries {
|
||||
if e.IsDir() || !strings.HasSuffix(e.Name(), ".json") {
|
||||
continue
|
||||
}
|
||||
raw, err := schemasFS.ReadFile(filepath.ToSlash("schema-org/" + e.Name()))
|
||||
if err != nil {
|
||||
compileErr = fmt.Errorf("read schema directory: %w", err)
|
||||
return
|
||||
return nil, fmt.Errorf("read %s: %w", e.Name(), err)
|
||||
}
|
||||
|
||||
for _, e := range entries {
|
||||
if e.IsDir() || !strings.HasSuffix(e.Name(), ".json") {
|
||||
continue
|
||||
}
|
||||
raw, err := schemasFS.ReadFile(filepath.ToSlash("schema-org/" + e.Name()))
|
||||
if err != nil {
|
||||
compileErr = fmt.Errorf("read %s: %w", e.Name(), err)
|
||||
return
|
||||
}
|
||||
|
||||
// Unmarshal once for the compiler, but use $id from the raw JSON as the resource name.
|
||||
js, err := jsonschema.UnmarshalJSON(bytes.NewReader(raw))
|
||||
if err != nil {
|
||||
compileErr = fmt.Errorf("unmarshal %s: %w", e.Name(), err)
|
||||
return
|
||||
}
|
||||
|
||||
id := extractID(raw)
|
||||
if id == "" {
|
||||
// Fallbacks if $id is missing; Schema.org dumps typically use "schema:<Name>".
|
||||
base := strings.TrimSuffix(e.Name(), ".json")
|
||||
id = "schema:" + base
|
||||
}
|
||||
|
||||
if err := c.AddResource(id, js); err != nil {
|
||||
compileErr = fmt.Errorf("add resource %s: %w", id, err)
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
compiler = c
|
||||
})
|
||||
return compileErr
|
||||
}
|
||||
|
||||
func extractID(raw []byte) string {
|
||||
var tmp struct {
|
||||
ID string `json:"$id"`
|
||||
out = append(out, raw)
|
||||
}
|
||||
_ = json.Unmarshal(raw, &tmp)
|
||||
return strings.TrimSpace(tmp.ID)
|
||||
}
|
||||
|
||||
func normalizeRef(name string) string {
|
||||
n := strings.TrimSpace(name)
|
||||
// Accept "Recipe" or "schema:Recipe" transparently.
|
||||
if strings.HasPrefix(n, "schema:") || strings.HasPrefix(n, "http://") || strings.HasPrefix(n, "https://") {
|
||||
return n
|
||||
}
|
||||
return "schema:" + n
|
||||
return out, nil
|
||||
}
|
||||
|
Reference in New Issue
Block a user