feat: remove ai cmd

This commit is contained in:
2025-11-05 09:44:42 +08:00
parent ad8ca2b1f7
commit bf7cfc8224
24 changed files with 0 additions and 3112 deletions

View File

@@ -1,250 +0,0 @@
package config
import (
"errors"
"fmt"
"io/fs"
"os"
"path/filepath"
"strings"
"sync"
"unicode"
"github.com/joho/godotenv"
)
const (
configDirEnvVar = "RENAMER_CONFIG_DIR"
configFileName = ".renamer"
defaultVendorSlug = "openai"
vendorTokenSuffix = "_TOKEN"
errTokenNotFoundFmt = "model token %q not found in %s or the process environment"
)
// TokenProvider resolves API tokens for AI models.
type TokenProvider interface {
ResolveModelToken(model string) (string, error)
}
// TokenStore loads model authentication tokens from ~/.config/.renamer.
type TokenStore struct {
configDir string
once sync.Once
values map[string]string
err error
}
// NewTokenStore constructs a TokenStore rooted at configDir. When configDir is
// empty the default path of `$HOME/.config/.renamer` is used. An environment
// override can be supplied via RENAMER_CONFIG_DIR.
func NewTokenStore(configDir string) (*TokenStore, error) {
root := configDir
if root == "" {
if override := strings.TrimSpace(os.Getenv(configDirEnvVar)); override != "" {
root = override
} else {
home, err := os.UserHomeDir()
if err != nil {
return nil, fmt.Errorf("resolve user home: %w", err)
}
root = filepath.Join(home, ".config", configFileName)
}
}
return &TokenStore{
configDir: root,
values: make(map[string]string),
}, nil
}
// ConfigDir returns the directory the token store reads from.
func (s *TokenStore) ConfigDir() string {
return s.configDir
}
// ResolveModelToken returns the token for the provided model name. Model names
// are normalized to match the `<VENDOR>_TOKEN` convention documented
// for the CLI. Environment variables take precedence over file-based tokens.
func (s *TokenStore) ResolveModelToken(model string) (string, error) {
key := ModelTokenKey(model)
return s.lookup(key)
}
// lookup loads the requested key from either the environment or cached tokens.
func (s *TokenStore) lookup(key string) (string, error) {
if strings.TrimSpace(key) == "" {
return "", errors.New("token key must not be empty")
}
if val, ok := os.LookupEnv(key); ok && strings.TrimSpace(val) != "" {
return strings.TrimSpace(val), nil
}
if err := s.ensureLoaded(); err != nil {
return "", err
}
if val, ok := s.values[key]; ok && strings.TrimSpace(val) != "" {
return strings.TrimSpace(val), nil
}
return "", fmt.Errorf(errTokenNotFoundFmt, key, s.configFilePath())
}
func (s *TokenStore) ensureLoaded() error {
s.once.Do(func() {
s.err = s.loadConfigFile()
})
return s.err
}
func (s *TokenStore) loadConfigFile() error {
path := s.configFilePath()
envMap, err := godotenv.Read(path)
if errors.Is(err, fs.ErrNotExist) {
return nil
}
if err != nil {
return fmt.Errorf("load %s: %w", path, err)
}
for k, v := range envMap {
if strings.TrimSpace(k) == "" || strings.TrimSpace(v) == "" {
continue
}
s.values[k] = strings.TrimSpace(v)
}
return nil
}
func (s *TokenStore) configFilePath() string {
info, err := os.Stat(s.configDir)
if err == nil {
if info.IsDir() {
return filepath.Join(s.configDir, configFileName)
}
return s.configDir
}
if strings.HasSuffix(s.configDir, configFileName) {
return s.configDir
}
return filepath.Join(s.configDir, configFileName)
}
// ModelTokenKey derives the vendor token key for the provided model, following
// the `<VENDOR>_TOKEN` convention. When the vendor cannot be inferred the
// default OpenAI slug is returned.
func ModelTokenKey(model string) string {
slug := vendorSlugFromModel(model)
if slug == "" {
slug = defaultVendorSlug
}
return strings.ToUpper(slug) + vendorTokenSuffix
}
func vendorSlugFromModel(model string) string {
normalized := strings.ToLower(strings.TrimSpace(model))
if normalized == "" {
return defaultVendorSlug
}
if explicit := explicitVendorPrefix(normalized); explicit != "" {
return explicit
}
for _, mapping := range vendorHintTable {
for _, hint := range mapping.hints {
if strings.Contains(normalized, hint) {
return mapping.vendor
}
}
}
if firstToken := leadingToken(normalized); firstToken != "" {
return slugify(firstToken)
}
if slug := slugify(normalized); slug != "" {
return slug
}
return defaultVendorSlug
}
func explicitVendorPrefix(value string) string {
separators := func(r rune) bool {
switch r {
case '/', ':', '@':
return true
}
return false
}
parts := strings.FieldsFunc(value, separators)
if len(parts) > 1 {
if slug := slugify(parts[0]); slug != "" {
return slug
}
}
return ""
}
func leadingToken(value string) string {
for i, r := range value {
if unicode.IsLetter(r) || unicode.IsDigit(r) {
continue
}
if i == 0 {
return ""
}
return value[:i]
}
return value
}
var vendorHintTable = []struct {
vendor string
hints []string
}{
{vendor: "openai", hints: []string{"openai", "gpt", "o1", "chatgpt"}},
{vendor: "anthropic", hints: []string{"anthropic", "claude"}},
{vendor: "google", hints: []string{"google", "gemini", "learnlm", "palm"}},
{vendor: "mistral", hints: []string{"mistral", "mixtral", "ministral"}},
{vendor: "cohere", hints: []string{"cohere", "command", "r-plus"}},
{vendor: "moonshot", hints: []string{"moonshot"}},
{vendor: "zhipu", hints: []string{"zhipu", "glm"}},
{vendor: "alibaba", hints: []string{"dashscope", "qwen"}},
{vendor: "baidu", hints: []string{"wenxin", "ernie", "qianfan"}},
{vendor: "minimax", hints: []string{"minimax", "abab"}},
{vendor: "bytedance", hints: []string{"doubao", "bytedance"}},
{vendor: "baichuan", hints: []string{"baichuan"}},
{vendor: "deepseek", hints: []string{"deepseek"}},
{vendor: "xai", hints: []string{"grok", "xai"}},
}
func slugify(input string) string {
input = strings.TrimSpace(input)
if input == "" {
return ""
}
var b strings.Builder
b.Grow(len(input))
lastUnderscore := false
for _, r := range input {
switch {
case unicode.IsLetter(r) || unicode.IsDigit(r):
b.WriteRune(unicode.ToLower(r))
lastUnderscore = false
default:
if !lastUnderscore && b.Len() > 0 {
b.WriteByte('_')
lastUnderscore = true
}
}
}
return strings.Trim(b.String(), "_")
}

View File

@@ -1,158 +0,0 @@
package genkit
import (
"context"
"crypto/sha256"
"encoding/hex"
"encoding/json"
"fmt"
"strings"
"sync"
genaigo "github.com/firebase/genkit/go/ai"
"github.com/openai/openai-go/option"
aiconfig "github.com/rogeecn/renamer/internal/ai/config"
"github.com/rogeecn/renamer/internal/ai/prompt"
)
// WorkflowRunner executes a Genkit request and returns the structured response.
type WorkflowRunner interface {
Run(ctx context.Context, req Request) (Result, error)
}
// WorkflowFactory constructs workflow runners.
type WorkflowFactory func(ctx context.Context, opts Options) (WorkflowRunner, error)
var (
factoryMu sync.RWMutex
defaultFactory = func(ctx context.Context, opts Options) (WorkflowRunner, error) {
return NewWorkflow(ctx, opts)
}
currentFactory WorkflowFactory = defaultFactory
)
// OverrideWorkflowFactory allows tests to supply custom workflow implementations.
func OverrideWorkflowFactory(factory WorkflowFactory) {
factoryMu.Lock()
defer factoryMu.Unlock()
if factory == nil {
currentFactory = defaultFactory
return
}
currentFactory = factory
}
// ResetWorkflowFactory restores the default workflow constructor.
func ResetWorkflowFactory() {
OverrideWorkflowFactory(nil)
}
func getWorkflowFactory() WorkflowFactory {
factoryMu.RLock()
defer factoryMu.RUnlock()
return currentFactory
}
// ClientOptions configure the Genkit client.
type ClientOptions struct {
Model string
TokenProvider aiconfig.TokenProvider
RequestOptions []option.RequestOption
}
// Client orchestrates prompt execution against the configured workflow.
type Client struct {
model string
tokenProvider aiconfig.TokenProvider
requestOptions []option.RequestOption
}
// NewClient constructs a client with optional overrides.
func NewClient(opts ClientOptions) *Client {
model := strings.TrimSpace(opts.Model)
if model == "" {
model = DefaultModelName
}
return &Client{
model: model,
tokenProvider: opts.TokenProvider,
requestOptions: append([]option.RequestOption(nil), opts.RequestOptions...),
}
}
// Invocation describes a single Genkit call.
type Invocation struct {
Instructions string
Prompt prompt.RenamePrompt
Model string
}
// InvocationResult carries the parsed response alongside telemetry.
type InvocationResult struct {
PromptHash string
Model string
Response prompt.RenameResponse
ModelResponse *genaigo.ModelResponse
PromptJSON []byte
}
// Invoke executes the workflow and returns the structured response.
func (c *Client) Invoke(ctx context.Context, inv Invocation) (InvocationResult, error) {
model := strings.TrimSpace(inv.Model)
if model == "" {
model = c.model
}
if model == "" {
model = DefaultModelName
}
payload, err := json.Marshal(inv.Prompt)
if err != nil {
return InvocationResult{}, fmt.Errorf("marshal prompt payload: %w", err)
}
factory := getWorkflowFactory()
runner, err := factory(ctx, Options{
Model: model,
TokenProvider: c.tokenProvider,
RequestOptions: c.requestOptions,
})
if err != nil {
return InvocationResult{}, err
}
result, err := runner.Run(ctx, Request{
Instructions: inv.Instructions,
Payload: inv.Prompt,
})
if err != nil {
return InvocationResult{}, err
}
if strings.TrimSpace(result.Response.Model) == "" {
result.Response.Model = model
}
promptHash := hashPrompt(inv.Instructions, payload)
if strings.TrimSpace(result.Response.PromptHash) == "" {
result.Response.PromptHash = promptHash
}
return InvocationResult{
PromptHash: promptHash,
Model: result.Response.Model,
Response: result.Response,
ModelResponse: result.ModelResponse,
PromptJSON: payload,
}, nil
}
func hashPrompt(instructions string, payload []byte) string {
hasher := sha256.New()
hasher.Write([]byte(strings.TrimSpace(instructions)))
hasher.Write([]byte{'\n'})
hasher.Write(payload)
sum := hasher.Sum(nil)
return hex.EncodeToString(sum)
}

View File

@@ -1,3 +0,0 @@
package genkit
// Package genkit integrates the Google Genkit workflow with the CLI.

View File

@@ -1,166 +0,0 @@
package genkit
import (
"context"
"encoding/json"
"errors"
"fmt"
"os"
"strings"
"github.com/firebase/genkit/go/ai"
gogenkit "github.com/firebase/genkit/go/genkit"
oai "github.com/firebase/genkit/go/plugins/compat_oai/openai"
"github.com/openai/openai-go/option"
aiconfig "github.com/rogeecn/renamer/internal/ai/config"
"github.com/rogeecn/renamer/internal/ai/prompt"
)
const (
defaultModelName = "gpt-4o-mini"
// DefaultModelName exposes the default model identifier used by the CLI.
DefaultModelName = defaultModelName
)
var (
// ErrMissingToken indicates the workflow could not locate a model token.
ErrMissingToken = errors.New("genkit workflow: model token not available")
// ErrMissingInstructions indicates that no system instructions were provided for a run.
ErrMissingInstructions = errors.New("genkit workflow: instructions are required")
)
// DataGenerator executes the Genkit request and decodes the structured response.
type DataGenerator func(ctx context.Context, g *gogenkit.Genkit, opts ...ai.GenerateOption) (*prompt.RenameResponse, *ai.ModelResponse, error)
// Options configure a Workflow instance.
type Options struct {
Model string
TokenProvider aiconfig.TokenProvider
RequestOptions []option.RequestOption
Generator DataGenerator
}
// Request captures the input necessary to execute the Genkit workflow.
type Request struct {
Instructions string
Payload prompt.RenamePrompt
}
// Result bundles the typed response together with the raw Genkit metadata.
type Result struct {
Response prompt.RenameResponse
ModelResponse *ai.ModelResponse
}
// Workflow orchestrates execution of the Genkit rename pipeline.
type Workflow struct {
modelName string
genkit *gogenkit.Genkit
model ai.Model
generate DataGenerator
}
// NewWorkflow instantiates a Genkit workflow for the preferred model. When no
// model is provided it defaults to gpt-4o-mini. The workflow requires a token
// provider capable of resolving `<VENDOR>_TOKEN` secrets.
func NewWorkflow(ctx context.Context, opts Options) (*Workflow, error) {
modelName := strings.TrimSpace(opts.Model)
if modelName == "" {
modelName = defaultModelName
}
token, err := resolveToken(opts.TokenProvider, modelName)
if err != nil {
return nil, err
}
if strings.TrimSpace(token) == "" {
return nil, fmt.Errorf("%w for %q", ErrMissingToken, modelName)
}
plugin := &oai.OpenAI{
APIKey: token,
Opts: opts.RequestOptions,
}
g := gogenkit.Init(ctx, gogenkit.WithPlugins(plugin))
model := plugin.Model(g, modelName)
generator := opts.Generator
if generator == nil {
generator = func(ctx context.Context, g *gogenkit.Genkit, opts ...ai.GenerateOption) (*prompt.RenameResponse, *ai.ModelResponse, error) {
return gogenkit.GenerateData[prompt.RenameResponse](ctx, g, opts...)
}
}
return &Workflow{
modelName: modelName,
genkit: g,
model: model,
generate: generator,
}, nil
}
// Run executes the workflow with the provided request and decodes the response
// into the shared RenameResponse structure.
func (w *Workflow) Run(ctx context.Context, req Request) (Result, error) {
if w == nil {
return Result{}, errors.New("genkit workflow: nil receiver")
}
if strings.TrimSpace(req.Instructions) == "" {
return Result{}, ErrMissingInstructions
}
payload, err := json.Marshal(req.Payload)
if err != nil {
return Result{}, fmt.Errorf("marshal workflow payload: %w", err)
}
options := []ai.GenerateOption{
ai.WithModel(w.model),
ai.WithSystem(req.Instructions),
ai.WithPrompt(string(payload)),
}
response, raw, err := w.generate(ctx, w.genkit, options...)
if err != nil {
return Result{}, fmt.Errorf("genkit generate: %w", err)
}
return Result{
Response: deref(response),
ModelResponse: raw,
}, nil
}
func resolveToken(provider aiconfig.TokenProvider, model string) (string, error) {
if provider != nil {
if token, err := provider.ResolveModelToken(model); err == nil && strings.TrimSpace(token) != "" {
return token, nil
} else if err != nil {
return "", fmt.Errorf("resolve model token: %w", err)
}
}
if direct := strings.TrimSpace(os.Getenv(aiconfig.ModelTokenKey(model))); direct != "" {
return direct, nil
}
store, err := aiconfig.NewTokenStore("")
if err != nil {
return "", err
}
token, err := store.ResolveModelToken(model)
if err != nil {
return "", err
}
return token, nil
}
func deref(resp *prompt.RenameResponse) prompt.RenameResponse {
if resp == nil {
return prompt.RenameResponse{}
}
return *resp
}

View File

@@ -1,250 +0,0 @@
package plan
import (
"context"
"crypto/sha256"
"encoding/hex"
"encoding/json"
"errors"
"fmt"
"os"
"path/filepath"
"sort"
"strings"
"github.com/rogeecn/renamer/internal/ai/prompt"
"github.com/rogeecn/renamer/internal/history"
)
// ApplyOptions describe the data required to apply an AI rename plan.
type ApplyOptions struct {
WorkingDir string
Candidates []Candidate
Response prompt.RenameResponse
Policies prompt.NamingPolicyConfig
PromptHash string
}
// Apply executes the AI rename plan and records the outcome in the ledger.
func Apply(ctx context.Context, opts ApplyOptions) (history.Entry, error) {
entry := history.Entry{Command: "ai"}
if len(opts.Response.Items) == 0 {
return entry, errors.New("ai apply: no items to apply")
}
candidateMap := make(map[string]Candidate, len(opts.Candidates))
for _, cand := range opts.Candidates {
key := strings.ToLower(strings.TrimSpace(cand.OriginalPath))
candidateMap[key] = cand
}
type operation struct {
sourceRel string
targetRel string
sourceAbs string
targetAbs string
depth int
}
ops := make([]operation, 0, len(opts.Response.Items))
seenTargets := make(map[string]string)
conflicts := make([]Conflict, 0)
for _, item := range opts.Response.Items {
key := strings.ToLower(strings.TrimSpace(item.Original))
cand, ok := candidateMap[key]
if !ok {
conflicts = append(conflicts, Conflict{
OriginalPath: item.Original,
Issue: "missing_candidate",
Details: "original file not found in current scope",
})
continue
}
target := strings.TrimSpace(item.Proposed)
if target == "" {
conflicts = append(conflicts, Conflict{
OriginalPath: item.Original,
Issue: "empty_target",
Details: "proposed name cannot be empty",
})
continue
}
normalizedTarget := filepath.ToSlash(filepath.Clean(target))
if strings.HasPrefix(normalizedTarget, "../") {
conflicts = append(conflicts, Conflict{
OriginalPath: item.Original,
Issue: "unsafe_target",
Details: "proposed path escapes the working directory",
})
continue
}
targetKey := strings.ToLower(normalizedTarget)
if existing, exists := seenTargets[targetKey]; exists && existing != item.Original {
conflicts = append(conflicts, Conflict{
OriginalPath: item.Original,
Issue: "duplicate_target",
Details: fmt.Sprintf("target %q reused", normalizedTarget),
})
continue
}
seenTargets[targetKey] = item.Original
sourceRel := filepath.ToSlash(cand.OriginalPath)
sourceAbs := filepath.Join(opts.WorkingDir, filepath.FromSlash(sourceRel))
targetAbs := filepath.Join(opts.WorkingDir, filepath.FromSlash(normalizedTarget))
if sameFile, err := isSameFile(sourceAbs, targetAbs); err != nil {
return history.Entry{}, err
} else if sameFile {
continue
}
if _, err := os.Stat(targetAbs); err == nil {
conflicts = append(conflicts, Conflict{
OriginalPath: item.Original,
Issue: "target_exists",
Details: fmt.Sprintf("target %q already exists", normalizedTarget),
})
continue
} else if !errors.Is(err, os.ErrNotExist) {
return history.Entry{}, err
}
op := operation{
sourceRel: sourceRel,
targetRel: normalizedTarget,
sourceAbs: sourceAbs,
targetAbs: targetAbs,
depth: cand.Depth,
}
ops = append(ops, op)
}
if len(conflicts) > 0 {
return history.Entry{}, ApplyConflictError{Conflicts: conflicts}
}
if len(ops) == 0 {
return entry, nil
}
sort.SliceStable(ops, func(i, j int) bool {
return ops[i].depth > ops[j].depth
})
done := make([]history.Operation, 0, len(ops))
revert := func() error {
for i := len(done) - 1; i >= 0; i-- {
op := done[i]
src := filepath.Join(opts.WorkingDir, filepath.FromSlash(op.To))
dst := filepath.Join(opts.WorkingDir, filepath.FromSlash(op.From))
if err := os.Rename(src, dst); err != nil && !errors.Is(err, os.ErrNotExist) {
return err
}
}
return nil
}
for _, op := range ops {
if err := ctx.Err(); err != nil {
_ = revert()
return history.Entry{}, err
}
if dir := filepath.Dir(op.targetAbs); dir != "" {
if err := os.MkdirAll(dir, 0o755); err != nil {
_ = revert()
return history.Entry{}, err
}
}
if err := os.Rename(op.sourceAbs, op.targetAbs); err != nil {
_ = revert()
return history.Entry{}, err
}
done = append(done, history.Operation{
From: op.sourceRel,
To: op.targetRel,
})
}
if len(done) == 0 {
return entry, nil
}
entry.Operations = done
aiMetadata := history.AIMetadata{
PromptHash: opts.PromptHash,
Model: opts.Response.Model,
Policies: prompt.NamingPolicyConfig{
Prefix: opts.Policies.Prefix,
Casing: opts.Policies.Casing,
AllowSpaces: opts.Policies.AllowSpaces,
KeepOriginalOrder: opts.Policies.KeepOriginalOrder,
ForbiddenTokens: append([]string(nil), opts.Policies.ForbiddenTokens...),
},
BatchSize: len(done),
}
if hash, err := ResponseDigest(opts.Response); err == nil {
aiMetadata.ResponseHash = hash
}
entry.AttachAIMetadata(aiMetadata)
if err := history.Append(opts.WorkingDir, entry); err != nil {
_ = revert()
return history.Entry{}, err
}
return entry, nil
}
// ApplyConflictError signals that the plan contained conflicts that block apply.
type ApplyConflictError struct {
Conflicts []Conflict
}
func (e ApplyConflictError) Error() string {
if len(e.Conflicts) == 0 {
return "ai apply: conflicts detected"
}
return fmt.Sprintf("ai apply: %d conflicts detected", len(e.Conflicts))
}
// ResponseDigest returns a hash of the AI response payload for ledger metadata.
func ResponseDigest(resp prompt.RenameResponse) (string, error) {
data, err := json.Marshal(resp)
if err != nil {
return "", err
}
return hashBytes(data), nil
}
func hashBytes(data []byte) string {
sum := sha256.Sum256(data)
return hex.EncodeToString(sum[:])
}
func isSameFile(a, b string) (bool, error) {
infoA, err := os.Stat(a)
if err != nil {
return false, err
}
infoB, err := os.Stat(b)
if err != nil {
if errors.Is(err, os.ErrNotExist) {
return false, nil
}
return false, err
}
return os.SameFile(infoA, infoB), nil
}

View File

@@ -1,67 +0,0 @@
package plan
import (
"fmt"
"sort"
"strings"
"github.com/rogeecn/renamer/internal/ai/prompt"
)
// Conflict describes an issue detected in an AI rename plan.
type Conflict struct {
OriginalPath string
Issue string
Details string
}
func detectConflicts(items []prompt.RenameItem) []Conflict {
conflicts := make([]Conflict, 0)
if len(items) == 0 {
return conflicts
}
targets := make(map[string][]prompt.RenameItem)
sequences := make([]int, 0, len(items))
for _, item := range items {
key := strings.ToLower(strings.TrimSpace(item.Proposed))
if key != "" {
targets[key] = append(targets[key], item)
}
if item.Sequence > 0 {
sequences = append(sequences, item.Sequence)
}
}
for _, entries := range targets {
if len(entries) <= 1 {
continue
}
for _, entry := range entries {
conflicts = append(conflicts, Conflict{
OriginalPath: entry.Original,
Issue: "duplicate_target",
Details: fmt.Sprintf("target %q is used by multiple entries", entries[0].Proposed),
})
}
}
if len(sequences) > 0 {
sort.Ints(sequences)
expected := 1
for _, seq := range sequences {
if seq != expected {
conflicts = append(conflicts, Conflict{
Issue: "sequence_gap",
Details: fmt.Sprintf("expected sequence %d but found %d", expected, seq),
})
expected = seq
}
expected++
}
}
return conflicts
}

View File

@@ -1,3 +0,0 @@
package plan
// Package plan handles AI rename plan validation, mapping, and persistence helpers.

View File

@@ -1,39 +0,0 @@
package plan
import (
"encoding/json"
"errors"
"fmt"
"io/fs"
"os"
"github.com/rogeecn/renamer/internal/ai/prompt"
)
// SaveResponse writes the AI rename response to disk for later editing.
func SaveResponse(path string, resp prompt.RenameResponse) error {
data, err := json.MarshalIndent(resp, "", " ")
if err != nil {
return fmt.Errorf("marshal ai plan: %w", err)
}
if err := os.WriteFile(path, append(data, '\n'), 0o644); err != nil {
return fmt.Errorf("write ai plan %s: %w", path, err)
}
return nil
}
// LoadResponse reads an edited AI rename response from disk.
func LoadResponse(path string) (prompt.RenameResponse, error) {
data, err := os.ReadFile(path)
if err != nil {
if errors.Is(err, fs.ErrNotExist) {
return prompt.RenameResponse{}, fmt.Errorf("plan file %s not found", path)
}
return prompt.RenameResponse{}, fmt.Errorf("read plan file %s: %w", path, err)
}
var resp prompt.RenameResponse
if err := json.Unmarshal(data, &resp); err != nil {
return prompt.RenameResponse{}, fmt.Errorf("parse plan file %s: %w", path, err)
}
return resp, nil
}

View File

@@ -1,190 +0,0 @@
package plan
import (
"fmt"
"path/filepath"
"sort"
"strings"
)
// Candidate represents a file considered for AI renaming.
type Candidate struct {
OriginalPath string
SizeBytes int64
Depth int
Extension string
}
// MapInput configures the mapping behaviour.
type MapInput struct {
Candidates []Candidate
SequenceWidth int
}
// PreviewPlan aggregates entries ready for preview rendering.
type PreviewPlan struct {
Entries []PreviewEntry
Warnings []string
PromptHash string
Model string
Conflicts []Conflict
}
// PreviewEntry is a single row in the preview table.
type PreviewEntry struct {
Sequence int
SequenceLabel string
OriginalPath string
ProposedPath string
SanitizedSegments []string
Notes string
}
// MapResponse converts a validated response into a preview plan.
func MapResponse(input MapInput, validation ValidationResult) (PreviewPlan, error) {
if input.SequenceWidth <= 0 {
input.SequenceWidth = 3
}
itemByOriginal := make(map[string]struct {
item promptRenameItem
}, len(validation.Items))
for _, item := range validation.Items {
key := normalizePath(item.Original)
itemByOriginal[key] = struct{ item promptRenameItem }{item: promptRenameItem{
Original: item.Original,
Proposed: item.Proposed,
Sequence: item.Sequence,
Notes: item.Notes,
}}
}
entries := make([]PreviewEntry, 0, len(input.Candidates))
for _, candidate := range input.Candidates {
key := normalizePath(candidate.OriginalPath)
entryData, ok := itemByOriginal[key]
if !ok {
return PreviewPlan{}, fmt.Errorf("ai plan: missing response for %s", candidate.OriginalPath)
}
item := entryData.item
label := formatSequence(item.Sequence, input.SequenceWidth)
sanitized := computeSanitizedSegments(candidate.OriginalPath, item.Proposed)
entries = append(entries, PreviewEntry{
Sequence: item.Sequence,
SequenceLabel: label,
OriginalPath: candidate.OriginalPath,
ProposedPath: item.Proposed,
SanitizedSegments: sanitized,
Notes: item.Notes,
})
}
return PreviewPlan{
Entries: entries,
Warnings: append([]string(nil), validation.Warnings...),
PromptHash: validation.PromptHash,
Model: validation.Model,
Conflicts: detectConflicts(validation.Items),
}, nil
}
type promptRenameItem struct {
Original string
Proposed string
Sequence int
Notes string
}
func formatSequence(seq, width int) string {
if seq <= 0 {
return ""
}
label := fmt.Sprintf("%0*d", width, seq)
if len(label) < len(fmt.Sprintf("%d", seq)) {
return fmt.Sprintf("%d", seq)
}
return label
}
func normalizePath(path string) string {
return strings.TrimSpace(strings.ReplaceAll(path, "\\", "/"))
}
func computeSanitizedSegments(original, proposed string) []string {
origStem := stem(original)
propStem := stem(proposed)
origTokens := tokenize(origStem)
propTokens := make(map[string]struct{}, len(origTokens))
for _, token := range tokenize(propStem) {
propTokens[token] = struct{}{}
}
var sanitized []string
seen := make(map[string]struct{})
for _, token := range origTokens {
if _, ok := propTokens[token]; ok {
continue
}
if _, already := seen[token]; already {
continue
}
if isNumericToken(token) {
continue
}
seen[token] = struct{}{}
sanitized = append(sanitized, token)
}
if len(sanitized) == 0 {
return nil
}
sort.Strings(sanitized)
return sanitized
}
func stem(path string) string {
base := filepath.Base(path)
ext := filepath.Ext(base)
if ext != "" {
return base[:len(base)-len(ext)]
}
return base
}
func tokenize(value string) []string {
fields := strings.FieldsFunc(value, func(r rune) bool {
if r >= '0' && r <= '9' {
return false
}
if r >= 'a' && r <= 'z' {
return false
}
if r >= 'A' && r <= 'Z' {
return false
}
return true
})
tokens := make([]string, 0, len(fields))
for _, field := range fields {
normalized := strings.ToLower(field)
if normalized == "" {
continue
}
tokens = append(tokens, normalized)
}
return tokens
}
func isNumericToken(token string) bool {
if token == "" {
return false
}
for _, r := range token {
if r < '0' || r > '9' {
return false
}
}
return true
}

View File

@@ -1,76 +0,0 @@
package plan
import (
"context"
"errors"
"io/fs"
"path/filepath"
"strings"
"github.com/rogeecn/renamer/internal/listing"
"github.com/rogeecn/renamer/internal/traversal"
)
// CollectCandidates walks the scope described by req and returns eligible file candidates.
func CollectCandidates(ctx context.Context, req *listing.ListingRequest) ([]Candidate, error) {
if req == nil {
return nil, errors.New("collect candidates: request cannot be nil")
}
if err := req.Validate(); err != nil {
return nil, err
}
w := traversal.NewWalker()
extensions := make(map[string]struct{}, len(req.Extensions))
for _, ext := range req.Extensions {
extensions[ext] = struct{}{}
}
candidates := make([]Candidate, 0)
err := w.Walk(
req.WorkingDir,
req.Recursive,
false, // directories are not considered candidates
req.IncludeHidden,
req.MaxDepth,
func(relPath string, entry fs.DirEntry, depth int) error {
select {
case <-ctx.Done():
return ctx.Err()
default:
}
if entry.IsDir() {
return nil
}
relSlash := filepath.ToSlash(relPath)
ext := strings.ToLower(filepath.Ext(entry.Name()))
if len(extensions) > 0 {
if _, match := extensions[ext]; !match {
return nil
}
}
info, err := entry.Info()
if err != nil {
return err
}
candidates = append(candidates, Candidate{
OriginalPath: relSlash,
SizeBytes: info.Size(),
Depth: depth,
Extension: filepath.Ext(entry.Name()),
})
return nil
},
)
if err != nil {
return nil, err
}
return candidates, nil
}

View File

@@ -1,423 +0,0 @@
package plan
import (
"fmt"
"path/filepath"
"sort"
"strings"
"unicode"
"github.com/rogeecn/renamer/internal/ai/prompt"
)
// Validator checks the AI response for completeness and uniqueness rules.
type Validator struct {
expected []string
expectedSet map[string]struct{}
policies prompt.NamingPolicyConfig
bannedSet map[string]struct{}
}
// ValidationResult captures the successfully decoded response data.
type ValidationResult struct {
Items []prompt.RenameItem
Warnings []string
PromptHash string
Model string
}
// InvalidItem describes a single response entry that failed validation.
type InvalidItem struct {
Index int
Original string
Proposed string
Reason string
}
// ValidationError aggregates the issues discovered during validation.
type ValidationError struct {
Result ValidationResult
MissingOriginals []string
UnexpectedOriginals []string
DuplicateOriginals map[string]int
DuplicateProposed map[string][]string
InvalidItems []InvalidItem
PolicyViolations []PolicyViolation
}
// PolicyViolation captures a single naming-policy breach.
type PolicyViolation struct {
Original string
Proposed string
Rule string
Message string
}
func (e *ValidationError) Error() string {
if e == nil {
return ""
}
parts := make([]string, 0, 5)
if len(e.MissingOriginals) > 0 {
parts = append(parts, fmt.Sprintf("missing %d originals", len(e.MissingOriginals)))
}
if len(e.UnexpectedOriginals) > 0 {
parts = append(parts, fmt.Sprintf("unexpected %d originals", len(e.UnexpectedOriginals)))
}
if len(e.DuplicateOriginals) > 0 {
parts = append(parts, fmt.Sprintf("%d duplicate originals", len(e.DuplicateOriginals)))
}
if len(e.DuplicateProposed) > 0 {
parts = append(parts, fmt.Sprintf("%d duplicate proposed names", len(e.DuplicateProposed)))
}
if len(e.InvalidItems) > 0 {
parts = append(parts, fmt.Sprintf("%d invalid items", len(e.InvalidItems)))
}
if len(e.PolicyViolations) > 0 {
parts = append(parts, fmt.Sprintf("%d policy violations", len(e.PolicyViolations)))
}
summary := strings.Join(parts, ", ")
if summary == "" {
summary = "response validation failed"
}
return fmt.Sprintf("ai response validation failed: %s", summary)
}
// HasIssues indicates whether the validation error captured any rule breaks.
func (e *ValidationError) HasIssues() bool {
if e == nil {
return false
}
return len(e.MissingOriginals) > 0 ||
len(e.UnexpectedOriginals) > 0 ||
len(e.DuplicateOriginals) > 0 ||
len(e.DuplicateProposed) > 0 ||
len(e.InvalidItems) > 0 ||
len(e.PolicyViolations) > 0
}
// NewValidator constructs a validator for the supplied original filenames. Any
// whitespace-only entries are discarded. Duplicate originals are collapsed to
// ensure consistent coverage checks.
func NewValidator(originals []string, policies prompt.NamingPolicyConfig, bannedTerms []string) Validator {
expectedSet := make(map[string]struct{}, len(originals))
deduped := make([]string, 0, len(originals))
for _, original := range originals {
trimmed := strings.TrimSpace(original)
if trimmed == "" {
continue
}
if _, exists := expectedSet[trimmed]; exists {
continue
}
expectedSet[trimmed] = struct{}{}
deduped = append(deduped, trimmed)
}
bannedSet := make(map[string]struct{})
for _, token := range bannedTerms {
lower := strings.ToLower(strings.TrimSpace(token))
if lower == "" {
continue
}
bannedSet[lower] = struct{}{}
}
policies.Casing = strings.ToLower(strings.TrimSpace(policies.Casing))
policies.Prefix = strings.TrimSpace(policies.Prefix)
policies.ForbiddenTokens = append([]string(nil), policies.ForbiddenTokens...)
return Validator{
expected: deduped,
expectedSet: expectedSet,
policies: policies,
bannedSet: bannedSet,
}
}
// Validate ensures the AI response covers each expected original exactly once
// and that the proposed filenames are unique.
func (v Validator) Validate(resp prompt.RenameResponse) (ValidationResult, error) {
result := ValidationResult{
Items: cloneItems(resp.Items),
Warnings: append([]string(nil), resp.Warnings...),
PromptHash: resp.PromptHash,
Model: resp.Model,
}
if len(resp.Items) == 0 {
err := &ValidationError{
Result: result,
MissingOriginals: append([]string(nil), v.expected...),
}
return result, err
}
seenOriginals := make(map[string]int, len(resp.Items))
seenProposed := make(map[string][]string, len(resp.Items))
unexpectedSet := map[string]struct{}{}
invalidItems := make([]InvalidItem, 0)
policyViolations := make([]PolicyViolation, 0)
for idx, item := range resp.Items {
original := strings.TrimSpace(item.Original)
proposed := strings.TrimSpace(item.Proposed)
if original == "" {
invalidItems = append(invalidItems, InvalidItem{
Index: idx,
Original: item.Original,
Proposed: item.Proposed,
Reason: "original is empty",
})
} else {
seenOriginals[original]++
if _, ok := v.expectedSet[original]; !ok {
unexpectedSet[original] = struct{}{}
}
}
if proposed == "" {
invalidItems = append(invalidItems, InvalidItem{
Index: idx,
Original: item.Original,
Proposed: item.Proposed,
Reason: "proposed is empty",
})
} else {
seenProposed[proposed] = append(seenProposed[proposed], original)
}
policyViolations = append(policyViolations, v.evaluatePolicies(item)...)
}
missing := make([]string, 0)
for _, original := range v.expected {
if seenOriginals[original] == 0 {
missing = append(missing, original)
}
}
duplicateOriginals := make(map[string]int)
for original, count := range seenOriginals {
if count > 1 {
duplicateOriginals[original] = count
}
}
duplicateProposed := make(map[string][]string)
for proposed, sources := range seenProposed {
if len(sources) > 1 {
filtered := make([]string, 0, len(sources))
for _, src := range sources {
if strings.TrimSpace(src) != "" {
filtered = append(filtered, src)
}
}
if len(filtered) > 1 {
duplicateProposed[proposed] = filtered
}
}
}
unexpected := orderedKeys(unexpectedSet)
if len(missing) == 0 &&
len(unexpected) == 0 &&
len(duplicateOriginals) == 0 &&
len(duplicateProposed) == 0 &&
len(invalidItems) == 0 &&
len(policyViolations) == 0 {
return result, nil
}
err := &ValidationError{
Result: result,
MissingOriginals: missing,
UnexpectedOriginals: unexpected,
DuplicateOriginals: duplicateOriginals,
DuplicateProposed: duplicateProposed,
InvalidItems: invalidItems,
PolicyViolations: policyViolations,
}
return result, err
}
// Expectation returns a copy of the expected originals tracked by the validator.
func (v Validator) Expectation() []string {
return append([]string(nil), v.expected...)
}
func cloneItems(items []prompt.RenameItem) []prompt.RenameItem {
if len(items) == 0 {
return nil
}
cp := make([]prompt.RenameItem, len(items))
copy(cp, items)
return cp
}
func orderedKeys(set map[string]struct{}) []string {
if len(set) == 0 {
return nil
}
out := make([]string, 0, len(set))
for k := range set {
out = append(out, k)
}
sort.Strings(out)
return out
}
func (v Validator) evaluatePolicies(item prompt.RenameItem) []PolicyViolation {
violations := make([]PolicyViolation, 0)
proposed := strings.TrimSpace(item.Proposed)
if proposed == "" {
return violations
}
base := filepath.Base(proposed)
stem := base
if ext := filepath.Ext(base); ext != "" {
stem = base[:len(base)-len(ext)]
}
stemLower := strings.ToLower(stem)
if v.policies.Prefix != "" {
prefixLower := strings.ToLower(v.policies.Prefix)
if !strings.HasPrefix(stemLower, prefixLower) {
violations = append(violations, PolicyViolation{
Original: item.Original,
Proposed: item.Proposed,
Rule: "prefix",
Message: fmt.Sprintf("expected prefix %q", v.policies.Prefix),
})
}
}
if !v.policies.AllowSpaces && strings.Contains(stem, " ") {
violations = append(violations, PolicyViolation{
Original: item.Original,
Proposed: item.Proposed,
Rule: "spaces",
Message: "spaces are not allowed",
})
}
if v.policies.Casing != "" {
if ok, message := matchesCasing(stem, v.policies); !ok {
violations = append(violations, PolicyViolation{
Original: item.Original,
Proposed: item.Proposed,
Rule: "casing",
Message: message,
})
}
}
if len(v.bannedSet) > 0 {
tokens := tokenize(stemLower)
for _, token := range tokens {
if _, ok := v.bannedSet[token]; ok {
violations = append(violations, PolicyViolation{
Original: item.Original,
Proposed: item.Proposed,
Rule: "banned",
Message: fmt.Sprintf("contains banned token %q", token),
})
break
}
}
}
return violations
}
func matchesCasing(stem string, policies prompt.NamingPolicyConfig) (bool, string) {
core := coreStem(stem, policies.Prefix)
switch policies.Casing {
case "kebab":
if strings.Contains(core, " ") {
return false, "expected kebab-case (no spaces)"
}
if strings.ContainsAny(core, "ABCDEFGHIJKLMNOPQRSTUVWXYZ") {
return false, "expected kebab-case (use lowercase letters)"
}
return true, ""
case "snake":
if strings.Contains(core, " ") {
return false, "expected snake_case (no spaces)"
}
if strings.ContainsAny(core, "ABCDEFGHIJKLMNOPQRSTUVWXYZ-") {
return false, "expected snake_case (lowercase letters with underscores)"
}
return true, ""
case "camel":
if strings.ContainsAny(core, " -_") {
return false, "expected camelCase (no separators)"
}
runes := []rune(core)
if len(runes) == 0 {
return false, "expected camelCase descriptive text"
}
if !unicode.IsLower(runes[0]) {
return false, "expected camelCase (first letter lowercase)"
}
return true, ""
case "pascal":
if strings.ContainsAny(core, " -_") {
return false, "expected PascalCase (no separators)"
}
runes := []rune(core)
if len(runes) == 0 {
return false, "expected PascalCase descriptive text"
}
if !unicode.IsUpper(runes[0]) {
return false, "expected PascalCase (first letter uppercase)"
}
return true, ""
case "title":
words := strings.Fields(strings.ReplaceAll(core, "-", " "))
if len(words) == 0 {
return false, "expected Title Case words"
}
for _, word := range words {
runes := []rune(word)
if len(runes) == 0 {
continue
}
if !unicode.IsUpper(runes[0]) {
return false, "expected Title Case (capitalize each word)"
}
}
return true, ""
default:
return true, ""
}
}
func coreStem(stem, prefix string) string {
trimmed := stem
if prefix != "" {
lowerStem := strings.ToLower(trimmed)
lowerPrefix := strings.ToLower(prefix)
if strings.HasPrefix(lowerStem, lowerPrefix) {
trimmed = trimmed[len(prefix):]
trimmed = strings.TrimLeft(trimmed, "-_ ")
}
}
i := 0
runes := []rune(trimmed)
for i < len(runes) {
r := runes[i]
if unicode.IsDigit(r) || r == '-' || r == '_' || r == ' ' {
i++
continue
}
break
}
return string(runes[i:])
}

View File

@@ -1,201 +0,0 @@
package prompt
import (
"errors"
"path/filepath"
"sort"
"strings"
"time"
)
const defaultMaxSamples = 10
// SequenceRule captures the numbering instructions forwarded to the AI.
type SequenceRule struct {
Style string
Width int
Start int
Separator string
}
// PolicyConfig enumerates naming policy directives for the AI prompt.
type PolicyConfig struct {
Prefix string
Casing string
AllowSpaces bool
KeepOriginalOrder bool
ForbiddenTokens []string
}
// SampleCandidate represents a traversal sample considered for inclusion in the prompt.
type SampleCandidate struct {
RelativePath string
SizeBytes int64
Depth int
}
// BuildInput aggregates the contextual data required to assemble the AI prompt payload.
type BuildInput struct {
WorkingDir string
Samples []SampleCandidate
TotalCount int
Sequence SequenceRule
Policies PolicyConfig
BannedTerms []string
Metadata map[string]string
}
// Builder constructs RenamePrompt payloads from traversal context.
type Builder struct {
maxSamples int
clock func() time.Time
}
// Option mutates builder configuration.
type Option func(*Builder)
// WithMaxSamples overrides the number of sampled files emitted in the prompt (default 10).
func WithMaxSamples(n int) Option {
return func(b *Builder) {
if n > 0 {
b.maxSamples = n
}
}
}
// WithClock injects a deterministic clock for metadata generation.
func WithClock(clock func() time.Time) Option {
return func(b *Builder) {
if clock != nil {
b.clock = clock
}
}
}
// NewBuilder instantiates a Builder with default configuration.
func NewBuilder(opts ...Option) *Builder {
builder := &Builder{
maxSamples: defaultMaxSamples,
clock: time.Now().UTC,
}
for _, opt := range opts {
opt(builder)
}
return builder
}
// Build produces a RenamePrompt populated with traversal context.
func (b *Builder) Build(input BuildInput) (RenamePrompt, error) {
if strings.TrimSpace(input.WorkingDir) == "" {
return RenamePrompt{}, errors.New("prompt builder: working directory required")
}
if input.TotalCount <= 0 {
return RenamePrompt{}, errors.New("prompt builder: total count must be positive")
}
if strings.TrimSpace(input.Sequence.Style) == "" {
return RenamePrompt{}, errors.New("prompt builder: sequence style required")
}
if input.Sequence.Width <= 0 {
return RenamePrompt{}, errors.New("prompt builder: sequence width must be positive")
}
if input.Sequence.Start <= 0 {
return RenamePrompt{}, errors.New("prompt builder: sequence start must be positive")
}
if strings.TrimSpace(input.Policies.Casing) == "" {
return RenamePrompt{}, errors.New("prompt builder: naming casing required")
}
samples := make([]SampleCandidate, 0, len(input.Samples))
for _, sample := range input.Samples {
if strings.TrimSpace(sample.RelativePath) == "" {
continue
}
samples = append(samples, sample)
}
sort.Slice(samples, func(i, j int) bool {
a := strings.ToLower(samples[i].RelativePath)
b := strings.ToLower(samples[j].RelativePath)
if a == b {
return samples[i].RelativePath < samples[j].RelativePath
}
return a < b
})
max := b.maxSamples
if max <= 0 || max > len(samples) {
max = len(samples)
}
promptSamples := make([]PromptSample, 0, max)
for i := 0; i < max; i++ {
sample := samples[i]
ext := filepath.Ext(sample.RelativePath)
promptSamples = append(promptSamples, PromptSample{
OriginalName: sample.RelativePath,
Extension: ext,
SizeBytes: sample.SizeBytes,
PathDepth: sample.Depth,
})
}
banned := normalizeBannedTerms(input.BannedTerms)
metadata := make(map[string]string, len(input.Metadata)+1)
for k, v := range input.Metadata {
if strings.TrimSpace(k) == "" || strings.TrimSpace(v) == "" {
continue
}
metadata[k] = v
}
metadata["generatedAt"] = b.clock().Format(time.RFC3339)
return RenamePrompt{
WorkingDir: promptAbs(input.WorkingDir),
Samples: promptSamples,
TotalCount: input.TotalCount,
SequenceRule: SequenceRuleConfig{
Style: input.Sequence.Style,
Width: input.Sequence.Width,
Start: input.Sequence.Start,
Separator: input.Sequence.Separator,
},
Policies: NamingPolicyConfig{
Prefix: input.Policies.Prefix,
Casing: input.Policies.Casing,
AllowSpaces: input.Policies.AllowSpaces,
KeepOriginalOrder: input.Policies.KeepOriginalOrder,
ForbiddenTokens: append([]string(nil), input.Policies.ForbiddenTokens...),
},
BannedTerms: banned,
Metadata: metadata,
}, nil
}
func promptAbs(dir string) string {
return strings.TrimSpace(dir)
}
func normalizeBannedTerms(values []string) []string {
unique := make(map[string]struct{})
for _, value := range values {
trimmed := strings.TrimSpace(value)
if trimmed == "" {
continue
}
lower := strings.ToLower(trimmed)
if lower == "" {
continue
}
unique[lower] = struct{}{}
}
if len(unique) == 0 {
return nil
}
terms := make([]string, 0, len(unique))
for term := range unique {
terms = append(terms, term)
}
sort.Strings(terms)
return terms
}

View File

@@ -1,3 +0,0 @@
package prompt
// Package prompt contains helpers for building AI prompt payloads.

View File

@@ -1,53 +0,0 @@
package prompt
// RenamePrompt captures the structured payload sent to the Genkit workflow.
type RenamePrompt struct {
WorkingDir string `json:"workingDir"`
Samples []PromptSample `json:"samples"`
TotalCount int `json:"totalCount"`
SequenceRule SequenceRuleConfig `json:"sequenceRule"`
Policies NamingPolicyConfig `json:"policies"`
BannedTerms []string `json:"bannedTerms,omitempty"`
Metadata map[string]string `json:"metadata,omitempty"`
}
// PromptSample represents a sampled file from the traversal scope.
type PromptSample struct {
OriginalName string `json:"originalName"`
Extension string `json:"extension"`
SizeBytes int64 `json:"sizeBytes"`
PathDepth int `json:"pathDepth"`
}
// SequenceRuleConfig captures numbering directives for the AI prompt.
type SequenceRuleConfig struct {
Style string `json:"style"`
Width int `json:"width"`
Start int `json:"start"`
Separator string `json:"separator"`
}
// NamingPolicyConfig enumerates naming policies forwarded to the AI.
type NamingPolicyConfig struct {
Prefix string `json:"prefix,omitempty"`
Casing string `json:"casing"`
AllowSpaces bool `json:"allowSpaces,omitempty"`
KeepOriginalOrder bool `json:"keepOriginalOrder,omitempty"`
ForbiddenTokens []string `json:"forbiddenTokens,omitempty"`
}
// RenameResponse is the structured payload expected from the AI model.
type RenameResponse struct {
Items []RenameItem `json:"items"`
Warnings []string `json:"warnings,omitempty"`
PromptHash string `json:"promptHash,omitempty"`
Model string `json:"model,omitempty"`
}
// RenameItem maps an original path to the AI-proposed rename.
type RenameItem struct {
Original string `json:"original"`
Proposed string `json:"proposed"`
Sequence int `json:"sequence"`
Notes string `json:"notes,omitempty"`
}

View File

@@ -7,8 +7,6 @@ import (
"os"
"path/filepath"
"time"
"github.com/rogeecn/renamer/internal/ai/prompt"
)
const ledgerFileName = ".renamer"
@@ -28,57 +26,6 @@ type Entry struct {
Metadata map[string]any `json:"metadata,omitempty"`
}
const aiMetadataKey = "ai"
// AIMetadata captures AI-specific ledger metadata for rename batches.
type AIMetadata struct {
PromptHash string `json:"promptHash"`
ResponseHash string `json:"responseHash"`
Model string `json:"model"`
Policies prompt.NamingPolicyConfig `json:"policies"`
BatchSize int `json:"batchSize"`
AppliedAt time.Time `json:"appliedAt"`
}
// AttachAIMetadata records AI metadata alongside the ledger entry.
func (e *Entry) AttachAIMetadata(meta AIMetadata) {
if e.Metadata == nil {
e.Metadata = make(map[string]any)
}
if meta.AppliedAt.IsZero() {
meta.AppliedAt = time.Now().UTC()
}
e.Metadata[aiMetadataKey] = meta
}
// AIMetadata extracts AI metadata from the ledger entry if present.
func (e Entry) AIMetadata() (AIMetadata, bool) {
if e.Metadata == nil {
return AIMetadata{}, false
}
raw, ok := e.Metadata[aiMetadataKey]
if !ok {
return AIMetadata{}, false
}
switch value := raw.(type) {
case AIMetadata:
return value, true
case map[string]any:
var meta AIMetadata
if err := remarshal(value, &meta); err != nil {
return AIMetadata{}, false
}
return meta, true
default:
var meta AIMetadata
if err := remarshal(value, &meta); err != nil {
return AIMetadata{}, false
}
return meta, true
}
}
func remarshal(value any, target any) error {
data, err := json.Marshal(value)
if err != nil {