feat: implement AI-assisted rename prompting feature
- Added data model for AI-assisted renaming including structures for prompts, responses, and policies. - Created implementation plan detailing the integration of Google Genkit into the CLI for renaming tasks. - Developed quickstart guide for setting up and using the new AI rename functionality. - Documented research decisions regarding Genkit orchestration and prompt composition. - Established tasks for phased implementation, including setup, foundational work, and user stories. - Implemented contract tests to ensure AI rename policies and ledger metadata are correctly applied. - Developed integration tests for validating AI rename flows, including preview, apply, and undo functionalities. - Added tooling to pin Genkit dependency for consistent builds.
This commit is contained in:
250
internal/ai/plan/apply.go
Normal file
250
internal/ai/plan/apply.go
Normal file
@@ -0,0 +1,250 @@
|
||||
package plan
|
||||
|
||||
import (
|
||||
"context"
|
||||
"crypto/sha256"
|
||||
"encoding/hex"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"sort"
|
||||
"strings"
|
||||
|
||||
"github.com/rogeecn/renamer/internal/ai/prompt"
|
||||
"github.com/rogeecn/renamer/internal/history"
|
||||
)
|
||||
|
||||
// ApplyOptions describe the data required to apply an AI rename plan.
|
||||
type ApplyOptions struct {
|
||||
WorkingDir string
|
||||
Candidates []Candidate
|
||||
Response prompt.RenameResponse
|
||||
Policies prompt.NamingPolicyConfig
|
||||
PromptHash string
|
||||
}
|
||||
|
||||
// Apply executes the AI rename plan and records the outcome in the ledger.
|
||||
func Apply(ctx context.Context, opts ApplyOptions) (history.Entry, error) {
|
||||
entry := history.Entry{Command: "ai"}
|
||||
|
||||
if len(opts.Response.Items) == 0 {
|
||||
return entry, errors.New("ai apply: no items to apply")
|
||||
}
|
||||
|
||||
candidateMap := make(map[string]Candidate, len(opts.Candidates))
|
||||
for _, cand := range opts.Candidates {
|
||||
key := strings.ToLower(strings.TrimSpace(cand.OriginalPath))
|
||||
candidateMap[key] = cand
|
||||
}
|
||||
|
||||
type operation struct {
|
||||
sourceRel string
|
||||
targetRel string
|
||||
sourceAbs string
|
||||
targetAbs string
|
||||
depth int
|
||||
}
|
||||
|
||||
ops := make([]operation, 0, len(opts.Response.Items))
|
||||
seenTargets := make(map[string]string)
|
||||
|
||||
conflicts := make([]Conflict, 0)
|
||||
|
||||
for _, item := range opts.Response.Items {
|
||||
key := strings.ToLower(strings.TrimSpace(item.Original))
|
||||
cand, ok := candidateMap[key]
|
||||
if !ok {
|
||||
conflicts = append(conflicts, Conflict{
|
||||
OriginalPath: item.Original,
|
||||
Issue: "missing_candidate",
|
||||
Details: "original file not found in current scope",
|
||||
})
|
||||
continue
|
||||
}
|
||||
|
||||
target := strings.TrimSpace(item.Proposed)
|
||||
if target == "" {
|
||||
conflicts = append(conflicts, Conflict{
|
||||
OriginalPath: item.Original,
|
||||
Issue: "empty_target",
|
||||
Details: "proposed name cannot be empty",
|
||||
})
|
||||
continue
|
||||
}
|
||||
|
||||
normalizedTarget := filepath.ToSlash(filepath.Clean(target))
|
||||
if strings.HasPrefix(normalizedTarget, "../") {
|
||||
conflicts = append(conflicts, Conflict{
|
||||
OriginalPath: item.Original,
|
||||
Issue: "unsafe_target",
|
||||
Details: "proposed path escapes the working directory",
|
||||
})
|
||||
continue
|
||||
}
|
||||
|
||||
targetKey := strings.ToLower(normalizedTarget)
|
||||
if existing, exists := seenTargets[targetKey]; exists && existing != item.Original {
|
||||
conflicts = append(conflicts, Conflict{
|
||||
OriginalPath: item.Original,
|
||||
Issue: "duplicate_target",
|
||||
Details: fmt.Sprintf("target %q reused", normalizedTarget),
|
||||
})
|
||||
continue
|
||||
}
|
||||
seenTargets[targetKey] = item.Original
|
||||
|
||||
sourceRel := filepath.ToSlash(cand.OriginalPath)
|
||||
sourceAbs := filepath.Join(opts.WorkingDir, filepath.FromSlash(sourceRel))
|
||||
targetAbs := filepath.Join(opts.WorkingDir, filepath.FromSlash(normalizedTarget))
|
||||
|
||||
if sameFile, err := isSameFile(sourceAbs, targetAbs); err != nil {
|
||||
return history.Entry{}, err
|
||||
} else if sameFile {
|
||||
continue
|
||||
}
|
||||
|
||||
if _, err := os.Stat(targetAbs); err == nil {
|
||||
conflicts = append(conflicts, Conflict{
|
||||
OriginalPath: item.Original,
|
||||
Issue: "target_exists",
|
||||
Details: fmt.Sprintf("target %q already exists", normalizedTarget),
|
||||
})
|
||||
continue
|
||||
} else if !errors.Is(err, os.ErrNotExist) {
|
||||
return history.Entry{}, err
|
||||
}
|
||||
|
||||
op := operation{
|
||||
sourceRel: sourceRel,
|
||||
targetRel: normalizedTarget,
|
||||
sourceAbs: sourceAbs,
|
||||
targetAbs: targetAbs,
|
||||
depth: cand.Depth,
|
||||
}
|
||||
ops = append(ops, op)
|
||||
}
|
||||
|
||||
if len(conflicts) > 0 {
|
||||
return history.Entry{}, ApplyConflictError{Conflicts: conflicts}
|
||||
}
|
||||
|
||||
if len(ops) == 0 {
|
||||
return entry, nil
|
||||
}
|
||||
|
||||
sort.SliceStable(ops, func(i, j int) bool {
|
||||
return ops[i].depth > ops[j].depth
|
||||
})
|
||||
|
||||
done := make([]history.Operation, 0, len(ops))
|
||||
|
||||
revert := func() error {
|
||||
for i := len(done) - 1; i >= 0; i-- {
|
||||
op := done[i]
|
||||
src := filepath.Join(opts.WorkingDir, filepath.FromSlash(op.To))
|
||||
dst := filepath.Join(opts.WorkingDir, filepath.FromSlash(op.From))
|
||||
if err := os.Rename(src, dst); err != nil && !errors.Is(err, os.ErrNotExist) {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
for _, op := range ops {
|
||||
if err := ctx.Err(); err != nil {
|
||||
_ = revert()
|
||||
return history.Entry{}, err
|
||||
}
|
||||
|
||||
if dir := filepath.Dir(op.targetAbs); dir != "" {
|
||||
if err := os.MkdirAll(dir, 0o755); err != nil {
|
||||
_ = revert()
|
||||
return history.Entry{}, err
|
||||
}
|
||||
}
|
||||
if err := os.Rename(op.sourceAbs, op.targetAbs); err != nil {
|
||||
_ = revert()
|
||||
return history.Entry{}, err
|
||||
}
|
||||
|
||||
done = append(done, history.Operation{
|
||||
From: op.sourceRel,
|
||||
To: op.targetRel,
|
||||
})
|
||||
}
|
||||
|
||||
if len(done) == 0 {
|
||||
return entry, nil
|
||||
}
|
||||
|
||||
entry.Operations = done
|
||||
|
||||
aiMetadata := history.AIMetadata{
|
||||
PromptHash: opts.PromptHash,
|
||||
Model: opts.Response.Model,
|
||||
Policies: prompt.NamingPolicyConfig{
|
||||
Prefix: opts.Policies.Prefix,
|
||||
Casing: opts.Policies.Casing,
|
||||
AllowSpaces: opts.Policies.AllowSpaces,
|
||||
KeepOriginalOrder: opts.Policies.KeepOriginalOrder,
|
||||
ForbiddenTokens: append([]string(nil), opts.Policies.ForbiddenTokens...),
|
||||
},
|
||||
BatchSize: len(done),
|
||||
}
|
||||
|
||||
if hash, err := ResponseDigest(opts.Response); err == nil {
|
||||
aiMetadata.ResponseHash = hash
|
||||
}
|
||||
|
||||
entry.AttachAIMetadata(aiMetadata)
|
||||
|
||||
if err := history.Append(opts.WorkingDir, entry); err != nil {
|
||||
_ = revert()
|
||||
return history.Entry{}, err
|
||||
}
|
||||
|
||||
return entry, nil
|
||||
}
|
||||
|
||||
// ApplyConflictError signals that the plan contained conflicts that block apply.
|
||||
type ApplyConflictError struct {
|
||||
Conflicts []Conflict
|
||||
}
|
||||
|
||||
func (e ApplyConflictError) Error() string {
|
||||
if len(e.Conflicts) == 0 {
|
||||
return "ai apply: conflicts detected"
|
||||
}
|
||||
return fmt.Sprintf("ai apply: %d conflicts detected", len(e.Conflicts))
|
||||
}
|
||||
|
||||
// ResponseDigest returns a hash of the AI response payload for ledger metadata.
|
||||
func ResponseDigest(resp prompt.RenameResponse) (string, error) {
|
||||
data, err := json.Marshal(resp)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
return hashBytes(data), nil
|
||||
}
|
||||
|
||||
func hashBytes(data []byte) string {
|
||||
sum := sha256.Sum256(data)
|
||||
return hex.EncodeToString(sum[:])
|
||||
}
|
||||
|
||||
func isSameFile(a, b string) (bool, error) {
|
||||
infoA, err := os.Stat(a)
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
infoB, err := os.Stat(b)
|
||||
if err != nil {
|
||||
if errors.Is(err, os.ErrNotExist) {
|
||||
return false, nil
|
||||
}
|
||||
return false, err
|
||||
}
|
||||
return os.SameFile(infoA, infoB), nil
|
||||
}
|
||||
67
internal/ai/plan/conflicts.go
Normal file
67
internal/ai/plan/conflicts.go
Normal file
@@ -0,0 +1,67 @@
|
||||
package plan
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"sort"
|
||||
"strings"
|
||||
|
||||
"github.com/rogeecn/renamer/internal/ai/prompt"
|
||||
)
|
||||
|
||||
// Conflict describes an issue detected in an AI rename plan.
|
||||
type Conflict struct {
|
||||
OriginalPath string
|
||||
Issue string
|
||||
Details string
|
||||
}
|
||||
|
||||
func detectConflicts(items []prompt.RenameItem) []Conflict {
|
||||
conflicts := make([]Conflict, 0)
|
||||
|
||||
if len(items) == 0 {
|
||||
return conflicts
|
||||
}
|
||||
|
||||
targets := make(map[string][]prompt.RenameItem)
|
||||
sequences := make([]int, 0, len(items))
|
||||
|
||||
for _, item := range items {
|
||||
key := strings.ToLower(strings.TrimSpace(item.Proposed))
|
||||
if key != "" {
|
||||
targets[key] = append(targets[key], item)
|
||||
}
|
||||
if item.Sequence > 0 {
|
||||
sequences = append(sequences, item.Sequence)
|
||||
}
|
||||
}
|
||||
|
||||
for _, entries := range targets {
|
||||
if len(entries) <= 1 {
|
||||
continue
|
||||
}
|
||||
for _, entry := range entries {
|
||||
conflicts = append(conflicts, Conflict{
|
||||
OriginalPath: entry.Original,
|
||||
Issue: "duplicate_target",
|
||||
Details: fmt.Sprintf("target %q is used by multiple entries", entries[0].Proposed),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
if len(sequences) > 0 {
|
||||
sort.Ints(sequences)
|
||||
expected := 1
|
||||
for _, seq := range sequences {
|
||||
if seq != expected {
|
||||
conflicts = append(conflicts, Conflict{
|
||||
Issue: "sequence_gap",
|
||||
Details: fmt.Sprintf("expected sequence %d but found %d", expected, seq),
|
||||
})
|
||||
expected = seq
|
||||
}
|
||||
expected++
|
||||
}
|
||||
}
|
||||
|
||||
return conflicts
|
||||
}
|
||||
3
internal/ai/plan/doc.go
Normal file
3
internal/ai/plan/doc.go
Normal file
@@ -0,0 +1,3 @@
|
||||
package plan
|
||||
|
||||
// Package plan handles AI rename plan validation, mapping, and persistence helpers.
|
||||
39
internal/ai/plan/editor.go
Normal file
39
internal/ai/plan/editor.go
Normal file
@@ -0,0 +1,39 @@
|
||||
package plan
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io/fs"
|
||||
"os"
|
||||
|
||||
"github.com/rogeecn/renamer/internal/ai/prompt"
|
||||
)
|
||||
|
||||
// SaveResponse writes the AI rename response to disk for later editing.
|
||||
func SaveResponse(path string, resp prompt.RenameResponse) error {
|
||||
data, err := json.MarshalIndent(resp, "", " ")
|
||||
if err != nil {
|
||||
return fmt.Errorf("marshal ai plan: %w", err)
|
||||
}
|
||||
if err := os.WriteFile(path, append(data, '\n'), 0o644); err != nil {
|
||||
return fmt.Errorf("write ai plan %s: %w", path, err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// LoadResponse reads an edited AI rename response from disk.
|
||||
func LoadResponse(path string) (prompt.RenameResponse, error) {
|
||||
data, err := os.ReadFile(path)
|
||||
if err != nil {
|
||||
if errors.Is(err, fs.ErrNotExist) {
|
||||
return prompt.RenameResponse{}, fmt.Errorf("plan file %s not found", path)
|
||||
}
|
||||
return prompt.RenameResponse{}, fmt.Errorf("read plan file %s: %w", path, err)
|
||||
}
|
||||
var resp prompt.RenameResponse
|
||||
if err := json.Unmarshal(data, &resp); err != nil {
|
||||
return prompt.RenameResponse{}, fmt.Errorf("parse plan file %s: %w", path, err)
|
||||
}
|
||||
return resp, nil
|
||||
}
|
||||
190
internal/ai/plan/mapper.go
Normal file
190
internal/ai/plan/mapper.go
Normal file
@@ -0,0 +1,190 @@
|
||||
package plan
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"path/filepath"
|
||||
"sort"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// Candidate represents a file considered for AI renaming.
|
||||
type Candidate struct {
|
||||
OriginalPath string
|
||||
SizeBytes int64
|
||||
Depth int
|
||||
Extension string
|
||||
}
|
||||
|
||||
// MapInput configures the mapping behaviour.
|
||||
type MapInput struct {
|
||||
Candidates []Candidate
|
||||
SequenceWidth int
|
||||
}
|
||||
|
||||
// PreviewPlan aggregates entries ready for preview rendering.
|
||||
type PreviewPlan struct {
|
||||
Entries []PreviewEntry
|
||||
Warnings []string
|
||||
PromptHash string
|
||||
Model string
|
||||
Conflicts []Conflict
|
||||
}
|
||||
|
||||
// PreviewEntry is a single row in the preview table.
|
||||
type PreviewEntry struct {
|
||||
Sequence int
|
||||
SequenceLabel string
|
||||
OriginalPath string
|
||||
ProposedPath string
|
||||
SanitizedSegments []string
|
||||
Notes string
|
||||
}
|
||||
|
||||
// MapResponse converts a validated response into a preview plan.
|
||||
func MapResponse(input MapInput, validation ValidationResult) (PreviewPlan, error) {
|
||||
if input.SequenceWidth <= 0 {
|
||||
input.SequenceWidth = 3
|
||||
}
|
||||
|
||||
itemByOriginal := make(map[string]struct {
|
||||
item promptRenameItem
|
||||
}, len(validation.Items))
|
||||
for _, item := range validation.Items {
|
||||
key := normalizePath(item.Original)
|
||||
itemByOriginal[key] = struct{ item promptRenameItem }{item: promptRenameItem{
|
||||
Original: item.Original,
|
||||
Proposed: item.Proposed,
|
||||
Sequence: item.Sequence,
|
||||
Notes: item.Notes,
|
||||
}}
|
||||
}
|
||||
|
||||
entries := make([]PreviewEntry, 0, len(input.Candidates))
|
||||
for _, candidate := range input.Candidates {
|
||||
key := normalizePath(candidate.OriginalPath)
|
||||
entryData, ok := itemByOriginal[key]
|
||||
if !ok {
|
||||
return PreviewPlan{}, fmt.Errorf("ai plan: missing response for %s", candidate.OriginalPath)
|
||||
}
|
||||
|
||||
item := entryData.item
|
||||
label := formatSequence(item.Sequence, input.SequenceWidth)
|
||||
sanitized := computeSanitizedSegments(candidate.OriginalPath, item.Proposed)
|
||||
|
||||
entries = append(entries, PreviewEntry{
|
||||
Sequence: item.Sequence,
|
||||
SequenceLabel: label,
|
||||
OriginalPath: candidate.OriginalPath,
|
||||
ProposedPath: item.Proposed,
|
||||
SanitizedSegments: sanitized,
|
||||
Notes: item.Notes,
|
||||
})
|
||||
}
|
||||
|
||||
return PreviewPlan{
|
||||
Entries: entries,
|
||||
Warnings: append([]string(nil), validation.Warnings...),
|
||||
PromptHash: validation.PromptHash,
|
||||
Model: validation.Model,
|
||||
Conflicts: detectConflicts(validation.Items),
|
||||
}, nil
|
||||
}
|
||||
|
||||
type promptRenameItem struct {
|
||||
Original string
|
||||
Proposed string
|
||||
Sequence int
|
||||
Notes string
|
||||
}
|
||||
|
||||
func formatSequence(seq, width int) string {
|
||||
if seq <= 0 {
|
||||
return ""
|
||||
}
|
||||
label := fmt.Sprintf("%0*d", width, seq)
|
||||
if len(label) < len(fmt.Sprintf("%d", seq)) {
|
||||
return fmt.Sprintf("%d", seq)
|
||||
}
|
||||
return label
|
||||
}
|
||||
|
||||
func normalizePath(path string) string {
|
||||
return strings.TrimSpace(strings.ReplaceAll(path, "\\", "/"))
|
||||
}
|
||||
|
||||
func computeSanitizedSegments(original, proposed string) []string {
|
||||
origStem := stem(original)
|
||||
propStem := stem(proposed)
|
||||
|
||||
origTokens := tokenize(origStem)
|
||||
propTokens := make(map[string]struct{}, len(origTokens))
|
||||
for _, token := range tokenize(propStem) {
|
||||
propTokens[token] = struct{}{}
|
||||
}
|
||||
|
||||
var sanitized []string
|
||||
seen := make(map[string]struct{})
|
||||
for _, token := range origTokens {
|
||||
if _, ok := propTokens[token]; ok {
|
||||
continue
|
||||
}
|
||||
if _, already := seen[token]; already {
|
||||
continue
|
||||
}
|
||||
if isNumericToken(token) {
|
||||
continue
|
||||
}
|
||||
seen[token] = struct{}{}
|
||||
sanitized = append(sanitized, token)
|
||||
}
|
||||
if len(sanitized) == 0 {
|
||||
return nil
|
||||
}
|
||||
sort.Strings(sanitized)
|
||||
return sanitized
|
||||
}
|
||||
|
||||
func stem(path string) string {
|
||||
base := filepath.Base(path)
|
||||
ext := filepath.Ext(base)
|
||||
if ext != "" {
|
||||
return base[:len(base)-len(ext)]
|
||||
}
|
||||
return base
|
||||
}
|
||||
|
||||
func tokenize(value string) []string {
|
||||
fields := strings.FieldsFunc(value, func(r rune) bool {
|
||||
if r >= '0' && r <= '9' {
|
||||
return false
|
||||
}
|
||||
if r >= 'a' && r <= 'z' {
|
||||
return false
|
||||
}
|
||||
if r >= 'A' && r <= 'Z' {
|
||||
return false
|
||||
}
|
||||
return true
|
||||
})
|
||||
tokens := make([]string, 0, len(fields))
|
||||
for _, field := range fields {
|
||||
normalized := strings.ToLower(field)
|
||||
if normalized == "" {
|
||||
continue
|
||||
}
|
||||
tokens = append(tokens, normalized)
|
||||
}
|
||||
return tokens
|
||||
}
|
||||
|
||||
func isNumericToken(token string) bool {
|
||||
if token == "" {
|
||||
return false
|
||||
}
|
||||
for _, r := range token {
|
||||
if r < '0' || r > '9' {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
76
internal/ai/plan/scope.go
Normal file
76
internal/ai/plan/scope.go
Normal file
@@ -0,0 +1,76 @@
|
||||
package plan
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"io/fs"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
|
||||
"github.com/rogeecn/renamer/internal/listing"
|
||||
"github.com/rogeecn/renamer/internal/traversal"
|
||||
)
|
||||
|
||||
// CollectCandidates walks the scope described by req and returns eligible file candidates.
|
||||
func CollectCandidates(ctx context.Context, req *listing.ListingRequest) ([]Candidate, error) {
|
||||
if req == nil {
|
||||
return nil, errors.New("collect candidates: request cannot be nil")
|
||||
}
|
||||
if err := req.Validate(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
w := traversal.NewWalker()
|
||||
extensions := make(map[string]struct{}, len(req.Extensions))
|
||||
for _, ext := range req.Extensions {
|
||||
extensions[ext] = struct{}{}
|
||||
}
|
||||
|
||||
candidates := make([]Candidate, 0)
|
||||
|
||||
err := w.Walk(
|
||||
req.WorkingDir,
|
||||
req.Recursive,
|
||||
false, // directories are not considered candidates
|
||||
req.IncludeHidden,
|
||||
req.MaxDepth,
|
||||
func(relPath string, entry fs.DirEntry, depth int) error {
|
||||
select {
|
||||
case <-ctx.Done():
|
||||
return ctx.Err()
|
||||
default:
|
||||
}
|
||||
|
||||
if entry.IsDir() {
|
||||
return nil
|
||||
}
|
||||
|
||||
relSlash := filepath.ToSlash(relPath)
|
||||
ext := strings.ToLower(filepath.Ext(entry.Name()))
|
||||
if len(extensions) > 0 {
|
||||
if _, match := extensions[ext]; !match {
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
info, err := entry.Info()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
candidates = append(candidates, Candidate{
|
||||
OriginalPath: relSlash,
|
||||
SizeBytes: info.Size(),
|
||||
Depth: depth,
|
||||
Extension: filepath.Ext(entry.Name()),
|
||||
})
|
||||
|
||||
return nil
|
||||
},
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return candidates, nil
|
||||
}
|
||||
423
internal/ai/plan/validator.go
Normal file
423
internal/ai/plan/validator.go
Normal file
@@ -0,0 +1,423 @@
|
||||
package plan
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"path/filepath"
|
||||
"sort"
|
||||
"strings"
|
||||
"unicode"
|
||||
|
||||
"github.com/rogeecn/renamer/internal/ai/prompt"
|
||||
)
|
||||
|
||||
// Validator checks the AI response for completeness and uniqueness rules.
|
||||
type Validator struct {
|
||||
expected []string
|
||||
expectedSet map[string]struct{}
|
||||
policies prompt.NamingPolicyConfig
|
||||
bannedSet map[string]struct{}
|
||||
}
|
||||
|
||||
// ValidationResult captures the successfully decoded response data.
|
||||
type ValidationResult struct {
|
||||
Items []prompt.RenameItem
|
||||
Warnings []string
|
||||
PromptHash string
|
||||
Model string
|
||||
}
|
||||
|
||||
// InvalidItem describes a single response entry that failed validation.
|
||||
type InvalidItem struct {
|
||||
Index int
|
||||
Original string
|
||||
Proposed string
|
||||
Reason string
|
||||
}
|
||||
|
||||
// ValidationError aggregates the issues discovered during validation.
|
||||
type ValidationError struct {
|
||||
Result ValidationResult
|
||||
MissingOriginals []string
|
||||
UnexpectedOriginals []string
|
||||
DuplicateOriginals map[string]int
|
||||
DuplicateProposed map[string][]string
|
||||
InvalidItems []InvalidItem
|
||||
PolicyViolations []PolicyViolation
|
||||
}
|
||||
|
||||
// PolicyViolation captures a single naming-policy breach.
|
||||
type PolicyViolation struct {
|
||||
Original string
|
||||
Proposed string
|
||||
Rule string
|
||||
Message string
|
||||
}
|
||||
|
||||
func (e *ValidationError) Error() string {
|
||||
if e == nil {
|
||||
return ""
|
||||
}
|
||||
|
||||
parts := make([]string, 0, 5)
|
||||
if len(e.MissingOriginals) > 0 {
|
||||
parts = append(parts, fmt.Sprintf("missing %d originals", len(e.MissingOriginals)))
|
||||
}
|
||||
if len(e.UnexpectedOriginals) > 0 {
|
||||
parts = append(parts, fmt.Sprintf("unexpected %d originals", len(e.UnexpectedOriginals)))
|
||||
}
|
||||
if len(e.DuplicateOriginals) > 0 {
|
||||
parts = append(parts, fmt.Sprintf("%d duplicate originals", len(e.DuplicateOriginals)))
|
||||
}
|
||||
if len(e.DuplicateProposed) > 0 {
|
||||
parts = append(parts, fmt.Sprintf("%d duplicate proposed names", len(e.DuplicateProposed)))
|
||||
}
|
||||
if len(e.InvalidItems) > 0 {
|
||||
parts = append(parts, fmt.Sprintf("%d invalid items", len(e.InvalidItems)))
|
||||
}
|
||||
if len(e.PolicyViolations) > 0 {
|
||||
parts = append(parts, fmt.Sprintf("%d policy violations", len(e.PolicyViolations)))
|
||||
}
|
||||
|
||||
summary := strings.Join(parts, ", ")
|
||||
if summary == "" {
|
||||
summary = "response validation failed"
|
||||
}
|
||||
return fmt.Sprintf("ai response validation failed: %s", summary)
|
||||
}
|
||||
|
||||
// HasIssues indicates whether the validation error captured any rule breaks.
|
||||
func (e *ValidationError) HasIssues() bool {
|
||||
if e == nil {
|
||||
return false
|
||||
}
|
||||
return len(e.MissingOriginals) > 0 ||
|
||||
len(e.UnexpectedOriginals) > 0 ||
|
||||
len(e.DuplicateOriginals) > 0 ||
|
||||
len(e.DuplicateProposed) > 0 ||
|
||||
len(e.InvalidItems) > 0 ||
|
||||
len(e.PolicyViolations) > 0
|
||||
}
|
||||
|
||||
// NewValidator constructs a validator for the supplied original filenames. Any
|
||||
// whitespace-only entries are discarded. Duplicate originals are collapsed to
|
||||
// ensure consistent coverage checks.
|
||||
func NewValidator(originals []string, policies prompt.NamingPolicyConfig, bannedTerms []string) Validator {
|
||||
expectedSet := make(map[string]struct{}, len(originals))
|
||||
deduped := make([]string, 0, len(originals))
|
||||
for _, original := range originals {
|
||||
trimmed := strings.TrimSpace(original)
|
||||
if trimmed == "" {
|
||||
continue
|
||||
}
|
||||
if _, exists := expectedSet[trimmed]; exists {
|
||||
continue
|
||||
}
|
||||
expectedSet[trimmed] = struct{}{}
|
||||
deduped = append(deduped, trimmed)
|
||||
}
|
||||
|
||||
bannedSet := make(map[string]struct{})
|
||||
for _, token := range bannedTerms {
|
||||
lower := strings.ToLower(strings.TrimSpace(token))
|
||||
if lower == "" {
|
||||
continue
|
||||
}
|
||||
bannedSet[lower] = struct{}{}
|
||||
}
|
||||
|
||||
policies.Casing = strings.ToLower(strings.TrimSpace(policies.Casing))
|
||||
policies.Prefix = strings.TrimSpace(policies.Prefix)
|
||||
policies.ForbiddenTokens = append([]string(nil), policies.ForbiddenTokens...)
|
||||
|
||||
return Validator{
|
||||
expected: deduped,
|
||||
expectedSet: expectedSet,
|
||||
policies: policies,
|
||||
bannedSet: bannedSet,
|
||||
}
|
||||
}
|
||||
|
||||
// Validate ensures the AI response covers each expected original exactly once
|
||||
// and that the proposed filenames are unique.
|
||||
func (v Validator) Validate(resp prompt.RenameResponse) (ValidationResult, error) {
|
||||
result := ValidationResult{
|
||||
Items: cloneItems(resp.Items),
|
||||
Warnings: append([]string(nil), resp.Warnings...),
|
||||
PromptHash: resp.PromptHash,
|
||||
Model: resp.Model,
|
||||
}
|
||||
|
||||
if len(resp.Items) == 0 {
|
||||
err := &ValidationError{
|
||||
Result: result,
|
||||
MissingOriginals: append([]string(nil), v.expected...),
|
||||
}
|
||||
return result, err
|
||||
}
|
||||
|
||||
seenOriginals := make(map[string]int, len(resp.Items))
|
||||
seenProposed := make(map[string][]string, len(resp.Items))
|
||||
unexpectedSet := map[string]struct{}{}
|
||||
|
||||
invalidItems := make([]InvalidItem, 0)
|
||||
policyViolations := make([]PolicyViolation, 0)
|
||||
|
||||
for idx, item := range resp.Items {
|
||||
original := strings.TrimSpace(item.Original)
|
||||
proposed := strings.TrimSpace(item.Proposed)
|
||||
|
||||
if original == "" {
|
||||
invalidItems = append(invalidItems, InvalidItem{
|
||||
Index: idx,
|
||||
Original: item.Original,
|
||||
Proposed: item.Proposed,
|
||||
Reason: "original is empty",
|
||||
})
|
||||
} else {
|
||||
seenOriginals[original]++
|
||||
if _, ok := v.expectedSet[original]; !ok {
|
||||
unexpectedSet[original] = struct{}{}
|
||||
}
|
||||
}
|
||||
|
||||
if proposed == "" {
|
||||
invalidItems = append(invalidItems, InvalidItem{
|
||||
Index: idx,
|
||||
Original: item.Original,
|
||||
Proposed: item.Proposed,
|
||||
Reason: "proposed is empty",
|
||||
})
|
||||
} else {
|
||||
seenProposed[proposed] = append(seenProposed[proposed], original)
|
||||
}
|
||||
|
||||
policyViolations = append(policyViolations, v.evaluatePolicies(item)...)
|
||||
}
|
||||
|
||||
missing := make([]string, 0)
|
||||
for _, original := range v.expected {
|
||||
if seenOriginals[original] == 0 {
|
||||
missing = append(missing, original)
|
||||
}
|
||||
}
|
||||
|
||||
duplicateOriginals := make(map[string]int)
|
||||
for original, count := range seenOriginals {
|
||||
if count > 1 {
|
||||
duplicateOriginals[original] = count
|
||||
}
|
||||
}
|
||||
|
||||
duplicateProposed := make(map[string][]string)
|
||||
for proposed, sources := range seenProposed {
|
||||
if len(sources) > 1 {
|
||||
filtered := make([]string, 0, len(sources))
|
||||
for _, src := range sources {
|
||||
if strings.TrimSpace(src) != "" {
|
||||
filtered = append(filtered, src)
|
||||
}
|
||||
}
|
||||
if len(filtered) > 1 {
|
||||
duplicateProposed[proposed] = filtered
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
unexpected := orderedKeys(unexpectedSet)
|
||||
|
||||
if len(missing) == 0 &&
|
||||
len(unexpected) == 0 &&
|
||||
len(duplicateOriginals) == 0 &&
|
||||
len(duplicateProposed) == 0 &&
|
||||
len(invalidItems) == 0 &&
|
||||
len(policyViolations) == 0 {
|
||||
return result, nil
|
||||
}
|
||||
|
||||
err := &ValidationError{
|
||||
Result: result,
|
||||
MissingOriginals: missing,
|
||||
UnexpectedOriginals: unexpected,
|
||||
DuplicateOriginals: duplicateOriginals,
|
||||
DuplicateProposed: duplicateProposed,
|
||||
InvalidItems: invalidItems,
|
||||
PolicyViolations: policyViolations,
|
||||
}
|
||||
|
||||
return result, err
|
||||
}
|
||||
|
||||
// Expectation returns a copy of the expected originals tracked by the validator.
|
||||
func (v Validator) Expectation() []string {
|
||||
return append([]string(nil), v.expected...)
|
||||
}
|
||||
|
||||
func cloneItems(items []prompt.RenameItem) []prompt.RenameItem {
|
||||
if len(items) == 0 {
|
||||
return nil
|
||||
}
|
||||
cp := make([]prompt.RenameItem, len(items))
|
||||
copy(cp, items)
|
||||
return cp
|
||||
}
|
||||
|
||||
func orderedKeys(set map[string]struct{}) []string {
|
||||
if len(set) == 0 {
|
||||
return nil
|
||||
}
|
||||
out := make([]string, 0, len(set))
|
||||
for k := range set {
|
||||
out = append(out, k)
|
||||
}
|
||||
sort.Strings(out)
|
||||
return out
|
||||
}
|
||||
|
||||
func (v Validator) evaluatePolicies(item prompt.RenameItem) []PolicyViolation {
|
||||
violations := make([]PolicyViolation, 0)
|
||||
proposed := strings.TrimSpace(item.Proposed)
|
||||
if proposed == "" {
|
||||
return violations
|
||||
}
|
||||
base := filepath.Base(proposed)
|
||||
stem := base
|
||||
if ext := filepath.Ext(base); ext != "" {
|
||||
stem = base[:len(base)-len(ext)]
|
||||
}
|
||||
stemLower := strings.ToLower(stem)
|
||||
|
||||
if v.policies.Prefix != "" {
|
||||
prefixLower := strings.ToLower(v.policies.Prefix)
|
||||
if !strings.HasPrefix(stemLower, prefixLower) {
|
||||
violations = append(violations, PolicyViolation{
|
||||
Original: item.Original,
|
||||
Proposed: item.Proposed,
|
||||
Rule: "prefix",
|
||||
Message: fmt.Sprintf("expected prefix %q", v.policies.Prefix),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
if !v.policies.AllowSpaces && strings.Contains(stem, " ") {
|
||||
violations = append(violations, PolicyViolation{
|
||||
Original: item.Original,
|
||||
Proposed: item.Proposed,
|
||||
Rule: "spaces",
|
||||
Message: "spaces are not allowed",
|
||||
})
|
||||
}
|
||||
|
||||
if v.policies.Casing != "" {
|
||||
if ok, message := matchesCasing(stem, v.policies); !ok {
|
||||
violations = append(violations, PolicyViolation{
|
||||
Original: item.Original,
|
||||
Proposed: item.Proposed,
|
||||
Rule: "casing",
|
||||
Message: message,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
if len(v.bannedSet) > 0 {
|
||||
tokens := tokenize(stemLower)
|
||||
for _, token := range tokens {
|
||||
if _, ok := v.bannedSet[token]; ok {
|
||||
violations = append(violations, PolicyViolation{
|
||||
Original: item.Original,
|
||||
Proposed: item.Proposed,
|
||||
Rule: "banned",
|
||||
Message: fmt.Sprintf("contains banned token %q", token),
|
||||
})
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return violations
|
||||
}
|
||||
|
||||
func matchesCasing(stem string, policies prompt.NamingPolicyConfig) (bool, string) {
|
||||
core := coreStem(stem, policies.Prefix)
|
||||
switch policies.Casing {
|
||||
case "kebab":
|
||||
if strings.Contains(core, " ") {
|
||||
return false, "expected kebab-case (no spaces)"
|
||||
}
|
||||
if strings.ContainsAny(core, "ABCDEFGHIJKLMNOPQRSTUVWXYZ") {
|
||||
return false, "expected kebab-case (use lowercase letters)"
|
||||
}
|
||||
return true, ""
|
||||
case "snake":
|
||||
if strings.Contains(core, " ") {
|
||||
return false, "expected snake_case (no spaces)"
|
||||
}
|
||||
if strings.ContainsAny(core, "ABCDEFGHIJKLMNOPQRSTUVWXYZ-") {
|
||||
return false, "expected snake_case (lowercase letters with underscores)"
|
||||
}
|
||||
return true, ""
|
||||
case "camel":
|
||||
if strings.ContainsAny(core, " -_") {
|
||||
return false, "expected camelCase (no separators)"
|
||||
}
|
||||
runes := []rune(core)
|
||||
if len(runes) == 0 {
|
||||
return false, "expected camelCase descriptive text"
|
||||
}
|
||||
if !unicode.IsLower(runes[0]) {
|
||||
return false, "expected camelCase (first letter lowercase)"
|
||||
}
|
||||
return true, ""
|
||||
case "pascal":
|
||||
if strings.ContainsAny(core, " -_") {
|
||||
return false, "expected PascalCase (no separators)"
|
||||
}
|
||||
runes := []rune(core)
|
||||
if len(runes) == 0 {
|
||||
return false, "expected PascalCase descriptive text"
|
||||
}
|
||||
if !unicode.IsUpper(runes[0]) {
|
||||
return false, "expected PascalCase (first letter uppercase)"
|
||||
}
|
||||
return true, ""
|
||||
case "title":
|
||||
words := strings.Fields(strings.ReplaceAll(core, "-", " "))
|
||||
if len(words) == 0 {
|
||||
return false, "expected Title Case words"
|
||||
}
|
||||
for _, word := range words {
|
||||
runes := []rune(word)
|
||||
if len(runes) == 0 {
|
||||
continue
|
||||
}
|
||||
if !unicode.IsUpper(runes[0]) {
|
||||
return false, "expected Title Case (capitalize each word)"
|
||||
}
|
||||
}
|
||||
return true, ""
|
||||
default:
|
||||
return true, ""
|
||||
}
|
||||
}
|
||||
|
||||
func coreStem(stem, prefix string) string {
|
||||
trimmed := stem
|
||||
if prefix != "" {
|
||||
lowerStem := strings.ToLower(trimmed)
|
||||
lowerPrefix := strings.ToLower(prefix)
|
||||
if strings.HasPrefix(lowerStem, lowerPrefix) {
|
||||
trimmed = trimmed[len(prefix):]
|
||||
trimmed = strings.TrimLeft(trimmed, "-_ ")
|
||||
}
|
||||
}
|
||||
i := 0
|
||||
runes := []rune(trimmed)
|
||||
for i < len(runes) {
|
||||
r := runes[i]
|
||||
if unicode.IsDigit(r) || r == '-' || r == '_' || r == ' ' {
|
||||
i++
|
||||
continue
|
||||
}
|
||||
break
|
||||
}
|
||||
return string(runes[i:])
|
||||
}
|
||||
Reference in New Issue
Block a user