feat: update AI command to streamline token management and remove unnecessary flags
This commit is contained in:
42
.renamer.example
Normal file
42
.renamer.example
Normal file
@@ -0,0 +1,42 @@
|
||||
# Example AI vendor credentials for renamer.
|
||||
# Copy this file to ~/.config/.renamer/.renamer (or the path specified by
|
||||
# RENAMER_CONFIG_DIR) and replace the placeholder values with real tokens.
|
||||
|
||||
# OpenAI (gpt-4o, o1, ChatGPT)
|
||||
OPENAI_TOKEN=sk-openai-xxxxxxxxxxxxxxxxxxxxxxxx
|
||||
|
||||
# Anthropic (Claude models)
|
||||
ANTHROPIC_TOKEN=sk-anthropic-xxxxxxxxxxxxxxxx
|
||||
|
||||
# Google (Gemini, LearnLM, PaLM)
|
||||
GOOGLE_TOKEN=ya29.xxxxxxxxxxxxxxxxxxxxxxxx
|
||||
|
||||
# Mistral AI (Mistral, Mixtral, Ministral)
|
||||
MISTRAL_TOKEN=sk-mistral-xxxxxxxxxxxxxxxx
|
||||
|
||||
# Cohere (Command family)
|
||||
COHERE_TOKEN=sk-cohere-xxxxxxxxxxxxxxxx
|
||||
|
||||
# Moonshot AI (Moonshot models)
|
||||
MOONSHOT_TOKEN=sk-moonshot-xxxxxxxxxxxxxxxx
|
||||
|
||||
# Zhipu AI (GLM series)
|
||||
ZHIPU_TOKEN=sk-zhipu-xxxxxxxxxxxxxxxx
|
||||
|
||||
# Alibaba DashScope (Qwen)
|
||||
ALIBABA_TOKEN=sk-dashscope-xxxxxxxxxxxxxxxx
|
||||
|
||||
# Baidu Wenxin/ERNIE
|
||||
BAIDU_TOKEN=sk-baidu-xxxxxxxxxxxxxxxx
|
||||
|
||||
# MiniMax (ABAB)
|
||||
MINIMAX_TOKEN=sk-minimax-xxxxxxxxxxxxxxxx
|
||||
|
||||
# ByteDance Doubao
|
||||
BYTEDANCE_TOKEN=sk-bytedance-xxxxxxxxxxxxxxxx
|
||||
|
||||
# DeepSeek
|
||||
DEEPSEEK_TOKEN=sk-deepseek-xxxxxxxxxxxxxxxx
|
||||
|
||||
# xAI Grok
|
||||
XAI_TOKEN=sk-xai-xxxxxxxxxxxxxxxx
|
||||
278
cmd/ai.go
278
cmd/ai.go
@@ -5,6 +5,7 @@ import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"sort"
|
||||
"strings"
|
||||
@@ -19,17 +20,13 @@ import (
|
||||
)
|
||||
|
||||
type aiCommandOptions struct {
|
||||
Model string
|
||||
Debug bool
|
||||
ExportPath string
|
||||
ImportPath string
|
||||
Casing string
|
||||
Prefix string
|
||||
AllowSpaces bool
|
||||
KeepOriginalOrder bool
|
||||
BannedTokens []string
|
||||
Model string
|
||||
Debug bool
|
||||
}
|
||||
|
||||
const aiPlanFilename = "renamer.plan.json"
|
||||
|
||||
// newAICommand 构建 `renamer ai` 子命令,仅保留模型选择与调试标志,其他策略交由 AI 自行生成。
|
||||
func newAICommand() *cobra.Command {
|
||||
ops := &aiCommandOptions{}
|
||||
|
||||
@@ -37,14 +34,11 @@ func newAICommand() *cobra.Command {
|
||||
Use: "ai",
|
||||
Short: "Generate rename plans using the AI workflow",
|
||||
Long: "Invoke the embedded AI workflow to generate, validate, and optionally apply rename plans.",
|
||||
Example: strings.TrimSpace(` # Preview an AI plan and export the raw response for edits
|
||||
renamer ai --path ./photos --dry-run --export-plan plan.json
|
||||
Example: strings.TrimSpace(` # Generate a plan for review in renamer.plan.json
|
||||
renamer ai --path ./photos --dry-run
|
||||
|
||||
# Import an edited plan and validate it without applying changes
|
||||
renamer ai --path ./photos --dry-run --import-plan plan.json
|
||||
|
||||
# Apply an edited plan after validation passes
|
||||
renamer ai --path ./photos --import-plan plan.json --yes`),
|
||||
# Apply the reviewed plan after confirming the preview
|
||||
renamer ai --path ./photos --yes`),
|
||||
RunE: func(cmd *cobra.Command, args []string) error {
|
||||
options := collectAIOptions(cmd, ops)
|
||||
return runAICommand(cmd.Context(), cmd, options)
|
||||
@@ -57,24 +51,15 @@ func newAICommand() *cobra.Command {
|
||||
}
|
||||
|
||||
func bindAIFlags(cmd *cobra.Command, opts *aiCommandOptions) {
|
||||
cmd.Flags().StringVar(&opts.Model, "genkit-model", genkit.DefaultModelName, fmt.Sprintf("OpenAI-compatible model identifier (default %s)", genkit.DefaultModelName))
|
||||
cmd.Flags().
|
||||
StringVar(&opts.Model, "genkit-model", genkit.DefaultModelName, fmt.Sprintf("OpenAI-compatible model identifier (default %s)", genkit.DefaultModelName))
|
||||
cmd.Flags().BoolVar(&opts.Debug, "debug-genkit", false, "Write Genkit prompt/response traces to the debug log")
|
||||
cmd.Flags().StringVar(&opts.ExportPath, "export-plan", "", "Export the raw AI plan JSON to the provided file path")
|
||||
cmd.Flags().StringVar(&opts.ImportPath, "import-plan", "", "Import an edited AI plan JSON for validation or apply")
|
||||
cmd.Flags().StringVar(&opts.Casing, "naming-casing", "kebab", "Casing style for AI-generated filenames (kebab, snake, camel, pascal, title)")
|
||||
cmd.Flags().StringVar(&opts.Prefix, "naming-prefix", "", "Static prefix AI proposals must include (alias: --prefix)")
|
||||
cmd.Flags().StringVar(&opts.Prefix, "prefix", "", "Alias for --naming-prefix")
|
||||
cmd.Flags().BoolVar(&opts.AllowSpaces, "naming-allow-spaces", false, "Permit spaces in AI-generated filenames")
|
||||
cmd.Flags().BoolVar(&opts.KeepOriginalOrder, "naming-keep-order", false, "Instruct AI to preserve original ordering of descriptive terms")
|
||||
cmd.Flags().StringSliceVar(&opts.BannedTokens, "banned", nil, "Comma-separated list of additional banned tokens (repeat flag to add more)")
|
||||
}
|
||||
|
||||
func collectAIOptions(cmd *cobra.Command, defaults *aiCommandOptions) aiCommandOptions {
|
||||
result := aiCommandOptions{
|
||||
Model: genkit.DefaultModelName,
|
||||
Debug: false,
|
||||
ExportPath: "",
|
||||
Casing: "kebab",
|
||||
Model: genkit.DefaultModelName,
|
||||
Debug: false,
|
||||
}
|
||||
|
||||
if defaults != nil {
|
||||
@@ -82,16 +67,6 @@ func collectAIOptions(cmd *cobra.Command, defaults *aiCommandOptions) aiCommandO
|
||||
result.Model = defaults.Model
|
||||
}
|
||||
result.Debug = defaults.Debug
|
||||
result.ExportPath = defaults.ExportPath
|
||||
if defaults.Casing != "" {
|
||||
result.Casing = defaults.Casing
|
||||
}
|
||||
result.Prefix = defaults.Prefix
|
||||
result.AllowSpaces = defaults.AllowSpaces
|
||||
result.KeepOriginalOrder = defaults.KeepOriginalOrder
|
||||
if len(defaults.BannedTokens) > 0 {
|
||||
result.BannedTokens = append([]string(nil), defaults.BannedTokens...)
|
||||
}
|
||||
}
|
||||
|
||||
if flag := cmd.Flags().Lookup("genkit-model"); flag != nil {
|
||||
@@ -106,56 +81,16 @@ func collectAIOptions(cmd *cobra.Command, defaults *aiCommandOptions) aiCommandO
|
||||
}
|
||||
}
|
||||
|
||||
if flag := cmd.Flags().Lookup("export-plan"); flag != nil {
|
||||
if value, err := cmd.Flags().GetString("export-plan"); err == nil && value != "" {
|
||||
result.ExportPath = value
|
||||
}
|
||||
}
|
||||
|
||||
if flag := cmd.Flags().Lookup("import-plan"); flag != nil {
|
||||
if value, err := cmd.Flags().GetString("import-plan"); err == nil && value != "" {
|
||||
result.ImportPath = value
|
||||
}
|
||||
}
|
||||
|
||||
if flag := cmd.Flags().Lookup("naming-casing"); flag != nil {
|
||||
if value, err := cmd.Flags().GetString("naming-casing"); err == nil && value != "" {
|
||||
result.Casing = value
|
||||
}
|
||||
}
|
||||
|
||||
if flag := cmd.Flags().Lookup("naming-prefix"); flag != nil {
|
||||
if value, err := cmd.Flags().GetString("naming-prefix"); err == nil {
|
||||
result.Prefix = value
|
||||
}
|
||||
}
|
||||
if flag := cmd.Flags().Lookup("prefix"); flag != nil && flag.Changed {
|
||||
if value, err := cmd.Flags().GetString("prefix"); err == nil {
|
||||
result.Prefix = value
|
||||
}
|
||||
}
|
||||
|
||||
if flag := cmd.Flags().Lookup("naming-allow-spaces"); flag != nil {
|
||||
if value, err := cmd.Flags().GetBool("naming-allow-spaces"); err == nil {
|
||||
result.AllowSpaces = value
|
||||
}
|
||||
}
|
||||
|
||||
if flag := cmd.Flags().Lookup("naming-keep-order"); flag != nil {
|
||||
if value, err := cmd.Flags().GetBool("naming-keep-order"); err == nil {
|
||||
result.KeepOriginalOrder = value
|
||||
}
|
||||
}
|
||||
|
||||
if flag := cmd.Flags().Lookup("banned"); flag != nil {
|
||||
if value, err := cmd.Flags().GetStringSlice("banned"); err == nil && len(value) > 0 {
|
||||
result.BannedTokens = append([]string(nil), value...)
|
||||
}
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
// runAICommand 按以下顺序执行 AI 重命名流程:
|
||||
// 1. 解析作用范围与是否需要立即应用;
|
||||
// 2. 自动探测工作目录下的 renamer.plan.json,决定是加载人工调整还是生成新计划;
|
||||
// 3. 收集候选文件并过滤生成过程中的辅助文件;
|
||||
// 4. 通过 Genkit 工作流调用模型生成方案或读取既有方案;
|
||||
// 5. 保存/更新本地计划文件,随后校验、渲染预览并输出冲突与告警;
|
||||
// 6. 在用户确认后执行改名并记录账本。
|
||||
func runAICommand(ctx context.Context, cmd *cobra.Command, options aiCommandOptions) error {
|
||||
scope, err := listing.ScopeFromCmd(cmd)
|
||||
if err != nil {
|
||||
@@ -167,22 +102,28 @@ func runAICommand(ctx context.Context, cmd *cobra.Command, options aiCommandOpti
|
||||
return err
|
||||
}
|
||||
|
||||
options.ImportPath = strings.TrimSpace(options.ImportPath)
|
||||
|
||||
casing, err := normalizeCasing(options.Casing)
|
||||
if err != nil {
|
||||
return err
|
||||
// 探测当前目录下的计划文件,支持人工预处理后再运行。
|
||||
planPath := filepath.Join(scope.WorkingDir, aiPlanFilename)
|
||||
planExists := false
|
||||
if info, err := os.Stat(planPath); err == nil {
|
||||
if info.IsDir() {
|
||||
return fmt.Errorf("plan file %s is a directory", planPath)
|
||||
}
|
||||
planExists = true
|
||||
} else if !errors.Is(err, os.ErrNotExist) {
|
||||
return fmt.Errorf("plan file %s: %w", planPath, err)
|
||||
}
|
||||
options.Casing = casing
|
||||
prefix := strings.TrimSpace(options.Prefix)
|
||||
userBanned := sanitizeTokenSlice(options.BannedTokens)
|
||||
bannedTerms := mergeBannedTerms(defaultBannedTerms(), userBanned)
|
||||
|
||||
// 默认策略完全交由提示模板处理,仅保留基础禁止词。
|
||||
casing := "kebab"
|
||||
bannedTerms := defaultBannedTerms()
|
||||
|
||||
// 收集所有候选文件,剔除计划文件自身避免被改名。
|
||||
candidates, err := plan.CollectCandidates(ctx, scope)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
ignoreSet := buildIgnoreSet(scope.WorkingDir, options.ExportPath, options.ImportPath)
|
||||
ignoreSet := buildIgnoreSet(scope.WorkingDir, planPath)
|
||||
if len(ignoreSet) > 0 {
|
||||
candidates = filterIgnoredCandidates(candidates, ignoreSet)
|
||||
}
|
||||
@@ -208,11 +149,11 @@ func runAICommand(ctx context.Context, cmd *cobra.Command, options aiCommandOpti
|
||||
}
|
||||
|
||||
policies := prompt.PolicyConfig{
|
||||
Prefix: prefix,
|
||||
Casing: options.Casing,
|
||||
AllowSpaces: options.AllowSpaces,
|
||||
KeepOriginalOrder: options.KeepOriginalOrder,
|
||||
ForbiddenTokens: append([]string(nil), userBanned...),
|
||||
Prefix: "",
|
||||
Casing: casing,
|
||||
AllowSpaces: false,
|
||||
KeepOriginalOrder: false,
|
||||
ForbiddenTokens: append([]string(nil), bannedTerms...),
|
||||
}
|
||||
validatorPolicy := prompt.NamingPolicyConfig{
|
||||
Prefix: policies.Prefix,
|
||||
@@ -226,8 +167,9 @@ func runAICommand(ctx context.Context, cmd *cobra.Command, options aiCommandOpti
|
||||
var promptHash string
|
||||
var model string
|
||||
|
||||
if options.ImportPath != "" {
|
||||
resp, err := plan.LoadResponse(options.ImportPath)
|
||||
if planExists {
|
||||
// 若检测到已有计划,则优先加载人工编辑的方案继续校验/执行。
|
||||
resp, err := plan.LoadResponse(planPath)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -238,6 +180,7 @@ func runAICommand(ctx context.Context, cmd *cobra.Command, options aiCommandOpti
|
||||
model = options.Model
|
||||
}
|
||||
} else {
|
||||
// 没有计划文件时,调用 Genkit 工作流生成全新方案。
|
||||
builder := prompt.NewBuilder()
|
||||
promptPayload, err := builder.Build(prompt.BuildInput{
|
||||
WorkingDir: scope.WorkingDir,
|
||||
@@ -267,13 +210,6 @@ func runAICommand(ctx context.Context, cmd *cobra.Command, options aiCommandOpti
|
||||
response = invocationResult.Response
|
||||
promptHash = invocationResult.PromptHash
|
||||
model = invocationResult.Response.Model
|
||||
|
||||
if options.ExportPath != "" {
|
||||
if err := plan.SaveResponse(options.ExportPath, response); err != nil {
|
||||
return err
|
||||
}
|
||||
fmt.Fprintf(cmd.ErrOrStderr(), "AI plan exported to %s\n", options.ExportPath)
|
||||
}
|
||||
}
|
||||
|
||||
if promptHash == "" {
|
||||
@@ -287,6 +223,16 @@ func runAICommand(ctx context.Context, cmd *cobra.Command, options aiCommandOpti
|
||||
response.PromptHash = promptHash
|
||||
response.Model = model
|
||||
|
||||
// 将生成或加载的计划写回本地,便于后续人工审核或复用。
|
||||
if err := plan.SaveResponse(planPath, response); err != nil {
|
||||
return err
|
||||
}
|
||||
message := "AI plan saved to %s\n"
|
||||
if planExists {
|
||||
message = "AI plan updated at %s\n"
|
||||
}
|
||||
fmt.Fprintf(cmd.ErrOrStderr(), message, planPath)
|
||||
|
||||
originals := make([]string, 0, len(candidates))
|
||||
for _, candidate := range candidates {
|
||||
originals = append(originals, candidate.OriginalPath)
|
||||
@@ -326,6 +272,7 @@ func runAICommand(ctx context.Context, cmd *cobra.Command, options aiCommandOpti
|
||||
previewPlan.Model = model
|
||||
}
|
||||
|
||||
// 输出预览表格与告警,帮助用户确认重命名提案。
|
||||
if err := renderAIPlan(cmd.OutOrStdout(), previewPlan); err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -343,15 +290,6 @@ func runAICommand(ctx context.Context, cmd *cobra.Command, options aiCommandOpti
|
||||
output.WriteAIPlanDebug(errorWriter, "", previewPlan.Warnings)
|
||||
}
|
||||
|
||||
if options.ImportPath == "" && options.ExportPath != "" {
|
||||
// Plan already exported earlier.
|
||||
} else if options.ImportPath != "" && options.ExportPath != "" {
|
||||
if err := plan.SaveResponse(options.ExportPath, response); err != nil {
|
||||
return err
|
||||
}
|
||||
fmt.Fprintf(errorWriter, "AI plan exported to %s\n", options.ExportPath)
|
||||
}
|
||||
|
||||
if !applyRequested {
|
||||
return nil
|
||||
}
|
||||
@@ -360,6 +298,7 @@ func runAICommand(ctx context.Context, cmd *cobra.Command, options aiCommandOpti
|
||||
return fmt.Errorf("cannot apply AI plan while conflicts remain")
|
||||
}
|
||||
|
||||
// 在无冲突且用户确认的情况下,按计划执行并记录到账本。
|
||||
applyEntry, err := plan.Apply(ctx, plan.ApplyOptions{
|
||||
WorkingDir: scope.WorkingDir,
|
||||
Candidates: candidates,
|
||||
@@ -371,7 +310,13 @@ func runAICommand(ctx context.Context, cmd *cobra.Command, options aiCommandOpti
|
||||
var conflictErr plan.ApplyConflictError
|
||||
if errors.As(err, &conflictErr) {
|
||||
for _, conflict := range conflictErr.Conflicts {
|
||||
fmt.Fprintf(errorWriter, "Apply conflict (%s): %s %s\n", conflict.Issue, conflict.OriginalPath, conflict.Details)
|
||||
fmt.Fprintf(
|
||||
errorWriter,
|
||||
"Apply conflict (%s): %s %s\n",
|
||||
conflict.Issue,
|
||||
conflict.OriginalPath,
|
||||
conflict.Details,
|
||||
)
|
||||
}
|
||||
}
|
||||
return err
|
||||
@@ -416,13 +361,25 @@ func composeInstructions(sequence prompt.SequenceRule, policies prompt.PolicyCon
|
||||
lines := []string{
|
||||
"You are an AI assistant that proposes safe file rename plans.",
|
||||
"Return JSON matching this schema: {\"items\":[{\"original\":string,\"proposed\":string,\"sequence\":number,\"notes\"?:string}],\"warnings\"?:[string]}.",
|
||||
fmt.Sprintf("Use %s numbering with width %d starting at %d and separator %q.", sequence.Style, sequence.Width, sequence.Start, sequence.Separator),
|
||||
fmt.Sprintf(
|
||||
"Use %s numbering with width %d starting at %d and separator %q.",
|
||||
sequence.Style,
|
||||
sequence.Width,
|
||||
sequence.Start,
|
||||
sequence.Separator,
|
||||
),
|
||||
"Preserve original file extensions exactly as provided.",
|
||||
fmt.Sprintf("Apply %s casing to filename stems and avoid promotional or banned terms.", policies.Casing),
|
||||
"Ensure proposed names are unique and sequences remain contiguous.",
|
||||
}
|
||||
if policies.Prefix != "" {
|
||||
lines = append(lines, fmt.Sprintf("Every proposed filename must begin with the prefix %q immediately before descriptive text.", policies.Prefix))
|
||||
lines = append(
|
||||
lines,
|
||||
fmt.Sprintf(
|
||||
"Every proposed filename must begin with the prefix %q immediately before descriptive text.",
|
||||
policies.Prefix,
|
||||
),
|
||||
)
|
||||
}
|
||||
if policies.AllowSpaces {
|
||||
lines = append(lines, "Spaces in filenames are permitted when they improve clarity.")
|
||||
@@ -433,80 +390,17 @@ func composeInstructions(sequence prompt.SequenceRule, policies prompt.PolicyCon
|
||||
lines = append(lines, "Preserve the original ordering of meaningful words when generating new stems.")
|
||||
}
|
||||
if len(bannedTerms) > 0 {
|
||||
lines = append(lines, fmt.Sprintf("Never include these banned tokens (case-insensitive) in any proposed filename: %s.", strings.Join(bannedTerms, ", ")))
|
||||
lines = append(
|
||||
lines,
|
||||
fmt.Sprintf(
|
||||
"Never include these banned tokens (case-insensitive) in any proposed filename: %s.",
|
||||
strings.Join(bannedTerms, ", "),
|
||||
),
|
||||
)
|
||||
}
|
||||
return strings.Join(lines, "\n")
|
||||
}
|
||||
|
||||
func normalizeCasing(value string) (string, error) {
|
||||
trimmed := strings.TrimSpace(value)
|
||||
if trimmed == "" {
|
||||
return "kebab", nil
|
||||
}
|
||||
lower := strings.ToLower(trimmed)
|
||||
supported := map[string]string{
|
||||
"kebab": "kebab",
|
||||
"snake": "snake",
|
||||
"camel": "camel",
|
||||
"pascal": "pascal",
|
||||
"title": "title",
|
||||
}
|
||||
if normalized, ok := supported[lower]; ok {
|
||||
return normalized, nil
|
||||
}
|
||||
return "", fmt.Errorf("unsupported naming casing %q (allowed: kebab, snake, camel, pascal, title)", value)
|
||||
}
|
||||
|
||||
func sanitizeTokenSlice(values []string) []string {
|
||||
unique := make(map[string]struct{})
|
||||
for _, raw := range values {
|
||||
for _, part := range strings.Split(raw, ",") {
|
||||
trimmed := strings.TrimSpace(part)
|
||||
if trimmed == "" {
|
||||
continue
|
||||
}
|
||||
lower := strings.ToLower(trimmed)
|
||||
if lower == "" {
|
||||
continue
|
||||
}
|
||||
unique[lower] = struct{}{}
|
||||
}
|
||||
}
|
||||
if len(unique) == 0 {
|
||||
return nil
|
||||
}
|
||||
tokens := make([]string, 0, len(unique))
|
||||
for token := range unique {
|
||||
tokens = append(tokens, token)
|
||||
}
|
||||
sort.Strings(tokens)
|
||||
return tokens
|
||||
}
|
||||
|
||||
func mergeBannedTerms(base, extra []string) []string {
|
||||
unique := make(map[string]struct{})
|
||||
for _, token := range base {
|
||||
lower := strings.ToLower(strings.TrimSpace(token))
|
||||
if lower == "" {
|
||||
continue
|
||||
}
|
||||
unique[lower] = struct{}{}
|
||||
}
|
||||
for _, token := range extra {
|
||||
lower := strings.ToLower(strings.TrimSpace(token))
|
||||
if lower == "" {
|
||||
continue
|
||||
}
|
||||
unique[lower] = struct{}{}
|
||||
}
|
||||
result := make([]string, 0, len(unique))
|
||||
for token := range unique {
|
||||
result = append(result, token)
|
||||
}
|
||||
sort.Strings(result)
|
||||
return result
|
||||
}
|
||||
|
||||
func buildIgnoreSet(workingDir string, paths ...string) map[string]struct{} {
|
||||
ignore := make(map[string]struct{})
|
||||
for _, path := range paths {
|
||||
|
||||
@@ -123,17 +123,15 @@ renamer extension <source-ext...> <target-ext> [flags]
|
||||
|
||||
## AI Command Secrets
|
||||
|
||||
- AI model authentication tokens are loaded from `$HOME/.config/.renamer/<MODEL>_MODEL_AUTH_TOKEN`. The default model token file is `default_MODEL_AUTH_TOKEN`, but any `--genkit-model` override maps to the same naming scheme.
|
||||
- Token files must contain only the raw API key with no extra whitespace; restrictive permissions (owner read/write) are recommended to keep credentials private.
|
||||
- AI vendor authentication tokens are read from the `.renamer` environment file located at `$HOME/.config/.renamer` by default (override with `RENAMER_CONFIG_DIR`). Each entry should follow the uppercase `<VENDOR>_TOKEN=...` naming convention; whitespace is trimmed automatically.
|
||||
- See `.renamer.example` for a pre-populated template covering OpenAI, Anthropic, Google Gemini, Mistral, Cohere, Moonshot, Zhipu, Alibaba DashScope, Baidu Wenxin, MiniMax, ByteDance Doubao, DeepSeek, and xAI Grok tokens.
|
||||
- Direct environment variables still take precedence over the config file, enabling CI/CD pipelines to inject secrets without touching the filesystem.
|
||||
|
||||
### AI Command Flags
|
||||
|
||||
- `--genkit-model <id>` overrides the default OpenAI-compatible model used by the embedded Genkit workflow. When omitted, `gpt-4o-mini` is used.
|
||||
- `--debug-genkit` streams prompt/response telemetry (including prompt hashes and warnings) to stderr so you can archive the exchange for auditing.
|
||||
- `--export-plan <path>` writes the exact AI response (prompt hash, model, warnings, and proposed items) to a JSON file. The same file can be edited and re-imported to tweak filenames before applying.
|
||||
- `--import-plan <path>` loads a previously exported or manually curated JSON plan. The CLI re-validates all entries before previewing or applying changes.
|
||||
- `--naming-casing <style>` enforces a casing policy (`kebab`, `snake`, `camel`, `pascal`, `title`). Banned tokens, prefix rules, and spacing requirements are evaluated against the imported or generated plan.
|
||||
- `--naming-prefix`, `--naming-allow-spaces`, `--naming-keep-order`, and `--banned` extend the policy envelope that both the prompt and validator obey.
|
||||
- `--yes` applies the currently loaded plan. Without `--yes`, the command remains in preview mode even when you import a plan.
|
||||
- Naming policies and sanitization are handled directly inside the AI workflow; no additional CLI flags are required.
|
||||
- `--yes` applies the currently loaded plan. Without `--yes`, the command remains in preview mode even when a plan already exists.
|
||||
|
||||
> Tip: Run `renamer ai --path ./fixtures --dry-run --export-plan plan.json` to capture the initial draft, edit the JSON file, then `renamer ai --path ./fixtures --import-plan plan.json --yes` to apply the curated result.
|
||||
> Tip: Running `renamer ai` writes or refreshes `renamer.plan.json` in the working directory. Edit that file as needed, then re-run `renamer ai --yes` to apply the reviewed plan once the preview looks good.
|
||||
|
||||
@@ -15,12 +15,11 @@ import (
|
||||
|
||||
const (
|
||||
configDirEnvVar = "RENAMER_CONFIG_DIR"
|
||||
defaultConfigRoot = ".renamer"
|
||||
configFileName = ".renamer"
|
||||
defaultVendorSlug = "openai"
|
||||
|
||||
modelTokenSuffix = "_MODEL_AUTH_TOKEN"
|
||||
vendorTokenSuffix = "_TOKEN"
|
||||
|
||||
defaultEnvFile = ".env"
|
||||
secondaryEnvFile = "tokens.env"
|
||||
errTokenNotFoundFmt = "model token %q not found in %s or the process environment"
|
||||
)
|
||||
|
||||
@@ -51,7 +50,7 @@ func NewTokenStore(configDir string) (*TokenStore, error) {
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("resolve user home: %w", err)
|
||||
}
|
||||
root = filepath.Join(home, ".config", defaultConfigRoot)
|
||||
root = filepath.Join(home, ".config", configFileName)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -67,7 +66,7 @@ func (s *TokenStore) ConfigDir() string {
|
||||
}
|
||||
|
||||
// ResolveModelToken returns the token for the provided model name. Model names
|
||||
// are normalized to match the `<slug>_MODEL_AUTH_TOKEN` convention documented
|
||||
// are normalized to match the `<VENDOR>_TOKEN` convention documented
|
||||
// for the CLI. Environment variables take precedence over file-based tokens.
|
||||
func (s *TokenStore) ResolveModelToken(model string) (string, error) {
|
||||
key := ModelTokenKey(model)
|
||||
@@ -92,107 +91,136 @@ func (s *TokenStore) lookup(key string) (string, error) {
|
||||
return strings.TrimSpace(val), nil
|
||||
}
|
||||
|
||||
path := filepath.Join(s.configDir, key)
|
||||
raw, err := os.ReadFile(path)
|
||||
if err == nil {
|
||||
value := strings.TrimSpace(string(raw))
|
||||
if value != "" {
|
||||
s.values[key] = value
|
||||
return value, nil
|
||||
}
|
||||
} else if !errors.Is(err, fs.ErrNotExist) {
|
||||
return "", fmt.Errorf("read token file %s: %w", path, err)
|
||||
}
|
||||
|
||||
return "", fmt.Errorf(errTokenNotFoundFmt, key, s.configDir)
|
||||
return "", fmt.Errorf(errTokenNotFoundFmt, key, s.configFilePath())
|
||||
}
|
||||
|
||||
func (s *TokenStore) ensureLoaded() error {
|
||||
s.once.Do(func() {
|
||||
s.err = s.loadEnvFiles()
|
||||
if s.err != nil {
|
||||
return
|
||||
}
|
||||
s.err = s.scanTokenFiles()
|
||||
s.err = s.loadConfigFile()
|
||||
})
|
||||
return s.err
|
||||
}
|
||||
|
||||
func (s *TokenStore) loadEnvFiles() error {
|
||||
candidates := []string{
|
||||
filepath.Join(s.configDir, defaultEnvFile),
|
||||
filepath.Join(s.configDir, secondaryEnvFile),
|
||||
}
|
||||
|
||||
for _, path := range candidates {
|
||||
envMap, err := godotenv.Read(path)
|
||||
if errors.Is(err, fs.ErrNotExist) {
|
||||
continue
|
||||
}
|
||||
if err != nil {
|
||||
return fmt.Errorf("load %s: %w", path, err)
|
||||
}
|
||||
for k, v := range envMap {
|
||||
if strings.TrimSpace(k) == "" || strings.TrimSpace(v) == "" {
|
||||
continue
|
||||
}
|
||||
s.values[k] = strings.TrimSpace(v)
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *TokenStore) scanTokenFiles() error {
|
||||
entries, err := os.ReadDir(s.configDir)
|
||||
func (s *TokenStore) loadConfigFile() error {
|
||||
path := s.configFilePath()
|
||||
envMap, err := godotenv.Read(path)
|
||||
if errors.Is(err, fs.ErrNotExist) {
|
||||
return nil
|
||||
}
|
||||
if err != nil {
|
||||
return fmt.Errorf("scan %s: %w", s.configDir, err)
|
||||
return fmt.Errorf("load %s: %w", path, err)
|
||||
}
|
||||
|
||||
for _, entry := range entries {
|
||||
if entry.IsDir() {
|
||||
for k, v := range envMap {
|
||||
if strings.TrimSpace(k) == "" || strings.TrimSpace(v) == "" {
|
||||
continue
|
||||
}
|
||||
name := entry.Name()
|
||||
path := filepath.Join(s.configDir, name)
|
||||
|
||||
content, err := os.ReadFile(path)
|
||||
if err != nil {
|
||||
return fmt.Errorf("read %s: %w", path, err)
|
||||
}
|
||||
|
||||
data := strings.TrimSpace(string(content))
|
||||
if data == "" {
|
||||
continue
|
||||
}
|
||||
|
||||
if parsed, perr := godotenv.Unmarshal(data); perr == nil && len(parsed) > 0 {
|
||||
for k, v := range parsed {
|
||||
if strings.TrimSpace(k) == "" || strings.TrimSpace(v) == "" {
|
||||
continue
|
||||
}
|
||||
s.values[k] = strings.TrimSpace(v)
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
s.values[name] = data
|
||||
s.values[k] = strings.TrimSpace(v)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// ModelTokenKey derives the token filename/environment variable for the given
|
||||
// model name following the `<slug>_MODEL_AUTH_TOKEN` convention. When model is
|
||||
// empty the default slug `default` is used.
|
||||
func ModelTokenKey(model string) string {
|
||||
slug := slugify(model)
|
||||
if slug == "" {
|
||||
slug = "default"
|
||||
func (s *TokenStore) configFilePath() string {
|
||||
info, err := os.Stat(s.configDir)
|
||||
if err == nil {
|
||||
if info.IsDir() {
|
||||
return filepath.Join(s.configDir, configFileName)
|
||||
}
|
||||
return s.configDir
|
||||
}
|
||||
return slug + modelTokenSuffix
|
||||
if strings.HasSuffix(s.configDir, configFileName) {
|
||||
return s.configDir
|
||||
}
|
||||
return filepath.Join(s.configDir, configFileName)
|
||||
}
|
||||
|
||||
// ModelTokenKey derives the vendor token key for the provided model, following
|
||||
// the `<VENDOR>_TOKEN` convention. When the vendor cannot be inferred the
|
||||
// default OpenAI slug is returned.
|
||||
func ModelTokenKey(model string) string {
|
||||
slug := vendorSlugFromModel(model)
|
||||
if slug == "" {
|
||||
slug = defaultVendorSlug
|
||||
}
|
||||
return strings.ToUpper(slug) + vendorTokenSuffix
|
||||
}
|
||||
|
||||
func vendorSlugFromModel(model string) string {
|
||||
normalized := strings.ToLower(strings.TrimSpace(model))
|
||||
if normalized == "" {
|
||||
return defaultVendorSlug
|
||||
}
|
||||
|
||||
if explicit := explicitVendorPrefix(normalized); explicit != "" {
|
||||
return explicit
|
||||
}
|
||||
|
||||
for _, mapping := range vendorHintTable {
|
||||
for _, hint := range mapping.hints {
|
||||
if strings.Contains(normalized, hint) {
|
||||
return mapping.vendor
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if firstToken := leadingToken(normalized); firstToken != "" {
|
||||
return slugify(firstToken)
|
||||
}
|
||||
|
||||
if slug := slugify(normalized); slug != "" {
|
||||
return slug
|
||||
}
|
||||
|
||||
return defaultVendorSlug
|
||||
}
|
||||
|
||||
func explicitVendorPrefix(value string) string {
|
||||
separators := func(r rune) bool {
|
||||
switch r {
|
||||
case '/', ':', '@':
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
parts := strings.FieldsFunc(value, separators)
|
||||
if len(parts) > 1 {
|
||||
if slug := slugify(parts[0]); slug != "" {
|
||||
return slug
|
||||
}
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func leadingToken(value string) string {
|
||||
for i, r := range value {
|
||||
if unicode.IsLetter(r) || unicode.IsDigit(r) {
|
||||
continue
|
||||
}
|
||||
if i == 0 {
|
||||
return ""
|
||||
}
|
||||
return value[:i]
|
||||
}
|
||||
return value
|
||||
}
|
||||
|
||||
var vendorHintTable = []struct {
|
||||
vendor string
|
||||
hints []string
|
||||
}{
|
||||
{vendor: "openai", hints: []string{"openai", "gpt", "o1", "chatgpt"}},
|
||||
{vendor: "anthropic", hints: []string{"anthropic", "claude"}},
|
||||
{vendor: "google", hints: []string{"google", "gemini", "learnlm", "palm"}},
|
||||
{vendor: "mistral", hints: []string{"mistral", "mixtral", "ministral"}},
|
||||
{vendor: "cohere", hints: []string{"cohere", "command", "r-plus"}},
|
||||
{vendor: "moonshot", hints: []string{"moonshot"}},
|
||||
{vendor: "zhipu", hints: []string{"zhipu", "glm"}},
|
||||
{vendor: "alibaba", hints: []string{"dashscope", "qwen"}},
|
||||
{vendor: "baidu", hints: []string{"wenxin", "ernie", "qianfan"}},
|
||||
{vendor: "minimax", hints: []string{"minimax", "abab"}},
|
||||
{vendor: "bytedance", hints: []string{"doubao", "bytedance"}},
|
||||
{vendor: "baichuan", hints: []string{"baichuan"}},
|
||||
{vendor: "deepseek", hints: []string{"deepseek"}},
|
||||
{vendor: "xai", hints: []string{"grok", "xai"}},
|
||||
}
|
||||
|
||||
func slugify(input string) string {
|
||||
|
||||
@@ -63,7 +63,7 @@ type Workflow struct {
|
||||
|
||||
// NewWorkflow instantiates a Genkit workflow for the preferred model. When no
|
||||
// model is provided it defaults to gpt-4o-mini. The workflow requires a token
|
||||
// provider capable of resolving `<model>_MODEL_AUTH_TOKEN` secrets.
|
||||
// provider capable of resolving `<VENDOR>_TOKEN` secrets.
|
||||
func NewWorkflow(ctx context.Context, opts Options) (*Workflow, error) {
|
||||
modelName := strings.TrimSpace(opts.Model)
|
||||
if modelName == "" {
|
||||
|
||||
@@ -32,7 +32,7 @@ func (c *captureWorkflow) Run(ctx context.Context, req genkit.Request) (genkit.R
|
||||
}, nil
|
||||
}
|
||||
|
||||
func TestAICommandAppliesNamingPoliciesToPrompt(t *testing.T) {
|
||||
func TestAICommandUsesDefaultPoliciesInPrompt(t *testing.T) {
|
||||
genkit.ResetWorkflowFactory()
|
||||
stub := &captureWorkflow{}
|
||||
genkit.OverrideWorkflowFactory(func(ctx context.Context, opts genkit.Options) (genkit.WorkflowRunner, error) {
|
||||
@@ -51,11 +51,6 @@ func TestAICommandAppliesNamingPoliciesToPrompt(t *testing.T) {
|
||||
"ai",
|
||||
"--path", rootDir,
|
||||
"--dry-run",
|
||||
"--naming-casing", "snake",
|
||||
"--naming-prefix", "proj",
|
||||
"--naming-allow-spaces",
|
||||
"--naming-keep-order",
|
||||
"--banned", "alpha",
|
||||
})
|
||||
|
||||
if err := rootCmd.Execute(); err != nil {
|
||||
@@ -70,38 +65,29 @@ func TestAICommandAppliesNamingPoliciesToPrompt(t *testing.T) {
|
||||
|
||||
req := stub.request
|
||||
policies := req.Payload.Policies
|
||||
if policies.Prefix != "proj" {
|
||||
t.Fatalf("expected prefix proj, got %q", policies.Prefix)
|
||||
if policies.Prefix != "" {
|
||||
t.Fatalf("expected empty prefix, got %q", policies.Prefix)
|
||||
}
|
||||
if policies.Casing != "snake" {
|
||||
t.Fatalf("expected casing snake, got %q", policies.Casing)
|
||||
if policies.Casing != "kebab" {
|
||||
t.Fatalf("expected default casing kebab, got %q", policies.Casing)
|
||||
}
|
||||
if !policies.AllowSpaces {
|
||||
t.Fatalf("expected allow spaces flag to propagate")
|
||||
if policies.AllowSpaces {
|
||||
t.Fatalf("expected allow spaces default false")
|
||||
}
|
||||
if !policies.KeepOriginalOrder {
|
||||
t.Fatalf("expected keep original order flag to propagate")
|
||||
}
|
||||
if len(policies.ForbiddenTokens) != 1 || policies.ForbiddenTokens[0] != "alpha" {
|
||||
t.Fatalf("expected forbidden tokens to capture user list, got %#v", policies.ForbiddenTokens)
|
||||
if policies.KeepOriginalOrder {
|
||||
t.Fatalf("expected keep original order default false")
|
||||
}
|
||||
|
||||
banned := req.Payload.BannedTerms
|
||||
containsDefault := false
|
||||
containsUser := false
|
||||
for _, term := range banned {
|
||||
switch term {
|
||||
case "alpha":
|
||||
containsUser = true
|
||||
case "clickbait":
|
||||
if term == "clickbait" {
|
||||
containsDefault = true
|
||||
break
|
||||
}
|
||||
}
|
||||
if !containsUser {
|
||||
t.Fatalf("expected banned terms to include user-provided token")
|
||||
}
|
||||
if !containsDefault {
|
||||
t.Fatalf("expected banned terms to retain default tokens")
|
||||
t.Fatalf("expected default banned terms propagated, got %#v", banned)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -44,7 +44,7 @@ func TestAIApplyAndUndoFlow(t *testing.T) {
|
||||
writeFile(t, filepath.Join(root, "draft_one.txt"))
|
||||
writeFile(t, filepath.Join(root, "draft_two.txt"))
|
||||
|
||||
planPath := filepath.Join(root, "ai-plan.json")
|
||||
planPath := filepath.Join(root, "renamer.plan.json")
|
||||
|
||||
preview := renamercmd.NewRootCommand()
|
||||
var previewOut, previewErr bytes.Buffer
|
||||
@@ -54,7 +54,6 @@ func TestAIApplyAndUndoFlow(t *testing.T) {
|
||||
"ai",
|
||||
"--path", root,
|
||||
"--dry-run",
|
||||
"--export-plan", planPath,
|
||||
})
|
||||
|
||||
if err := preview.Execute(); err != nil {
|
||||
@@ -124,7 +123,6 @@ func TestAIApplyAndUndoFlow(t *testing.T) {
|
||||
"ai",
|
||||
"--path", root,
|
||||
"--dry-run",
|
||||
"--import-plan", planPath,
|
||||
})
|
||||
|
||||
if err := previewEdited.Execute(); err != nil {
|
||||
@@ -148,7 +146,6 @@ func TestAIApplyAndUndoFlow(t *testing.T) {
|
||||
applyCmd.SetArgs([]string{
|
||||
"ai",
|
||||
"--path", root,
|
||||
"--import-plan", planPath,
|
||||
"--yes",
|
||||
})
|
||||
|
||||
|
||||
@@ -47,9 +47,6 @@ func TestAIPolicyValidationFailsWithActionableMessage(t *testing.T) {
|
||||
"ai",
|
||||
"--path", rootDir,
|
||||
"--dry-run",
|
||||
"--naming-casing", "kebab",
|
||||
"--naming-prefix", "proj",
|
||||
"--banned", "offer",
|
||||
})
|
||||
|
||||
err := rootCmd.Execute()
|
||||
@@ -58,9 +55,6 @@ func TestAIPolicyValidationFailsWithActionableMessage(t *testing.T) {
|
||||
}
|
||||
|
||||
lines := stderr.String()
|
||||
if !strings.Contains(lines, "Policy violation (prefix)") {
|
||||
t.Fatalf("expected prefix violation message in stderr, got: %s", lines)
|
||||
}
|
||||
if !strings.Contains(lines, "Policy violation (banned)") {
|
||||
t.Fatalf("expected banned token message in stderr, got: %s", lines)
|
||||
}
|
||||
|
||||
@@ -53,19 +53,18 @@ func TestAIPreviewFlowRendersSequenceTable(t *testing.T) {
|
||||
createAIPreviewFile(t, filepath.Join(root, "promo SALE 01.JPG"))
|
||||
createAIPreviewFile(t, filepath.Join(root, "family_photo.png"))
|
||||
|
||||
t.Setenv("default_MODEL_AUTH_TOKEN", "test-token")
|
||||
t.Setenv("OPENAI_TOKEN", "test-token")
|
||||
|
||||
rootCmd := renamercmd.NewRootCommand()
|
||||
var stdout, stderr bytes.Buffer
|
||||
rootCmd.SetOut(&stdout)
|
||||
rootCmd.SetErr(&stderr)
|
||||
exportPath := filepath.Join(root, "plan.json")
|
||||
exportPath := filepath.Join(root, "renamer.plan.json")
|
||||
rootCmd.SetArgs([]string{
|
||||
"ai",
|
||||
"--path", root,
|
||||
"--dry-run",
|
||||
"--debug-genkit",
|
||||
"--export-plan", exportPath,
|
||||
})
|
||||
|
||||
if err := rootCmd.Execute(); err != nil {
|
||||
|
||||
Reference in New Issue
Block a user