feat: switch upload hash to md5
This commit is contained in:
189
backend/app/commands/storage_migrate/migrate.go
Normal file
189
backend/app/commands/storage_migrate/migrate.go
Normal file
@@ -0,0 +1,189 @@
|
||||
package storage_migrate
|
||||
|
||||
import (
|
||||
"context"
|
||||
"crypto/md5"
|
||||
"encoding/hex"
|
||||
"fmt"
|
||||
"io"
|
||||
"os"
|
||||
"path"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"quyun/v2/app/commands"
|
||||
"quyun/v2/database"
|
||||
"quyun/v2/database/models"
|
||||
"quyun/v2/providers/postgres"
|
||||
"quyun/v2/providers/storage"
|
||||
|
||||
"github.com/spf13/cobra"
|
||||
"go.ipao.vip/atom"
|
||||
"go.ipao.vip/atom/container"
|
||||
"go.uber.org/dig"
|
||||
"gorm.io/gorm"
|
||||
)
|
||||
|
||||
func defaultProviders() container.Providers {
|
||||
return commands.Default(container.Providers{
|
||||
postgres.DefaultProvider(),
|
||||
storage.DefaultProvider(),
|
||||
database.DefaultProvider(),
|
||||
}...)
|
||||
}
|
||||
|
||||
func Command() atom.Option {
|
||||
return atom.Command(
|
||||
atom.Name("storage-migrate"),
|
||||
atom.Short("migrate media assets to md5 object keys"),
|
||||
atom.Arguments(func(cmd *cobra.Command) {
|
||||
cmd.Flags().Bool("dry-run", false, "preview changes without writing")
|
||||
cmd.Flags().Int("batch", 200, "batch size per scan")
|
||||
}),
|
||||
atom.RunE(Serve),
|
||||
atom.Providers(defaultProviders()),
|
||||
atom.Example("storage-migrate --dry-run"),
|
||||
)
|
||||
}
|
||||
|
||||
type Service struct {
|
||||
dig.In
|
||||
|
||||
DB *gorm.DB
|
||||
Storage *storage.Storage
|
||||
}
|
||||
|
||||
func Serve(cmd *cobra.Command, args []string) error {
|
||||
return container.Container.Invoke(func(ctx context.Context, svc Service) error {
|
||||
models.SetDefault(svc.DB)
|
||||
|
||||
dryRun, _ := cmd.Flags().GetBool("dry-run")
|
||||
batchSize, _ := cmd.Flags().GetInt("batch")
|
||||
if batchSize <= 0 {
|
||||
batchSize = 200
|
||||
}
|
||||
|
||||
localPath := svc.Storage.Config.LocalPath
|
||||
if localPath == "" {
|
||||
localPath = "./storage"
|
||||
}
|
||||
|
||||
fmt.Printf("storage migrate: dry-run=%v batch=%d local-path=%s\n", dryRun, batchSize, localPath)
|
||||
|
||||
tenantCache := make(map[int64]*models.Tenant)
|
||||
offset := 0
|
||||
for {
|
||||
tbl, q := models.MediaAssetQuery.QueryContext(ctx)
|
||||
list, err := q.Order(tbl.ID.Asc()).Offset(offset).Limit(batchSize).Find()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if len(list) == 0 {
|
||||
break
|
||||
}
|
||||
|
||||
for _, asset := range list {
|
||||
// 仅处理本地存储且有实际文件路径的资源。
|
||||
if strings.ToLower(asset.Provider) != "local" {
|
||||
continue
|
||||
}
|
||||
if asset.ObjectKey == "" {
|
||||
continue
|
||||
}
|
||||
if strings.HasPrefix(asset.ObjectKey, "http://") || strings.HasPrefix(asset.ObjectKey, "https://") {
|
||||
continue
|
||||
}
|
||||
|
||||
srcPath := asset.ObjectKey
|
||||
if !filepath.IsAbs(srcPath) {
|
||||
srcPath = filepath.Join(localPath, filepath.FromSlash(srcPath))
|
||||
}
|
||||
|
||||
hash, size, err := fileMD5(srcPath)
|
||||
if err != nil {
|
||||
fmt.Printf("skip asset=%d err=%v\n", asset.ID, err)
|
||||
continue
|
||||
}
|
||||
|
||||
filename := asset.Meta.Data().Filename
|
||||
if filename == "" {
|
||||
filename = path.Base(asset.ObjectKey)
|
||||
}
|
||||
|
||||
var tenant *models.Tenant
|
||||
if asset.TenantID > 0 {
|
||||
if cached, ok := tenantCache[asset.TenantID]; ok {
|
||||
tenant = cached
|
||||
} else if t, err := models.TenantQuery.WithContext(ctx).Where(models.TenantQuery.ID.Eq(asset.TenantID)).First(); err == nil {
|
||||
tenantCache[asset.TenantID] = t
|
||||
tenant = t
|
||||
}
|
||||
}
|
||||
|
||||
newKey := buildObjectKey(tenant, hash, filename)
|
||||
dstPath := filepath.Join(localPath, filepath.FromSlash(newKey))
|
||||
if asset.ObjectKey == newKey && asset.Hash == hash {
|
||||
continue
|
||||
}
|
||||
|
||||
if !dryRun {
|
||||
if asset.ObjectKey != newKey {
|
||||
if err := os.MkdirAll(filepath.Dir(dstPath), 0o755); err != nil {
|
||||
return err
|
||||
}
|
||||
if _, err := os.Stat(dstPath); err == nil {
|
||||
if srcPath != dstPath {
|
||||
_ = os.Remove(srcPath)
|
||||
}
|
||||
} else if err := os.Rename(srcPath, dstPath); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
_, err := models.MediaAssetQuery.WithContext(ctx).
|
||||
Where(models.MediaAssetQuery.ID.Eq(asset.ID)).
|
||||
UpdateSimple(
|
||||
models.MediaAssetQuery.ObjectKey.Value(newKey),
|
||||
models.MediaAssetQuery.Hash.Value(hash),
|
||||
models.MediaAssetQuery.UpdatedAt.Value(time.Now()),
|
||||
)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
fmt.Printf("migrated asset=%d key=%s hash=%s size=%d\n", asset.ID, newKey, hash, size)
|
||||
}
|
||||
|
||||
offset += len(list)
|
||||
}
|
||||
|
||||
return nil
|
||||
})
|
||||
}
|
||||
|
||||
func buildObjectKey(tenant *models.Tenant, hash, filename string) string {
|
||||
// 按租户维度组织对象路径:quyun/<tenant_uuid>/<md5>.<ext>
|
||||
tenantUUID := "public"
|
||||
if tenant != nil && tenant.UUID.String() != "" {
|
||||
tenantUUID = tenant.UUID.String()
|
||||
}
|
||||
ext := strings.ToLower(filepath.Ext(filename))
|
||||
return path.Join("quyun", tenantUUID, hash+ext)
|
||||
}
|
||||
|
||||
func fileMD5(filename string) (string, int64, error) {
|
||||
f, err := os.Open(filename)
|
||||
if err != nil {
|
||||
return "", 0, err
|
||||
}
|
||||
defer f.Close()
|
||||
|
||||
h := md5.New()
|
||||
size, err := io.Copy(h, f)
|
||||
if err != nil {
|
||||
return "", size, err
|
||||
}
|
||||
return hex.EncodeToString(h.Sum(nil)), size, nil
|
||||
}
|
||||
@@ -59,7 +59,7 @@ func (c *Common) GetOptions(ctx fiber.Ctx) (*dto.OptionsResponse, error) {
|
||||
// @Tags Common
|
||||
// @Accept json
|
||||
// @Produce json
|
||||
// @Param hash query string true "File Hash"
|
||||
// @Param hash query string true "File MD5 Hash"
|
||||
// @Success 200 {object} dto.UploadResult
|
||||
// @Bind user local key(__ctx_user)
|
||||
// @Bind hash query
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
package dto
|
||||
|
||||
type UploadInitForm struct {
|
||||
// Hash 文件哈希(用于秒传校验)。
|
||||
// Hash 文件 MD5 哈希(用于秒传校验)。
|
||||
Hash string `json:"hash"`
|
||||
// Size 文件大小(字节)。
|
||||
Size int64 `json:"size"`
|
||||
|
||||
@@ -2,7 +2,7 @@ package services
|
||||
|
||||
import (
|
||||
"context"
|
||||
"crypto/sha256"
|
||||
"crypto/md5"
|
||||
"encoding/hex"
|
||||
"encoding/json"
|
||||
"io"
|
||||
@@ -196,7 +196,7 @@ func (s *common) CompleteUpload(ctx context.Context, userID int64, form *common_
|
||||
}
|
||||
defer dst.Close()
|
||||
|
||||
hasher := sha256.New()
|
||||
hasher := md5.New()
|
||||
var totalSize int64
|
||||
|
||||
for _, partNum := range parts {
|
||||
@@ -346,7 +346,7 @@ func (s *common) Upload(
|
||||
}
|
||||
|
||||
// Hash calculation while copying
|
||||
hasher := sha256.New()
|
||||
hasher := md5.New()
|
||||
size, err := io.Copy(io.MultiWriter(dst, hasher), src)
|
||||
dst.Close() // Close immediately to allow removal if needed
|
||||
if err != nil {
|
||||
|
||||
@@ -0,0 +1,5 @@
|
||||
-- +goose Up
|
||||
COMMENT ON COLUMN media_assets.hash IS '文件 MD5 哈希';
|
||||
|
||||
-- +goose Down
|
||||
COMMENT ON COLUMN media_assets.hash IS '文件 SHA-256 哈希';
|
||||
@@ -4,6 +4,7 @@ import (
|
||||
"quyun/v2/app/commands/http"
|
||||
"quyun/v2/app/commands/migrate"
|
||||
"quyun/v2/app/commands/seed"
|
||||
"quyun/v2/app/commands/storage_migrate"
|
||||
"quyun/v2/pkg/utils"
|
||||
|
||||
log "github.com/sirupsen/logrus"
|
||||
@@ -34,6 +35,7 @@ func main() {
|
||||
http.Command(),
|
||||
migrate.Command(),
|
||||
seed.Command(),
|
||||
storage_migrate.Command(),
|
||||
}
|
||||
|
||||
if err := atom.Serve(opts...); err != nil {
|
||||
|
||||
144
frontend/portal/src/utils/md5.js
Normal file
144
frontend/portal/src/utils/md5.js
Normal file
@@ -0,0 +1,144 @@
|
||||
const hexChars = '0123456789abcdef';
|
||||
|
||||
function add(x, y) {
|
||||
return (x + y) >>> 0;
|
||||
}
|
||||
|
||||
function rol(x, n) {
|
||||
return (x << n) | (x >>> (32 - n));
|
||||
}
|
||||
|
||||
function cmn(q, a, b, x, s, t) {
|
||||
return add(rol(add(add(a, q), add(x, t)), s), b);
|
||||
}
|
||||
|
||||
function ff(a, b, c, d, x, s, t) {
|
||||
return cmn((b & c) | (~b & d), a, b, x, s, t);
|
||||
}
|
||||
|
||||
function gg(a, b, c, d, x, s, t) {
|
||||
return cmn((b & d) | (c & ~d), a, b, x, s, t);
|
||||
}
|
||||
|
||||
function hh(a, b, c, d, x, s, t) {
|
||||
return cmn(b ^ c ^ d, a, b, x, s, t);
|
||||
}
|
||||
|
||||
function ii(a, b, c, d, x, s, t) {
|
||||
return cmn(c ^ (b | ~d), a, b, x, s, t);
|
||||
}
|
||||
|
||||
function toHex(n) {
|
||||
let out = '';
|
||||
for (let i = 0; i < 4; i++) {
|
||||
const byte = (n >>> (i * 8)) & 0xff;
|
||||
out += hexChars.charAt((byte >>> 4) & 0x0f) + hexChars.charAt(byte & 0x0f);
|
||||
}
|
||||
return out;
|
||||
}
|
||||
|
||||
export function md5ArrayBuffer(buffer) {
|
||||
const bytes = new Uint8Array(buffer);
|
||||
const words = [];
|
||||
for (let i = 0; i < bytes.length; i++) {
|
||||
words[i >> 2] |= bytes[i] << ((i % 4) * 8);
|
||||
}
|
||||
|
||||
const bitLen = bytes.length * 8;
|
||||
words[bitLen >> 5] |= 0x80 << (bitLen % 32);
|
||||
words[(((bitLen + 64) >>> 9) << 4) + 14] = bitLen;
|
||||
|
||||
const totalLen = (((bitLen + 64) >>> 9) << 4) + 16;
|
||||
for (let i = 0; i < totalLen; i++) {
|
||||
if (words[i] === undefined) {
|
||||
words[i] = 0;
|
||||
}
|
||||
}
|
||||
|
||||
let a = 0x67452301;
|
||||
let b = 0xefcdab89;
|
||||
let c = 0x98badcfe;
|
||||
let d = 0x10325476;
|
||||
|
||||
for (let i = 0; i < words.length; i += 16) {
|
||||
const oa = a;
|
||||
const ob = b;
|
||||
const oc = c;
|
||||
const od = d;
|
||||
|
||||
a = ff(a, b, c, d, words[i + 0], 7, 0xd76aa478);
|
||||
d = ff(d, a, b, c, words[i + 1], 12, 0xe8c7b756);
|
||||
c = ff(c, d, a, b, words[i + 2], 17, 0x242070db);
|
||||
b = ff(b, c, d, a, words[i + 3], 22, 0xc1bdceee);
|
||||
a = ff(a, b, c, d, words[i + 4], 7, 0xf57c0faf);
|
||||
d = ff(d, a, b, c, words[i + 5], 12, 0x4787c62a);
|
||||
c = ff(c, d, a, b, words[i + 6], 17, 0xa8304613);
|
||||
b = ff(b, c, d, a, words[i + 7], 22, 0xfd469501);
|
||||
a = ff(a, b, c, d, words[i + 8], 7, 0x698098d8);
|
||||
d = ff(d, a, b, c, words[i + 9], 12, 0x8b44f7af);
|
||||
c = ff(c, d, a, b, words[i + 10], 17, 0xffff5bb1);
|
||||
b = ff(b, c, d, a, words[i + 11], 22, 0x895cd7be);
|
||||
a = ff(a, b, c, d, words[i + 12], 7, 0x6b901122);
|
||||
d = ff(d, a, b, c, words[i + 13], 12, 0xfd987193);
|
||||
c = ff(c, d, a, b, words[i + 14], 17, 0xa679438e);
|
||||
b = ff(b, c, d, a, words[i + 15], 22, 0x49b40821);
|
||||
|
||||
a = gg(a, b, c, d, words[i + 1], 5, 0xf61e2562);
|
||||
d = gg(d, a, b, c, words[i + 6], 9, 0xc040b340);
|
||||
c = gg(c, d, a, b, words[i + 11], 14, 0x265e5a51);
|
||||
b = gg(b, c, d, a, words[i + 0], 20, 0xe9b6c7aa);
|
||||
a = gg(a, b, c, d, words[i + 5], 5, 0xd62f105d);
|
||||
d = gg(d, a, b, c, words[i + 10], 9, 0x02441453);
|
||||
c = gg(c, d, a, b, words[i + 15], 14, 0xd8a1e681);
|
||||
b = gg(b, c, d, a, words[i + 4], 20, 0xe7d3fbc8);
|
||||
a = gg(a, b, c, d, words[i + 9], 5, 0x21e1cde6);
|
||||
d = gg(d, a, b, c, words[i + 14], 9, 0xc33707d6);
|
||||
c = gg(c, d, a, b, words[i + 3], 14, 0xf4d50d87);
|
||||
b = gg(b, c, d, a, words[i + 8], 20, 0x455a14ed);
|
||||
a = gg(a, b, c, d, words[i + 13], 5, 0xa9e3e905);
|
||||
d = gg(d, a, b, c, words[i + 2], 9, 0xfcefa3f8);
|
||||
c = gg(c, d, a, b, words[i + 7], 14, 0x676f02d9);
|
||||
b = gg(b, c, d, a, words[i + 12], 20, 0x8d2a4c8a);
|
||||
|
||||
a = hh(a, b, c, d, words[i + 5], 4, 0xfffa3942);
|
||||
d = hh(d, a, b, c, words[i + 8], 11, 0x8771f681);
|
||||
c = hh(c, d, a, b, words[i + 11], 16, 0x6d9d6122);
|
||||
b = hh(b, c, d, a, words[i + 14], 23, 0xfde5380c);
|
||||
a = hh(a, b, c, d, words[i + 1], 4, 0xa4beea44);
|
||||
d = hh(d, a, b, c, words[i + 4], 11, 0x4bdecfa9);
|
||||
c = hh(c, d, a, b, words[i + 7], 16, 0xf6bb4b60);
|
||||
b = hh(b, c, d, a, words[i + 10], 23, 0xbebfbc70);
|
||||
a = hh(a, b, c, d, words[i + 13], 4, 0x289b7ec6);
|
||||
d = hh(d, a, b, c, words[i + 0], 11, 0xeaa127fa);
|
||||
c = hh(c, d, a, b, words[i + 3], 16, 0xd4ef3085);
|
||||
b = hh(b, c, d, a, words[i + 6], 23, 0x04881d05);
|
||||
a = hh(a, b, c, d, words[i + 9], 4, 0xd9d4d039);
|
||||
d = hh(d, a, b, c, words[i + 12], 11, 0xe6db99e5);
|
||||
c = hh(c, d, a, b, words[i + 15], 16, 0x1fa27cf8);
|
||||
b = hh(b, c, d, a, words[i + 2], 23, 0xc4ac5665);
|
||||
|
||||
a = ii(a, b, c, d, words[i + 0], 6, 0xf4292244);
|
||||
d = ii(d, a, b, c, words[i + 7], 10, 0x432aff97);
|
||||
c = ii(c, d, a, b, words[i + 14], 15, 0xab9423a7);
|
||||
b = ii(b, c, d, a, words[i + 5], 21, 0xfc93a039);
|
||||
a = ii(a, b, c, d, words[i + 12], 6, 0x655b59c3);
|
||||
d = ii(d, a, b, c, words[i + 3], 10, 0x8f0ccc92);
|
||||
c = ii(c, d, a, b, words[i + 10], 15, 0xffeff47d);
|
||||
b = ii(b, c, d, a, words[i + 1], 21, 0x85845dd1);
|
||||
a = ii(a, b, c, d, words[i + 8], 6, 0x6fa87e4f);
|
||||
d = ii(d, a, b, c, words[i + 15], 10, 0xfe2ce6e0);
|
||||
c = ii(c, d, a, b, words[i + 6], 15, 0xa3014314);
|
||||
b = ii(b, c, d, a, words[i + 13], 21, 0x4e0811a1);
|
||||
a = ii(a, b, c, d, words[i + 4], 6, 0xf7537e82);
|
||||
d = ii(d, a, b, c, words[i + 11], 10, 0xbd3af235);
|
||||
c = ii(c, d, a, b, words[i + 2], 15, 0x2ad7d2bb);
|
||||
b = ii(b, c, d, a, words[i + 9], 21, 0xeb86d391);
|
||||
|
||||
a = add(a, oa);
|
||||
b = add(b, ob);
|
||||
c = add(c, oc);
|
||||
d = add(d, od);
|
||||
}
|
||||
|
||||
return toHex(a) + toHex(b) + toHex(c) + toHex(d);
|
||||
}
|
||||
@@ -222,7 +222,7 @@ import ProgressBar from 'primevue/progressbar';
|
||||
import Image from 'primevue/image';
|
||||
import Toast from 'primevue/toast';
|
||||
import draggable from 'vuedraggable';
|
||||
import { sha256 } from 'js-sha256';
|
||||
import { md5ArrayBuffer } from '../../utils/md5';
|
||||
import { useToast } from 'primevue/usetoast';
|
||||
import { computed, reactive, ref, onMounted } from 'vue';
|
||||
import { useRouter, useRoute } from 'vue-router';
|
||||
@@ -339,7 +339,7 @@ const triggerUpload = (type) => {
|
||||
|
||||
const calculateHash = async (file) => {
|
||||
const buffer = await file.arrayBuffer();
|
||||
return sha256(buffer);
|
||||
return md5ArrayBuffer(buffer);
|
||||
};
|
||||
|
||||
const handleFileChange = async (event) => {
|
||||
|
||||
Reference in New Issue
Block a user