diff --git a/backend/app/jobs/download_from_alioss.go b/backend/app/jobs/download_from_alioss.go index a3d1b6d..d3d7157 100644 --- a/backend/app/jobs/download_from_alioss.go +++ b/backend/app/jobs/download_from_alioss.go @@ -93,5 +93,15 @@ func (w *DownloadFromAliOSSWorker) Work(ctx context.Context, job *Job[DownloadFr log.Infof("Successfully downloaded file: %s", media.Path) + if err := w.job.Add(&VideoCut{MediaID: job.Args.MediaID}); err != nil { + log.Errorf("Error adding job: %v", err) + return err + } + + if err := w.job.Add(&VideoExtractHeadImage{MediaID: job.Args.MediaID}); err != nil { + log.Errorf("Error adding job: %v", err) + return err + } + return nil } diff --git a/backend/app/jobs/extract_head_image_from_video.go b/backend/app/jobs/extract_head_image_from_video.go deleted file mode 100644 index dfdbf80..0000000 --- a/backend/app/jobs/extract_head_image_from_video.go +++ /dev/null @@ -1,79 +0,0 @@ -package jobs - -import ( - "context" - "time" - - "quyun/app/models" - "quyun/providers/ali" - "quyun/providers/app" - "quyun/providers/job" - - . "github.com/riverqueue/river" - log "github.com/sirupsen/logrus" - _ "go.ipao.vip/atom" - "go.ipao.vip/atom/contracts" -) - -var _ contracts.JobArgs = (*WechatCallback)(nil) - -type ExtractHeadImageFromVideo struct { - MediaID int64 `json:"media_id"` -} - -func (s ExtractHeadImageFromVideo) InsertOpts() InsertOpts { - return InsertOpts{ - Queue: QueueDefault, - Priority: PriorityDefault, - } -} - -func (s ExtractHeadImageFromVideo) Kind() string { return "extract_head_image_from_video" } -func (a ExtractHeadImageFromVideo) UniqueID() string { return a.Kind() } - -var _ Worker[ExtractHeadImageFromVideo] = (*ExtractHeadImageFromVideoWorker)(nil) - -// @provider(job) -type ExtractHeadImageFromVideoWorker struct { - WorkerDefaults[ExtractHeadImageFromVideo] - - oss *ali.OSSClient - job *job.Job - app *app.Config -} - -func (w *ExtractHeadImageFromVideoWorker) NextRetry(job *Job[ExtractHeadImageFromVideo]) time.Time { - return time.Now().Add(30 * time.Second) -} - -func (w *ExtractHeadImageFromVideoWorker) Work(ctx context.Context, job *Job[ExtractHeadImageFromVideo]) error { - log := log.WithField("job", job.Args.Kind()) - - log.Infof("[Start] Working on job with strings: %+v", job.Args) - defer log.Infof("[End] Finished %s", job.Args.Kind()) - - media, err := models.Medias.GetByID(ctx, job.Args.MediaID) - if err != nil { - log.Errorf("Error getting media by ID: %v", err) - return JobCancel(err) - } - _ = media - - // TODO - - // path := "/Users/rogee/Projects/self/quyun/backend/fixtures/oss/" - // dst := filepath.Join(path, media.Path) - - // // use ffmpeg to extract audio from video - // audioPath := filepath.Join(path, media.Hash+".mp3") - - // cmd := exec.Command("ffmpeg", "-i", dst, audioPath) - // if err := cmd.Run(); err != nil { - // log.Errorf("Error extracting audio: %v", err) - // return err - // } - - // log.Infof("Successfully extracted audio to: %s", audioPath) - - return nil -} diff --git a/backend/app/jobs/provider.gen.go b/backend/app/jobs/provider.gen.go index ce2eb79..cac3332 100755 --- a/backend/app/jobs/provider.gen.go +++ b/backend/app/jobs/provider.gen.go @@ -60,10 +60,12 @@ func Provide(opts ...opt.Option) error { } if err := container.Container.Provide(func( __job *job.Job, + app *app.Config, job *job.Job, oss *ali.OSSClient, ) (contracts.Initial, error) { - obj := &ExtractAudioFromVideoWorker{ + obj := &VideoCutWorker{ + app: app, job: job, oss: oss, } @@ -81,7 +83,26 @@ func Provide(opts ...opt.Option) error { job *job.Job, oss *ali.OSSClient, ) (contracts.Initial, error) { - obj := &ExtractHeadImageFromVideoWorker{ + obj := &VideoExtractHeadImageWorker{ + app: app, + job: job, + oss: oss, + } + if err := river.AddWorkerSafely(__job.Workers, obj); err != nil { + return nil, err + } + + return obj, nil + }, atom.GroupInitial); err != nil { + return err + } + if err := container.Container.Provide(func( + __job *job.Job, + app *app.Config, + job *job.Job, + oss *ali.OSSClient, + ) (contracts.Initial, error) { + obj := &VideoStoreShortWorker{ app: app, job: job, oss: oss, diff --git a/backend/app/jobs/video_cut.go b/backend/app/jobs/video_cut.go index 3c18fff..1770a63 100644 --- a/backend/app/jobs/video_cut.go +++ b/backend/app/jobs/video_cut.go @@ -1,13 +1,13 @@ package jobs import ( - "bufio" "context" - "os/exec" "path/filepath" "time" "quyun/app/models" + "quyun/database/fields" + "quyun/pkg/utils" "quyun/providers/ali" "quyun/providers/app" "quyun/providers/job" @@ -16,7 +16,6 @@ import ( log "github.com/sirupsen/logrus" _ "go.ipao.vip/atom" "go.ipao.vip/atom/contracts" - "golang.org/x/sync/errgroup" ) var _ contracts.JobArgs = (*VideoCut)(nil) @@ -63,53 +62,34 @@ func (w *VideoCutWorker) Work(ctx context.Context, job *Job[VideoCut]) error { } input := filepath.Join(w.app.StoragePath, media.Path) - output := input[:len(input)-len(filepath.Ext(input))] + "-output" + filepath.Ext(input) + output := input[:len(input)-len(filepath.Ext(input))] + "-short" + filepath.Ext(input) log.Infof("cut video process %s to %s", input, output) - cmd := exec.Command("ffmpeg", "-ss", "00:00:00", "-i", input, "-to", "00:01:00", "-c", "copy", output) - stdout, err := cmd.StdoutPipe() + if err := utils.CutMedia(input, output, 0, 60); err != nil { + log.Errorf("Error cutting media: %v", err) + return JobCancel(err) + } + + duration, err := utils.GetMediaDuration(input) if err != nil { - log.Errorf("Error creating stdout pipe: %v", err) - return err + log.Errorf("Error getting media duration: %v", err) + return JobCancel(err) + } + // update media metas + metas := fields.MediaMetas{ + ParentHash: "", + Short: false, + Duration: duration, + } + if err := models.Medias.UpdateMetas(ctx, media.ID, metas); err != nil { + log.Errorf("Error updating media metas: %v", err) + return JobCancel(err) } - stderr, err := cmd.StderrPipe() - if err != nil { - log.Errorf("Error creating stderr pipe: %v", err) - return err - } - - if err := cmd.Start(); err != nil { - log.Errorf("Error starting command: %v", err) - return err - } - - var eg errgroup.Group - eg.Go(func() error { - scanner := bufio.NewScanner(stdout) - for scanner.Scan() { - log.Info(scanner.Text()) - } - return nil + // save to database + return w.job.Add(&VideoStoreShort{ + MediaID: media.ID, + FilePath: output, }) - - eg.Go(func() error { - scanner := bufio.NewScanner(stderr) - for scanner.Scan() { - log.Error(scanner.Text()) - } - return nil - }) - - if err := cmd.Wait(); err != nil { - log.Errorf("Error waiting for command: %v", err) - return err - } - - if err := eg.Wait(); err != nil { - log.Errorf("Error waiting for command: %v", err) - return err - } - return nil } diff --git a/backend/app/jobs/video_extract_head_image.go b/backend/app/jobs/video_extract_head_image.go new file mode 100644 index 0000000..4a372a3 --- /dev/null +++ b/backend/app/jobs/video_extract_head_image.go @@ -0,0 +1,111 @@ +package jobs + +import ( + "context" + "os" + "path/filepath" + "time" + + "quyun/app/models" + "quyun/database/fields" + "quyun/database/schemas/public/model" + "quyun/pkg/utils" + "quyun/providers/ali" + "quyun/providers/app" + "quyun/providers/job" + + . "github.com/riverqueue/river" + log "github.com/sirupsen/logrus" + _ "go.ipao.vip/atom" + "go.ipao.vip/atom/contracts" +) + +var _ contracts.JobArgs = (*VideoExtractHeadImage)(nil) + +type VideoExtractHeadImage struct { + MediaID int64 `json:"media_id"` +} + +func (s VideoExtractHeadImage) InsertOpts() InsertOpts { + return InsertOpts{ + Queue: QueueDefault, + Priority: PriorityDefault, + } +} + +func (s VideoExtractHeadImage) Kind() string { return "video_extract_head_image" } +func (a VideoExtractHeadImage) UniqueID() string { return a.Kind() } + +var _ Worker[VideoExtractHeadImage] = (*VideoExtractHeadImageWorker)(nil) + +// @provider(job) +type VideoExtractHeadImageWorker struct { + WorkerDefaults[VideoExtractHeadImage] + + oss *ali.OSSClient + job *job.Job + app *app.Config +} + +func (w *VideoExtractHeadImageWorker) NextRetry(job *Job[VideoExtractHeadImage]) time.Time { + return time.Now().Add(30 * time.Second) +} + +func (w *VideoExtractHeadImageWorker) Work(ctx context.Context, job *Job[VideoExtractHeadImage]) error { + log := log.WithField("job", job.Args.Kind()) + + log.Infof("[Start] Working on job with strings: %+v", job.Args) + defer log.Infof("[End] Finished %s", job.Args.Kind()) + + media, err := models.Medias.GetByID(ctx, job.Args.MediaID) + if err != nil { + log.Errorf("Error getting media by ID: %v", err) + return JobCancel(err) + } + _ = media + + input := filepath.Join(w.app.StoragePath, media.Path) + output := input[:len(input)-len(filepath.Ext(input))] + ".jpg" + + if err := utils.GetFrameImageFromVideo(input, output, 1); err != nil { + log.Errorf("Error extracting image from video: %v", err) + return JobCancel(err) + } + defer os.RemoveAll(output) + + // Upload the image to OSS + if err := w.oss.Upload(ctx, output, filepath.Base(output)); err != nil { + log.Errorf("Error uploading image to OSS: %v", err) + return JobCancel(err) + } + + fileSize, err := utils.GetFileSize(output) + if err != nil { + log.Errorf("Error getting file size: %v", err) + return JobCancel(err) + } + + fileMd5, err := utils.GetFileMd5(output) + if err != nil { + log.Errorf("Error getting file MD5: %v", err) + return JobCancel(err) + } + + // create a new media record for the image + imageMedia := &model.Medias{ + CreatedAt: time.Now(), + Name: "[展示图]" + media.Name, + MimeType: "image/jpeg", + Size: fileSize, + Path: w.oss.GetSavePath(filepath.Base(output)), + Hash: fileMd5, + Metas: fields.Json[fields.MediaMetas]{}, + } + + if err := models.Medias.Create(ctx, imageMedia); err != nil { + log.Errorf("Error creating media record: %v", err) + return JobCancel(err) + } + + return nil +} diff --git a/backend/app/jobs/video_store_short.go b/backend/app/jobs/video_store_short.go new file mode 100644 index 0000000..6eb086e --- /dev/null +++ b/backend/app/jobs/video_store_short.go @@ -0,0 +1,111 @@ +package jobs + +import ( + "context" + "path/filepath" + "time" + + "quyun/app/models" + "quyun/database/fields" + "quyun/database/schemas/public/model" + "quyun/pkg/utils" + "quyun/providers/ali" + "quyun/providers/app" + "quyun/providers/job" + + . "github.com/riverqueue/river" + log "github.com/sirupsen/logrus" + _ "go.ipao.vip/atom" + "go.ipao.vip/atom/contracts" +) + +var _ contracts.JobArgs = (*VideoStoreShort)(nil) + +type VideoStoreShort struct { + MediaID int64 `json:"media_id"` + FilePath string `json:"file_path"` +} + +func (s VideoStoreShort) InsertOpts() InsertOpts { + return InsertOpts{ + Queue: QueueDefault, + Priority: PriorityDefault, + } +} + +func (s VideoStoreShort) Kind() string { return "video_store_short" } +func (a VideoStoreShort) UniqueID() string { return a.Kind() } + +var _ Worker[VideoStoreShort] = (*VideoStoreShortWorker)(nil) + +// @provider(job) +type VideoStoreShortWorker struct { + WorkerDefaults[VideoStoreShort] + + oss *ali.OSSClient + job *job.Job + app *app.Config +} + +func (w *VideoStoreShortWorker) NextRetry(job *Job[VideoStoreShort]) time.Time { + return time.Now().Add(30 * time.Second) +} + +func (w *VideoStoreShortWorker) Work(ctx context.Context, job *Job[VideoStoreShort]) error { + log := log.WithField("job", job.Args.Kind()) + + log.Infof("[Start] Working on job with strings: %+v", job.Args) + defer log.Infof("[End] Finished %s", job.Args.Kind()) + + media, err := models.Medias.GetByID(ctx, job.Args.MediaID) + if err != nil { + log.Errorf("Error getting media by ID: %v", err) + return JobCancel(err) + } + + duration, err := utils.GetMediaDuration(job.Args.FilePath) + if err != nil { + log.Errorf("Error getting media duration: %v", err) + return JobCancel(err) + } + + // get file md5 + fileMd5, err := utils.GetFileMd5(job.Args.FilePath) + if err != nil { + log.Errorf("Error getting file md5: %v", err) + return JobCancel(err) + } + + filePath := w.oss.GetSavePath(fileMd5 + filepath.Ext(job.Args.FilePath)) + + // get file size + fileSize, err := utils.GetFileSize(job.Args.FilePath) + if err != nil { + log.Errorf("Error getting file size: %v", err) + return JobCancel(err) + } + + // save to db and relate to master + mediaModel := &model.Medias{ + CreatedAt: time.Now(), + Name: "[试听]" + media.Name, + MimeType: media.MimeType, + Size: fileSize, + Path: filePath, + Hash: fileMd5, + Metas: fields.ToJson(fields.MediaMetas{ + ParentHash: media.Hash, + Short: true, + Duration: duration, + }), + } + + if err := models.Medias.Create(ctx, mediaModel); err != nil { + log.Errorf("Error saving media record: %v data: %+v", err, mediaModel) + return err + } + + log.Infof("Media record created with path: %s and hash: %s", filePath, fileMd5) + + return nil +} diff --git a/backend/app/models/medias.go b/backend/app/models/medias.go index 8756c9f..a7b6c98 100644 --- a/backend/app/models/medias.go +++ b/backend/app/models/medias.go @@ -5,6 +5,7 @@ import ( "time" "quyun/app/requests" + "quyun/database/fields" "quyun/database/schemas/public/model" "quyun/database/schemas/public/table" @@ -215,3 +216,23 @@ func (m *mediasModel) Delete(ctx context.Context, id int64) error { m.log.Infof("media item deleted successfully") return nil } + +// UpdateMetas +func (m *mediasModel) UpdateMetas(ctx context.Context, id int64, metas fields.MediaMetas) error { + meta := fields.ToJson(metas) + + tbl := table.Medias + stmt := tbl. + UPDATE(tbl.Metas). + SET(meta). + WHERE(tbl.ID.EQ(Int64(id))) + m.log.Infof("sql: %s", stmt.DebugSql()) + + if _, err := stmt.ExecContext(ctx, db); err != nil { + m.log.Errorf("error updating media metas: %v", err) + return err + } + + m.log.Infof("media (%d) metas updated successfully", id) + return nil +} diff --git a/backend/database/fields/medias.go b/backend/database/fields/medias.go new file mode 100644 index 0000000..d057228 --- /dev/null +++ b/backend/database/fields/medias.go @@ -0,0 +1,7 @@ +package fields + +type MediaMetas struct { + ParentHash string `json:"parent_hash,omitempty"` + Short bool `json:"short,omitempty"` + Duration int64 `json:"duration,omitempty"` +} diff --git a/backend/database/migrations/20250321112535_create_medias.sql b/backend/database/migrations/20250321112535_create_medias.sql index 328d0ad..63bc702 100644 --- a/backend/database/migrations/20250321112535_create_medias.sql +++ b/backend/database/migrations/20250321112535_create_medias.sql @@ -7,6 +7,7 @@ CREATE TABLE medias( mime_type varchar(128) NOT NULL DEFAULT '', size int8 NOT NULL DEFAULT 0, path varchar(255) NOT NULL DEFAULT '', + metas jsonb NOT NULL DEFAULT '{}' ::jsonb, hash varchar(64) NOT NULL DEFAULT '' ); diff --git a/backend/database/schemas/public/model/medias.go b/backend/database/schemas/public/model/medias.go index d8de890..ed24315 100644 --- a/backend/database/schemas/public/model/medias.go +++ b/backend/database/schemas/public/model/medias.go @@ -8,15 +8,17 @@ package model import ( + "quyun/database/fields" "time" ) type Medias struct { - ID int64 `sql:"primary_key" json:"id"` - CreatedAt time.Time `json:"created_at"` - Name string `json:"name"` - MimeType string `json:"mime_type"` - Size int64 `json:"size"` - Path string `json:"path"` - Hash string `json:"hash"` + ID int64 `sql:"primary_key" json:"id"` + CreatedAt time.Time `json:"created_at"` + Name string `json:"name"` + MimeType string `json:"mime_type"` + Size int64 `json:"size"` + Path string `json:"path"` + Hash string `json:"hash"` + Metas fields.Json[fields.MediaMetas] `json:"metas"` } diff --git a/backend/database/schemas/public/table/medias.go b/backend/database/schemas/public/table/medias.go index cf43687..a517519 100644 --- a/backend/database/schemas/public/table/medias.go +++ b/backend/database/schemas/public/table/medias.go @@ -24,6 +24,7 @@ type mediasTable struct { Size postgres.ColumnInteger Path postgres.ColumnString Hash postgres.ColumnString + Metas postgres.ColumnString AllColumns postgres.ColumnList MutableColumns postgres.ColumnList @@ -71,8 +72,9 @@ func newMediasTableImpl(schemaName, tableName, alias string) mediasTable { SizeColumn = postgres.IntegerColumn("size") PathColumn = postgres.StringColumn("path") HashColumn = postgres.StringColumn("hash") - allColumns = postgres.ColumnList{IDColumn, CreatedAtColumn, NameColumn, MimeTypeColumn, SizeColumn, PathColumn, HashColumn} - mutableColumns = postgres.ColumnList{CreatedAtColumn, NameColumn, MimeTypeColumn, SizeColumn, PathColumn, HashColumn} + MetasColumn = postgres.StringColumn("metas") + allColumns = postgres.ColumnList{IDColumn, CreatedAtColumn, NameColumn, MimeTypeColumn, SizeColumn, PathColumn, HashColumn, MetasColumn} + mutableColumns = postgres.ColumnList{CreatedAtColumn, NameColumn, MimeTypeColumn, SizeColumn, PathColumn, HashColumn, MetasColumn} ) return mediasTable{ @@ -86,6 +88,7 @@ func newMediasTableImpl(schemaName, tableName, alias string) mediasTable { Size: SizeColumn, Path: PathColumn, Hash: HashColumn, + Metas: MetasColumn, AllColumns: allColumns, MutableColumns: mutableColumns, diff --git a/backend/database/transform.yaml b/backend/database/transform.yaml index 9d48d5e..7b5aa6a 100644 --- a/backend/database/transform.yaml +++ b/backend/database/transform.yaml @@ -25,3 +25,6 @@ types: orders: status: OrderStatus meta: Json[OrderMeta] + + medias: + metas: Json[MediaMetas] diff --git a/backend/pkg/utils/ffmpeg.go b/backend/pkg/utils/ffmpeg.go new file mode 100644 index 0000000..b96cddb --- /dev/null +++ b/backend/pkg/utils/ffmpeg.go @@ -0,0 +1,120 @@ +package utils + +import ( + "bufio" + "context" + "os/exec" + "strconv" + "strings" + + "github.com/go-pay/errgroup" + "github.com/pkg/errors" + log "github.com/sirupsen/logrus" +) + +func GetMediaDuration(path string) (int64, error) { + // use ffprobe to get media duration + // ffprobe -v error -show_entries format=duration -of default=noprint_wrappers=1:nokey=1 + cmd := exec.Command("ffprobe", "-v", "error", "-show_entries", "format=duration", "-of", "default=noprint_wrappers=1:nokey=1", path) + durationOutput, err := cmd.Output() + if err != nil { + return 0, errors.Wrap(err, "ffprobe error") + } + duration := string(durationOutput) + duration = strings.TrimSpace(duration) + durationInt, err := strconv.Atoi(duration) + if err != nil { + return 0, errors.Wrap(err, "duration conversion error") + } + return int64(durationInt), nil +} + +func CutMedia(input, output string, start, end int64) error { + // ffmpeg -ss 00:00:00 -i input.mp4 -to 00:01:00 -c copy output.mp4 + cmd := exec.Command("ffmpeg", "-ss", strconv.FormatInt(start, 10), "-i", input, "-t", strconv.FormatInt(end, 10), "-c", "copy", output) + + stdout, err := cmd.StdoutPipe() + if err != nil { + log.Errorf("Error creating stdout pipe: %v", err) + return err + } + + stderr, err := cmd.StderrPipe() + if err != nil { + log.Errorf("Error creating stderr pipe: %v", err) + return err + } + + if err := cmd.Start(); err != nil { + log.Errorf("Error starting command: %v", err) + return err + } + + var eg errgroup.Group + eg.Go(func(ctx context.Context) error { + scanner := bufio.NewScanner(stdout) + for scanner.Scan() { + log.Info(scanner.Text()) + } + return nil + }) + + eg.Go(func(ctx context.Context) error { + scanner := bufio.NewScanner(stderr) + for scanner.Scan() { + log.Error(scanner.Text()) + } + return nil + }) + + if err := cmd.Wait(); err != nil { + log.Errorf("Error waiting for command: %v", err) + return err + } + + if err := eg.Wait(); err != nil { + log.Errorf("Error waiting for command: %v", err) + return err + } + return nil +} + +// GetFrameImageFromVideo extracts target time frame from a video file and saves it as an image. +func GetFrameImageFromVideo(input, output string, time int64) error { + // ffmpeg -i input.mp4 -ss 00:00:01 -vframes 1 output.jpg + cmd := exec.Command("ffmpeg", "-i", input, "-ss", strconv.FormatInt(time, 10), "-vframes", "1", output) + stdout, err := cmd.StdoutPipe() + if err != nil { + return errors.Wrap(err, "stdout pipe error") + } + + stderr, err := cmd.StderrPipe() + if err != nil { + return errors.Wrap(err, "stderr pipe error") + } + if err := cmd.Start(); err != nil { + return errors.Wrap(err, "command start error") + } + var eg errgroup.Group + eg.Go(func(ctx context.Context) error { + scanner := bufio.NewScanner(stdout) + for scanner.Scan() { + log.Info(scanner.Text()) + } + return nil + }) + eg.Go(func(ctx context.Context) error { + scanner := bufio.NewScanner(stderr) + for scanner.Scan() { + log.Error(scanner.Text()) + } + return nil + }) + if err := cmd.Wait(); err != nil { + return errors.Wrap(err, "command wait error") + } + if err := eg.Wait(); err != nil { + return errors.Wrap(err, "command wait error") + } + return nil +} diff --git a/backend/pkg/utils/md5.go b/backend/pkg/utils/md5.go index 55550d9..a782be9 100644 --- a/backend/pkg/utils/md5.go +++ b/backend/pkg/utils/md5.go @@ -31,3 +31,12 @@ func GetFileMd5(file string) (string, error) { return fmt.Sprintf("%x", h.Sum(nil)), nil } + +// GetFileSize +func GetFileSize(file string) (int64, error) { + fi, err := os.Stat(file) + if err != nil { + return 0, err + } + return fi.Size(), nil +} diff --git a/backend/providers/ali/oss_client.go b/backend/providers/ali/oss_client.go index 35ae881..752cebb 100644 --- a/backend/providers/ali/oss_client.go +++ b/backend/providers/ali/oss_client.go @@ -2,6 +2,7 @@ package ali import ( "context" + "path/filepath" "strings" "time" @@ -14,6 +15,10 @@ type OSSClient struct { config *Config } +func (c *OSSClient) GetSavePath(path string) string { + return filepath.Join("quyun", strings.Trim(path, "/")) +} + func (c *OSSClient) GetClient() *oss.Client { return c.client } @@ -21,7 +26,7 @@ func (c *OSSClient) GetClient() *oss.Client { func (c *OSSClient) PreSignUpload(ctx context.Context, path, mimeType string) (*oss.PresignResult, error) { request := &oss.PutObjectRequest{ Bucket: oss.Ptr(c.config.Bucket), - Key: oss.Ptr("quyun/" + strings.Trim(path, "/")), + Key: oss.Ptr(c.GetSavePath(path)), ContentType: oss.Ptr(mimeType), } return c.client.Presign(ctx, request) @@ -67,3 +72,16 @@ func (c *OSSClient) Delete(ctx context.Context, path string) error { } return nil } + +// Upload +func (c *OSSClient) Upload(ctx context.Context, input, dst string) error { + request := &oss.PutObjectRequest{ + Bucket: oss.Ptr(c.config.Bucket), + Key: oss.Ptr(c.GetSavePath(dst)), + } + + if _, err := c.internalClient.PutObjectFromFile(ctx, request, input); err != nil { + return err + } + return nil +}