package driver12import (3"context"45"github.com/alist-org/alist/v3/internal/model"6)78type Driver interface {9Meta10Reader11//Writer12//Other13}1415type Meta interface {16Config() Config17// GetStorage just get raw storage, no need to implement, because model.Storage have implemented18GetStorage() *model.Storage19SetStorage(model.Storage)20// GetAddition Additional is used for unmarshal of JSON, so need return pointer21GetAddition() Additional22// Init If already initialized, drop first23Init(ctx context.Context) error24Drop(ctx context.Context) error25}2627type Other interface {28Other(ctx context.Context, args model.OtherArgs) (interface{}, error)29}3031type Reader interface {32// List files in the path33// if identify files by path, need to set ID with path,like path.Join(dir.GetID(), obj.GetName())34// if identify files by id, need to set ID with corresponding id35List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error)36// Link get url/filepath/reader of file37Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error)38}3940type GetRooter interface {41GetRoot(ctx context.Context) (model.Obj, error)42}4344type Getter interface {45// Get file by path, the path haven't been joined with root path46Get(ctx context.Context, path string) (model.Obj, error)47}4849//type Writer interface {50// Mkdir51// Move52// Rename53// Copy54// Remove55// Put56//}5758type Mkdir interface {59MakeDir(ctx context.Context, parentDir model.Obj, dirName string) error60}6162type Move interface {63Move(ctx context.Context, srcObj, dstDir model.Obj) error64}6566type Rename interface {67Rename(ctx context.Context, srcObj model.Obj, newName string) error68}6970type Copy interface {71Copy(ctx context.Context, srcObj, dstDir model.Obj) error72}7374type Remove interface {75Remove(ctx context.Context, obj model.Obj) error76}7778type Put interface {79// Put a file (provided as a FileStreamer) into the driver80// Besides the most basic upload functionality, the following features also need to be implemented:81// 1. Canceling (when `<-ctx.Done()` returns), which can be supported by the following methods:82// (1) Use request methods that carry context, such as the following:83// a. http.NewRequestWithContext84// b. resty.Request.SetContext85// c. s3manager.Uploader.UploadWithContext86// d. utils.CopyWithCtx87// (2) Use a `driver.ReaderWithCtx` or `driver.NewLimitedUploadStream`88// (3) Use `utils.IsCanceled` to check if the upload has been canceled during the upload process,89// this is typically applicable to chunked uploads.90// 2. Submit upload progress (via `up`) in real-time. There are three recommended ways as follows:91// (1) Use `utils.CopyWithCtx`92// (2) Use `driver.ReaderUpdatingProgress`93// (3) Use `driver.Progress` with `io.TeeReader`94// 3. Slow down upload speed (via `stream.ServerUploadLimit`). It requires you to wrap the read stream95// in a `driver.RateLimitReader` or a `driver.RateLimitFile` after calculating the file's hash and96// before uploading the file or file chunks. Or you can directly call `driver.ServerUploadLimitWaitN`97// if your file chunks are sufficiently small (less than about 50KB).98// NOTE that the network speed may be significantly slower than the stream's read speed. Therefore, if99// you use a `errgroup.Group` to upload each chunk in parallel, you should consider using a recursive100// mutex like `semaphore.Weighted` to limit the maximum number of upload threads, preventing excessive101// memory usage caused by buffering too many file chunks awaiting upload.102Put(ctx context.Context, dstDir model.Obj, file model.FileStreamer, up UpdateProgress) error103}104105type PutURL interface {106// PutURL directly put a URL into the storage107// Applicable to index-based drivers like URL-Tree or drivers that support uploading files as URLs108// Called when using SimpleHttp for offline downloading, skipping creating a download task109PutURL(ctx context.Context, dstDir model.Obj, name, url string) error110}111112//type WriteResult interface {113// MkdirResult114// MoveResult115// RenameResult116// CopyResult117// PutResult118// Remove119//}120121type MkdirResult interface {122MakeDir(ctx context.Context, parentDir model.Obj, dirName string) (model.Obj, error)123}124125type MoveResult interface {126Move(ctx context.Context, srcObj, dstDir model.Obj) (model.Obj, error)127}128129type RenameResult interface {130Rename(ctx context.Context, srcObj model.Obj, newName string) (model.Obj, error)131}132133type CopyResult interface {134Copy(ctx context.Context, srcObj, dstDir model.Obj) (model.Obj, error)135}136137type PutResult interface {138// Put a file (provided as a FileStreamer) into the driver and return the put obj139// Besides the most basic upload functionality, the following features also need to be implemented:140// 1. Canceling (when `<-ctx.Done()` returns), which can be supported by the following methods:141// (1) Use request methods that carry context, such as the following:142// a. http.NewRequestWithContext143// b. resty.Request.SetContext144// c. s3manager.Uploader.UploadWithContext145// d. utils.CopyWithCtx146// (2) Use a `driver.ReaderWithCtx` or `driver.NewLimitedUploadStream`147// (3) Use `utils.IsCanceled` to check if the upload has been canceled during the upload process,148// this is typically applicable to chunked uploads.149// 2. Submit upload progress (via `up`) in real-time. There are three recommended ways as follows:150// (1) Use `utils.CopyWithCtx`151// (2) Use `driver.ReaderUpdatingProgress`152// (3) Use `driver.Progress` with `io.TeeReader`153// 3. Slow down upload speed (via `stream.ServerUploadLimit`). It requires you to wrap the read stream154// in a `driver.RateLimitReader` or a `driver.RateLimitFile` after calculating the file's hash and155// before uploading the file or file chunks. Or you can directly call `driver.ServerUploadLimitWaitN`156// if your file chunks are sufficiently small (less than about 50KB).157// NOTE that the network speed may be significantly slower than the stream's read speed. Therefore, if158// you use a `errgroup.Group` to upload each chunk in parallel, you should consider using a recursive159// mutex like `semaphore.Weighted` to limit the maximum number of upload threads, preventing excessive160// memory usage caused by buffering too many file chunks awaiting upload.161Put(ctx context.Context, dstDir model.Obj, file model.FileStreamer, up UpdateProgress) (model.Obj, error)162}163164type PutURLResult interface {165// PutURL directly put a URL into the storage166// Applicable to index-based drivers like URL-Tree or drivers that support uploading files as URLs167// Called when using SimpleHttp for offline downloading, skipping creating a download task168PutURL(ctx context.Context, dstDir model.Obj, name, url string) (model.Obj, error)169}170171type ArchiveReader interface {172// GetArchiveMeta get the meta-info of an archive173// return errs.WrongArchivePassword if the meta-info is also encrypted but provided password is wrong or empty174// return errs.NotImplement to use internal archive tools to get the meta-info, such as the following cases:175// 1. the driver do not support the format of the archive but there may be an internal tool do176// 2. handling archives is a VIP feature, but the driver does not have VIP access177GetArchiveMeta(ctx context.Context, obj model.Obj, args model.ArchiveArgs) (model.ArchiveMeta, error)178// ListArchive list the children of model.ArchiveArgs.InnerPath in the archive179// return errs.NotImplement to use internal archive tools to list the children180// return errs.NotSupport if the folder structure should be acquired from model.ArchiveMeta.GetTree181ListArchive(ctx context.Context, obj model.Obj, args model.ArchiveInnerArgs) ([]model.Obj, error)182// Extract get url/filepath/reader of a file in the archive183// return errs.NotImplement to use internal archive tools to extract184Extract(ctx context.Context, obj model.Obj, args model.ArchiveInnerArgs) (*model.Link, error)185}186187type ArchiveGetter interface {188// ArchiveGet get file by inner path189// return errs.NotImplement to use internal archive tools to get the children190// return errs.NotSupport if the folder structure should be acquired from model.ArchiveMeta.GetTree191ArchiveGet(ctx context.Context, obj model.Obj, args model.ArchiveInnerArgs) (model.Obj, error)192}193194type ArchiveDecompress interface {195ArchiveDecompress(ctx context.Context, srcObj, dstDir model.Obj, args model.ArchiveDecompressArgs) error196}197198type ArchiveDecompressResult interface {199// ArchiveDecompress decompress an archive200// when args.PutIntoNewDir, the new sub-folder should be named the same to the archive but without the extension201// return each decompressed obj from the root path of the archive when args.PutIntoNewDir is false202// return only the newly created folder when args.PutIntoNewDir is true203// return errs.NotImplement to use internal archive tools to decompress204ArchiveDecompress(ctx context.Context, srcObj, dstDir model.Obj, args model.ArchiveDecompressArgs) ([]model.Obj, error)205}206207type Reference interface {208InitReference(storage Driver) error209}210211212