mirror of
https://github.com/mattermost/mattermost.git
synced 2026-02-18 18:18:23 -05:00
[MM-53454] Add export file settings + slash command for public link (#23915)
Co-authored-by: Mattermost Build <build@mattermost.com>
This commit is contained in:
parent
f3b1f33dff
commit
077c16ef61
29 changed files with 1024 additions and 135 deletions
|
|
@ -17,6 +17,7 @@ func (api *API) InitExport() {
|
|||
api.BaseRoutes.Exports.Handle("", api.APISessionRequired(listExports)).Methods("GET")
|
||||
api.BaseRoutes.Export.Handle("", api.APISessionRequired(deleteExport)).Methods("DELETE")
|
||||
api.BaseRoutes.Export.Handle("", api.APISessionRequired(downloadExport)).Methods("GET")
|
||||
api.BaseRoutes.Export.Handle("/presign-url", api.APISessionRequired(generatePresignURLExport)).Methods("POST")
|
||||
}
|
||||
|
||||
func listExports(c *Context, w http.ResponseWriter, r *http.Request) {
|
||||
|
|
@ -66,7 +67,7 @@ func downloadExport(c *Context, w http.ResponseWriter, r *http.Request) {
|
|||
}
|
||||
|
||||
filePath := filepath.Join(*c.App.Config().ExportSettings.Directory, c.Params.ExportName)
|
||||
if ok, err := c.App.FileExists(filePath); err != nil {
|
||||
if ok, err := c.App.ExportFileExists(filePath); err != nil {
|
||||
c.Err = err
|
||||
return
|
||||
} else if !ok {
|
||||
|
|
@ -74,7 +75,7 @@ func downloadExport(c *Context, w http.ResponseWriter, r *http.Request) {
|
|||
return
|
||||
}
|
||||
|
||||
file, err := c.App.FileReader(filePath)
|
||||
file, err := c.App.ExportFileReader(filePath)
|
||||
if err != nil {
|
||||
c.Err = err
|
||||
return
|
||||
|
|
@ -84,3 +85,30 @@ func downloadExport(c *Context, w http.ResponseWriter, r *http.Request) {
|
|||
w.Header().Set("Content-Type", "application/zip")
|
||||
http.ServeContent(w, r, c.Params.ExportName, time.Time{}, file)
|
||||
}
|
||||
|
||||
func generatePresignURLExport(c *Context, w http.ResponseWriter, r *http.Request) {
|
||||
auditRec := c.MakeAuditRecord("generatePresignURLExport", audit.Fail)
|
||||
defer c.LogAuditRec(auditRec)
|
||||
|
||||
audit.AddEventParameter(auditRec, "export_name", c.Params.ExportName)
|
||||
|
||||
if !c.IsSystemAdmin() {
|
||||
c.SetPermissionError(model.PermissionManageSystem)
|
||||
return
|
||||
}
|
||||
|
||||
res, appErr := c.App.GeneratePresignURLForExport(c.Params.ExportName)
|
||||
if appErr != nil {
|
||||
c.Err = appErr
|
||||
return
|
||||
}
|
||||
|
||||
data, err := json.Marshal(res)
|
||||
if err != nil {
|
||||
c.Err = model.NewAppError("generatePresignURLExport", "app.export.marshal.app_error", nil, "", http.StatusInternalServerError).Wrap(err)
|
||||
return
|
||||
}
|
||||
|
||||
w.Write(data)
|
||||
auditRec.Success()
|
||||
}
|
||||
|
|
|
|||
|
|
@ -7,4 +7,5 @@ func (api *API) InitExportLocal() {
|
|||
api.BaseRoutes.Exports.Handle("", api.APILocal(listExports)).Methods("GET")
|
||||
api.BaseRoutes.Export.Handle("", api.APILocal(deleteExport)).Methods("DELETE")
|
||||
api.BaseRoutes.Export.Handle("", api.APILocal(downloadExport)).Methods("GET")
|
||||
api.BaseRoutes.Export.Handle("/presign-url", api.APILocal(generatePresignURLExport)).Methods("POST")
|
||||
}
|
||||
|
|
|
|||
|
|
@ -56,6 +56,8 @@ type AppIface interface {
|
|||
// AddUserToChannel adds a user to a given channel.
|
||||
AddUserToChannel(c request.CTX, user *model.User, channel *model.Channel, skipTeamMemberIntegrityCheck bool) (*model.ChannelMember, *model.AppError)
|
||||
// Caller must close the first return value
|
||||
ExportFileReader(path string) (filestore.ReadCloseSeeker, *model.AppError)
|
||||
// Caller must close the first return value
|
||||
FileReader(path string) (filestore.ReadCloseSeeker, *model.AppError)
|
||||
// ChannelMembersMinusGroupMembers returns the set of users in the given channel minus the set of users in the given
|
||||
// groups.
|
||||
|
|
@ -565,6 +567,9 @@ type AppIface interface {
|
|||
DownloadFromURL(downloadURL string) ([]byte, error)
|
||||
EnableUserAccessToken(token *model.UserAccessToken) *model.AppError
|
||||
EnvironmentConfig(filter func(reflect.StructField) bool) map[string]any
|
||||
ExportFileBackend() filestore.FileBackend
|
||||
ExportFileExists(path string) (bool, *model.AppError)
|
||||
ExportFileModTime(path string) (time.Time, *model.AppError)
|
||||
ExportPermissions(w io.Writer) error
|
||||
ExtractContentFromFileInfo(fileInfo *model.FileInfo) error
|
||||
FetchSamlMetadataFromIdp(url string) ([]byte, *model.AppError)
|
||||
|
|
@ -578,6 +583,7 @@ type AppIface interface {
|
|||
FindTeamByName(name string) bool
|
||||
FinishSendAdminNotifyPost(trial bool, now int64, pluginBasedData map[string][]*model.NotifyAdminData)
|
||||
GenerateMfaSecret(userID string) (*model.MfaSecret, *model.AppError)
|
||||
GeneratePresignURLForExport(name string) (*model.PresignURLResponse, *model.AppError)
|
||||
GeneratePublicLink(siteURL string, info *model.FileInfo) string
|
||||
GenerateSupportPacket() []model.FileData
|
||||
GetAcknowledgementsForPost(postID string) ([]*model.PostAcknowledgement, *model.AppError)
|
||||
|
|
@ -914,6 +920,7 @@ type AppIface interface {
|
|||
ListAllCommands(teamID string, T i18n.TranslateFunc) ([]*model.Command, *model.AppError)
|
||||
ListDirectory(path string) ([]string, *model.AppError)
|
||||
ListDirectoryRecursively(path string) ([]string, *model.AppError)
|
||||
ListExportDirectory(path string) ([]string, *model.AppError)
|
||||
ListExports() ([]string, *model.AppError)
|
||||
ListImports() ([]string, *model.AppError)
|
||||
ListPluginKeys(pluginID string, page, perPage int) ([]string, *model.AppError)
|
||||
|
|
@ -978,6 +985,7 @@ type AppIface interface {
|
|||
RemoveChannelsFromRetentionPolicy(policyID string, channelIDs []string) *model.AppError
|
||||
RemoveCustomStatus(c request.CTX, userID string) *model.AppError
|
||||
RemoveDirectory(path string) *model.AppError
|
||||
RemoveExportFile(path string) *model.AppError
|
||||
RemoveFile(path string) *model.AppError
|
||||
RemoveLdapPrivateCertificate() *model.AppError
|
||||
RemoveLdapPublicCertificate() *model.AppError
|
||||
|
|
@ -1178,6 +1186,8 @@ type AppIface interface {
|
|||
VerifyEmailFromToken(c request.CTX, userSuppliedTokenString string) *model.AppError
|
||||
VerifyUserEmail(userID, email string) *model.AppError
|
||||
ViewChannel(c request.CTX, view *model.ChannelView, userID string, currentSessionId string, collapsedThreadsSupported bool) (map[string]int64, *model.AppError)
|
||||
WriteExportFile(fr io.Reader, path string) (int64, *model.AppError)
|
||||
WriteExportFileContext(ctx context.Context, fr io.Reader, path string) (int64, *model.AppError)
|
||||
WriteFile(fr io.Reader, path string) (int64, *model.AppError)
|
||||
WriteFileContext(ctx context.Context, fr io.Reader, path string) (int64, *model.AppError)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -35,11 +35,12 @@ type licenseSvc interface {
|
|||
|
||||
// Channels contains all channels related state.
|
||||
type Channels struct {
|
||||
srv *Server
|
||||
cfgSvc product.ConfigService
|
||||
filestore filestore.FileBackend
|
||||
licenseSvc licenseSvc
|
||||
routerSvc *routerService
|
||||
srv *Server
|
||||
cfgSvc product.ConfigService
|
||||
filestore filestore.FileBackend
|
||||
exportFilestore filestore.FileBackend
|
||||
licenseSvc licenseSvc
|
||||
routerSvc *routerService
|
||||
|
||||
postActionCookieSecret []byte
|
||||
|
||||
|
|
@ -91,10 +92,11 @@ func init() {
|
|||
return NewChannels(services)
|
||||
},
|
||||
Dependencies: map[product.ServiceKey]struct{}{
|
||||
ServerKey: {},
|
||||
product.ConfigKey: {},
|
||||
product.LicenseKey: {},
|
||||
product.FilestoreKey: {},
|
||||
ServerKey: {},
|
||||
product.ConfigKey: {},
|
||||
product.LicenseKey: {},
|
||||
product.FilestoreKey: {},
|
||||
product.ExportFilestoreKey: {},
|
||||
},
|
||||
})
|
||||
}
|
||||
|
|
@ -119,6 +121,7 @@ func NewChannels(services map[product.ServiceKey]any) (*Channels, error) {
|
|||
product.ConfigKey,
|
||||
product.LicenseKey,
|
||||
product.FilestoreKey,
|
||||
product.ExportFilestoreKey,
|
||||
}
|
||||
for _, svcKey := range requiredServices {
|
||||
svc, ok := services[svcKey]
|
||||
|
|
@ -139,6 +142,12 @@ func NewChannels(services map[product.ServiceKey]any) (*Channels, error) {
|
|||
return nil, errors.New("Filestore service did not satisfy FileBackend interface")
|
||||
}
|
||||
ch.filestore = filestore
|
||||
case product.ExportFilestoreKey:
|
||||
exportFilestore, ok := svc.(filestore.FileBackend)
|
||||
if !ok {
|
||||
return nil, errors.New("Export filestore service did not satisfy FileBackend interface")
|
||||
}
|
||||
ch.exportFilestore = exportFilestore
|
||||
case product.LicenseKey:
|
||||
svc, ok := svc.(licenseSvc)
|
||||
if !ok {
|
||||
|
|
|
|||
|
|
@ -11,6 +11,7 @@ import (
|
|||
"io"
|
||||
"net/http"
|
||||
"os"
|
||||
"path"
|
||||
"path/filepath"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
|
@ -23,6 +24,7 @@ import (
|
|||
"github.com/mattermost/mattermost/server/v8/channels/app/imports"
|
||||
"github.com/mattermost/mattermost/server/v8/channels/app/request"
|
||||
"github.com/mattermost/mattermost/server/v8/channels/store"
|
||||
"github.com/mattermost/mattermost/server/v8/platform/shared/filestore"
|
||||
)
|
||||
|
||||
// We use this map to identify the exportable preferences.
|
||||
|
|
@ -817,7 +819,7 @@ func (a *App) exportFile(outPath, filePath string, zipWr *zip.Writer) *model.App
|
|||
}
|
||||
|
||||
func (a *App) ListExports() ([]string, *model.AppError) {
|
||||
exports, appErr := a.ListDirectory(*a.Config().ExportSettings.Directory)
|
||||
exports, appErr := a.ListExportDirectory(*a.Config().ExportSettings.Directory)
|
||||
if appErr != nil {
|
||||
return nil, appErr
|
||||
}
|
||||
|
|
@ -830,16 +832,51 @@ func (a *App) ListExports() ([]string, *model.AppError) {
|
|||
return results, nil
|
||||
}
|
||||
|
||||
func (a *App) GeneratePresignURLForExport(name string) (*model.PresignURLResponse, *model.AppError) {
|
||||
if !a.Config().FeatureFlags.EnableExportDirectDownload {
|
||||
return nil, model.NewAppError("GeneratePresignURLForExport", "app.eport.generate_presigned_url.featureflag.app_error", nil, "", http.StatusInternalServerError)
|
||||
}
|
||||
|
||||
if !*a.Config().FileSettings.DedicatedExportStore {
|
||||
return nil, model.NewAppError("GeneratePresignURLForExport", "app.eport.generate_presigned_url.config.app_error", nil, "", http.StatusInternalServerError)
|
||||
}
|
||||
|
||||
b := a.ExportFileBackend()
|
||||
backend, ok := b.(filestore.FileBackendWithLinkGenerator)
|
||||
if !ok {
|
||||
return nil, model.NewAppError("GeneratePresignURLForExport", "app.eport.generate_presigned_url.driver.app_error", nil, "", http.StatusInternalServerError)
|
||||
}
|
||||
|
||||
p := path.Join(*a.Config().ExportSettings.Directory, filepath.Base(name))
|
||||
found, err := b.FileExists(p)
|
||||
if err != nil {
|
||||
return nil, model.NewAppError("GeneratePresignURLForExport", "app.eport.generate_presigned_url.fileexist.app_error", nil, "", http.StatusInternalServerError)
|
||||
}
|
||||
if !found {
|
||||
return nil, model.NewAppError("GeneratePresignURLForExport", "app.eport.generate_presigned_url.notfound.app_error", nil, "", http.StatusInternalServerError)
|
||||
}
|
||||
|
||||
link, exp, err := backend.GeneratePublicLink(p)
|
||||
if err != nil {
|
||||
return nil, model.NewAppError("GeneratePresignURLForExport", "app.eport.generate_presigned_url.link.app_error", nil, "", http.StatusInternalServerError)
|
||||
}
|
||||
|
||||
return &model.PresignURLResponse{
|
||||
URL: link,
|
||||
Expiration: exp,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (a *App) DeleteExport(name string) *model.AppError {
|
||||
filePath := filepath.Join(*a.Config().ExportSettings.Directory, name)
|
||||
|
||||
if ok, err := a.FileExists(filePath); err != nil {
|
||||
if ok, err := a.ExportFileExists(filePath); err != nil {
|
||||
return err
|
||||
} else if !ok {
|
||||
return nil
|
||||
}
|
||||
|
||||
return a.RemoveFile(filePath)
|
||||
return a.RemoveExportFile(filePath)
|
||||
}
|
||||
|
||||
func updateJobProgress(logger mlog.LoggerIFace, store store.Store, job *model.Job, key string, value int) {
|
||||
|
|
|
|||
|
|
@ -65,6 +65,10 @@ func (a *App) FileBackend() filestore.FileBackend {
|
|||
return a.ch.filestore
|
||||
}
|
||||
|
||||
func (a *App) ExportFileBackend() filestore.FileBackend {
|
||||
return a.ch.exportFilestore
|
||||
}
|
||||
|
||||
func (a *App) CheckMandatoryS3Fields(settings *model.FileSettings) *model.AppError {
|
||||
fileBackendSettings := filestore.NewFileBackendSettingsFromConfig(settings, false, false)
|
||||
err := fileBackendSettings.CheckMandatoryS3Fields()
|
||||
|
|
@ -111,31 +115,56 @@ func (a *App) ReadFile(path string) ([]byte, *model.AppError) {
|
|||
return a.ch.srv.ReadFile(path)
|
||||
}
|
||||
|
||||
func (s *Server) fileReader(path string) (filestore.ReadCloseSeeker, *model.AppError) {
|
||||
result, nErr := s.FileBackend().Reader(path)
|
||||
func fileReader(backend filestore.FileBackend, path string) (filestore.ReadCloseSeeker, *model.AppError) {
|
||||
result, nErr := backend.Reader(path)
|
||||
if nErr != nil {
|
||||
return nil, model.NewAppError("FileReader", "api.file.file_reader.app_error", nil, "", http.StatusInternalServerError).Wrap(nErr)
|
||||
}
|
||||
return result, nil
|
||||
}
|
||||
|
||||
func (s *Server) fileReader(path string) (filestore.ReadCloseSeeker, *model.AppError) {
|
||||
return fileReader(s.FileBackend(), path)
|
||||
}
|
||||
|
||||
func (s *Server) exportFileReader(path string) (filestore.ReadCloseSeeker, *model.AppError) {
|
||||
return fileReader(s.ExportFileBackend(), path)
|
||||
}
|
||||
|
||||
// Caller must close the first return value
|
||||
func (a *App) FileReader(path string) (filestore.ReadCloseSeeker, *model.AppError) {
|
||||
return a.Srv().fileReader(path)
|
||||
}
|
||||
|
||||
// Caller must close the first return value
|
||||
func (a *App) ExportFileReader(path string) (filestore.ReadCloseSeeker, *model.AppError) {
|
||||
return a.Srv().exportFileReader(path)
|
||||
}
|
||||
|
||||
func (a *App) FileExists(path string) (bool, *model.AppError) {
|
||||
return a.Srv().fileExists(path)
|
||||
}
|
||||
|
||||
func (a *App) ExportFileExists(path string) (bool, *model.AppError) {
|
||||
return a.Srv().exportFileExists(path)
|
||||
}
|
||||
|
||||
func (s *Server) fileExists(path string) (bool, *model.AppError) {
|
||||
result, nErr := s.FileBackend().FileExists(path)
|
||||
return fileExists(s.FileBackend(), path)
|
||||
}
|
||||
|
||||
func fileExists(backend filestore.FileBackend, path string) (bool, *model.AppError) {
|
||||
result, nErr := backend.FileExists(path)
|
||||
if nErr != nil {
|
||||
return false, model.NewAppError("FileExists", "api.file.file_exists.app_error", nil, "", http.StatusInternalServerError).Wrap(nErr)
|
||||
}
|
||||
return result, nil
|
||||
}
|
||||
|
||||
func (s *Server) exportFileExists(path string) (bool, *model.AppError) {
|
||||
return fileExists(s.ExportFileBackend(), path)
|
||||
}
|
||||
|
||||
func (a *App) FileSize(path string) (int64, *model.AppError) {
|
||||
size, nErr := a.FileBackend().FileSize(path)
|
||||
if nErr != nil {
|
||||
|
|
@ -144,8 +173,8 @@ func (a *App) FileSize(path string) (int64, *model.AppError) {
|
|||
return size, nil
|
||||
}
|
||||
|
||||
func (a *App) FileModTime(path string) (time.Time, *model.AppError) {
|
||||
modTime, nErr := a.FileBackend().FileModTime(path)
|
||||
func fileModTime(backend filestore.FileBackend, path string) (time.Time, *model.AppError) {
|
||||
modTime, nErr := backend.FileModTime(path)
|
||||
if nErr != nil {
|
||||
return time.Time{}, model.NewAppError("FileModTime", "api.file.file_mod_time.app_error", nil, "", http.StatusInternalServerError).Wrap(nErr)
|
||||
}
|
||||
|
|
@ -153,6 +182,14 @@ func (a *App) FileModTime(path string) (time.Time, *model.AppError) {
|
|||
return modTime, nil
|
||||
}
|
||||
|
||||
func (a *App) FileModTime(path string) (time.Time, *model.AppError) {
|
||||
return fileModTime(a.FileBackend(), path)
|
||||
}
|
||||
|
||||
func (a *App) ExportFileModTime(path string) (time.Time, *model.AppError) {
|
||||
return fileModTime(a.ExportFileBackend(), path)
|
||||
}
|
||||
|
||||
func (a *App) MoveFile(oldPath, newPath string) *model.AppError {
|
||||
nErr := a.FileBackend().MoveFile(oldPath, newPath)
|
||||
if nErr != nil {
|
||||
|
|
@ -169,17 +206,33 @@ func (a *App) WriteFile(fr io.Reader, path string) (int64, *model.AppError) {
|
|||
return a.Srv().writeFile(fr, path)
|
||||
}
|
||||
|
||||
func (s *Server) writeFile(fr io.Reader, path string) (int64, *model.AppError) {
|
||||
result, nErr := s.FileBackend().WriteFile(fr, path)
|
||||
func writeFile(backend filestore.FileBackend, fr io.Reader, path string) (int64, *model.AppError) {
|
||||
result, nErr := backend.WriteFile(fr, path)
|
||||
if nErr != nil {
|
||||
return result, model.NewAppError("WriteFile", "api.file.write_file.app_error", nil, "", http.StatusInternalServerError).Wrap(nErr)
|
||||
}
|
||||
return result, nil
|
||||
}
|
||||
|
||||
func (s *Server) writeFileContext(ctx context.Context, fr io.Reader, path string) (int64, *model.AppError) {
|
||||
func (s *Server) writeFile(fr io.Reader, path string) (int64, *model.AppError) {
|
||||
return writeFile(s.FileBackend(), fr, path)
|
||||
}
|
||||
|
||||
func (s *Server) writeExportFile(fr io.Reader, path string) (int64, *model.AppError) {
|
||||
return writeFile(s.ExportFileBackend(), fr, path)
|
||||
}
|
||||
|
||||
func (a *App) WriteExportFileContext(ctx context.Context, fr io.Reader, path string) (int64, *model.AppError) {
|
||||
return a.Srv().writeExportFileContext(ctx, fr, path)
|
||||
}
|
||||
|
||||
func (a *App) WriteExportFile(fr io.Reader, path string) (int64, *model.AppError) {
|
||||
return a.Srv().writeExportFile(fr, path)
|
||||
}
|
||||
|
||||
func writeFileContext(ctx context.Context, backend filestore.FileBackend, fr io.Reader, path string) (int64, *model.AppError) {
|
||||
// Check if we can provide a custom context, otherwise just use the default method.
|
||||
written, err := filestore.TryWriteFileContext(s.FileBackend(), ctx, fr, path)
|
||||
written, err := filestore.TryWriteFileContext(backend, ctx, fr, path)
|
||||
if err != nil {
|
||||
return written, model.NewAppError("WriteFile", "api.file.write_file.app_error", nil, "", http.StatusInternalServerError).Wrap(err)
|
||||
}
|
||||
|
|
@ -187,6 +240,14 @@ func (s *Server) writeFileContext(ctx context.Context, fr io.Reader, path string
|
|||
return written, nil
|
||||
}
|
||||
|
||||
func (s *Server) writeFileContext(ctx context.Context, fr io.Reader, path string) (int64, *model.AppError) {
|
||||
return writeFileContext(ctx, s.FileBackend(), fr, path)
|
||||
}
|
||||
|
||||
func (s *Server) writeExportFileContext(ctx context.Context, fr io.Reader, path string) (int64, *model.AppError) {
|
||||
return writeFileContext(ctx, s.ExportFileBackend(), fr, path)
|
||||
}
|
||||
|
||||
func (a *App) AppendFile(fr io.Reader, path string) (int64, *model.AppError) {
|
||||
result, nErr := a.FileBackend().AppendFile(fr, path)
|
||||
if nErr != nil {
|
||||
|
|
@ -199,24 +260,43 @@ func (a *App) RemoveFile(path string) *model.AppError {
|
|||
return a.Srv().removeFile(path)
|
||||
}
|
||||
|
||||
func (s *Server) removeFile(path string) *model.AppError {
|
||||
nErr := s.FileBackend().RemoveFile(path)
|
||||
func (a *App) RemoveExportFile(path string) *model.AppError {
|
||||
return a.Srv().removeExportFile(path)
|
||||
}
|
||||
|
||||
func removeFile(backend filestore.FileBackend, path string) *model.AppError {
|
||||
nErr := backend.RemoveFile(path)
|
||||
if nErr != nil {
|
||||
return model.NewAppError("RemoveFile", "api.file.remove_file.app_error", nil, "", http.StatusInternalServerError).Wrap(nErr)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *Server) removeFile(path string) *model.AppError {
|
||||
return removeFile(s.FileBackend(), path)
|
||||
}
|
||||
|
||||
func (s *Server) removeExportFile(path string) *model.AppError {
|
||||
return removeFile(s.ExportFileBackend(), path)
|
||||
}
|
||||
|
||||
func (a *App) ListDirectory(path string) ([]string, *model.AppError) {
|
||||
return a.Srv().listDirectory(path, false)
|
||||
}
|
||||
|
||||
func (a *App) ListExportDirectory(path string) ([]string, *model.AppError) {
|
||||
return a.Srv().listExportDirectory(path, false)
|
||||
}
|
||||
|
||||
func (a *App) ListDirectoryRecursively(path string) ([]string, *model.AppError) {
|
||||
return a.Srv().listDirectory(path, true)
|
||||
}
|
||||
|
||||
func (s *Server) listDirectory(path string, recursion bool) ([]string, *model.AppError) {
|
||||
backend := s.FileBackend()
|
||||
return listDirectory(s.FileBackend(), path, recursion)
|
||||
}
|
||||
|
||||
func listDirectory(backend filestore.FileBackend, path string, recursion bool) ([]string, *model.AppError) {
|
||||
var paths []string
|
||||
var nErr error
|
||||
|
||||
|
|
@ -227,12 +307,16 @@ func (s *Server) listDirectory(path string, recursion bool) ([]string, *model.Ap
|
|||
}
|
||||
|
||||
if nErr != nil {
|
||||
return nil, model.NewAppError("ListDirectory", "api.file.list_directory.app_error", nil, "", http.StatusInternalServerError).Wrap(nErr)
|
||||
return nil, model.NewAppError("ListExportDirectory", "api.file.list_directory.app_error", nil, "", http.StatusInternalServerError).Wrap(nErr)
|
||||
}
|
||||
|
||||
return paths, nil
|
||||
}
|
||||
|
||||
func (s *Server) listExportDirectory(path string, recursion bool) ([]string, *model.AppError) {
|
||||
return listDirectory(s.ExportFileBackend(), path, recursion)
|
||||
}
|
||||
|
||||
func (a *App) RemoveDirectory(path string) *model.AppError {
|
||||
nErr := a.FileBackend().RemoveDirectory(path)
|
||||
if nErr != nil {
|
||||
|
|
|
|||
|
|
@ -1454,13 +1454,15 @@ func TestPushNotificationRace(t *testing.T) {
|
|||
ConfigStore: memoryStore,
|
||||
},
|
||||
platform.SetFileStore(&fmocks.FileBackend{}),
|
||||
platform.SetExportFileStore(&fmocks.FileBackend{}),
|
||||
platform.StoreOverride(mockStore))
|
||||
require.NoError(t, err)
|
||||
serviceMap := map[product.ServiceKey]any{
|
||||
ServerKey: s,
|
||||
product.ConfigKey: s.platform,
|
||||
product.LicenseKey: &licenseWrapper{s},
|
||||
product.FilestoreKey: s.FileBackend(),
|
||||
ServerKey: s,
|
||||
product.ConfigKey: s.platform,
|
||||
product.LicenseKey: &licenseWrapper{s},
|
||||
product.FilestoreKey: s.FileBackend(),
|
||||
product.ExportFilestoreKey: s.ExportFileBackend(),
|
||||
}
|
||||
ch, err := NewChannels(serviceMap)
|
||||
require.NoError(t, err)
|
||||
|
|
|
|||
|
|
@ -4104,6 +4104,89 @@ func (a *OpenTracingAppLayer) ExecuteCommand(c request.CTX, args *model.CommandA
|
|||
return resultVar0, resultVar1
|
||||
}
|
||||
|
||||
func (a *OpenTracingAppLayer) ExportFileBackend() filestore.FileBackend {
|
||||
origCtx := a.ctx
|
||||
span, newCtx := tracing.StartSpanWithParentByContext(a.ctx, "app.ExportFileBackend")
|
||||
|
||||
a.ctx = newCtx
|
||||
a.app.Srv().Store().SetContext(newCtx)
|
||||
defer func() {
|
||||
a.app.Srv().Store().SetContext(origCtx)
|
||||
a.ctx = origCtx
|
||||
}()
|
||||
|
||||
defer span.Finish()
|
||||
resultVar0 := a.app.ExportFileBackend()
|
||||
|
||||
return resultVar0
|
||||
}
|
||||
|
||||
func (a *OpenTracingAppLayer) ExportFileExists(path string) (bool, *model.AppError) {
|
||||
origCtx := a.ctx
|
||||
span, newCtx := tracing.StartSpanWithParentByContext(a.ctx, "app.ExportFileExists")
|
||||
|
||||
a.ctx = newCtx
|
||||
a.app.Srv().Store().SetContext(newCtx)
|
||||
defer func() {
|
||||
a.app.Srv().Store().SetContext(origCtx)
|
||||
a.ctx = origCtx
|
||||
}()
|
||||
|
||||
defer span.Finish()
|
||||
resultVar0, resultVar1 := a.app.ExportFileExists(path)
|
||||
|
||||
if resultVar1 != nil {
|
||||
span.LogFields(spanlog.Error(resultVar1))
|
||||
ext.Error.Set(span, true)
|
||||
}
|
||||
|
||||
return resultVar0, resultVar1
|
||||
}
|
||||
|
||||
func (a *OpenTracingAppLayer) ExportFileModTime(path string) (time.Time, *model.AppError) {
|
||||
origCtx := a.ctx
|
||||
span, newCtx := tracing.StartSpanWithParentByContext(a.ctx, "app.ExportFileModTime")
|
||||
|
||||
a.ctx = newCtx
|
||||
a.app.Srv().Store().SetContext(newCtx)
|
||||
defer func() {
|
||||
a.app.Srv().Store().SetContext(origCtx)
|
||||
a.ctx = origCtx
|
||||
}()
|
||||
|
||||
defer span.Finish()
|
||||
resultVar0, resultVar1 := a.app.ExportFileModTime(path)
|
||||
|
||||
if resultVar1 != nil {
|
||||
span.LogFields(spanlog.Error(resultVar1))
|
||||
ext.Error.Set(span, true)
|
||||
}
|
||||
|
||||
return resultVar0, resultVar1
|
||||
}
|
||||
|
||||
func (a *OpenTracingAppLayer) ExportFileReader(path string) (filestore.ReadCloseSeeker, *model.AppError) {
|
||||
origCtx := a.ctx
|
||||
span, newCtx := tracing.StartSpanWithParentByContext(a.ctx, "app.ExportFileReader")
|
||||
|
||||
a.ctx = newCtx
|
||||
a.app.Srv().Store().SetContext(newCtx)
|
||||
defer func() {
|
||||
a.app.Srv().Store().SetContext(origCtx)
|
||||
a.ctx = origCtx
|
||||
}()
|
||||
|
||||
defer span.Finish()
|
||||
resultVar0, resultVar1 := a.app.ExportFileReader(path)
|
||||
|
||||
if resultVar1 != nil {
|
||||
span.LogFields(spanlog.Error(resultVar1))
|
||||
ext.Error.Set(span, true)
|
||||
}
|
||||
|
||||
return resultVar0, resultVar1
|
||||
}
|
||||
|
||||
func (a *OpenTracingAppLayer) ExportPermissions(w io.Writer) error {
|
||||
origCtx := a.ctx
|
||||
span, newCtx := tracing.StartSpanWithParentByContext(a.ctx, "app.ExportPermissions")
|
||||
|
|
@ -4478,6 +4561,28 @@ func (a *OpenTracingAppLayer) GenerateMfaSecret(userID string) (*model.MfaSecret
|
|||
return resultVar0, resultVar1
|
||||
}
|
||||
|
||||
func (a *OpenTracingAppLayer) GeneratePresignURLForExport(name string) (*model.PresignURLResponse, *model.AppError) {
|
||||
origCtx := a.ctx
|
||||
span, newCtx := tracing.StartSpanWithParentByContext(a.ctx, "app.GeneratePresignURLForExport")
|
||||
|
||||
a.ctx = newCtx
|
||||
a.app.Srv().Store().SetContext(newCtx)
|
||||
defer func() {
|
||||
a.app.Srv().Store().SetContext(origCtx)
|
||||
a.ctx = origCtx
|
||||
}()
|
||||
|
||||
defer span.Finish()
|
||||
resultVar0, resultVar1 := a.app.GeneratePresignURLForExport(name)
|
||||
|
||||
if resultVar1 != nil {
|
||||
span.LogFields(spanlog.Error(resultVar1))
|
||||
ext.Error.Set(span, true)
|
||||
}
|
||||
|
||||
return resultVar0, resultVar1
|
||||
}
|
||||
|
||||
func (a *OpenTracingAppLayer) GeneratePublicLink(siteURL string, info *model.FileInfo) string {
|
||||
origCtx := a.ctx
|
||||
span, newCtx := tracing.StartSpanWithParentByContext(a.ctx, "app.GeneratePublicLink")
|
||||
|
|
@ -12393,6 +12498,28 @@ func (a *OpenTracingAppLayer) ListDirectoryRecursively(path string) ([]string, *
|
|||
return resultVar0, resultVar1
|
||||
}
|
||||
|
||||
func (a *OpenTracingAppLayer) ListExportDirectory(path string) ([]string, *model.AppError) {
|
||||
origCtx := a.ctx
|
||||
span, newCtx := tracing.StartSpanWithParentByContext(a.ctx, "app.ListExportDirectory")
|
||||
|
||||
a.ctx = newCtx
|
||||
a.app.Srv().Store().SetContext(newCtx)
|
||||
defer func() {
|
||||
a.app.Srv().Store().SetContext(origCtx)
|
||||
a.ctx = origCtx
|
||||
}()
|
||||
|
||||
defer span.Finish()
|
||||
resultVar0, resultVar1 := a.app.ListExportDirectory(path)
|
||||
|
||||
if resultVar1 != nil {
|
||||
span.LogFields(spanlog.Error(resultVar1))
|
||||
ext.Error.Set(span, true)
|
||||
}
|
||||
|
||||
return resultVar0, resultVar1
|
||||
}
|
||||
|
||||
func (a *OpenTracingAppLayer) ListExports() ([]string, *model.AppError) {
|
||||
origCtx := a.ctx
|
||||
span, newCtx := tracing.StartSpanWithParentByContext(a.ctx, "app.ListExports")
|
||||
|
|
@ -13951,6 +14078,28 @@ func (a *OpenTracingAppLayer) RemoveDirectory(path string) *model.AppError {
|
|||
return resultVar0
|
||||
}
|
||||
|
||||
func (a *OpenTracingAppLayer) RemoveExportFile(path string) *model.AppError {
|
||||
origCtx := a.ctx
|
||||
span, newCtx := tracing.StartSpanWithParentByContext(a.ctx, "app.RemoveExportFile")
|
||||
|
||||
a.ctx = newCtx
|
||||
a.app.Srv().Store().SetContext(newCtx)
|
||||
defer func() {
|
||||
a.app.Srv().Store().SetContext(origCtx)
|
||||
a.ctx = origCtx
|
||||
}()
|
||||
|
||||
defer span.Finish()
|
||||
resultVar0 := a.app.RemoveExportFile(path)
|
||||
|
||||
if resultVar0 != nil {
|
||||
span.LogFields(spanlog.Error(resultVar0))
|
||||
ext.Error.Set(span, true)
|
||||
}
|
||||
|
||||
return resultVar0
|
||||
}
|
||||
|
||||
func (a *OpenTracingAppLayer) RemoveFile(path string) *model.AppError {
|
||||
origCtx := a.ctx
|
||||
span, newCtx := tracing.StartSpanWithParentByContext(a.ctx, "app.RemoveFile")
|
||||
|
|
@ -18759,6 +18908,50 @@ func (a *OpenTracingAppLayer) ViewChannel(c request.CTX, view *model.ChannelView
|
|||
return resultVar0, resultVar1
|
||||
}
|
||||
|
||||
func (a *OpenTracingAppLayer) WriteExportFile(fr io.Reader, path string) (int64, *model.AppError) {
|
||||
origCtx := a.ctx
|
||||
span, newCtx := tracing.StartSpanWithParentByContext(a.ctx, "app.WriteExportFile")
|
||||
|
||||
a.ctx = newCtx
|
||||
a.app.Srv().Store().SetContext(newCtx)
|
||||
defer func() {
|
||||
a.app.Srv().Store().SetContext(origCtx)
|
||||
a.ctx = origCtx
|
||||
}()
|
||||
|
||||
defer span.Finish()
|
||||
resultVar0, resultVar1 := a.app.WriteExportFile(fr, path)
|
||||
|
||||
if resultVar1 != nil {
|
||||
span.LogFields(spanlog.Error(resultVar1))
|
||||
ext.Error.Set(span, true)
|
||||
}
|
||||
|
||||
return resultVar0, resultVar1
|
||||
}
|
||||
|
||||
func (a *OpenTracingAppLayer) WriteExportFileContext(ctx context.Context, fr io.Reader, path string) (int64, *model.AppError) {
|
||||
origCtx := a.ctx
|
||||
span, newCtx := tracing.StartSpanWithParentByContext(a.ctx, "app.WriteExportFileContext")
|
||||
|
||||
a.ctx = newCtx
|
||||
a.app.Srv().Store().SetContext(newCtx)
|
||||
defer func() {
|
||||
a.app.Srv().Store().SetContext(origCtx)
|
||||
a.ctx = origCtx
|
||||
}()
|
||||
|
||||
defer span.Finish()
|
||||
resultVar0, resultVar1 := a.app.WriteExportFileContext(ctx, fr, path)
|
||||
|
||||
if resultVar1 != nil {
|
||||
span.LogFields(spanlog.Error(resultVar1))
|
||||
ext.Error.Set(span, true)
|
||||
}
|
||||
|
||||
return resultVar0, resultVar1
|
||||
}
|
||||
|
||||
func (a *OpenTracingAppLayer) WriteFile(fr io.Reader, path string) (int64, *model.AppError) {
|
||||
origCtx := a.ctx
|
||||
span, newCtx := tracing.StartSpanWithParentByContext(a.ctx, "app.WriteFile")
|
||||
|
|
|
|||
|
|
@ -82,6 +82,13 @@ func SetFileStore(filestore filestore.FileBackend) Option {
|
|||
}
|
||||
}
|
||||
|
||||
func SetExportFileStore(filestore filestore.FileBackend) Option {
|
||||
return func(ps *PlatformService) error {
|
||||
ps.exportFilestore = filestore
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
// ConfigStore applies the given config store, typically to replace the traditional sources with a memory store for testing.
|
||||
func ConfigStore(configStore *config.Store) Option {
|
||||
return func(ps *PlatformService) error {
|
||||
|
|
|
|||
|
|
@ -43,7 +43,8 @@ type PlatformService struct {
|
|||
|
||||
configStore *config.Store
|
||||
|
||||
filestore filestore.FileBackend
|
||||
filestore filestore.FileBackend
|
||||
exportFilestore filestore.FileBackend
|
||||
|
||||
cacheProvider cache.Provider
|
||||
statusCache cache.Cache
|
||||
|
|
@ -247,6 +248,18 @@ func New(sc ServiceConfig, options ...Option) (*PlatformService, error) {
|
|||
ps.filestore = backend
|
||||
}
|
||||
|
||||
if ps.exportFilestore == nil {
|
||||
ps.exportFilestore = ps.filestore
|
||||
if *ps.Config().FileSettings.DedicatedExportStore {
|
||||
backend, errFileBack := filestore.NewFileBackend(filestore.NewExportFileBackendSettingsFromConfig(&ps.Config().FileSettings, license != nil && *license.Features.Compliance, false))
|
||||
if errFileBack != nil {
|
||||
return nil, fmt.Errorf("failed to initialize export filebackend: %w", errFileBack)
|
||||
}
|
||||
|
||||
ps.exportFilestore = backend
|
||||
}
|
||||
}
|
||||
|
||||
var err error
|
||||
ps.Store, err = ps.newStore()
|
||||
if err != nil {
|
||||
|
|
@ -490,3 +503,7 @@ func (ps *PlatformService) GetPluginStatuses() (model.PluginStatuses, *model.App
|
|||
func (ps *PlatformService) FileBackend() filestore.FileBackend {
|
||||
return ps.filestore
|
||||
}
|
||||
|
||||
func (ps *PlatformService) ExportFileBackend() filestore.FileBackend {
|
||||
return ps.exportFilestore
|
||||
}
|
||||
|
|
|
|||
|
|
@ -50,29 +50,32 @@ func TestInitializeProducts(t *testing.T) {
|
|||
|
||||
t.Run("2 products and no circular dependency", func(t *testing.T) {
|
||||
serviceMap := map[product.ServiceKey]any{
|
||||
product.ConfigKey: nil,
|
||||
product.LicenseKey: nil,
|
||||
product.FilestoreKey: nil,
|
||||
product.ClusterKey: nil,
|
||||
product.ConfigKey: nil,
|
||||
product.LicenseKey: nil,
|
||||
product.FilestoreKey: nil,
|
||||
product.ExportFilestoreKey: nil,
|
||||
product.ClusterKey: nil,
|
||||
}
|
||||
|
||||
products := map[string]product.Manifest{
|
||||
"productA": {
|
||||
Initializer: newProductA,
|
||||
Dependencies: map[product.ServiceKey]struct{}{
|
||||
product.ConfigKey: {},
|
||||
product.LicenseKey: {},
|
||||
product.FilestoreKey: {},
|
||||
product.ClusterKey: {},
|
||||
product.ConfigKey: {},
|
||||
product.LicenseKey: {},
|
||||
product.FilestoreKey: {},
|
||||
product.ExportFilestoreKey: {},
|
||||
product.ClusterKey: {},
|
||||
},
|
||||
},
|
||||
"productB": {
|
||||
Initializer: newProductB,
|
||||
Dependencies: map[product.ServiceKey]struct{}{
|
||||
product.ConfigKey: {},
|
||||
testSrvKey1: {},
|
||||
product.FilestoreKey: {},
|
||||
product.ClusterKey: {},
|
||||
product.ConfigKey: {},
|
||||
testSrvKey1: {},
|
||||
product.FilestoreKey: {},
|
||||
product.ExportFilestoreKey: {},
|
||||
product.ClusterKey: {},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
|
@ -89,30 +92,33 @@ func TestInitializeProducts(t *testing.T) {
|
|||
|
||||
t.Run("2 products and circular dependency", func(t *testing.T) {
|
||||
serviceMap := map[product.ServiceKey]any{
|
||||
product.ConfigKey: nil,
|
||||
product.LicenseKey: nil,
|
||||
product.FilestoreKey: nil,
|
||||
product.ClusterKey: nil,
|
||||
product.ConfigKey: nil,
|
||||
product.LicenseKey: nil,
|
||||
product.FilestoreKey: nil,
|
||||
product.ExportFilestoreKey: nil,
|
||||
product.ClusterKey: nil,
|
||||
}
|
||||
|
||||
products := map[string]product.Manifest{
|
||||
"productA": {
|
||||
Initializer: newProductA,
|
||||
Dependencies: map[product.ServiceKey]struct{}{
|
||||
product.ConfigKey: {},
|
||||
product.LicenseKey: {},
|
||||
product.FilestoreKey: {},
|
||||
product.ClusterKey: {},
|
||||
testSrvKey2: {},
|
||||
product.ConfigKey: {},
|
||||
product.LicenseKey: {},
|
||||
product.FilestoreKey: {},
|
||||
product.ExportFilestoreKey: {},
|
||||
product.ClusterKey: {},
|
||||
testSrvKey2: {},
|
||||
},
|
||||
},
|
||||
"productB": {
|
||||
Initializer: newProductB,
|
||||
Dependencies: map[product.ServiceKey]struct{}{
|
||||
product.ConfigKey: {},
|
||||
testSrvKey1: {},
|
||||
product.FilestoreKey: {},
|
||||
product.ClusterKey: {},
|
||||
product.ConfigKey: {},
|
||||
testSrvKey1: {},
|
||||
product.FilestoreKey: {},
|
||||
product.ExportFilestoreKey: {},
|
||||
product.ClusterKey: {},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
|
@ -127,10 +133,11 @@ func TestInitializeProducts(t *testing.T) {
|
|||
|
||||
t.Run("2 products and one w/o any dependency", func(t *testing.T) {
|
||||
serviceMap := map[product.ServiceKey]any{
|
||||
product.ConfigKey: nil,
|
||||
product.LicenseKey: nil,
|
||||
product.FilestoreKey: nil,
|
||||
product.ClusterKey: nil,
|
||||
product.ConfigKey: nil,
|
||||
product.LicenseKey: nil,
|
||||
product.FilestoreKey: nil,
|
||||
product.ExportFilestoreKey: nil,
|
||||
product.ClusterKey: nil,
|
||||
}
|
||||
|
||||
products := map[string]product.Manifest{
|
||||
|
|
|
|||
|
|
@ -250,21 +250,22 @@ func NewServer(options ...Option) (*Server, error) {
|
|||
|
||||
app := New(ServerConnector(s.Channels()))
|
||||
serviceMap := map[product.ServiceKey]any{
|
||||
ServerKey: s,
|
||||
product.ConfigKey: s.platform,
|
||||
product.LicenseKey: s.licenseWrapper,
|
||||
product.FilestoreKey: s.platform.FileBackend(),
|
||||
product.FileInfoStoreKey: &fileInfoWrapper{srv: s},
|
||||
product.ClusterKey: s.platform,
|
||||
product.UserKey: app,
|
||||
product.LogKey: s.platform.Log(),
|
||||
product.CloudKey: &cloudWrapper{cloud: s.Cloud},
|
||||
product.KVStoreKey: s.platform,
|
||||
product.StoreKey: store.NewStoreServiceAdapter(s.Store()),
|
||||
product.SystemKey: &systemServiceAdapter{server: s},
|
||||
product.SessionKey: app,
|
||||
product.FrontendKey: app,
|
||||
product.CommandKey: app,
|
||||
ServerKey: s,
|
||||
product.ConfigKey: s.platform,
|
||||
product.LicenseKey: s.licenseWrapper,
|
||||
product.FilestoreKey: s.platform.FileBackend(),
|
||||
product.ExportFilestoreKey: s.platform.ExportFileBackend(),
|
||||
product.FileInfoStoreKey: &fileInfoWrapper{srv: s},
|
||||
product.ClusterKey: s.platform,
|
||||
product.UserKey: app,
|
||||
product.LogKey: s.platform.Log(),
|
||||
product.CloudKey: &cloudWrapper{cloud: s.Cloud},
|
||||
product.KVStoreKey: s.platform,
|
||||
product.StoreKey: store.NewStoreServiceAdapter(s.Store()),
|
||||
product.SystemKey: &systemServiceAdapter{server: s},
|
||||
product.SessionKey: app,
|
||||
product.FrontendKey: app,
|
||||
product.CommandKey: app,
|
||||
}
|
||||
|
||||
// It is important to initialize the hub only after the global logger is set
|
||||
|
|
@ -1476,6 +1477,10 @@ func (s *Server) FileBackend() filestore.FileBackend {
|
|||
return s.platform.FileBackend()
|
||||
}
|
||||
|
||||
func (s *Server) ExportFileBackend() filestore.FileBackend {
|
||||
return s.platform.ExportFileBackend()
|
||||
}
|
||||
|
||||
func (s *Server) TotalWebsocketConnections() int {
|
||||
return s.Platform().TotalWebsocketConnections()
|
||||
}
|
||||
|
|
|
|||
120
server/channels/app/slashcommands/command_exportlink.go
Normal file
120
server/channels/app/slashcommands/command_exportlink.go
Normal file
|
|
@ -0,0 +1,120 @@
|
|||
// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
|
||||
// See LICENSE.txt for license information.
|
||||
|
||||
package slashcommands
|
||||
|
||||
import (
|
||||
"path"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/mattermost/logr/v2"
|
||||
"github.com/mattermost/mattermost/server/public/model"
|
||||
"github.com/mattermost/mattermost/server/public/shared/i18n"
|
||||
"github.com/mattermost/mattermost/server/v8/channels/app"
|
||||
"github.com/mattermost/mattermost/server/v8/channels/app/request"
|
||||
"github.com/mattermost/mattermost/server/v8/platform/shared/filestore"
|
||||
)
|
||||
|
||||
type ExportLinkProvider struct {
|
||||
}
|
||||
|
||||
const (
|
||||
CmdExportLink = "exportlink"
|
||||
LatestExportMessage = "latest"
|
||||
)
|
||||
|
||||
func init() {
|
||||
app.RegisterCommandProvider(&ExportLinkProvider{})
|
||||
}
|
||||
|
||||
func (*ExportLinkProvider) GetTrigger() string {
|
||||
return CmdExportLink
|
||||
}
|
||||
|
||||
func (*ExportLinkProvider) GetCommand(a *app.App, T i18n.TranslateFunc) *model.Command {
|
||||
if !a.Config().FeatureFlags.EnableExportDirectDownload {
|
||||
return nil
|
||||
}
|
||||
|
||||
if !*a.Config().FileSettings.DedicatedExportStore {
|
||||
return nil
|
||||
}
|
||||
|
||||
b := a.ExportFileBackend()
|
||||
_, ok := b.(filestore.FileBackendWithLinkGenerator)
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
|
||||
return &model.Command{
|
||||
Trigger: CmdExportLink,
|
||||
AutoComplete: true,
|
||||
AutoCompleteDesc: T("api.command_exportlink.desc"),
|
||||
AutoCompleteHint: T("api.command_exportlink.hint", map[string]any{
|
||||
"LatestMsg": LatestExportMessage,
|
||||
}),
|
||||
DisplayName: T("api.command_exportlink.name"),
|
||||
}
|
||||
}
|
||||
|
||||
func (*ExportLinkProvider) DoCommand(a *app.App, c request.CTX, args *model.CommandArgs, message string) *model.CommandResponse {
|
||||
if !a.SessionHasPermissionTo(*c.Session(), model.PermissionManageSystem) {
|
||||
return &model.CommandResponse{ResponseType: model.CommandResponseTypeEphemeral, Text: args.T("api.command_exportlink.permission.app_error")}
|
||||
}
|
||||
|
||||
b := a.ExportFileBackend()
|
||||
_, ok := b.(filestore.FileBackendWithLinkGenerator)
|
||||
if !ok {
|
||||
return &model.CommandResponse{ResponseType: model.CommandResponseTypeEphemeral, Text: args.T("api.command_exportlink.driver.app_error")}
|
||||
}
|
||||
|
||||
file := ""
|
||||
if message == LatestExportMessage {
|
||||
files, err := b.ListDirectory(*a.Config().ExportSettings.Directory)
|
||||
if err != nil {
|
||||
return &model.CommandResponse{ResponseType: model.CommandResponseTypeEphemeral, Text: args.T("api.command_exportlink.list.app_error")}
|
||||
}
|
||||
if len(files) == 0 {
|
||||
return &model.CommandResponse{ResponseType: model.CommandResponseTypeEphemeral, Text: args.T("api.command_exportlink.empty.app_error")}
|
||||
}
|
||||
latestFound := time.Time{}
|
||||
for _, f := range files {
|
||||
// find the latest file
|
||||
if !strings.HasSuffix(f, "_export.zip") {
|
||||
continue
|
||||
}
|
||||
t, err := b.FileModTime(f)
|
||||
if err != nil {
|
||||
a.Log().Warn("Failed to get file mod time", logr.String("file", f), logr.Err(err))
|
||||
continue
|
||||
}
|
||||
if t.After(latestFound) {
|
||||
file = path.Base(f)
|
||||
latestFound = t
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if model.IsValidId(message) {
|
||||
file = message + "_export.zip"
|
||||
}
|
||||
|
||||
if file == "" {
|
||||
file = message
|
||||
}
|
||||
if !strings.HasSuffix(file, "_export.zip") {
|
||||
return &model.CommandResponse{ResponseType: model.CommandResponseTypeEphemeral, Text: args.T("api.command_exportlink.invalid.app_error")}
|
||||
}
|
||||
|
||||
res, err := a.GeneratePresignURLForExport(file)
|
||||
if err != nil {
|
||||
return &model.CommandResponse{ResponseType: model.CommandResponseTypeEphemeral, Text: args.T("api.command_exportlink.presign.app_error")}
|
||||
}
|
||||
|
||||
// return link
|
||||
return &model.CommandResponse{ResponseType: model.CommandResponseTypeEphemeral, Text: args.T("api.command_exportlink.link.text", map[string]interface{}{
|
||||
"Link": res.URL,
|
||||
"Expiration": res.Expiration.String(),
|
||||
})}
|
||||
}
|
||||
|
|
@ -19,9 +19,9 @@ const jobName = "ExportDelete"
|
|||
|
||||
type AppIface interface {
|
||||
configservice.ConfigService
|
||||
ListDirectory(path string) ([]string, *model.AppError)
|
||||
FileModTime(path string) (time.Time, *model.AppError)
|
||||
RemoveFile(path string) *model.AppError
|
||||
ListExportDirectory(path string) ([]string, *model.AppError)
|
||||
ExportFileModTime(path string) (time.Time, *model.AppError)
|
||||
RemoveExportFile(path string) *model.AppError
|
||||
}
|
||||
|
||||
func MakeWorker(jobServer *jobs.JobServer, app AppIface) model.Worker {
|
||||
|
|
@ -33,7 +33,7 @@ func MakeWorker(jobServer *jobs.JobServer, app AppIface) model.Worker {
|
|||
|
||||
exportPath := *app.Config().ExportSettings.Directory
|
||||
retentionTime := time.Duration(*app.Config().ExportSettings.RetentionDays) * 24 * time.Hour
|
||||
exports, appErr := app.ListDirectory(exportPath)
|
||||
exports, appErr := app.ListExportDirectory(exportPath)
|
||||
if appErr != nil {
|
||||
return appErr
|
||||
}
|
||||
|
|
@ -41,7 +41,7 @@ func MakeWorker(jobServer *jobs.JobServer, app AppIface) model.Worker {
|
|||
errors := merror.New()
|
||||
for i := range exports {
|
||||
filename := filepath.Base(exports[i])
|
||||
modTime, appErr := app.FileModTime(filepath.Join(exportPath, filename))
|
||||
modTime, appErr := app.ExportFileModTime(filepath.Join(exportPath, filename))
|
||||
if appErr != nil {
|
||||
mlog.Debug("Worker: Failed to get file modification time",
|
||||
mlog.Err(appErr), mlog.String("export", exports[i]))
|
||||
|
|
@ -51,7 +51,7 @@ func MakeWorker(jobServer *jobs.JobServer, app AppIface) model.Worker {
|
|||
|
||||
if time.Now().After(modTime.Add(retentionTime)) {
|
||||
// remove file data from storage.
|
||||
if appErr := app.RemoveFile(exports[i]); appErr != nil {
|
||||
if appErr := app.RemoveExportFile(exports[i]); appErr != nil {
|
||||
mlog.Debug("Worker: Failed to remove file",
|
||||
mlog.Err(appErr), mlog.String("export", exports[i]))
|
||||
errors.Append(appErr)
|
||||
|
|
|
|||
|
|
@ -19,8 +19,7 @@ const jobName = "ExportProcess"
|
|||
|
||||
type AppIface interface {
|
||||
configservice.ConfigService
|
||||
WriteFile(fr io.Reader, path string) (int64, *model.AppError)
|
||||
WriteFileContext(ctx context.Context, fr io.Reader, path string) (int64, *model.AppError)
|
||||
WriteExportFileContext(ctx context.Context, fr io.Reader, path string) (int64, *model.AppError)
|
||||
BulkExport(ctx request.CTX, writer io.Writer, outPath string, job *model.Job, opts model.BulkExportOpts) *model.AppError
|
||||
Log() *mlog.Logger
|
||||
}
|
||||
|
|
@ -45,7 +44,7 @@ func MakeWorker(jobServer *jobs.JobServer, app AppIface) model.Worker {
|
|||
rd, wr := io.Pipe()
|
||||
|
||||
go func() {
|
||||
_, appErr := app.WriteFileContext(context.Background(), rd, filepath.Join(outPath, exportFilename))
|
||||
_, appErr := app.WriteExportFileContext(context.Background(), rd, filepath.Join(outPath, exportFilename))
|
||||
if appErr != nil {
|
||||
// we close the reader here to prevent a deadlock when the bulk exporter tries to
|
||||
// write into the pipe while app.WriteFile has already returned. The error will be
|
||||
|
|
|
|||
|
|
@ -6,27 +6,28 @@ package product
|
|||
type ServiceKey string
|
||||
|
||||
const (
|
||||
ChannelKey ServiceKey = "channel"
|
||||
ConfigKey ServiceKey = "config"
|
||||
LicenseKey ServiceKey = "license"
|
||||
FilestoreKey ServiceKey = "filestore"
|
||||
FileInfoStoreKey ServiceKey = "fileinfostore"
|
||||
ClusterKey ServiceKey = "cluster"
|
||||
CloudKey ServiceKey = "cloud"
|
||||
PostKey ServiceKey = "post"
|
||||
TeamKey ServiceKey = "team"
|
||||
UserKey ServiceKey = "user"
|
||||
PermissionsKey ServiceKey = "permissions"
|
||||
RouterKey ServiceKey = "router"
|
||||
BotKey ServiceKey = "bot"
|
||||
LogKey ServiceKey = "log"
|
||||
HooksKey ServiceKey = "hooks"
|
||||
KVStoreKey ServiceKey = "kvstore"
|
||||
StoreKey ServiceKey = "storekey"
|
||||
SystemKey ServiceKey = "systemkey"
|
||||
PreferencesKey ServiceKey = "preferenceskey"
|
||||
SessionKey ServiceKey = "sessionkey"
|
||||
FrontendKey ServiceKey = "frontendkey"
|
||||
CommandKey ServiceKey = "commandkey"
|
||||
ThreadsKey ServiceKey = "threadskey"
|
||||
ChannelKey ServiceKey = "channel"
|
||||
ConfigKey ServiceKey = "config"
|
||||
LicenseKey ServiceKey = "license"
|
||||
FilestoreKey ServiceKey = "filestore"
|
||||
ExportFilestoreKey ServiceKey = "exportfilestore"
|
||||
FileInfoStoreKey ServiceKey = "fileinfostore"
|
||||
ClusterKey ServiceKey = "cluster"
|
||||
CloudKey ServiceKey = "cloud"
|
||||
PostKey ServiceKey = "post"
|
||||
TeamKey ServiceKey = "team"
|
||||
UserKey ServiceKey = "user"
|
||||
PermissionsKey ServiceKey = "permissions"
|
||||
RouterKey ServiceKey = "router"
|
||||
BotKey ServiceKey = "bot"
|
||||
LogKey ServiceKey = "log"
|
||||
HooksKey ServiceKey = "hooks"
|
||||
KVStoreKey ServiceKey = "kvstore"
|
||||
StoreKey ServiceKey = "storekey"
|
||||
SystemKey ServiceKey = "systemkey"
|
||||
PreferencesKey ServiceKey = "preferenceskey"
|
||||
SessionKey ServiceKey = "sessionkey"
|
||||
FrontendKey ServiceKey = "frontendkey"
|
||||
CommandKey ServiceKey = "commandkey"
|
||||
ThreadsKey ServiceKey = "threadskey"
|
||||
)
|
||||
|
|
|
|||
|
|
@ -146,5 +146,6 @@ type Client interface {
|
|||
ListExports(ctx context.Context) ([]string, *model.Response, error)
|
||||
DeleteExport(ctx context.Context, name string) (*model.Response, error)
|
||||
DownloadExport(ctx context.Context, name string, wr io.Writer, offset int64) (int64, *model.Response, error)
|
||||
GeneratePresignedURL(ctx context.Context, name string) (*model.PresignURLResponse, *model.Response, error)
|
||||
ResetSamlAuthDataToEmail(ctx context.Context, includeDeleted bool, dryRun bool, userIDs []string) (int64, *model.Response, error)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -40,6 +40,13 @@ var ExportDownloadCmd = &cobra.Command{
|
|||
RunE: withClient(exportDownloadCmdF),
|
||||
}
|
||||
|
||||
var ExportGeneratePresignedURLCmd = &cobra.Command{
|
||||
Use: "generate-presigned-url [exportname]",
|
||||
Short: "Generate a presigned url for an export file. This is helpful when an export is big and might have trouble downloading from the Mattermost server.",
|
||||
Args: cobra.ExactArgs(1),
|
||||
RunE: withClient(exportGeneratePresignedURLCmdF),
|
||||
}
|
||||
|
||||
var ExportDeleteCmd = &cobra.Command{
|
||||
Use: "delete [exportname]",
|
||||
Aliases: []string{"rm"},
|
||||
|
|
@ -115,6 +122,7 @@ func init() {
|
|||
ExportListCmd,
|
||||
ExportDeleteCmd,
|
||||
ExportDownloadCmd,
|
||||
ExportGeneratePresignedURLCmd,
|
||||
ExportJobCmd,
|
||||
)
|
||||
RootCmd.AddCommand(ExportCmd)
|
||||
|
|
@ -171,6 +179,22 @@ func exportDeleteCmdF(c client.Client, command *cobra.Command, args []string) er
|
|||
return nil
|
||||
}
|
||||
|
||||
func exportGeneratePresignedURLCmdF(c client.Client, command *cobra.Command, args []string) error {
|
||||
name := args[0]
|
||||
|
||||
presignedURL, _, err := c.GeneratePresignedURL(context.TODO(), name)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to generate export link: %w", err)
|
||||
}
|
||||
|
||||
printer.PrintT("Export link: {{.Link}}\nExpiration: {{.Expiration}}", map[string]interface{}{
|
||||
"Link": presignedURL.URL,
|
||||
"Expiration": presignedURL.Expiration.String(),
|
||||
})
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func exportDownloadCmdF(c client.Client, command *cobra.Command, args []string) error {
|
||||
var path string
|
||||
name := args[0]
|
||||
|
|
|
|||
|
|
@ -40,6 +40,7 @@ SEE ALSO
|
|||
* `mmctl export create <mmctl_export_create.rst>`_ - Create export file
|
||||
* `mmctl export delete <mmctl_export_delete.rst>`_ - Delete export file
|
||||
* `mmctl export download <mmctl_export_download.rst>`_ - Download export files
|
||||
* `mmctl export generate-presigned-url <mmctl_export_generate-presigned-url.rst>`_ - Generate a presigned url for an export file. This is helpful when an export is big and might have trouble downloading from the Mattermost server.
|
||||
* `mmctl export job <mmctl_export_job.rst>`_ - List, show and cancel export jobs
|
||||
* `mmctl export list <mmctl_export_list.rst>`_ - List export files
|
||||
|
||||
|
|
|
|||
|
|
@ -0,0 +1,44 @@
|
|||
.. _mmctl_export_generate-presigned-url:
|
||||
|
||||
mmctl export generate-presigned-url
|
||||
-----------------------------------
|
||||
|
||||
Generate a presigned url for an export file. This is helpful when an export is big and might have trouble downloading from the Mattermost server.
|
||||
|
||||
Synopsis
|
||||
~~~~~~~~
|
||||
|
||||
|
||||
Generate a presigned url for an export file. This is helpful when an export is big and might have trouble downloading from the Mattermost server.
|
||||
|
||||
::
|
||||
|
||||
mmctl export generate-presigned-url [exportname] [flags]
|
||||
|
||||
Options
|
||||
~~~~~~~
|
||||
|
||||
::
|
||||
|
||||
-h, --help help for generate-presigned-url
|
||||
|
||||
Options inherited from parent commands
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
::
|
||||
|
||||
--config string path to the configuration file (default "$XDG_CONFIG_HOME/mmctl/config")
|
||||
--disable-pager disables paged output
|
||||
--insecure-sha1-intermediate allows to use insecure TLS protocols, such as SHA-1
|
||||
--insecure-tls-version allows to use TLS versions 1.0 and 1.1
|
||||
--json the output format will be in json format
|
||||
--local allows communicating with the server through a unix socket
|
||||
--quiet prevent mmctl to generate output for the commands
|
||||
--strict will only run commands if the mmctl version matches the server one
|
||||
--suppress-warnings disables printing warning messages
|
||||
|
||||
SEE ALSO
|
||||
~~~~~~~~
|
||||
|
||||
* `mmctl export <mmctl_export.rst>`_ - Management of exports
|
||||
|
||||
|
|
@ -525,6 +525,22 @@ func (mr *MockClientMockRecorder) EnablePlugin(arg0, arg1 interface{}) *gomock.C
|
|||
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "EnablePlugin", reflect.TypeOf((*MockClient)(nil).EnablePlugin), arg0, arg1)
|
||||
}
|
||||
|
||||
// GeneratePresignedURL mocks base method.
|
||||
func (m *MockClient) GeneratePresignedURL(arg0 context.Context, arg1 string) (*model.PresignURLResponse, *model.Response, error) {
|
||||
m.ctrl.T.Helper()
|
||||
ret := m.ctrl.Call(m, "GeneratePresignedURL", arg0, arg1)
|
||||
ret0, _ := ret[0].(*model.PresignURLResponse)
|
||||
ret1, _ := ret[1].(*model.Response)
|
||||
ret2, _ := ret[2].(error)
|
||||
return ret0, ret1, ret2
|
||||
}
|
||||
|
||||
// GeneratePresignedURL indicates an expected call of GeneratePresignedURL.
|
||||
func (mr *MockClientMockRecorder) GeneratePresignedURL(arg0, arg1 interface{}) *gomock.Call {
|
||||
mr.mock.ctrl.T.Helper()
|
||||
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GeneratePresignedURL", reflect.TypeOf((*MockClient)(nil).GeneratePresignedURL), arg0, arg1)
|
||||
}
|
||||
|
||||
// GetAllTeams mocks base method.
|
||||
func (m *MockClient) GetAllTeams(arg0 context.Context, arg1 string, arg2, arg3 int) ([]*model.Team, *model.Response, error) {
|
||||
m.ctrl.T.Helper()
|
||||
|
|
|
|||
|
|
@ -811,6 +811,46 @@
|
|||
"id": "api.command_expand_collapse.fail.app_error",
|
||||
"translation": "An error occurred while expanding previews."
|
||||
},
|
||||
{
|
||||
"id": "api.command_exportlink.desc",
|
||||
"translation": "Generate a link to download a export."
|
||||
},
|
||||
{
|
||||
"id": "api.command_exportlink.driver.app_error",
|
||||
"translation": "The file store driver does not support link generation."
|
||||
},
|
||||
{
|
||||
"id": "api.command_exportlink.empty.app_error",
|
||||
"translation": "No export file has been found."
|
||||
},
|
||||
{
|
||||
"id": "api.command_exportlink.hint",
|
||||
"translation": "[job-id|zip filename|{{.LatestMsg}}]"
|
||||
},
|
||||
{
|
||||
"id": "api.command_exportlink.invalid.app_error",
|
||||
"translation": "Unable to find the file you requested."
|
||||
},
|
||||
{
|
||||
"id": "api.command_exportlink.link.text",
|
||||
"translation": "You can download your file here: {{.Link}}.\nThis link will expire in {{.Expiration}}."
|
||||
},
|
||||
{
|
||||
"id": "api.command_exportlink.list.app_error",
|
||||
"translation": "Unable to retrieve the export list."
|
||||
},
|
||||
{
|
||||
"id": "api.command_exportlink.name",
|
||||
"translation": "exportlink"
|
||||
},
|
||||
{
|
||||
"id": "api.command_exportlink.permission.app_error",
|
||||
"translation": "You don't have enough permissions to run this command."
|
||||
},
|
||||
{
|
||||
"id": "api.command_exportlink.presign.app_error",
|
||||
"translation": "Unable to generate presign url."
|
||||
},
|
||||
{
|
||||
"id": "api.command_groupmsg.desc",
|
||||
"translation": "Sends a Group Message to the specified users"
|
||||
|
|
@ -5107,6 +5147,30 @@
|
|||
"id": "app.emoji.get_list.internal_error",
|
||||
"translation": "Unable to get the emoji."
|
||||
},
|
||||
{
|
||||
"id": "app.eport.generate_presigned_url.config.app_error",
|
||||
"translation": "This actions requires the use of a dedicated export store."
|
||||
},
|
||||
{
|
||||
"id": "app.eport.generate_presigned_url.driver.app_error",
|
||||
"translation": "Your export store driver does not support presign url generation."
|
||||
},
|
||||
{
|
||||
"id": "app.eport.generate_presigned_url.featureflag.app_error",
|
||||
"translation": "This feature is restricted by a feature flag."
|
||||
},
|
||||
{
|
||||
"id": "app.eport.generate_presigned_url.fileexist.app_error",
|
||||
"translation": "Unable to check if the file exists."
|
||||
},
|
||||
{
|
||||
"id": "app.eport.generate_presigned_url.link.app_error",
|
||||
"translation": "Unable to generate the presigned url."
|
||||
},
|
||||
{
|
||||
"id": "app.eport.generate_presigned_url.notfound.app_error",
|
||||
"translation": "The export file was not found."
|
||||
},
|
||||
{
|
||||
"id": "app.export.export_attachment.copy_file.error",
|
||||
"translation": "Failed to copy file during export."
|
||||
|
|
|
|||
|
|
@ -41,6 +41,10 @@ type FileBackend interface {
|
|||
RemoveDirectory(path string) error
|
||||
}
|
||||
|
||||
type FileBackendWithLinkGenerator interface {
|
||||
GeneratePublicLink(path string) (string, time.Duration, error)
|
||||
}
|
||||
|
||||
type FileBackendSettings struct {
|
||||
DriverName string
|
||||
Directory string
|
||||
|
|
@ -56,6 +60,7 @@ type FileBackendSettings struct {
|
|||
AmazonS3Trace bool
|
||||
SkipVerify bool
|
||||
AmazonS3RequestTimeoutMilliseconds int64
|
||||
AmazonS3PresignExpiresSeconds int64
|
||||
}
|
||||
|
||||
func NewFileBackendSettingsFromConfig(fileSettings *model.FileSettings, enableComplianceFeature bool, skipVerify bool) FileBackendSettings {
|
||||
|
|
@ -82,6 +87,31 @@ func NewFileBackendSettingsFromConfig(fileSettings *model.FileSettings, enableCo
|
|||
}
|
||||
}
|
||||
|
||||
func NewExportFileBackendSettingsFromConfig(fileSettings *model.FileSettings, enableComplianceFeature bool, skipVerify bool) FileBackendSettings {
|
||||
if *fileSettings.ExportDriverName == model.ImageDriverLocal {
|
||||
return FileBackendSettings{
|
||||
DriverName: *fileSettings.ExportDriverName,
|
||||
Directory: *fileSettings.ExportDirectory,
|
||||
}
|
||||
}
|
||||
return FileBackendSettings{
|
||||
DriverName: *fileSettings.ExportDriverName,
|
||||
AmazonS3AccessKeyId: *fileSettings.ExportAmazonS3AccessKeyId,
|
||||
AmazonS3SecretAccessKey: *fileSettings.ExportAmazonS3SecretAccessKey,
|
||||
AmazonS3Bucket: *fileSettings.ExportAmazonS3Bucket,
|
||||
AmazonS3PathPrefix: *fileSettings.ExportAmazonS3PathPrefix,
|
||||
AmazonS3Region: *fileSettings.ExportAmazonS3Region,
|
||||
AmazonS3Endpoint: *fileSettings.ExportAmazonS3Endpoint,
|
||||
AmazonS3SSL: fileSettings.ExportAmazonS3SSL == nil || *fileSettings.AmazonS3SSL,
|
||||
AmazonS3SignV2: fileSettings.ExportAmazonS3SignV2 != nil && *fileSettings.AmazonS3SignV2,
|
||||
AmazonS3SSE: fileSettings.ExportAmazonS3SSE != nil && *fileSettings.AmazonS3SSE && enableComplianceFeature,
|
||||
AmazonS3Trace: fileSettings.ExportAmazonS3Trace != nil && *fileSettings.AmazonS3Trace,
|
||||
AmazonS3RequestTimeoutMilliseconds: *fileSettings.ExportAmazonS3RequestTimeoutMilliseconds,
|
||||
AmazonS3PresignExpiresSeconds: *fileSettings.ExportAmazonS3PresignExpiresSeconds,
|
||||
SkipVerify: skipVerify,
|
||||
}
|
||||
}
|
||||
|
||||
func (settings *FileBackendSettings) CheckMandatoryS3Fields() error {
|
||||
if settings.AmazonS3Bucket == "" {
|
||||
return errors.New("missing s3 bucket settings")
|
||||
|
|
|
|||
|
|
@ -0,0 +1,62 @@
|
|||
// Code generated by mockery v2.23.2. DO NOT EDIT.
|
||||
|
||||
// Regenerate this file using `make filestore-mocks`.
|
||||
|
||||
package mocks
|
||||
|
||||
import (
|
||||
time "time"
|
||||
|
||||
mock "github.com/stretchr/testify/mock"
|
||||
)
|
||||
|
||||
// FileBackendWithLinkGenerator is an autogenerated mock type for the FileBackendWithLinkGenerator type
|
||||
type FileBackendWithLinkGenerator struct {
|
||||
mock.Mock
|
||||
}
|
||||
|
||||
// GeneratePublicLink provides a mock function with given fields: path
|
||||
func (_m *FileBackendWithLinkGenerator) GeneratePublicLink(path string) (string, time.Duration, error) {
|
||||
ret := _m.Called(path)
|
||||
|
||||
var r0 string
|
||||
var r1 time.Duration
|
||||
var r2 error
|
||||
if rf, ok := ret.Get(0).(func(string) (string, time.Duration, error)); ok {
|
||||
return rf(path)
|
||||
}
|
||||
if rf, ok := ret.Get(0).(func(string) string); ok {
|
||||
r0 = rf(path)
|
||||
} else {
|
||||
r0 = ret.Get(0).(string)
|
||||
}
|
||||
|
||||
if rf, ok := ret.Get(1).(func(string) time.Duration); ok {
|
||||
r1 = rf(path)
|
||||
} else {
|
||||
r1 = ret.Get(1).(time.Duration)
|
||||
}
|
||||
|
||||
if rf, ok := ret.Get(2).(func(string) error); ok {
|
||||
r2 = rf(path)
|
||||
} else {
|
||||
r2 = ret.Error(2)
|
||||
}
|
||||
|
||||
return r0, r1, r2
|
||||
}
|
||||
|
||||
type mockConstructorTestingTNewFileBackendWithLinkGenerator interface {
|
||||
mock.TestingT
|
||||
Cleanup(func())
|
||||
}
|
||||
|
||||
// NewFileBackendWithLinkGenerator creates a new instance of FileBackendWithLinkGenerator. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations.
|
||||
func NewFileBackendWithLinkGenerator(t mockConstructorTestingTNewFileBackendWithLinkGenerator) *FileBackendWithLinkGenerator {
|
||||
mock := &FileBackendWithLinkGenerator{}
|
||||
mock.Mock.Test(t)
|
||||
|
||||
t.Cleanup(func() { mock.AssertExpectations(t) })
|
||||
|
||||
return mock
|
||||
}
|
||||
|
|
@ -9,6 +9,7 @@ import (
|
|||
"crypto/tls"
|
||||
"io"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
|
|
@ -26,20 +27,21 @@ import (
|
|||
// S3FileBackend contains all necessary information to communicate with
|
||||
// an AWS S3 compatible API backend.
|
||||
type S3FileBackend struct {
|
||||
endpoint string
|
||||
accessKey string
|
||||
secretKey string
|
||||
secure bool
|
||||
signV2 bool
|
||||
region string
|
||||
bucket string
|
||||
pathPrefix string
|
||||
encrypt bool
|
||||
trace bool
|
||||
client *s3.Client
|
||||
skipVerify bool
|
||||
timeout time.Duration
|
||||
isCloud bool // field to indicate whether this is running under Mattermost cloud or not.
|
||||
endpoint string
|
||||
accessKey string
|
||||
secretKey string
|
||||
secure bool
|
||||
signV2 bool
|
||||
region string
|
||||
bucket string
|
||||
pathPrefix string
|
||||
encrypt bool
|
||||
trace bool
|
||||
client *s3.Client
|
||||
skipVerify bool
|
||||
timeout time.Duration
|
||||
presignExpires time.Duration
|
||||
isCloud bool // field to indicate whether this is running under Mattermost cloud or not.
|
||||
}
|
||||
|
||||
type S3FileBackendAuthError struct {
|
||||
|
|
@ -61,7 +63,8 @@ var (
|
|||
|
||||
var (
|
||||
// Ensure that the ReaderAt interface is implemented.
|
||||
_ io.ReaderAt = (*s3WithCancel)(nil)
|
||||
_ io.ReaderAt = (*s3WithCancel)(nil)
|
||||
_ FileBackendWithLinkGenerator = (*S3FileBackend)(nil)
|
||||
)
|
||||
|
||||
func isFileExtImage(ext string) bool {
|
||||
|
|
@ -89,18 +92,19 @@ func (s *S3FileBackendNoBucketError) Error() string {
|
|||
func NewS3FileBackend(settings FileBackendSettings) (*S3FileBackend, error) {
|
||||
timeout := time.Duration(settings.AmazonS3RequestTimeoutMilliseconds) * time.Millisecond
|
||||
backend := &S3FileBackend{
|
||||
endpoint: settings.AmazonS3Endpoint,
|
||||
accessKey: settings.AmazonS3AccessKeyId,
|
||||
secretKey: settings.AmazonS3SecretAccessKey,
|
||||
secure: settings.AmazonS3SSL,
|
||||
signV2: settings.AmazonS3SignV2,
|
||||
region: settings.AmazonS3Region,
|
||||
bucket: settings.AmazonS3Bucket,
|
||||
pathPrefix: settings.AmazonS3PathPrefix,
|
||||
encrypt: settings.AmazonS3SSE,
|
||||
trace: settings.AmazonS3Trace,
|
||||
skipVerify: settings.SkipVerify,
|
||||
timeout: timeout,
|
||||
endpoint: settings.AmazonS3Endpoint,
|
||||
accessKey: settings.AmazonS3AccessKeyId,
|
||||
secretKey: settings.AmazonS3SecretAccessKey,
|
||||
secure: settings.AmazonS3SSL,
|
||||
signV2: settings.AmazonS3SignV2,
|
||||
region: settings.AmazonS3Region,
|
||||
bucket: settings.AmazonS3Bucket,
|
||||
pathPrefix: settings.AmazonS3PathPrefix,
|
||||
encrypt: settings.AmazonS3SSE,
|
||||
trace: settings.AmazonS3Trace,
|
||||
skipVerify: settings.SkipVerify,
|
||||
timeout: timeout,
|
||||
presignExpires: time.Duration(settings.AmazonS3PresignExpiresSeconds) * time.Second,
|
||||
}
|
||||
isCloud := os.Getenv("MM_CLOUD_FILESTORE_BIFROST") != ""
|
||||
cli, err := backend.s3New(isCloud)
|
||||
|
|
@ -609,6 +613,25 @@ func (b *S3FileBackend) RemoveDirectory(path string) error {
|
|||
return nil
|
||||
}
|
||||
|
||||
func (b *S3FileBackend) GeneratePublicLink(path string) (string, time.Duration, error) {
|
||||
path, err := b.prefixedPath(path)
|
||||
if err != nil {
|
||||
return "", 0, errors.Wrapf(err, "unable to prefix path %s", path)
|
||||
}
|
||||
ctx, cancel := context.WithTimeout(context.Background(), b.timeout)
|
||||
defer cancel()
|
||||
|
||||
reqParams := make(url.Values)
|
||||
reqParams.Set("response-content-disposition", "attachment")
|
||||
|
||||
req, err := b.client.PresignedGetObject(ctx, b.bucket, path, b.presignExpires, reqParams)
|
||||
if err != nil {
|
||||
return "", 0, errors.Wrapf(err, "unable to generate public link for %s", path)
|
||||
}
|
||||
|
||||
return req.String(), b.presignExpires, nil
|
||||
}
|
||||
|
||||
// prefixedPathFast is a variation of prefixedPath
|
||||
// where we don't check for the file path. This is for cases
|
||||
// where we know the file won't exist - like while writing a new file.
|
||||
|
|
|
|||
|
|
@ -8403,6 +8403,22 @@ func (c *Client4) DownloadExport(ctx context.Context, name string, wr io.Writer,
|
|||
return n, BuildResponse(r), nil
|
||||
}
|
||||
|
||||
func (c *Client4) GeneratePresignedURL(ctx context.Context, name string) (*PresignURLResponse, *Response, error) {
|
||||
r, err := c.DoAPIRequest(ctx, http.MethodPost, c.APIURL+c.exportRoute(name)+"/presign-url", "", "")
|
||||
if err != nil {
|
||||
return nil, BuildResponse(r), err
|
||||
}
|
||||
defer closeBody(r)
|
||||
|
||||
res := &PresignURLResponse{}
|
||||
err = json.NewDecoder(r.Body).Decode(res)
|
||||
if err != nil {
|
||||
return nil, BuildResponse(r), NewAppError("GeneratePresignedURL", "model.client.json_decode.app_error", nil, "", http.StatusInternalServerError).Wrap(err)
|
||||
}
|
||||
|
||||
return res, BuildResponse(r), nil
|
||||
}
|
||||
|
||||
func (c *Client4) GetUserThreads(ctx context.Context, userId, teamId string, options GetUserThreadsOpts) (*Threads, *Response, error) {
|
||||
v := url.Values{}
|
||||
if options.Since != 0 {
|
||||
|
|
|
|||
|
|
@ -1546,6 +1546,22 @@ type FileSettings struct {
|
|||
AmazonS3SSE *bool `access:"environment_file_storage,write_restrictable,cloud_restrictable"`
|
||||
AmazonS3Trace *bool `access:"environment_file_storage,write_restrictable,cloud_restrictable"`
|
||||
AmazonS3RequestTimeoutMilliseconds *int64 `access:"environment_file_storage,write_restrictable,cloud_restrictable"` // telemetry: none
|
||||
// Export store settings
|
||||
DedicatedExportStore *bool `access:"environment_file_storage,write_restrictable,cloud_restrictable"`
|
||||
ExportDriverName *string `access:"environment_file_storage,write_restrictable,cloud_restrictable"`
|
||||
ExportDirectory *string `access:"environment_file_storage,write_restrictable,cloud_restrictable"` // telemetry: none
|
||||
ExportAmazonS3AccessKeyId *string `access:"environment_file_storage,write_restrictable,cloud_restrictable"` // telemetry: none
|
||||
ExportAmazonS3SecretAccessKey *string `access:"environment_file_storage,write_restrictable,cloud_restrictable"` // telemetry: none
|
||||
ExportAmazonS3Bucket *string `access:"environment_file_storage,write_restrictable,cloud_restrictable"` // telemetry: none
|
||||
ExportAmazonS3PathPrefix *string `access:"environment_file_storage,write_restrictable,cloud_restrictable"` // telemetry: none
|
||||
ExportAmazonS3Region *string `access:"environment_file_storage,write_restrictable,cloud_restrictable"` // telemetry: none
|
||||
ExportAmazonS3Endpoint *string `access:"environment_file_storage,write_restrictable,cloud_restrictable"` // telemetry: none
|
||||
ExportAmazonS3SSL *bool `access:"environment_file_storage,write_restrictable,cloud_restrictable"`
|
||||
ExportAmazonS3SignV2 *bool `access:"environment_file_storage,write_restrictable,cloud_restrictable"`
|
||||
ExportAmazonS3SSE *bool `access:"environment_file_storage,write_restrictable,cloud_restrictable"`
|
||||
ExportAmazonS3Trace *bool `access:"environment_file_storage,write_restrictable,cloud_restrictable"`
|
||||
ExportAmazonS3RequestTimeoutMilliseconds *int64 `access:"environment_file_storage,write_restrictable,cloud_restrictable"` // telemetry: none
|
||||
ExportAmazonS3PresignExpiresSeconds *int64 `access:"environment_file_storage,write_restrictable,cloud_restrictable"` // telemetry: none
|
||||
}
|
||||
|
||||
func (s *FileSettings) SetDefaults(isUpdate bool) {
|
||||
|
|
@ -1653,6 +1669,68 @@ func (s *FileSettings) SetDefaults(isUpdate bool) {
|
|||
if s.AmazonS3RequestTimeoutMilliseconds == nil {
|
||||
s.AmazonS3RequestTimeoutMilliseconds = NewInt64(30000)
|
||||
}
|
||||
|
||||
if s.DedicatedExportStore == nil {
|
||||
s.DedicatedExportStore = NewBool(false)
|
||||
}
|
||||
|
||||
if s.ExportDriverName == nil {
|
||||
s.ExportDriverName = NewString(ImageDriverLocal)
|
||||
}
|
||||
|
||||
if s.ExportDirectory == nil || *s.ExportDirectory == "" {
|
||||
s.ExportDirectory = NewString(FileSettingsDefaultDirectory)
|
||||
}
|
||||
|
||||
if s.ExportAmazonS3AccessKeyId == nil {
|
||||
s.ExportAmazonS3AccessKeyId = NewString("")
|
||||
}
|
||||
|
||||
if s.ExportAmazonS3SecretAccessKey == nil {
|
||||
s.ExportAmazonS3SecretAccessKey = NewString("")
|
||||
}
|
||||
|
||||
if s.ExportAmazonS3Bucket == nil {
|
||||
s.ExportAmazonS3Bucket = NewString("")
|
||||
}
|
||||
|
||||
if s.ExportAmazonS3PathPrefix == nil {
|
||||
s.ExportAmazonS3PathPrefix = NewString("")
|
||||
}
|
||||
|
||||
if s.ExportAmazonS3Region == nil {
|
||||
s.ExportAmazonS3Region = NewString("")
|
||||
}
|
||||
|
||||
if s.ExportAmazonS3Endpoint == nil || *s.ExportAmazonS3Endpoint == "" {
|
||||
// Defaults to "s3.amazonaws.com"
|
||||
s.ExportAmazonS3Endpoint = NewString("s3.amazonaws.com")
|
||||
}
|
||||
|
||||
if s.ExportAmazonS3SSL == nil {
|
||||
s.ExportAmazonS3SSL = NewBool(true) // Secure by default.
|
||||
}
|
||||
|
||||
if s.ExportAmazonS3SignV2 == nil {
|
||||
s.ExportAmazonS3SignV2 = new(bool)
|
||||
// Signature v2 is not enabled by default.
|
||||
}
|
||||
|
||||
if s.ExportAmazonS3SSE == nil {
|
||||
s.ExportAmazonS3SSE = NewBool(false) // Not Encrypted by default.
|
||||
}
|
||||
|
||||
if s.ExportAmazonS3Trace == nil {
|
||||
s.ExportAmazonS3Trace = NewBool(false)
|
||||
}
|
||||
|
||||
if s.ExportAmazonS3RequestTimeoutMilliseconds == nil {
|
||||
s.ExportAmazonS3RequestTimeoutMilliseconds = NewInt64(30000)
|
||||
}
|
||||
|
||||
if s.ExportAmazonS3PresignExpiresSeconds == nil {
|
||||
s.ExportAmazonS3PresignExpiresSeconds = NewInt64(21600) // 6h
|
||||
}
|
||||
}
|
||||
|
||||
type EmailSettings struct {
|
||||
|
|
|
|||
|
|
@ -58,6 +58,8 @@ type FeatureFlags struct {
|
|||
|
||||
CloudReverseTrial bool
|
||||
|
||||
EnableExportDirectDownload bool
|
||||
|
||||
DataRetentionConcurrencyEnabled bool
|
||||
}
|
||||
|
||||
|
|
@ -79,6 +81,7 @@ func (f *FeatureFlags) SetDefaults() {
|
|||
f.WysiwygEditor = false
|
||||
f.OnboardingTourTips = true
|
||||
f.CloudReverseTrial = false
|
||||
f.EnableExportDirectDownload = false
|
||||
f.DataRetentionConcurrencyEnabled = true
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -3,6 +3,8 @@
|
|||
|
||||
package model
|
||||
|
||||
import "time"
|
||||
|
||||
const (
|
||||
MaxImageSize = int64(6048 * 4032) // 24 megapixels, roughly 36MB as a raw image
|
||||
)
|
||||
|
|
@ -11,3 +13,8 @@ type FileUploadResponse struct {
|
|||
FileInfos []*FileInfo `json:"file_infos"`
|
||||
ClientIds []string `json:"client_ids"`
|
||||
}
|
||||
|
||||
type PresignURLResponse struct {
|
||||
URL string `json:"url"`
|
||||
Expiration time.Duration `json:"expiration"`
|
||||
}
|
||||
|
|
|
|||
Loading…
Reference in a new issue