2022-01-08 17:17:01 +01:00
|
|
|
/*
|
|
|
|
GoToSocial
|
|
|
|
Copyright (C) 2021-2022 GoToSocial Authors admin@gotosocial.org
|
|
|
|
|
|
|
|
This program is free software: you can redistribute it and/or modify
|
|
|
|
it under the terms of the GNU Affero General Public License as published by
|
|
|
|
the Free Software Foundation, either version 3 of the License, or
|
|
|
|
(at your option) any later version.
|
|
|
|
|
|
|
|
This program is distributed in the hope that it will be useful,
|
|
|
|
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
GNU Affero General Public License for more details.
|
|
|
|
|
|
|
|
You should have received a copy of the GNU Affero General Public License
|
|
|
|
along with this program. If not, see <http://www.gnu.org/licenses/>.
|
|
|
|
*/
|
|
|
|
|
2022-01-02 15:00:53 +01:00
|
|
|
package media
|
|
|
|
|
2022-01-03 17:37:38 +01:00
|
|
|
import (
|
2022-01-16 18:52:55 +01:00
|
|
|
"bytes"
|
2022-01-04 17:37:54 +01:00
|
|
|
"context"
|
2022-11-11 20:27:37 +01:00
|
|
|
"errors"
|
2022-01-03 17:37:38 +01:00
|
|
|
"fmt"
|
2022-01-16 18:52:55 +01:00
|
|
|
"io"
|
2022-01-11 17:49:14 +01:00
|
|
|
"strings"
|
2022-01-03 17:37:38 +01:00
|
|
|
"sync"
|
2022-02-08 13:38:44 +01:00
|
|
|
"sync/atomic"
|
2022-01-08 17:17:01 +01:00
|
|
|
"time"
|
2022-01-03 17:37:38 +01:00
|
|
|
|
2022-01-23 14:41:58 +01:00
|
|
|
terminator "github.com/superseriousbusiness/exif-terminator"
|
2022-01-04 17:37:54 +01:00
|
|
|
"github.com/superseriousbusiness/gotosocial/internal/db"
|
2022-01-03 17:37:38 +01:00
|
|
|
"github.com/superseriousbusiness/gotosocial/internal/gtsmodel"
|
2022-01-11 17:49:14 +01:00
|
|
|
"github.com/superseriousbusiness/gotosocial/internal/id"
|
2022-07-19 10:47:55 +02:00
|
|
|
"github.com/superseriousbusiness/gotosocial/internal/log"
|
2022-07-03 12:08:30 +02:00
|
|
|
"github.com/superseriousbusiness/gotosocial/internal/storage"
|
2022-01-11 17:49:14 +01:00
|
|
|
"github.com/superseriousbusiness/gotosocial/internal/uris"
|
2022-01-03 17:37:38 +01:00
|
|
|
)
|
2022-01-02 15:00:53 +01:00
|
|
|
|
2022-01-11 17:49:14 +01:00
|
|
|
// ProcessingMedia represents a piece of media that is currently being processed. It exposes
|
2022-01-10 18:36:09 +01:00
|
|
|
// various functions for retrieving data from the process.
|
2022-01-11 17:49:14 +01:00
|
|
|
type ProcessingMedia struct {
|
2022-01-04 17:37:54 +01:00
|
|
|
mu sync.Mutex
|
|
|
|
|
|
|
|
/*
|
|
|
|
below fields should be set on newly created media;
|
|
|
|
attachment will be updated incrementally as media goes through processing
|
|
|
|
*/
|
|
|
|
|
2022-01-11 17:49:14 +01:00
|
|
|
attachment *gtsmodel.MediaAttachment
|
|
|
|
data DataFunc
|
2022-02-22 13:50:33 +01:00
|
|
|
postData PostDataCallbackFunc
|
2022-01-16 18:52:55 +01:00
|
|
|
read bool // bool indicating that data function has been triggered already
|
2022-01-09 18:41:22 +01:00
|
|
|
|
2022-02-08 13:38:44 +01:00
|
|
|
thumbState int32 // the processing state of the media thumbnail
|
|
|
|
fullSizeState int32 // the processing state of the full-sized media
|
2022-01-04 17:37:54 +01:00
|
|
|
|
|
|
|
/*
|
|
|
|
below pointers to database and storage are maintained so that
|
|
|
|
the media can store and update itself during processing steps
|
|
|
|
*/
|
|
|
|
|
|
|
|
database db.DB
|
2022-11-24 09:35:46 +01:00
|
|
|
storage *storage.Driver
|
2022-01-04 17:37:54 +01:00
|
|
|
|
|
|
|
err error // error created during processing, if any
|
2022-01-15 17:36:15 +01:00
|
|
|
|
|
|
|
// track whether this media has already been put in the databse
|
|
|
|
insertedInDB bool
|
2022-03-07 11:08:26 +01:00
|
|
|
|
|
|
|
// true if this is a recache, false if it's brand new media
|
|
|
|
recache bool
|
2022-01-03 17:37:38 +01:00
|
|
|
}
|
|
|
|
|
2022-01-11 17:49:14 +01:00
|
|
|
// AttachmentID returns the ID of the underlying media attachment without blocking processing.
|
|
|
|
func (p *ProcessingMedia) AttachmentID() string {
|
|
|
|
return p.attachment.ID
|
|
|
|
}
|
|
|
|
|
|
|
|
// LoadAttachment blocks until the thumbnail and fullsize content
|
|
|
|
// has been processed, and then returns the completed attachment.
|
|
|
|
func (p *ProcessingMedia) LoadAttachment(ctx context.Context) (*gtsmodel.MediaAttachment, error) {
|
2022-01-16 18:52:55 +01:00
|
|
|
p.mu.Lock()
|
|
|
|
defer p.mu.Unlock()
|
|
|
|
|
|
|
|
if err := p.store(ctx); err != nil {
|
2022-01-11 17:49:14 +01:00
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
2022-12-17 05:38:56 +01:00
|
|
|
if err := p.loadFullSize(ctx); err != nil {
|
2022-01-11 17:49:14 +01:00
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
2022-12-17 05:38:56 +01:00
|
|
|
if err := p.loadThumb(ctx); err != nil {
|
2022-01-11 17:49:14 +01:00
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
2022-01-15 17:36:15 +01:00
|
|
|
if !p.insertedInDB {
|
2022-03-07 11:08:26 +01:00
|
|
|
if p.recache {
|
2022-12-12 12:22:19 +01:00
|
|
|
// This is an existing media attachment we're recaching, so only need to update it
|
2022-10-08 13:50:48 +02:00
|
|
|
if err := p.database.UpdateByID(ctx, p.attachment, p.attachment.ID); err != nil {
|
2022-03-07 11:08:26 +01:00
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
} else {
|
2022-12-12 12:22:19 +01:00
|
|
|
// This is a new media attachment we're caching for first time
|
2022-03-07 11:08:26 +01:00
|
|
|
if err := p.database.Put(ctx, p.attachment); err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
2022-01-15 17:36:15 +01:00
|
|
|
}
|
2022-12-12 12:22:19 +01:00
|
|
|
|
|
|
|
// Mark this as stored in DB
|
2022-01-15 17:36:15 +01:00
|
|
|
p.insertedInDB = true
|
|
|
|
}
|
|
|
|
|
2022-12-12 12:22:19 +01:00
|
|
|
log.Tracef("finished loading attachment %s", p.attachment.URL)
|
2022-01-11 17:49:14 +01:00
|
|
|
return p.attachment, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
// Finished returns true if processing has finished for both the thumbnail
|
|
|
|
// and full fized version of this piece of media.
|
|
|
|
func (p *ProcessingMedia) Finished() bool {
|
2022-02-08 13:38:44 +01:00
|
|
|
return atomic.LoadInt32(&p.thumbState) == int32(complete) && atomic.LoadInt32(&p.fullSizeState) == int32(complete)
|
2022-01-11 17:49:14 +01:00
|
|
|
}
|
|
|
|
|
2022-01-16 18:52:55 +01:00
|
|
|
func (p *ProcessingMedia) loadThumb(ctx context.Context) error {
|
2022-02-08 13:38:44 +01:00
|
|
|
thumbState := atomic.LoadInt32(&p.thumbState)
|
|
|
|
switch processState(thumbState) {
|
2022-01-04 17:37:54 +01:00
|
|
|
case received:
|
|
|
|
// we haven't processed a thumbnail for this media yet so do it now
|
2022-01-09 18:41:22 +01:00
|
|
|
// check if we need to create a blurhash or if there's already one set
|
|
|
|
var createBlurhash bool
|
2022-01-10 18:36:09 +01:00
|
|
|
if p.attachment.Blurhash == "" {
|
2022-01-09 18:41:22 +01:00
|
|
|
// no blurhash created yet
|
|
|
|
createBlurhash = true
|
|
|
|
}
|
|
|
|
|
2022-12-17 05:38:56 +01:00
|
|
|
var (
|
|
|
|
thumb *mediaMeta
|
|
|
|
err error
|
|
|
|
)
|
|
|
|
switch ct := p.attachment.File.ContentType; ct {
|
|
|
|
case mimeImageJpeg, mimeImagePng, mimeImageWebp, mimeImageGif:
|
|
|
|
// thumbnail the image from the original stored full size version
|
|
|
|
stored, err := p.storage.GetStream(ctx, p.attachment.File.Path)
|
|
|
|
if err != nil {
|
|
|
|
p.err = fmt.Errorf("loadThumb: error fetching file from storage: %s", err)
|
|
|
|
atomic.StoreInt32(&p.thumbState, int32(errored))
|
|
|
|
return p.err
|
|
|
|
}
|
2022-03-22 12:42:34 +01:00
|
|
|
|
2022-12-17 05:38:56 +01:00
|
|
|
thumb, err = deriveThumbnailFromImage(stored, ct, createBlurhash)
|
|
|
|
|
|
|
|
// try to close the stored stream we had open, no matter what
|
|
|
|
if closeErr := stored.Close(); closeErr != nil {
|
|
|
|
log.Errorf("error closing stream: %s", closeErr)
|
|
|
|
}
|
|
|
|
|
|
|
|
// now check if we managed to get a thumbnail
|
|
|
|
if err != nil {
|
|
|
|
p.err = fmt.Errorf("loadThumb: error deriving thumbnail: %s", err)
|
|
|
|
atomic.StoreInt32(&p.thumbState, int32(errored))
|
|
|
|
return p.err
|
|
|
|
}
|
|
|
|
case mimeVideoMp4:
|
|
|
|
// create a generic thumbnail based on video height + width
|
|
|
|
thumb, err = deriveThumbnailFromVideo(p.attachment.FileMeta.Original.Height, p.attachment.FileMeta.Original.Width)
|
|
|
|
if err != nil {
|
|
|
|
p.err = fmt.Errorf("loadThumb: error deriving thumbnail: %s", err)
|
|
|
|
atomic.StoreInt32(&p.thumbState, int32(errored))
|
|
|
|
return p.err
|
|
|
|
}
|
|
|
|
default:
|
|
|
|
p.err = fmt.Errorf("loadThumb: content type %s not a processible image type", ct)
|
2022-02-08 13:38:44 +01:00
|
|
|
atomic.StoreInt32(&p.thumbState, int32(errored))
|
2022-01-16 18:52:55 +01:00
|
|
|
return p.err
|
|
|
|
}
|
|
|
|
|
2022-01-04 17:37:54 +01:00
|
|
|
// put the thumbnail in storage
|
2022-09-19 13:59:11 +02:00
|
|
|
if err := p.storage.Put(ctx, p.attachment.Thumbnail.Path, thumb.small); err != nil && err != storage.ErrAlreadyExists {
|
2022-01-16 18:52:55 +01:00
|
|
|
p.err = fmt.Errorf("loadThumb: error storing thumbnail: %s", err)
|
2022-02-08 13:38:44 +01:00
|
|
|
atomic.StoreInt32(&p.thumbState, int32(errored))
|
2022-01-16 18:52:55 +01:00
|
|
|
return p.err
|
2022-01-04 17:37:54 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
// set appropriate fields on the attachment based on the thumbnail we derived
|
2022-01-09 18:41:22 +01:00
|
|
|
if createBlurhash {
|
2022-01-10 18:36:09 +01:00
|
|
|
p.attachment.Blurhash = thumb.blurhash
|
2022-01-09 18:41:22 +01:00
|
|
|
}
|
2022-01-10 18:36:09 +01:00
|
|
|
p.attachment.FileMeta.Small = gtsmodel.Small{
|
2022-01-04 17:37:54 +01:00
|
|
|
Width: thumb.width,
|
|
|
|
Height: thumb.height,
|
|
|
|
Size: thumb.size,
|
|
|
|
Aspect: thumb.aspect,
|
|
|
|
}
|
2022-01-16 18:52:55 +01:00
|
|
|
p.attachment.Thumbnail.FileSize = len(thumb.small)
|
2022-01-04 17:37:54 +01:00
|
|
|
|
|
|
|
// we're done processing the thumbnail!
|
2022-02-08 13:38:44 +01:00
|
|
|
atomic.StoreInt32(&p.thumbState, int32(complete))
|
2022-12-12 12:22:19 +01:00
|
|
|
log.Tracef("finished processing thumbnail for attachment %s", p.attachment.URL)
|
2022-01-04 17:37:54 +01:00
|
|
|
fallthrough
|
|
|
|
case complete:
|
2022-01-16 18:52:55 +01:00
|
|
|
return nil
|
2022-01-04 17:37:54 +01:00
|
|
|
case errored:
|
2022-01-16 18:52:55 +01:00
|
|
|
return p.err
|
2022-01-04 17:37:54 +01:00
|
|
|
}
|
|
|
|
|
2022-02-08 13:38:44 +01:00
|
|
|
return fmt.Errorf("loadThumb: thumbnail processing status %d unknown", p.thumbState)
|
2022-01-04 17:37:54 +01:00
|
|
|
}
|
|
|
|
|
2022-01-16 18:52:55 +01:00
|
|
|
func (p *ProcessingMedia) loadFullSize(ctx context.Context) error {
|
2022-02-08 13:38:44 +01:00
|
|
|
fullSizeState := atomic.LoadInt32(&p.fullSizeState)
|
|
|
|
switch processState(fullSizeState) {
|
2022-01-08 13:45:42 +01:00
|
|
|
case received:
|
|
|
|
var err error
|
2022-12-17 05:38:56 +01:00
|
|
|
var decoded *mediaMeta
|
2022-01-16 18:52:55 +01:00
|
|
|
|
|
|
|
// stream the original file out of storage...
|
2022-07-03 12:08:30 +02:00
|
|
|
stored, err := p.storage.GetStream(ctx, p.attachment.File.Path)
|
2022-01-16 18:52:55 +01:00
|
|
|
if err != nil {
|
|
|
|
p.err = fmt.Errorf("loadFullSize: error fetching file from storage: %s", err)
|
2022-02-08 13:38:44 +01:00
|
|
|
atomic.StoreInt32(&p.fullSizeState, int32(errored))
|
2022-01-16 18:52:55 +01:00
|
|
|
return p.err
|
|
|
|
}
|
2022-01-08 13:45:42 +01:00
|
|
|
|
2022-09-19 13:43:22 +02:00
|
|
|
defer func() {
|
|
|
|
if err := stored.Close(); err != nil {
|
|
|
|
log.Errorf("loadFullSize: error closing stored full size: %s", err)
|
|
|
|
}
|
|
|
|
}()
|
|
|
|
|
2022-01-16 18:52:55 +01:00
|
|
|
// decode the image
|
2022-01-10 18:36:09 +01:00
|
|
|
ct := p.attachment.File.ContentType
|
2022-01-08 13:45:42 +01:00
|
|
|
switch ct {
|
2022-12-06 14:15:25 +01:00
|
|
|
case mimeImageJpeg, mimeImagePng, mimeImageWebp:
|
2022-01-16 18:52:55 +01:00
|
|
|
decoded, err = decodeImage(stored, ct)
|
2022-01-08 13:45:42 +01:00
|
|
|
case mimeImageGif:
|
2022-01-16 18:52:55 +01:00
|
|
|
decoded, err = decodeGif(stored)
|
2022-12-17 05:38:56 +01:00
|
|
|
case mimeVideoMp4:
|
|
|
|
decoded, err = decodeVideo(stored, ct)
|
2022-01-08 13:45:42 +01:00
|
|
|
default:
|
2022-01-16 18:52:55 +01:00
|
|
|
err = fmt.Errorf("loadFullSize: content type %s not a processible image type", ct)
|
2022-01-04 17:37:54 +01:00
|
|
|
}
|
|
|
|
|
2022-01-08 13:45:42 +01:00
|
|
|
if err != nil {
|
2022-01-10 18:36:09 +01:00
|
|
|
p.err = err
|
2022-02-08 13:38:44 +01:00
|
|
|
atomic.StoreInt32(&p.fullSizeState, int32(errored))
|
2022-01-16 18:52:55 +01:00
|
|
|
return p.err
|
2022-01-08 13:45:42 +01:00
|
|
|
}
|
|
|
|
|
2022-01-08 17:17:01 +01:00
|
|
|
// set appropriate fields on the attachment based on the image we derived
|
2022-01-10 18:36:09 +01:00
|
|
|
p.attachment.FileMeta.Original = gtsmodel.Original{
|
2022-01-08 17:17:01 +01:00
|
|
|
Width: decoded.width,
|
|
|
|
Height: decoded.height,
|
|
|
|
Size: decoded.size,
|
|
|
|
Aspect: decoded.aspect,
|
|
|
|
}
|
2022-01-10 18:36:09 +01:00
|
|
|
p.attachment.File.UpdatedAt = time.Now()
|
|
|
|
p.attachment.Processing = gtsmodel.ProcessingStatusProcessed
|
2022-01-08 17:17:01 +01:00
|
|
|
|
2022-01-08 13:45:42 +01:00
|
|
|
// we're done processing the full-size image
|
2022-02-08 13:38:44 +01:00
|
|
|
atomic.StoreInt32(&p.fullSizeState, int32(complete))
|
2022-12-12 12:22:19 +01:00
|
|
|
log.Tracef("finished processing full size image for attachment %s", p.attachment.URL)
|
2022-01-08 13:45:42 +01:00
|
|
|
fallthrough
|
|
|
|
case complete:
|
2022-01-16 18:52:55 +01:00
|
|
|
return nil
|
2022-01-08 13:45:42 +01:00
|
|
|
case errored:
|
2022-01-16 18:52:55 +01:00
|
|
|
return p.err
|
2022-01-03 17:37:38 +01:00
|
|
|
}
|
2022-01-04 17:37:54 +01:00
|
|
|
|
2022-01-16 18:52:55 +01:00
|
|
|
return fmt.Errorf("loadFullSize: full size processing status %d unknown", p.fullSizeState)
|
2022-01-03 17:37:38 +01:00
|
|
|
}
|
|
|
|
|
2022-01-16 18:52:55 +01:00
|
|
|
// store calls the data function attached to p if it hasn't been called yet,
|
|
|
|
// and updates the underlying attachment fields as necessary. It will then stream
|
|
|
|
// bytes from p's reader directly into storage so that it can be retrieved later.
|
|
|
|
func (p *ProcessingMedia) store(ctx context.Context) error {
|
2022-01-11 17:49:14 +01:00
|
|
|
// check if we've already done this and bail early if we have
|
2022-01-16 18:52:55 +01:00
|
|
|
if p.read {
|
2022-01-11 17:49:14 +01:00
|
|
|
return nil
|
|
|
|
}
|
2022-01-08 17:17:01 +01:00
|
|
|
|
2022-11-03 15:03:12 +01:00
|
|
|
// execute the data function to get the readcloser out of it
|
|
|
|
rc, fileSize, err := p.data(ctx)
|
2022-01-11 17:49:14 +01:00
|
|
|
if err != nil {
|
2022-01-16 18:52:55 +01:00
|
|
|
return fmt.Errorf("store: error executing data function: %s", err)
|
|
|
|
}
|
|
|
|
|
2022-03-21 13:41:44 +01:00
|
|
|
// defer closing the reader when we're done with it
|
|
|
|
defer func() {
|
2022-11-03 15:03:12 +01:00
|
|
|
if err := rc.Close(); err != nil {
|
|
|
|
log.Errorf("store: error closing readcloser: %s", err)
|
|
|
|
}
|
|
|
|
}()
|
|
|
|
|
|
|
|
// execute the postData function no matter what happens
|
|
|
|
defer func() {
|
|
|
|
if p.postData != nil {
|
|
|
|
if err := p.postData(ctx); err != nil {
|
|
|
|
log.Errorf("store: error executing postData: %s", err)
|
2022-03-21 13:41:44 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}()
|
|
|
|
|
2022-01-16 18:52:55 +01:00
|
|
|
// extract no more than 261 bytes from the beginning of the file -- this is the header
|
|
|
|
firstBytes := make([]byte, maxFileHeaderBytes)
|
2022-11-03 15:03:12 +01:00
|
|
|
if _, err := rc.Read(firstBytes); err != nil {
|
2022-01-16 18:52:55 +01:00
|
|
|
return fmt.Errorf("store: error reading initial %d bytes: %s", maxFileHeaderBytes, err)
|
2022-01-08 13:45:42 +01:00
|
|
|
}
|
|
|
|
|
2022-01-16 18:52:55 +01:00
|
|
|
// now we have the file header we can work out the content type from it
|
|
|
|
contentType, err := parseContentType(firstBytes)
|
2022-01-11 17:49:14 +01:00
|
|
|
if err != nil {
|
2022-01-16 18:52:55 +01:00
|
|
|
return fmt.Errorf("store: error parsing content type: %s", err)
|
2022-01-08 13:45:42 +01:00
|
|
|
}
|
|
|
|
|
2022-01-16 18:52:55 +01:00
|
|
|
// bail if this is a type we can't process
|
2022-12-17 05:38:56 +01:00
|
|
|
if !supportedAttachment(contentType) {
|
2022-01-16 18:52:55 +01:00
|
|
|
return fmt.Errorf("store: media type %s not (yet) supported", contentType)
|
2022-01-15 17:41:18 +01:00
|
|
|
}
|
|
|
|
|
2022-01-16 18:52:55 +01:00
|
|
|
// extract the file extension
|
2022-01-11 17:49:14 +01:00
|
|
|
split := strings.Split(contentType, "/")
|
|
|
|
if len(split) != 2 {
|
2022-01-16 18:52:55 +01:00
|
|
|
return fmt.Errorf("store: content type %s was not valid", contentType)
|
2022-01-11 17:49:14 +01:00
|
|
|
}
|
|
|
|
extension := split[1] // something like 'jpeg'
|
|
|
|
|
2022-01-23 14:41:58 +01:00
|
|
|
// concatenate the cleaned up first bytes with the existing bytes still in the reader (thanks Mara)
|
2022-11-03 15:03:12 +01:00
|
|
|
multiReader := io.MultiReader(bytes.NewBuffer(firstBytes), rc)
|
2022-01-23 14:41:58 +01:00
|
|
|
|
2022-09-24 11:11:47 +02:00
|
|
|
// use the extension to derive the attachment type
|
|
|
|
// and, while we're in here, clean up exif data from
|
|
|
|
// the image if we already know the fileSize
|
2022-11-03 15:03:12 +01:00
|
|
|
var readerToStore io.Reader
|
2022-01-23 14:41:58 +01:00
|
|
|
switch extension {
|
|
|
|
case mimeGif:
|
2022-09-04 14:58:58 +02:00
|
|
|
p.attachment.Type = gtsmodel.FileTypeImage
|
2022-11-03 15:03:12 +01:00
|
|
|
// nothing to terminate, we can just store the multireader
|
|
|
|
readerToStore = multiReader
|
2022-12-06 14:15:25 +01:00
|
|
|
case mimeJpeg, mimePng, mimeWebp:
|
2022-01-15 17:41:18 +01:00
|
|
|
p.attachment.Type = gtsmodel.FileTypeImage
|
2022-09-24 11:11:47 +02:00
|
|
|
if fileSize > 0 {
|
2022-11-03 15:03:12 +01:00
|
|
|
terminated, err := terminator.Terminate(multiReader, int(fileSize), extension)
|
2022-09-24 11:11:47 +02:00
|
|
|
if err != nil {
|
|
|
|
return fmt.Errorf("store: exif error: %s", err)
|
|
|
|
}
|
|
|
|
defer func() {
|
2022-11-03 15:03:12 +01:00
|
|
|
if closer, ok := terminated.(io.Closer); ok {
|
|
|
|
if err := closer.Close(); err != nil {
|
2022-09-24 11:11:47 +02:00
|
|
|
log.Errorf("store: error closing terminator reader: %s", err)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}()
|
2022-11-03 15:03:12 +01:00
|
|
|
// store the exif-terminated version of what was in the multireader
|
|
|
|
readerToStore = terminated
|
|
|
|
} else {
|
|
|
|
// can't terminate if we don't know the file size, so just store the multiReader
|
|
|
|
readerToStore = multiReader
|
2022-01-23 14:41:58 +01:00
|
|
|
}
|
2022-12-17 05:38:56 +01:00
|
|
|
case mimeMp4:
|
|
|
|
p.attachment.Type = gtsmodel.FileTypeVideo
|
|
|
|
// nothing to terminate, we can just store the multireader
|
|
|
|
readerToStore = multiReader
|
2022-01-23 14:41:58 +01:00
|
|
|
default:
|
|
|
|
return fmt.Errorf("store: couldn't process %s", extension)
|
2022-01-15 17:41:18 +01:00
|
|
|
}
|
2022-01-23 14:41:58 +01:00
|
|
|
|
|
|
|
// now set some additional fields on the attachment since
|
|
|
|
// we know more about what the underlying media actually is
|
2022-01-11 17:49:14 +01:00
|
|
|
p.attachment.URL = uris.GenerateURIForAttachment(p.attachment.AccountID, string(TypeAttachment), string(SizeOriginal), p.attachment.ID, extension)
|
|
|
|
p.attachment.File.ContentType = contentType
|
2022-09-24 11:11:47 +02:00
|
|
|
p.attachment.File.Path = fmt.Sprintf("%s/%s/%s/%s.%s", p.attachment.AccountID, TypeAttachment, SizeOriginal, p.attachment.ID, extension)
|
2022-01-16 18:52:55 +01:00
|
|
|
|
|
|
|
// store this for now -- other processes can pull it out of storage as they please
|
2022-11-11 20:27:37 +01:00
|
|
|
if fileSize, err = putStream(ctx, p.storage, p.attachment.File.Path, readerToStore, fileSize); err != nil {
|
|
|
|
if !errors.Is(err, storage.ErrAlreadyExists) {
|
|
|
|
return fmt.Errorf("store: error storing stream: %s", err)
|
|
|
|
}
|
|
|
|
log.Warnf("attachment %s already exists at storage path: %s", p.attachment.ID, p.attachment.File.Path)
|
2022-01-16 18:52:55 +01:00
|
|
|
}
|
2022-08-15 12:35:05 +02:00
|
|
|
|
|
|
|
cached := true
|
|
|
|
p.attachment.Cached = &cached
|
2022-09-29 22:50:43 +02:00
|
|
|
p.attachment.File.FileSize = int(fileSize)
|
2022-01-16 18:52:55 +01:00
|
|
|
p.read = true
|
2022-02-22 13:50:33 +01:00
|
|
|
|
2022-12-12 12:22:19 +01:00
|
|
|
log.Tracef("finished storing initial data for attachment %s", p.attachment.URL)
|
2022-01-11 17:49:14 +01:00
|
|
|
return nil
|
2022-01-10 18:36:09 +01:00
|
|
|
}
|
|
|
|
|
2022-02-22 13:50:33 +01:00
|
|
|
func (m *manager) preProcessMedia(ctx context.Context, data DataFunc, postData PostDataCallbackFunc, accountID string, ai *AdditionalMediaInfo) (*ProcessingMedia, error) {
|
2022-01-11 17:49:14 +01:00
|
|
|
id, err := id.NewRandomULID()
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
file := gtsmodel.File{
|
|
|
|
Path: "", // we don't know yet because it depends on the uncalled DataFunc
|
|
|
|
ContentType: "", // we don't know yet because it depends on the uncalled DataFunc
|
|
|
|
UpdatedAt: time.Now(),
|
|
|
|
}
|
|
|
|
|
|
|
|
thumbnail := gtsmodel.Thumbnail{
|
|
|
|
URL: uris.GenerateURIForAttachment(accountID, string(TypeAttachment), string(SizeSmall), id, mimeJpeg), // all thumbnails are encoded as jpeg,
|
|
|
|
Path: fmt.Sprintf("%s/%s/%s/%s.%s", accountID, TypeAttachment, SizeSmall, id, mimeJpeg), // all thumbnails are encoded as jpeg,
|
2022-02-21 11:26:26 +01:00
|
|
|
ContentType: mimeImageJpeg,
|
2022-01-11 17:49:14 +01:00
|
|
|
UpdatedAt: time.Now(),
|
|
|
|
}
|
|
|
|
|
2022-08-15 12:35:05 +02:00
|
|
|
avatar := false
|
|
|
|
header := false
|
|
|
|
cached := false
|
|
|
|
|
2022-01-11 17:49:14 +01:00
|
|
|
// populate initial fields on the media attachment -- some of these will be overwritten as we proceed
|
|
|
|
attachment := >smodel.MediaAttachment{
|
|
|
|
ID: id,
|
|
|
|
CreatedAt: time.Now(),
|
|
|
|
UpdatedAt: time.Now(),
|
|
|
|
StatusID: "",
|
|
|
|
URL: "", // we don't know yet because it depends on the uncalled DataFunc
|
|
|
|
RemoteURL: "",
|
|
|
|
Type: gtsmodel.FileTypeUnknown, // we don't know yet because it depends on the uncalled DataFunc
|
|
|
|
FileMeta: gtsmodel.FileMeta{},
|
|
|
|
AccountID: accountID,
|
|
|
|
Description: "",
|
|
|
|
ScheduledStatusID: "",
|
|
|
|
Blurhash: "",
|
|
|
|
Processing: gtsmodel.ProcessingStatusReceived,
|
|
|
|
File: file,
|
|
|
|
Thumbnail: thumbnail,
|
2022-08-15 12:35:05 +02:00
|
|
|
Avatar: &avatar,
|
|
|
|
Header: &header,
|
|
|
|
Cached: &cached,
|
2022-01-11 17:49:14 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
// check if we have additional info to add to the attachment,
|
|
|
|
// and overwrite some of the attachment fields if so
|
|
|
|
if ai != nil {
|
|
|
|
if ai.CreatedAt != nil {
|
|
|
|
attachment.CreatedAt = *ai.CreatedAt
|
|
|
|
}
|
|
|
|
|
|
|
|
if ai.StatusID != nil {
|
|
|
|
attachment.StatusID = *ai.StatusID
|
|
|
|
}
|
|
|
|
|
|
|
|
if ai.RemoteURL != nil {
|
|
|
|
attachment.RemoteURL = *ai.RemoteURL
|
|
|
|
}
|
|
|
|
|
|
|
|
if ai.Description != nil {
|
|
|
|
attachment.Description = *ai.Description
|
|
|
|
}
|
|
|
|
|
|
|
|
if ai.ScheduledStatusID != nil {
|
|
|
|
attachment.ScheduledStatusID = *ai.ScheduledStatusID
|
|
|
|
}
|
|
|
|
|
|
|
|
if ai.Blurhash != nil {
|
|
|
|
attachment.Blurhash = *ai.Blurhash
|
|
|
|
}
|
|
|
|
|
|
|
|
if ai.Avatar != nil {
|
2022-08-15 12:35:05 +02:00
|
|
|
attachment.Avatar = ai.Avatar
|
2022-01-11 17:49:14 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
if ai.Header != nil {
|
2022-08-15 12:35:05 +02:00
|
|
|
attachment.Header = ai.Header
|
2022-01-11 17:49:14 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
if ai.FocusX != nil {
|
|
|
|
attachment.FileMeta.Focus.X = *ai.FocusX
|
|
|
|
}
|
|
|
|
|
|
|
|
if ai.FocusY != nil {
|
|
|
|
attachment.FileMeta.Focus.Y = *ai.FocusY
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
processingMedia := &ProcessingMedia{
|
|
|
|
attachment: attachment,
|
|
|
|
data: data,
|
2022-02-22 13:50:33 +01:00
|
|
|
postData: postData,
|
2022-02-08 13:38:44 +01:00
|
|
|
thumbState: int32(received),
|
|
|
|
fullSizeState: int32(received),
|
2022-01-11 17:49:14 +01:00
|
|
|
database: m.db,
|
|
|
|
storage: m.storage,
|
|
|
|
}
|
|
|
|
|
|
|
|
return processingMedia, nil
|
|
|
|
}
|
2022-03-07 11:08:26 +01:00
|
|
|
|
|
|
|
func (m *manager) preProcessRecache(ctx context.Context, data DataFunc, postData PostDataCallbackFunc, attachmentID string) (*ProcessingMedia, error) {
|
|
|
|
// get the existing attachment
|
|
|
|
attachment, err := m.db.GetAttachmentByID(ctx, attachmentID)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
processingMedia := &ProcessingMedia{
|
|
|
|
attachment: attachment,
|
|
|
|
data: data,
|
|
|
|
postData: postData,
|
|
|
|
thumbState: int32(received),
|
|
|
|
fullSizeState: int32(received),
|
|
|
|
database: m.db,
|
|
|
|
storage: m.storage,
|
|
|
|
recache: true, // indicate it's a recache
|
|
|
|
}
|
|
|
|
|
|
|
|
return processingMedia, nil
|
|
|
|
}
|