2023-03-12 16:00:57 +01:00
// GoToSocial
// Copyright (C) GoToSocial Authors admin@gotosocial.org
// SPDX-License-Identifier: AGPL-3.0-or-later
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Affero General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Affero General Public License for more details.
//
// You should have received a copy of the GNU Affero General Public License
// along with this program. If not, see <http://www.gnu.org/licenses/>.
2021-08-10 13:32:39 +02:00
package dereferencing
import (
"context"
"errors"
2022-01-16 18:52:55 +01:00
"io"
2021-08-10 13:32:39 +02:00
"net/url"
2022-01-24 18:12:04 +01:00
"time"
2021-08-10 13:32:39 +02:00
2024-02-23 16:24:40 +01:00
"github.com/superseriousbusiness/activity/pub"
2021-08-10 13:32:39 +02:00
"github.com/superseriousbusiness/gotosocial/internal/ap"
2022-08-20 22:47:19 +02:00
"github.com/superseriousbusiness/gotosocial/internal/config"
2022-06-11 11:01:34 +02:00
"github.com/superseriousbusiness/gotosocial/internal/db"
2023-05-31 10:39:54 +02:00
"github.com/superseriousbusiness/gotosocial/internal/gtscontext"
2023-05-28 14:08:35 +02:00
"github.com/superseriousbusiness/gotosocial/internal/gtserror"
2021-08-10 13:32:39 +02:00
"github.com/superseriousbusiness/gotosocial/internal/gtsmodel"
"github.com/superseriousbusiness/gotosocial/internal/id"
2022-07-19 10:47:55 +02:00
"github.com/superseriousbusiness/gotosocial/internal/log"
2022-01-09 18:41:22 +01:00
"github.com/superseriousbusiness/gotosocial/internal/media"
2023-02-09 09:27:07 +01:00
"github.com/superseriousbusiness/gotosocial/internal/transport"
2024-02-09 12:38:51 +01:00
"github.com/superseriousbusiness/gotosocial/internal/util"
2021-08-10 13:32:39 +02:00
)
2024-02-09 15:24:49 +01:00
// accountFresh returns true if the given account is
// still considered "fresh" according to the desired
// freshness window (falls back to default if nil).
//
// Local accounts will always be considered fresh because
// there's no remote state that could have changed.
//
// True is also returned for suspended accounts, since
// we'll never want to try to refresh one of these.
//
// Return value of false indicates that the account
// is not fresh and should be refreshed from remote.
func accountFresh (
account * gtsmodel . Account ,
window * FreshnessWindow ,
) bool {
if window == nil {
window = DefaultAccountFreshness
2023-11-10 17:16:58 +01:00
}
2023-05-12 11:15:54 +02:00
if account . IsLocal ( ) {
2024-02-09 15:24:49 +01:00
// Can't refresh
// local accounts.
2023-05-12 11:15:54 +02:00
return true
}
2024-03-12 15:34:08 +01:00
if account . IsSuspended ( ) {
// Can't/won't refresh
2024-02-09 15:24:49 +01:00
// suspended accounts.
2023-05-12 11:15:54 +02:00
return true
}
2024-02-09 15:24:49 +01:00
if account . IsInstance ( ) &&
! account . IsNew ( ) {
// Existing instance account.
// No need for refresh.
2023-05-12 11:15:54 +02:00
return true
}
2024-02-09 15:24:49 +01:00
// Moment when the account is
// considered stale according to
// desired freshness window.
staleAt := account . FetchedAt . Add (
time . Duration ( * window ) ,
)
// It's still fresh if the time now
// is not past the point of staleness.
return ! time . Now ( ) . After ( staleAt )
2023-05-12 11:15:54 +02:00
}
2023-10-23 11:58:13 +02:00
// GetAccountByURI will attempt to fetch an accounts by its URI, first checking the database. In the case of a newly-met remote model, or a remote model
// whose last_fetched date is beyond a certain interval, the account will be dereferenced. In the case of dereferencing, some low-priority account information
// may be enqueued for asynchronous fetching, e.g. featured account statuses (pins). An ActivityPub object indicates the account was dereferenced.
func ( d * Dereferencer ) GetAccountByURI ( ctx context . Context , requestUser string , uri * url . URL ) ( * gtsmodel . Account , ap . Accountable , error ) {
2023-05-12 11:15:54 +02:00
// Fetch and dereference account if necessary.
2024-01-09 10:42:39 +01:00
account , accountable , err := d . getAccountByURI ( ctx ,
2023-05-12 11:15:54 +02:00
requestUser ,
uri ,
)
if err != nil {
return nil , nil , err
}
2024-01-09 10:42:39 +01:00
if accountable != nil {
2023-05-12 11:15:54 +02:00
// This account was updated, enqueue re-dereference featured posts.
2024-04-26 14:50:46 +02:00
d . state . Workers . Dereference . Queue . Push ( func ( ctx context . Context ) {
2023-05-12 11:15:54 +02:00
if err := d . dereferenceAccountFeatured ( ctx , requestUser , account ) ; err != nil {
log . Errorf ( ctx , "error fetching account featured collection: %v" , err )
}
} )
}
2024-01-09 10:42:39 +01:00
return account , accountable , nil
2023-05-12 11:15:54 +02:00
}
// getAccountByURI is a package internal form of .GetAccountByURI() that doesn't bother dereferencing featured posts on update.
2023-10-23 11:58:13 +02:00
func ( d * Dereferencer ) getAccountByURI ( ctx context . Context , requestUser string , uri * url . URL ) ( * gtsmodel . Account , ap . Accountable , error ) {
2023-02-03 21:03:05 +01:00
var (
account * gtsmodel . Account
uriStr = uri . String ( )
err error
)
2022-11-29 10:24:55 +01:00
2023-05-31 10:39:54 +02:00
// Search the database for existing account with URI.
account , err = d . state . DB . GetAccountByURI (
// request a barebones object, it may be in the
// db but with related models not yet dereferenced.
gtscontext . SetBarebones ( ctx ) ,
uriStr ,
)
2023-02-03 21:03:05 +01:00
if err != nil && ! errors . Is ( err , db . ErrNoEntries ) {
2023-05-28 14:08:35 +02:00
return nil , nil , gtserror . Newf ( "error checking database for account %s by uri: %w" , uriStr , err )
2023-02-03 21:03:05 +01:00
}
2022-11-29 10:24:55 +01:00
2023-02-03 21:03:05 +01:00
if account == nil {
2023-05-31 10:39:54 +02:00
// Else, search the database for existing by URL.
account , err = d . state . DB . GetAccountByURL (
gtscontext . SetBarebones ( ctx ) ,
uriStr ,
)
2023-02-03 21:03:05 +01:00
if err != nil && ! errors . Is ( err , db . ErrNoEntries ) {
2023-05-28 14:08:35 +02:00
return nil , nil , gtserror . Newf ( "error checking database for account %s by url: %w" , uriStr , err )
2022-11-29 10:24:55 +01:00
}
}
2023-02-03 21:03:05 +01:00
if account == nil {
// Ensure that this is isn't a search for a local account.
if uri . Host == config . GetHost ( ) || uri . Host == config . GetAccountDomain ( ) {
2023-06-22 21:46:36 +02:00
return nil , nil , gtserror . SetUnretrievable ( err ) // this will be db.ErrNoEntries
2022-08-20 22:47:19 +02:00
}
2023-02-03 21:03:05 +01:00
// Create and pass-through a new bare-bones model for dereferencing.
2023-10-31 12:12:22 +01:00
return d . enrichAccountSafely ( ctx , requestUser , uri , & gtsmodel . Account {
2023-02-03 21:03:05 +01:00
ID : id . NewULID ( ) ,
Domain : uri . Host ,
URI : uriStr ,
2023-05-12 11:15:54 +02:00
} , nil )
}
2024-02-09 15:24:49 +01:00
if accountFresh ( account , nil ) {
2024-01-09 10:42:39 +01:00
// This is an existing account that is up-to-date,
// before returning ensure it is fully populated.
2023-05-31 10:39:54 +02:00
if err := d . state . DB . PopulateAccount ( ctx , account ) ; err != nil {
log . Errorf ( ctx , "error populating existing account: %v" , err )
}
2024-01-09 10:42:39 +01:00
2023-05-12 11:15:54 +02:00
return account , nil , nil
2022-06-11 11:01:34 +02:00
}
2023-05-12 11:15:54 +02:00
// Try to update existing account model.
2024-01-09 10:42:39 +01:00
latest , accountable , err := d . enrichAccountSafely ( ctx ,
2023-05-12 11:15:54 +02:00
requestUser ,
uri ,
account ,
nil ,
)
2022-06-11 11:01:34 +02:00
if err != nil {
2023-02-17 12:02:29 +01:00
log . Errorf ( ctx , "error enriching remote account: %v" , err )
2023-05-12 11:15:54 +02:00
// Fallback to existing.
return account , nil , nil
2022-06-11 11:01:34 +02:00
}
2024-01-09 10:42:39 +01:00
return latest , accountable , nil
2023-02-03 21:03:05 +01:00
}
2022-06-11 11:01:34 +02:00
2023-10-23 11:58:13 +02:00
// GetAccountByUsernameDomain will attempt to fetch an accounts by its username@domain, first checking the database. In the case of a newly-met remote model,
// or a remote model whose last_fetched date is beyond a certain interval, the account will be dereferenced. In the case of dereferencing, some low-priority
// account information may be enqueued for asynchronous fetching, e.g. featured account statuses (pins). An ActivityPub object indicates the account was dereferenced.
func ( d * Dereferencer ) GetAccountByUsernameDomain ( ctx context . Context , requestUser string , username string , domain string ) ( * gtsmodel . Account , ap . Accountable , error ) {
2024-01-09 10:42:39 +01:00
account , accountable , err := d . getAccountByUsernameDomain (
2023-10-31 12:05:17 +01:00
ctx ,
requestUser ,
username ,
domain ,
)
if err != nil {
return nil , nil , err
}
2024-01-09 10:42:39 +01:00
if accountable != nil {
2023-10-31 12:05:17 +01:00
// This account was updated, enqueue re-dereference featured posts.
2024-04-26 14:50:46 +02:00
d . state . Workers . Dereference . Queue . Push ( func ( ctx context . Context ) {
2023-10-31 12:05:17 +01:00
if err := d . dereferenceAccountFeatured ( ctx , requestUser , account ) ; err != nil {
log . Errorf ( ctx , "error fetching account featured collection: %v" , err )
}
} )
}
2024-01-09 10:42:39 +01:00
return account , accountable , nil
2023-10-31 12:05:17 +01:00
}
2024-01-09 10:42:39 +01:00
// getAccountByUsernameDomain is a package internal form of
// GetAccountByUsernameDomain() that doesn't bother deref of featured posts.
2023-10-31 12:05:17 +01:00
func ( d * Dereferencer ) getAccountByUsernameDomain (
ctx context . Context ,
requestUser string ,
username string ,
domain string ,
) ( * gtsmodel . Account , ap . Accountable , error ) {
2023-02-03 21:03:05 +01:00
if domain == config . GetHost ( ) || domain == config . GetAccountDomain ( ) {
// We do local lookups using an empty domain,
// else it will fail the db search below.
domain = ""
}
2022-11-29 10:24:55 +01:00
2023-05-12 11:15:54 +02:00
// Search the database for existing account with USERNAME@DOMAIN.
2023-05-31 10:39:54 +02:00
account , err := d . state . DB . GetAccountByUsernameDomain (
// request a barebones object, it may be in the
// db but with related models not yet dereferenced.
gtscontext . SetBarebones ( ctx ) ,
username , domain ,
)
2023-02-03 21:03:05 +01:00
if err != nil && ! errors . Is ( err , db . ErrNoEntries ) {
2023-05-28 14:08:35 +02:00
return nil , nil , gtserror . Newf ( "error checking database for account %s@%s: %w" , username , domain , err )
2023-02-03 21:03:05 +01:00
}
2022-01-24 18:12:04 +01:00
2023-02-03 21:03:05 +01:00
if account == nil {
if domain == "" {
2023-05-12 11:15:54 +02:00
// failed local lookup, will be db.ErrNoEntries.
2023-06-22 21:46:36 +02:00
return nil , nil , gtserror . SetUnretrievable ( err )
2021-08-10 13:32:39 +02:00
}
2023-02-03 21:03:05 +01:00
// Create and pass-through a new bare-bones model for dereferencing.
2024-01-09 10:42:39 +01:00
account , accountable , err := d . enrichAccountSafely ( ctx , requestUser , nil , & gtsmodel . Account {
2023-02-03 21:03:05 +01:00
ID : id . NewULID ( ) ,
Domain : domain ,
2024-01-26 14:17:10 +01:00
Username : username ,
2023-05-12 11:15:54 +02:00
} , nil )
if err != nil {
return nil , nil , err
2023-05-09 12:16:10 +02:00
}
2024-01-09 10:42:39 +01:00
return account , accountable , nil
2021-08-10 13:32:39 +02:00
}
2023-05-12 11:15:54 +02:00
// Try to update existing account model.
2024-01-09 10:42:39 +01:00
latest , accountable , err := d . RefreshAccount ( ctx ,
2023-05-12 11:15:54 +02:00
requestUser ,
account ,
nil ,
2024-02-09 15:24:49 +01:00
nil ,
2023-05-12 11:15:54 +02:00
)
2023-02-03 21:03:05 +01:00
if err != nil {
2023-05-12 11:15:54 +02:00
// Fallback to existing.
return account , nil , nil //nolint
2022-06-11 11:01:34 +02:00
}
2024-01-09 10:42:39 +01:00
if accountable == nil {
2023-05-31 10:39:54 +02:00
// This is existing up-to-date account, ensure it is populated.
2023-10-31 12:12:22 +01:00
if err := d . state . DB . PopulateAccount ( ctx , latest ) ; err != nil {
2023-05-31 10:39:54 +02:00
log . Errorf ( ctx , "error populating existing account: %v" , err )
}
}
2024-01-09 10:42:39 +01:00
return latest , accountable , nil
2023-02-03 21:03:05 +01:00
}
2024-02-09 15:24:49 +01:00
// RefreshAccount updates the given account if it's a
// remote account, and considered stale / not fresh
// based on Account.FetchedAt and desired freshness.
//
// An updated account model is returned, but in the
// case of dereferencing, some low-priority account
// info may be enqueued for asynchronous fetching,
// e.g. featured account statuses (pins).
//
// An ActivityPub object indicates the account was
// dereferenced (i.e. updated).
2024-01-09 10:42:39 +01:00
func ( d * Dereferencer ) RefreshAccount (
ctx context . Context ,
requestUser string ,
account * gtsmodel . Account ,
accountable ap . Accountable ,
2024-02-09 15:24:49 +01:00
window * FreshnessWindow ,
2024-01-09 10:42:39 +01:00
) ( * gtsmodel . Account , ap . Accountable , error ) {
// If no incoming data is provided,
2024-02-09 15:24:49 +01:00
// check whether account needs refresh.
2024-01-09 10:42:39 +01:00
if accountable == nil &&
2024-02-09 15:24:49 +01:00
accountFresh ( account , window ) {
2023-05-12 11:15:54 +02:00
return account , nil , nil
2023-05-09 12:16:10 +02:00
}
2023-05-12 11:15:54 +02:00
// Parse the URI from account.
uri , err := url . Parse ( account . URI )
2023-05-09 12:16:10 +02:00
if err != nil {
2023-05-28 14:08:35 +02:00
return nil , nil , gtserror . Newf ( "invalid account uri %q: %w" , account . URI , err )
2023-05-09 12:16:10 +02:00
}
2023-10-31 12:12:22 +01:00
// Try to update + deref passed account model.
2024-01-09 10:42:39 +01:00
latest , accountable , err := d . enrichAccountSafely ( ctx ,
2023-05-12 11:15:54 +02:00
requestUser ,
uri ,
account ,
2024-01-09 10:42:39 +01:00
accountable ,
2023-05-09 12:16:10 +02:00
)
if err != nil {
2023-05-12 11:15:54 +02:00
log . Errorf ( ctx , "error enriching remote account: %v" , err )
2023-10-31 12:12:22 +01:00
return nil , nil , gtserror . Newf ( "error enriching remote account: %w" , err )
2023-05-09 12:16:10 +02:00
}
2024-01-09 10:42:39 +01:00
if accountable != nil {
2023-10-31 12:12:22 +01:00
// This account was updated, enqueue re-dereference featured posts.
2024-04-26 14:50:46 +02:00
d . state . Workers . Dereference . Queue . Push ( func ( ctx context . Context ) {
2023-10-31 12:12:22 +01:00
if err := d . dereferenceAccountFeatured ( ctx , requestUser , latest ) ; err != nil {
log . Errorf ( ctx , "error fetching account featured collection: %v" , err )
}
} )
}
2023-05-12 11:15:54 +02:00
2024-01-09 10:42:39 +01:00
return latest , accountable , nil
2023-02-03 21:03:05 +01:00
}
2024-02-09 15:24:49 +01:00
// RefreshAccountAsync enqueues the given account for
// an asychronous update fetching, if it's a remote
// account, and considered stale / not fresh based on
// Account.FetchedAt and desired freshness.
//
// This is a more optimized form of manually enqueueing
// .UpdateAccount() to the federation worker, since it
// only enqueues update if necessary.
2024-01-09 10:42:39 +01:00
func ( d * Dereferencer ) RefreshAccountAsync (
ctx context . Context ,
requestUser string ,
account * gtsmodel . Account ,
accountable ap . Accountable ,
2024-02-09 15:24:49 +01:00
window * FreshnessWindow ,
2024-01-09 10:42:39 +01:00
) {
// If no incoming data is provided,
2024-02-09 15:24:49 +01:00
// check whether account needs refresh.
2024-01-09 10:42:39 +01:00
if accountable == nil &&
2024-02-09 15:24:49 +01:00
accountFresh ( account , window ) {
2023-05-12 11:15:54 +02:00
return
2022-11-29 10:24:55 +01:00
}
2023-05-12 11:15:54 +02:00
// Parse the URI from account.
uri , err := url . Parse ( account . URI )
if err != nil {
2023-05-28 14:08:35 +02:00
log . Errorf ( ctx , "invalid account uri %q: %v" , account . URI , err )
2023-05-12 11:15:54 +02:00
return
2022-06-11 11:01:34 +02:00
}
2023-05-12 11:15:54 +02:00
// Enqueue a worker function to enrich this account async.
2024-04-26 14:50:46 +02:00
d . state . Workers . Dereference . Queue . Push ( func ( ctx context . Context ) {
2024-01-09 10:42:39 +01:00
latest , accountable , err := d . enrichAccountSafely ( ctx , requestUser , uri , account , accountable )
2023-05-12 11:15:54 +02:00
if err != nil {
log . Errorf ( ctx , "error enriching remote account: %v" , err )
return
}
2023-02-03 21:03:05 +01:00
2024-01-09 10:42:39 +01:00
if accountable != nil {
2023-10-31 12:12:22 +01:00
// This account was updated, enqueue re-dereference featured posts.
2023-11-14 15:57:25 +01:00
if err := d . dereferenceAccountFeatured ( ctx , requestUser , latest ) ; err != nil {
log . Errorf ( ctx , "error fetching account featured collection: %v" , err )
}
2022-06-11 11:01:34 +02:00
}
2023-05-12 11:15:54 +02:00
} )
}
2022-06-11 11:01:34 +02:00
2023-10-31 12:12:22 +01:00
// enrichAccountSafely wraps enrichAccount() to perform
// it within the State{}.FedLocks mutexmap, which protects
// dereferencing actions with per-URI mutex locks.
func ( d * Dereferencer ) enrichAccountSafely (
ctx context . Context ,
requestUser string ,
uri * url . URL ,
account * gtsmodel . Account ,
2024-01-09 10:42:39 +01:00
accountable ap . Accountable ,
2023-10-31 12:12:22 +01:00
) ( * gtsmodel . Account , ap . Accountable , error ) {
2024-03-12 15:34:08 +01:00
// Noop if account suspended;
// we don't want to deref it.
if account . IsSuspended ( ) {
2023-11-10 17:16:58 +01:00
return account , nil , nil
}
2023-10-31 12:12:22 +01:00
// By default use account.URI
// as the per-URI deref lock.
2024-01-31 14:29:47 +01:00
var uriStr string
2024-01-26 14:17:10 +01:00
if account . URI != "" {
2024-01-31 14:29:47 +01:00
uriStr = account . URI
2024-01-26 14:17:10 +01:00
} else {
2023-10-31 12:12:22 +01:00
// No URI is set yet, instead generate a faux-one from user+domain.
2024-01-31 14:29:47 +01:00
uriStr = "https://" + account . Domain + "/users/" + account . Username
2023-10-31 12:12:22 +01:00
}
// Acquire per-URI deref lock, wraping unlock
// to safely defer in case of panic, while still
// performing more granular unlocks when needed.
2024-01-31 14:29:47 +01:00
unlock := d . state . FedLocks . Lock ( uriStr )
2024-02-09 12:38:51 +01:00
unlock = util . DoOnce ( unlock )
2023-10-31 12:12:22 +01:00
defer unlock ( )
// Perform status enrichment with passed vars.
latest , apubAcc , err := d . enrichAccount ( ctx ,
requestUser ,
uri ,
account ,
2024-01-09 10:42:39 +01:00
accountable ,
2023-10-31 12:12:22 +01:00
)
2024-01-31 14:29:47 +01:00
if gtserror . StatusCode ( err ) >= 400 {
2024-01-26 14:17:10 +01:00
if account . IsNew ( ) {
// This was a new account enrich
// attempt which failed before we
// got to store it, so we can't
// return anything useful.
return nil , nil , err
}
// We had this account stored already
// before this enrichment attempt.
//
// Update fetched_at to slow re-attempts
// but don't return early. We can still
// return the model we had stored already.
2023-10-31 12:12:22 +01:00
account . FetchedAt = time . Now ( )
2024-01-26 14:17:10 +01:00
if err := d . state . DB . UpdateAccount ( ctx , account , "fetched_at" ) ; err != nil {
2024-01-31 14:29:47 +01:00
log . Error ( ctx , "error updating %s fetched_at: %v" , uriStr , err )
2024-01-26 14:17:10 +01:00
}
2023-10-31 12:12:22 +01:00
}
// Unlock now
// we're done.
unlock ( )
if errors . Is ( err , db . ErrAlreadyExists ) {
// Ensure AP model isn't set,
// otherwise this indicates WE
// enriched the account.
apubAcc = nil
// DATA RACE! We likely lost out to another goroutine
// in a call to db.Put(Account). Look again in DB by URI.
latest , err = d . state . DB . GetAccountByURI ( ctx , account . URI )
if err != nil {
2024-01-31 14:29:47 +01:00
err = gtserror . Newf ( "error getting account %s from database after race: %w" , uriStr , err )
2023-10-31 12:12:22 +01:00
}
}
return latest , apubAcc , err
}
// enrichAccount will enrich the given account, whether a
// new barebones model, or existing model from the database.
// It handles necessary dereferencing, webfingering etc.
func ( d * Dereferencer ) enrichAccount (
ctx context . Context ,
requestUser string ,
uri * url . URL ,
account * gtsmodel . Account ,
apubAcc ap . Accountable ,
) ( * gtsmodel . Account , ap . Accountable , error ) {
2023-02-10 21:15:23 +01:00
// Pre-fetch a transport for requesting username, used by later deref procedures.
2023-05-12 11:15:54 +02:00
tsport , err := d . transportController . NewTransportForUsername ( ctx , requestUser )
2023-02-09 09:27:07 +01:00
if err != nil {
2023-05-28 14:08:35 +02:00
return nil , nil , gtserror . Newf ( "couldn't create transport: %w" , err )
2023-02-09 09:27:07 +01:00
}
2023-02-03 21:03:05 +01:00
if account . Username != "" {
// A username was provided so we can attempt a webfinger, this ensures up-to-date accountdomain info.
2023-05-12 11:15:54 +02:00
accDomain , accURI , err := d . fingerRemoteAccount ( ctx , tsport , account . Username , account . Domain )
2024-01-26 14:17:10 +01:00
switch {
2021-08-10 13:32:39 +02:00
2024-01-26 14:17:10 +01:00
case err != nil && account . URI == "" :
// This is a new account (to us) with username@domain
// but failed webfinger, nothing more we can do.
err := gtserror . Newf ( "error webfingering account: %w" , err )
return nil , nil , gtserror . SetUnretrievable ( err )
case err != nil :
// Simply log this error and move on,
// we already have an account URI.
log . Errorf ( ctx ,
"error webfingering[1] remote account %s@%s: %v" ,
account . Username , account . Domain , err ,
)
case err == nil && account . Domain != accDomain :
// After webfinger, we now have correct account domain from which we can do a final DB check.
2024-02-23 16:24:40 +01:00
alreadyAcc , err := d . state . DB . GetAccountByUsernameDomain ( ctx , account . Username , accDomain )
2024-01-26 14:17:10 +01:00
if err != nil && ! errors . Is ( err , db . ErrNoEntries ) {
2024-02-23 16:24:40 +01:00
return nil , nil , gtserror . Newf ( "db error getting account after webfinger: %w" , err )
2024-01-26 14:17:10 +01:00
}
2023-03-08 18:19:49 +01:00
2024-02-23 16:24:40 +01:00
if alreadyAcc != nil {
// We had this account stored
// under discovered accountDomain.
//
2024-01-26 14:17:10 +01:00
// Proceed with this account.
2024-02-23 16:24:40 +01:00
account = alreadyAcc
2023-02-09 10:34:44 +01:00
}
2024-01-26 14:17:10 +01:00
// Whether we had the account or not, we
// now have webfinger info relevant to the
// account, so fallthrough to set webfinger
// info on either the account we just found,
// or the stub account we were passed.
fallthrough
case err == nil :
2023-02-03 21:03:05 +01:00
// Update account with latest info.
account . URI = accURI . String ( )
account . Domain = accDomain
uri = accURI
2021-08-10 13:32:39 +02:00
}
2023-02-03 21:03:05 +01:00
}
2021-08-10 13:32:39 +02:00
2023-02-03 21:03:05 +01:00
if uri == nil {
2024-01-26 14:17:10 +01:00
// No URI provided / found,
// must parse from account.
2023-02-03 21:03:05 +01:00
uri , err = url . Parse ( account . URI )
if err != nil {
2024-01-26 14:17:10 +01:00
return nil , nil , gtserror . Newf (
"invalid uri %q: %w" ,
account . URI , gtserror . SetUnretrievable ( err ) ,
)
}
2024-02-23 16:24:40 +01:00
// Check URI scheme ahead of time for more useful errs.
2024-01-26 14:17:10 +01:00
if uri . Scheme != "http" && uri . Scheme != "https" {
2024-02-23 16:24:40 +01:00
err := errors . New ( "account URI scheme must be http or https" )
2024-01-26 14:17:10 +01:00
return nil , nil , gtserror . Newf (
"invalid uri %q: %w" ,
account . URI , gtserror . SetUnretrievable ( err ) ,
)
2021-08-10 13:32:39 +02:00
}
2023-02-03 21:03:05 +01:00
}
2021-08-10 13:32:39 +02:00
2024-01-26 14:17:10 +01:00
/ *
BY THIS POINT we must have an account URI set ,
either provided , pinned to the account , or
obtained via webfinger call .
* /
2023-02-10 21:15:23 +01:00
// Check whether this account URI is a blocked domain / subdomain.
2023-05-12 11:15:54 +02:00
if blocked , err := d . state . DB . IsDomainBlocked ( ctx , uri . Host ) ; err != nil {
2023-05-28 14:08:35 +02:00
return nil , nil , gtserror . Newf ( "error checking blocked domain: %w" , err )
2023-02-03 21:03:05 +01:00
} else if blocked {
2023-05-28 14:08:35 +02:00
return nil , nil , gtserror . Newf ( "%s is blocked" , uri . Host )
2023-02-03 21:03:05 +01:00
}
2022-06-11 11:01:34 +02:00
2023-02-10 21:15:23 +01:00
// Mark deref+update handshake start.
2023-02-03 21:03:05 +01:00
d . startHandshake ( requestUser , uri )
defer d . stopHandshake ( requestUser , uri )
2022-01-24 13:12:17 +01:00
2023-05-12 11:15:54 +02:00
if apubAcc == nil {
2024-01-26 14:17:10 +01:00
// We were not given any (partial) ActivityPub
// version of this account as a parameter.
2023-05-12 11:15:54 +02:00
// Dereference latest version of the account.
2024-02-23 16:24:40 +01:00
rsp , err := tsport . Dereference ( ctx , uri )
2023-05-12 11:15:54 +02:00
if err != nil {
2024-01-26 14:17:10 +01:00
err := gtserror . Newf ( "error dereferencing %s: %w" , uri , err )
2023-06-22 21:46:36 +02:00
return nil , nil , gtserror . SetUnretrievable ( err )
2023-05-12 11:15:54 +02:00
}
2024-02-23 16:24:40 +01:00
// Attempt to resolve ActivityPub acc from response.
apubAcc , err = ap . ResolveAccountable ( ctx , rsp . Body )
// Tidy up now done.
_ = rsp . Body . Close ( )
2023-05-12 11:15:54 +02:00
if err != nil {
2024-02-23 16:24:40 +01:00
// ResolveAccountable will set gtserror.WrongType
2024-01-26 14:17:10 +01:00
// on the returned error, so we don't need to do it here.
2024-02-23 16:24:40 +01:00
err = gtserror . Newf ( "error resolving accountable %s: %w" , uri , err )
2024-01-26 14:17:10 +01:00
return nil , nil , err
2023-05-12 11:15:54 +02:00
}
2024-02-23 16:24:40 +01:00
// Check whether input URI and final returned URI
// have changed (i.e. we followed some redirects).
if finalURIStr := rsp . Request . URL . String ( ) ; //
finalURIStr != uri . String ( ) {
// NOTE: this URI check + database call is performed
// AFTER reading and closing response body, for performance.
//
// Check whether we have this account stored under *final* URI.
alreadyAcc , err := d . state . DB . GetAccountByURI ( ctx , finalURIStr )
if err != nil && ! errors . Is ( err , db . ErrNoEntries ) {
return nil , nil , gtserror . Newf ( "db error getting account after redirects: %w" , err )
}
if alreadyAcc != nil {
// We had this account stored
// under discovered final URI.
//
// Proceed with this account.
account = alreadyAcc
}
// Update the input URI to
// the final determined URI
// for later URI checks.
uri = rsp . Request . URL
}
2023-06-22 21:46:36 +02:00
}
2023-05-12 11:15:54 +02:00
2024-01-26 14:17:10 +01:00
/ *
BY THIS POINT we must have the ActivityPub
representation of the account , either provided ,
or obtained via a dereference call .
* /
2023-06-22 21:46:36 +02:00
// Convert the dereferenced AP account object to our GTS model.
2024-01-26 14:17:10 +01:00
//
// We put this in the variable latestAcc because we might want
// to compare the provided account model with this fresh version,
// in order to check if anything changed since we last saw it.
2023-09-23 18:44:11 +02:00
latestAcc , err := d . converter . ASRepresentationToAccount ( ctx ,
2023-06-22 21:46:36 +02:00
apubAcc ,
account . Domain ,
)
if err != nil {
2024-01-26 14:17:10 +01:00
// ASRepresentationToAccount will set Malformed on the
// returned error, so we don't need to do it here.
err = gtserror . Newf ( "error converting accountable to gts model for account %s: %w" , uri , err )
return nil , nil , err
2021-08-10 13:32:39 +02:00
}
2023-02-03 21:03:05 +01:00
if account . Username == "" {
2024-02-14 12:13:38 +01:00
// Assume the host from the
// ActivityPub representation.
id := ap . GetJSONLDId ( apubAcc )
if id == nil {
return nil , nil , gtserror . New ( "no id property found on person, or id was not an iri" )
}
// Get IRI host value.
accHost := id . Host
2023-02-03 21:03:05 +01:00
// No username was provided, so no webfinger was attempted earlier.
//
2023-03-19 16:45:13 +01:00
// Now we have a username we can attempt again, to ensure up-to-date
// accountDomain info. For this final attempt we should use the domain
// of the ID of the dereffed account, rather than the URI we were given.
//
// This avoids cases where we were given a URI like
// https://example.org/@someone@somewhere.else and we've been redirected
// from example.org to somewhere.else: we want to take somewhere.else
// as the accountDomain then, not the example.org we were redirected from.
2023-05-12 11:15:54 +02:00
latestAcc . Domain , _ , err = d . fingerRemoteAccount ( ctx ,
tsport ,
latestAcc . Username ,
accHost ,
)
2023-03-19 16:45:13 +01:00
if err != nil {
2024-02-14 12:13:38 +01:00
// Webfingering account still failed, so we're not certain
// what the accountDomain actually is. Exit here for safety.
return nil , nil , gtserror . Newf (
"error webfingering remote account %s@%s: %w" ,
latestAcc . Username , accHost , err ,
)
2022-09-23 21:27:35 +02:00
}
}
2024-01-09 10:42:39 +01:00
if latestAcc . Domain == "" {
2024-01-26 14:17:10 +01:00
// Ensure we have a domain set by this point,
2024-01-09 10:42:39 +01:00
// otherwise it gets stored as a local user!
return nil , nil , gtserror . Newf ( "empty domain for %s" , uri )
}
2024-02-14 12:13:38 +01:00
// Ensure the final parsed account URI / URL matches
// the input URI we fetched (or received) it as.
if expect := uri . String ( ) ; latestAcc . URI != expect &&
latestAcc . URL != expect {
return nil , nil , gtserror . Newf (
"dereferenced account uri %s does not match %s" ,
latestAcc . URI , expect ,
)
}
2024-01-26 14:17:10 +01:00
/ *
BY THIS POINT we have more or less a fullly - formed
representation of the target account , derived from
a combination of webfinger lookups and dereferencing .
Further fetching beyond this point is for peripheral
2024-04-16 13:10:13 +02:00
things like account avatar , header , emojis , stats .
2024-01-26 14:17:10 +01:00
* /
// Ensure internal db ID is
// set and update fetch time.
2023-02-03 21:03:05 +01:00
latestAcc . ID = account . ID
latestAcc . FetchedAt = time . Now ( )
2023-06-22 21:46:36 +02:00
// Ensure the account's avatar media is populated, passing in existing to check for chages.
if err := d . fetchRemoteAccountAvatar ( ctx , tsport , account , latestAcc ) ; err != nil {
log . Errorf ( ctx , "error fetching remote avatar for account %s: %v" , uri , err )
2023-02-09 09:27:07 +01:00
}
2023-06-22 21:46:36 +02:00
// Ensure the account's avatar media is populated, passing in existing to check for chages.
if err := d . fetchRemoteAccountHeader ( ctx , tsport , account , latestAcc ) ; err != nil {
log . Errorf ( ctx , "error fetching remote header for account %s: %v" , uri , err )
2022-01-24 13:12:17 +01:00
}
2023-02-03 21:03:05 +01:00
// Fetch the latest remote account emoji IDs used in account display name/bio.
2023-03-01 18:52:44 +01:00
if _ , err = d . fetchRemoteAccountEmojis ( ctx , latestAcc , requestUser ) ; err != nil {
2023-02-17 12:02:29 +01:00
log . Errorf ( ctx , "error fetching remote emojis for account %s: %v" , uri , err )
2022-01-24 13:12:17 +01:00
}
2024-04-16 13:10:13 +02:00
// Fetch followers/following count for this account.
if err := d . fetchRemoteAccountStats ( ctx , latestAcc , requestUser ) ; err != nil {
log . Errorf ( ctx , "error fetching remote stats for account %s: %v" , uri , err )
}
2024-01-26 14:17:10 +01:00
if account . IsNew ( ) {
// Prefer published/created time from
// apubAcc, fall back to FetchedAt value.
if latestAcc . CreatedAt . IsZero ( ) {
latestAcc . CreatedAt = latestAcc . FetchedAt
}
// Set time of update from the last-fetched date.
2023-02-03 21:03:05 +01:00
latestAcc . UpdatedAt = latestAcc . FetchedAt
2023-03-03 09:34:34 +01:00
// This is new, put it in the database.
2023-05-12 11:15:54 +02:00
err := d . state . DB . PutAccount ( ctx , latestAcc )
2023-03-03 09:34:34 +01:00
if err != nil {
2023-05-28 14:08:35 +02:00
return nil , nil , gtserror . Newf ( "error putting in database: %w" , err )
2023-02-03 21:03:05 +01:00
}
} else {
2024-01-26 14:17:10 +01:00
// Prefer published time from apubAcc,
// fall back to previous stored value.
if latestAcc . CreatedAt . IsZero ( ) {
latestAcc . CreatedAt = account . CreatedAt
}
2023-02-03 21:03:05 +01:00
// Set time of update from the last-fetched date.
latestAcc . UpdatedAt = latestAcc . FetchedAt
// This is an existing account, update the model in the database.
2023-05-12 11:15:54 +02:00
if err := d . state . DB . UpdateAccount ( ctx , latestAcc ) ; err != nil {
2023-05-28 14:08:35 +02:00
return nil , nil , gtserror . Newf ( "error updating database: %w" , err )
2022-01-25 11:21:22 +01:00
}
2022-01-24 13:12:17 +01:00
}
2023-05-12 11:15:54 +02:00
return latestAcc , apubAcc , nil
2021-08-10 13:32:39 +02:00
}
2023-10-23 11:58:13 +02:00
func ( d * Dereferencer ) fetchRemoteAccountAvatar ( ctx context . Context , tsport transport . Transport , existing , latestAcc * gtsmodel . Account ) error {
2023-07-12 13:20:15 +02:00
if latestAcc . AvatarRemoteURL == "" {
// No avatar set on newest model, leave
// latest avatar attachment ID empty.
2023-06-22 21:46:36 +02:00
return nil
}
2023-07-12 13:20:15 +02:00
// By default we keep the previous media attachment ID. This will only
// be changed if and when we have the new media loaded into storage.
latestAcc . AvatarMediaAttachmentID = existing . AvatarMediaAttachmentID
2023-06-22 21:46:36 +02:00
2023-07-12 13:20:15 +02:00
// If we had a media attachment ID already, and the URL
// of the attachment hasn't changed from existing -> latest,
// then we may be able to just keep our existing attachment
// without having to make any remote calls.
if latestAcc . AvatarMediaAttachmentID != "" &&
existing . AvatarRemoteURL == latestAcc . AvatarRemoteURL {
// Ensure we have media attachment with the known ID.
2023-06-22 21:46:36 +02:00
media , err := d . state . DB . GetAttachmentByID ( ctx , existing . AvatarMediaAttachmentID )
if err != nil && ! errors . Is ( err , db . ErrNoEntries ) {
return gtserror . Newf ( "error getting attachment %s: %w" , existing . AvatarMediaAttachmentID , err )
}
2023-07-12 13:20:15 +02:00
// Ensure attachment has correct properties.
if media != nil && media . RemoteURL == latestAcc . AvatarRemoteURL {
// We already have the most up-to-date
// media attachment, keep using it.
2023-06-22 21:46:36 +02:00
return nil
}
}
2023-07-12 13:20:15 +02:00
// If we reach here, we know we need to fetch the most
// up-to-date version of the attachment from remote.
2023-06-22 21:46:36 +02:00
// Parse and validate the newly provided media URL.
2023-07-12 13:20:15 +02:00
avatarURI , err := url . Parse ( latestAcc . AvatarRemoteURL )
2023-02-10 21:15:23 +01:00
if err != nil {
2023-07-12 13:20:15 +02:00
return gtserror . Newf ( "error parsing url %s: %w" , latestAcc . AvatarRemoteURL , err )
2023-02-10 21:15:23 +01:00
}
// Acquire lock for derefs map.
2023-10-31 12:12:22 +01:00
unlock := d . state . FedLocks . Lock ( latestAcc . AvatarRemoteURL )
2024-02-09 12:38:51 +01:00
unlock = util . DoOnce ( unlock )
2023-02-10 21:15:23 +01:00
defer unlock ( )
2023-02-13 21:19:51 +01:00
// Look for an existing dereference in progress.
2023-07-12 13:20:15 +02:00
processing , ok := d . derefAvatars [ latestAcc . AvatarRemoteURL ]
2023-02-10 21:15:23 +01:00
2023-02-13 21:19:51 +01:00
if ! ok {
// Set the media data function to dereference avatar from URI.
data := func ( ctx context . Context ) ( io . ReadCloser , int64 , error ) {
return tsport . DereferenceMedia ( ctx , avatarURI )
}
// Create new media processing request from the media manager instance.
2023-11-10 19:29:26 +01:00
processing = d . mediaManager . PreProcessMedia ( data , latestAcc . ID , & media . AdditionalMediaInfo {
2023-02-13 21:19:51 +01:00
Avatar : func ( ) * bool { v := true ; return & v } ( ) ,
2023-07-12 13:20:15 +02:00
RemoteURL : & latestAcc . AvatarRemoteURL ,
2023-02-13 21:19:51 +01:00
} )
2023-02-10 21:15:23 +01:00
2023-02-13 21:19:51 +01:00
// Store media in map to mark as processing.
2023-07-12 13:20:15 +02:00
d . derefAvatars [ latestAcc . AvatarRemoteURL ] = processing
2023-02-13 21:19:51 +01:00
defer func ( ) {
// On exit safely remove media from map.
2023-10-31 12:12:22 +01:00
unlock := d . state . FedLocks . Lock ( latestAcc . AvatarRemoteURL )
2023-07-12 13:20:15 +02:00
delete ( d . derefAvatars , latestAcc . AvatarRemoteURL )
2023-02-13 21:19:51 +01:00
unlock ( )
} ( )
}
2023-02-10 21:15:23 +01:00
// Unlock map.
unlock ( )
// Start media attachment loading (blocking call).
if _ , err := processing . LoadAttachment ( ctx ) ; err != nil {
2023-07-12 13:20:15 +02:00
return gtserror . Newf ( "error loading attachment %s: %w" , latestAcc . AvatarRemoteURL , err )
2023-02-10 21:15:23 +01:00
}
2023-06-22 21:46:36 +02:00
// Set the newly loaded avatar media attachment ID.
2023-07-12 13:20:15 +02:00
latestAcc . AvatarMediaAttachmentID = processing . AttachmentID ( )
2023-06-22 21:46:36 +02:00
return nil
2023-02-10 21:15:23 +01:00
}
2023-10-23 11:58:13 +02:00
func ( d * Dereferencer ) fetchRemoteAccountHeader ( ctx context . Context , tsport transport . Transport , existing , latestAcc * gtsmodel . Account ) error {
2023-07-12 13:20:15 +02:00
if latestAcc . HeaderRemoteURL == "" {
// No header set on newest model, leave
// latest header attachment ID empty.
2023-06-22 21:46:36 +02:00
return nil
}
2023-07-12 13:20:15 +02:00
// By default we keep the previous media attachment ID. This will only
// be changed if and when we have the new media loaded into storage.
latestAcc . HeaderMediaAttachmentID = existing . HeaderMediaAttachmentID
2023-06-22 21:46:36 +02:00
2023-07-12 13:20:15 +02:00
// If we had a media attachment ID already, and the URL
// of the attachment hasn't changed from existing -> latest,
// then we may be able to just keep our existing attachment
// without having to make any remote calls.
if latestAcc . HeaderMediaAttachmentID != "" &&
existing . HeaderRemoteURL == latestAcc . HeaderRemoteURL {
// Ensure we have media attachment with the known ID.
2023-06-22 21:46:36 +02:00
media , err := d . state . DB . GetAttachmentByID ( ctx , existing . HeaderMediaAttachmentID )
if err != nil && ! errors . Is ( err , db . ErrNoEntries ) {
return gtserror . Newf ( "error getting attachment %s: %w" , existing . HeaderMediaAttachmentID , err )
}
2023-07-12 13:20:15 +02:00
// Ensure attachment has correct properties.
if media != nil && media . RemoteURL == latestAcc . HeaderRemoteURL {
// We already have the most up-to-date
// media attachment, keep using it.
2023-06-22 21:46:36 +02:00
return nil
}
}
2023-07-12 13:20:15 +02:00
// If we reach here, we know we need to fetch the most
// up-to-date version of the attachment from remote.
2023-06-22 21:46:36 +02:00
// Parse and validate the newly provided media URL.
2023-07-12 13:20:15 +02:00
headerURI , err := url . Parse ( latestAcc . HeaderRemoteURL )
2023-02-09 09:27:07 +01:00
if err != nil {
2023-07-12 13:20:15 +02:00
return gtserror . Newf ( "error parsing url %s: %w" , latestAcc . HeaderRemoteURL , err )
2021-08-10 13:32:39 +02:00
}
2023-02-10 21:15:23 +01:00
// Acquire lock for derefs map.
2023-10-31 12:12:22 +01:00
unlock := d . state . FedLocks . Lock ( latestAcc . HeaderRemoteURL )
2024-02-09 12:38:51 +01:00
unlock = util . DoOnce ( unlock )
2023-02-10 21:15:23 +01:00
defer unlock ( )
2023-02-13 21:19:51 +01:00
// Look for an existing dereference in progress.
2023-07-12 13:20:15 +02:00
processing , ok := d . derefHeaders [ latestAcc . HeaderRemoteURL ]
2023-02-10 21:15:23 +01:00
2023-02-13 21:19:51 +01:00
if ! ok {
2023-06-22 21:46:36 +02:00
// Set the media data function to dereference avatar from URI.
2023-02-13 21:19:51 +01:00
data := func ( ctx context . Context ) ( io . ReadCloser , int64 , error ) {
return tsport . DereferenceMedia ( ctx , headerURI )
}
// Create new media processing request from the media manager instance.
2023-11-10 19:29:26 +01:00
processing = d . mediaManager . PreProcessMedia ( data , latestAcc . ID , & media . AdditionalMediaInfo {
2023-02-13 21:19:51 +01:00
Header : func ( ) * bool { v := true ; return & v } ( ) ,
2023-07-12 13:20:15 +02:00
RemoteURL : & latestAcc . HeaderRemoteURL ,
2023-02-13 21:19:51 +01:00
} )
// Store media in map to mark as processing.
2023-07-12 13:20:15 +02:00
d . derefHeaders [ latestAcc . HeaderRemoteURL ] = processing
2022-01-24 18:12:04 +01:00
2023-02-13 21:19:51 +01:00
defer func ( ) {
// On exit safely remove media from map.
2023-10-31 12:12:22 +01:00
unlock := d . state . FedLocks . Lock ( latestAcc . HeaderRemoteURL )
2023-07-12 13:20:15 +02:00
delete ( d . derefHeaders , latestAcc . HeaderRemoteURL )
2023-02-13 21:19:51 +01:00
unlock ( )
} ( )
}
2023-02-10 21:15:23 +01:00
// Unlock map.
unlock ( )
// Start media attachment loading (blocking call).
if _ , err := processing . LoadAttachment ( ctx ) ; err != nil {
2023-07-12 13:20:15 +02:00
return gtserror . Newf ( "error loading attachment %s: %w" , latestAcc . HeaderRemoteURL , err )
2021-08-10 13:32:39 +02:00
}
2022-01-24 13:12:17 +01:00
2023-06-22 21:46:36 +02:00
// Set the newly loaded avatar media attachment ID.
2023-07-12 13:20:15 +02:00
latestAcc . HeaderMediaAttachmentID = processing . AttachmentID ( )
2023-06-22 21:46:36 +02:00
return nil
2021-08-10 13:32:39 +02:00
}
2022-01-24 13:12:17 +01:00
2023-10-23 11:58:13 +02:00
func ( d * Dereferencer ) fetchRemoteAccountEmojis ( ctx context . Context , targetAccount * gtsmodel . Account , requestingUsername string ) ( bool , error ) {
2022-09-26 11:56:01 +02:00
maybeEmojis := targetAccount . Emojis
maybeEmojiIDs := targetAccount . EmojiIDs
// It's possible that the account had emoji IDs set on it, but not Emojis
// themselves, depending on how it was fetched before being passed to us.
//
// If we only have IDs, fetch the emojis from the db. We know they're in
// there or else they wouldn't have IDs.
if len ( maybeEmojiIDs ) > len ( maybeEmojis ) {
2022-11-11 20:27:37 +01:00
maybeEmojis = make ( [ ] * gtsmodel . Emoji , 0 , len ( maybeEmojiIDs ) )
2022-09-26 11:56:01 +02:00
for _ , emojiID := range maybeEmojiIDs {
2023-05-12 11:15:54 +02:00
maybeEmoji , err := d . state . DB . GetEmojiByID ( ctx , emojiID )
2022-09-26 11:56:01 +02:00
if err != nil {
return false , err
}
maybeEmojis = append ( maybeEmojis , maybeEmoji )
}
}
// For all the maybe emojis we have, we either fetch them from the database
// (if we haven't already), or dereference them from the remote instance.
gotEmojis , err := d . populateEmojis ( ctx , maybeEmojis , requestingUsername )
if err != nil {
return false , err
}
// Extract the ID of each fetched or dereferenced emoji, so we can attach
// this to the account if necessary.
gotEmojiIDs := make ( [ ] string , 0 , len ( gotEmojis ) )
for _ , e := range gotEmojis {
gotEmojiIDs = append ( gotEmojiIDs , e . ID )
}
var (
changed = false // have the emojis for this account changed?
maybeLen = len ( maybeEmojis )
gotLen = len ( gotEmojis )
)
// if the length of everything is zero, this is simple:
// nothing has changed and there's nothing to do
if maybeLen == 0 && gotLen == 0 {
return changed , nil
}
// if the *amount* of emojis on the account has changed, then the got emojis
// are definitely different from the previous ones (if there were any) --
// the account has either more or fewer emojis set on it now, so take the
// discovered emojis as the new correct ones.
if maybeLen != gotLen {
changed = true
targetAccount . Emojis = gotEmojis
targetAccount . EmojiIDs = gotEmojiIDs
return changed , nil
}
// if the lengths are the same but not all of the slices are
// zero, something *might* have changed, so we have to check
// 1. did we have emojis before that we don't have now?
for _ , maybeEmoji := range maybeEmojis {
var stillPresent bool
for _ , gotEmoji := range gotEmojis {
if maybeEmoji . URI == gotEmoji . URI {
// the emoji we maybe had is still present now,
// so we can stop checking gotEmojis
stillPresent = true
break
}
}
if ! stillPresent {
// at least one maybeEmoji is no longer present in
// the got emojis, so we can stop checking now
changed = true
targetAccount . Emojis = gotEmojis
targetAccount . EmojiIDs = gotEmojiIDs
return changed , nil
}
}
// 2. do we have emojis now that we didn't have before?
for _ , gotEmoji := range gotEmojis {
var wasPresent bool
for _ , maybeEmoji := range maybeEmojis {
// check emoji IDs here as well, because unreferenced
// maybe emojis we didn't already have would not have
// had IDs set on them yet
if gotEmoji . URI == maybeEmoji . URI && gotEmoji . ID == maybeEmoji . ID {
// this got emoji was present already in the maybeEmoji,
// so we can stop checking through maybeEmojis
wasPresent = true
break
}
}
if ! wasPresent {
// at least one gotEmojis was not present in
// the maybeEmojis, so we can stop checking now
changed = true
targetAccount . Emojis = gotEmojis
targetAccount . EmojiIDs = gotEmojiIDs
return changed , nil
}
}
return changed , nil
}
2023-03-01 18:52:44 +01:00
2024-04-16 13:10:13 +02:00
func ( d * Dereferencer ) fetchRemoteAccountStats ( ctx context . Context , account * gtsmodel . Account , requestUser string ) error {
// Ensure we have a stats model for this account.
if account . Stats == nil {
if err := d . state . DB . PopulateAccountStats ( ctx , account ) ; err != nil {
return gtserror . Newf ( "db error getting account stats: %w" , err )
}
}
// We want to update stats by getting remote
// followers/following/statuses counts for
// this account.
//
// If we fail getting any particular stat,
// it will just fall back to counting local.
// Followers first.
if count , err := d . countCollection (
ctx ,
account . FollowersURI ,
requestUser ,
) ; err != nil {
// Log this but don't bail.
log . Warnf ( ctx ,
"couldn't count followers for @%s@%s: %v" ,
account . Username , account . Domain , err ,
)
} else if count > 0 {
// Positive integer is useful!
account . Stats . FollowersCount = & count
}
// Now following.
if count , err := d . countCollection (
ctx ,
account . FollowingURI ,
requestUser ,
) ; err != nil {
// Log this but don't bail.
log . Warnf ( ctx ,
"couldn't count following for @%s@%s: %v" ,
account . Username , account . Domain , err ,
)
} else if count > 0 {
// Positive integer is useful!
account . Stats . FollowingCount = & count
}
// Now statuses count.
if count , err := d . countCollection (
ctx ,
account . OutboxURI ,
requestUser ,
) ; err != nil {
// Log this but don't bail.
log . Warnf ( ctx ,
"couldn't count statuses for @%s@%s: %v" ,
account . Username , account . Domain , err ,
)
} else if count > 0 {
// Positive integer is useful!
account . Stats . StatusesCount = & count
}
// Update stats now.
if err := d . state . DB . UpdateAccountStats (
ctx ,
account . Stats ,
"followers_count" ,
"following_count" ,
"statuses_count" ,
) ; err != nil {
return gtserror . Newf ( "db error updating account stats: %w" , err )
}
return nil
}
// countCollection parses the given uriStr,
// dereferences the result as a collection
// type, and returns total items as 0, or
// a positive integer, or -1 if total items
// cannot be counted.
//
// Error will be returned for invalid non-empty
// URIs or dereferencing isses.
func ( d * Dereferencer ) countCollection (
ctx context . Context ,
uriStr string ,
requestUser string ,
) ( int , error ) {
if uriStr == "" {
return - 1 , nil
}
uri , err := url . Parse ( uriStr )
if err != nil {
return - 1 , err
}
collect , err := d . dereferenceCollection ( ctx , requestUser , uri )
if err != nil {
return - 1 , err
}
return collect . TotalItems ( ) , nil
}
2023-05-12 11:15:54 +02:00
// dereferenceAccountFeatured dereferences an account's featuredCollectionURI (if not empty). For each discovered status, this status will
// be dereferenced (if necessary) and marked as pinned (if necessary). Then, old pins will be removed if they're not included in new pins.
2023-10-23 11:58:13 +02:00
func ( d * Dereferencer ) dereferenceAccountFeatured ( ctx context . Context , requestUser string , account * gtsmodel . Account ) error {
2023-05-12 11:15:54 +02:00
uri , err := url . Parse ( account . FeaturedCollectionURI )
2023-03-01 18:52:44 +01:00
if err != nil {
return err
}
2024-02-23 16:24:40 +01:00
collect , err := d . dereferenceCollection ( ctx , requestUser , uri )
2023-03-01 18:52:44 +01:00
if err != nil {
return err
}
// Get previous pinned statuses (we'll need these later).
2023-05-12 11:15:54 +02:00
wasPinned , err := d . state . DB . GetAccountPinnedStatuses ( ctx , account . ID )
2023-03-01 18:52:44 +01:00
if err != nil && ! errors . Is ( err , db . ErrNoEntries ) {
2023-05-28 14:08:35 +02:00
return gtserror . Newf ( "error getting account pinned statuses: %w" , err )
2023-03-01 18:52:44 +01:00
}
2024-02-23 16:24:40 +01:00
var statusURIs [ ] * url . URL
2023-03-01 18:52:44 +01:00
2024-02-23 16:24:40 +01:00
for {
// Get next collect item.
item := collect . NextItem ( )
if item == nil {
break
2023-03-01 18:52:44 +01:00
}
2024-02-23 16:24:40 +01:00
// Check for available IRI.
itemIRI , _ := pub . ToId ( item )
if itemIRI == nil {
2023-03-01 18:52:44 +01:00
continue
}
2024-02-23 16:24:40 +01:00
if itemIRI . Host != uri . Host {
2023-03-01 18:52:44 +01:00
// If this status doesn't share a host with its featured
// collection URI, we shouldn't trust it. Just move on.
continue
}
// Already append this status URI to our slice.
// We do this here so that even if we can't get
// the status in the next part for some reason,
// we still know it was *meant* to be pinned.
2024-02-23 16:24:40 +01:00
statusURIs = append ( statusURIs , itemIRI )
2023-03-01 18:52:44 +01:00
2024-01-31 14:29:47 +01:00
// Search for status by URI. Note this may return an existing model
// we have stored with an error from attempted update, so check both.
2024-02-23 16:24:40 +01:00
status , _ , _ , err := d . getStatusByURI ( ctx , requestUser , itemIRI )
2023-03-01 18:52:44 +01:00
if err != nil {
2024-02-23 16:24:40 +01:00
log . Errorf ( ctx , "error getting status from featured collection %s: %v" , itemIRI , err )
2024-01-31 14:29:47 +01:00
if status == nil {
// This is only unactionable
// if no status was returned.
continue
}
2023-03-01 18:52:44 +01:00
}
2024-02-23 16:24:40 +01:00
// If the status was already pinned,
// we don't need to do anything.
2023-03-01 18:52:44 +01:00
if ! status . PinnedAt . IsZero ( ) {
continue
}
2024-02-23 16:24:40 +01:00
if status . AccountURI != account . URI {
2023-03-01 18:52:44 +01:00
// Someone's pinned a status that doesn't
// belong to them, this doesn't work for us.
continue
}
if status . BoostOfID != "" {
2024-02-23 16:24:40 +01:00
// Someone's pinned a boost. This
// also doesn't work for us. (note
// we check using BoostOfID since
// BoostOfURI isn't *always* set).
2023-03-01 18:52:44 +01:00
continue
}
// All conditions are met for this status to
// be pinned, so we can finally update it.
status . PinnedAt = time . Now ( )
2023-05-12 11:15:54 +02:00
if err := d . state . DB . UpdateStatus ( ctx , status , "pinned_at" ) ; err != nil {
log . Errorf ( ctx , "error updating status in featured collection %s: %v" , status . URI , err )
continue
2023-03-01 18:52:44 +01:00
}
}
// Now that we know which statuses are pinned, we should
// *unpin* previous pinned statuses that aren't included.
outerLoop :
for _ , status := range wasPinned {
for _ , statusURI := range statusURIs {
if status . URI == statusURI . String ( ) {
// This status is included in most recent
// pinned uris. No need to keep checking.
continue outerLoop
}
}
// Status was pinned before, but is not included
// in most recent pinned uris, so unpin it now.
status . PinnedAt = time . Time { }
2023-05-12 11:15:54 +02:00
if err := d . state . DB . UpdateStatus ( ctx , status , "pinned_at" ) ; err != nil {
log . Errorf ( ctx , "error unpinning status %s: %v" , status . URI , err )
continue
2023-03-01 18:52:44 +01:00
}
}
return nil
}