Add an immutable tarball link to archive download headers for Nix
This allows `nix flake metadata` and nix in general to lock a *branch* tarball link in a manner that causes it to fetch the correct commit even if the branch is updated with a newer version. For further context, Nix flakes are a feature that, among other things, allows for "inputs" that are "github:someuser/somerepo", "https://some-tarball-service/some-tarball.tar.gz", "sourcehut:~meow/nya" or similar. This feature allows our users to fetch tarballs of git-based inputs to their builds rather than using git to fetch them, saving significant download time. There is presently no gitea or forgejo specific fetcher in Nix, and we don't particularly wish to have one. Ideally (as a developer on a Nix implementation myself) we could just use the generic tarball fetcher and not add specific forgejo support, but to do so, we need additional metadata to know which commit a given *branch* tarball represents, which is the purpose of the Link header added here. The result of this patch is that a Nix user can specify `inputs.something.url = "https://forgejo-host/some/project/archive/main.tar.gz"` in flake.nix and get a link to some concrete tarball for the actual commit in the lock file, then when they run `nix flake update` in the future, they will get the latest commit in that branch. Example of it working locally: » nix flake metadata --refresh 'http://localhost:3000/api/v1/repos/jade/cats/archive/main.tar.gz?dir=configs/nix' Resolved URL: http://localhost:3000/api/v1/repos/jade/cats/archive/main.tar.gz?dir=configs/nix Locked URL:804ede182b
.tar.gz?dir=configs /nix&narHash=sha256-yP7KkDVfuixZzs0fsqhSETXFC0y8m6nmPLw2GrAMxKQ%3D Description: Computers with the nixos Path: /nix/store/s856c6yqghyan4v0zy6jj19ksv0q22nx-source Revision: 804ede182b6b66469b23ea4d21eece52766b7a06 Last modified: 2024-05-02 00:48:32 For details on the header value, see:56763ff918/doc/manual/src/protocols/tarball-fetcher.md
(cherry picked from commit6631f56ebf
)
This commit is contained in:
parent
6d9fe1db67
commit
900381d6e9
|
@ -0,0 +1,5 @@
|
||||||
|
Support the [Nix tarball fetcher immutable link protocol](https://github.com/nixos/nix/blob/56763ff918eb308db23080e560ed2ea3e00c80a7/doc/manual/src/protocols/tarball-fetcher.md) on archive URLs, so Forgejo-generated tarballs for branches will go into Nix's `flake.lock` as their respective commit URLs and `nix flake update` will just work. This allows natively fetching Forgejo repositories for Nix flake inputs as tarballs rather than as Git repositories, significantly improving fetch times and avoiding depending on Git at runtime.
|
||||||
|
|
||||||
|
Concretely, Forgejo now returns a header of the following format from its archive URLs: `Link: <https://my-forgejo/api/v1/someuser/somerepo/archive/some-commit-hash.tar.gz?rev=some-commit-hash> rel="immutable"`.
|
||||||
|
|
||||||
|
Example usage: `inputs.meow.url = "https://my-forgejo/someuser/somerepo/archive/main.tar.gz";` in `flake.nix`. For a private repository, configure `netrc-file` in `nix.conf` and use `https://my-forgejo/api/v1/repos/someuser/somerepo/archive/main.tar.gz` as a URL instead, since the normal archive endpoint doesn't support tokens.
|
|
@ -326,6 +326,12 @@ func archiveDownload(ctx *context.APIContext) {
|
||||||
func download(ctx *context.APIContext, archiveName string, archiver *repo_model.RepoArchiver) {
|
func download(ctx *context.APIContext, archiveName string, archiver *repo_model.RepoArchiver) {
|
||||||
downloadName := ctx.Repo.Repository.Name + "-" + archiveName
|
downloadName := ctx.Repo.Repository.Name + "-" + archiveName
|
||||||
|
|
||||||
|
// Add nix format link header so tarballs lock correctly:
|
||||||
|
// https://github.com/nixos/nix/blob/56763ff918eb308db23080e560ed2ea3e00c80a7/doc/manual/src/protocols/tarball-fetcher.md
|
||||||
|
ctx.Resp.Header().Add("Link", fmt.Sprintf("<%s/archive/%s.tar.gz?rev=%s>; rel=\"immutable\"",
|
||||||
|
ctx.Repo.Repository.APIURL(),
|
||||||
|
archiver.CommitID, archiver.CommitID))
|
||||||
|
|
||||||
rPath := archiver.RelativePath()
|
rPath := archiver.RelativePath()
|
||||||
if setting.RepoArchive.Storage.MinioConfig.ServeDirect {
|
if setting.RepoArchive.Storage.MinioConfig.ServeDirect {
|
||||||
// If we have a signed url (S3, object storage), redirect to this directly.
|
// If we have a signed url (S3, object storage), redirect to this directly.
|
||||||
|
|
|
@ -480,6 +480,12 @@ func Download(ctx *context.Context) {
|
||||||
func download(ctx *context.Context, archiveName string, archiver *repo_model.RepoArchiver) {
|
func download(ctx *context.Context, archiveName string, archiver *repo_model.RepoArchiver) {
|
||||||
downloadName := ctx.Repo.Repository.Name + "-" + archiveName
|
downloadName := ctx.Repo.Repository.Name + "-" + archiveName
|
||||||
|
|
||||||
|
// Add nix format link header so tarballs lock correctly:
|
||||||
|
// https://github.com/nixos/nix/blob/56763ff918eb308db23080e560ed2ea3e00c80a7/doc/manual/src/protocols/tarball-fetcher.md
|
||||||
|
ctx.Resp.Header().Add("Link", fmt.Sprintf("<%s/archive/%s.tar.gz?rev=%s>; rel=\"immutable\"",
|
||||||
|
ctx.Repo.Repository.APIURL(),
|
||||||
|
archiver.CommitID, archiver.CommitID))
|
||||||
|
|
||||||
rPath := archiver.RelativePath()
|
rPath := archiver.RelativePath()
|
||||||
if setting.RepoArchive.Storage.MinioConfig.ServeDirect {
|
if setting.RepoArchive.Storage.MinioConfig.ServeDirect {
|
||||||
// If we have a signed url (S3, object storage), redirect to this directly.
|
// If we have a signed url (S3, object storage), redirect to this directly.
|
||||||
|
|
|
@ -8,6 +8,7 @@ import (
|
||||||
"io"
|
"io"
|
||||||
"net/http"
|
"net/http"
|
||||||
"net/url"
|
"net/url"
|
||||||
|
"regexp"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
auth_model "code.gitea.io/gitea/models/auth"
|
auth_model "code.gitea.io/gitea/models/auth"
|
||||||
|
@ -41,6 +42,16 @@ func TestAPIDownloadArchive(t *testing.T) {
|
||||||
assert.Len(t, bs, 266)
|
assert.Len(t, bs, 266)
|
||||||
assert.EqualValues(t, "application/gzip", resp.Header().Get("Content-Type"))
|
assert.EqualValues(t, "application/gzip", resp.Header().Get("Content-Type"))
|
||||||
|
|
||||||
|
// Must return a link to a commit ID as the "immutable" archive link
|
||||||
|
linkHeaderRe := regexp.MustCompile(`<(?P<url>https?://.*/api/v1/repos/user2/repo1/archive/[a-f0-9]+\.tar\.gz.*)>; rel="immutable"`)
|
||||||
|
m := linkHeaderRe.FindStringSubmatch(resp.Header().Get("Link"))
|
||||||
|
assert.NotEmpty(t, m[1])
|
||||||
|
resp = MakeRequest(t, NewRequest(t, "GET", m[1]).AddTokenAuth(token), http.StatusOK)
|
||||||
|
bs2, err := io.ReadAll(resp.Body)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
// The locked URL should give the same bytes as the non-locked one
|
||||||
|
assert.EqualValues(t, bs, bs2)
|
||||||
|
|
||||||
link, _ = url.Parse(fmt.Sprintf("/api/v1/repos/%s/%s/archive/master.bundle", user2.Name, repo.Name))
|
link, _ = url.Parse(fmt.Sprintf("/api/v1/repos/%s/%s/archive/master.bundle", user2.Name, repo.Name))
|
||||||
resp = MakeRequest(t, NewRequest(t, "GET", link.String()).AddTokenAuth(token), http.StatusOK)
|
resp = MakeRequest(t, NewRequest(t, "GET", link.String()).AddTokenAuth(token), http.StatusOK)
|
||||||
bs, err = io.ReadAll(resp.Body)
|
bs, err = io.ReadAll(resp.Body)
|
||||||
|
|
Loading…
Reference in New Issue