Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
32 changes: 31 additions & 1 deletion internal/handler/composer.go
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@ import (
"fmt"
"io"
"net/http"
"path"
"strings"
"time"

Expand Down Expand Up @@ -182,9 +183,10 @@ func expandMinifiedVersions(versionList []any) []any {
}

// Merge inherited fields into a new map, then overlay current fields.
// Deep copy values to avoid shared references between versions.
merged := make(map[string]any, len(inherited)+len(vmap))
for k, val := range inherited {
merged[k] = val
merged[k] = deepCopyValue(val)
}
for k, val := range vmap {
merged[k] = val
Expand All @@ -199,6 +201,26 @@ func expandMinifiedVersions(versionList []any) []any {
return expanded
}

// deepCopyValue returns a deep copy of JSON-like values (maps, slices, scalars).
func deepCopyValue(v any) any {
switch val := v.(type) {
case map[string]any:
m := make(map[string]any, len(val))
for k, v := range val {
m[k] = deepCopyValue(v)
}
return m
case []any:
s := make([]any, len(val))
for i, v := range val {
s[i] = deepCopyValue(v)
}
return s
default:
return v
}
}

// filterAndRewriteVersions applies cooldown filtering and rewrites dist URLs
// for a single package's version list.
func (h *ComposerHandler) filterAndRewriteVersions(packageName string, versionList []any) []any {
Expand Down Expand Up @@ -266,6 +288,14 @@ func (h *ComposerHandler) rewriteDistURL(vmap map[string]any, packageName, versi
filename = url[idx+1:]
}

// GitHub zipball URLs end with a bare commit hash (no extension).
// Append .zip so the archives library can detect the format.
if path.Ext(filename) == "" {
if distType, _ := dist["type"].(string); distType == "zip" {
filename += ".zip"
}
}

parts := strings.SplitN(packageName, "/", vendorPackageParts)
if len(parts) == vendorPackageParts {
newURL := fmt.Sprintf("%s/composer/files/%s/%s/%s/%s",
Expand Down
146 changes: 146 additions & 0 deletions internal/handler/composer_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@ package handler
import (
"encoding/json"
"log/slog"
"strings"
"testing"
"time"

Expand Down Expand Up @@ -245,6 +246,151 @@ func TestComposerRewriteMetadataCooldownPreservesNames(t *testing.T) {
}
}

func TestComposerRewriteDistURLGitHubZipball(t *testing.T) {
// GitHub zipball URLs end with a bare commit hash, no file extension.
// The proxy must produce a filename with .zip extension so that the
// archives library can detect the format when browsing source.
h := &ComposerHandler{
proxy: testProxy(),
proxyURL: "http://localhost:8080",
}

vmap := map[string]any{
"version": "v7.4.8",
"dist": map[string]any{
"url": "https://api.github.com/repos/symfony/asset/zipball/d2e2f014ccd6ec9fae8dbe6336a4164346a2a856",
"type": "zip",
"shasum": "",
"reference": "d2e2f014ccd6ec9fae8dbe6336a4164346a2a856",
},
}

h.rewriteDistURL(vmap, "symfony/asset", "v7.4.8")

dist := vmap["dist"].(map[string]any)
url := dist["url"].(string)

// The rewritten URL's filename must have a .zip extension
if !strings.HasSuffix(url, ".zip") {
t.Errorf("rewritten dist URL filename has no .zip extension: %s", url)
}
}

func TestComposerRewriteMetadataGitHubZipballFilenames(t *testing.T) {
// End-to-end: metadata with GitHub zipball URLs should produce
// download URLs that end in .zip so browse source can open them.
h := &ComposerHandler{
proxy: testProxy(),
proxyURL: "http://localhost:8080",
}

input := `{
"packages": {
"symfony/config": [
{
"version": "v7.4.8",
"dist": {
"url": "https://api.github.com/repos/symfony/config/zipball/c7369cc1da250fcbfe0c5a9d109e419661549c39",
"type": "zip",
"reference": "c7369cc1da250fcbfe0c5a9d109e419661549c39"
}
}
]
}
}`

output, err := h.rewriteMetadata([]byte(input))
if err != nil {
t.Fatalf("rewriteMetadata failed: %v", err)
}

var result map[string]any
if err := json.Unmarshal(output, &result); err != nil {
t.Fatalf("failed to parse output: %v", err)
}

packages := result["packages"].(map[string]any)
versions := packages["symfony/config"].([]any)
v := versions[0].(map[string]any)
dist := v["dist"].(map[string]any)
url := dist["url"].(string)

if !strings.HasSuffix(url, ".zip") {
t.Errorf("rewritten URL should end in .zip, got %s", url)
}
}

func TestComposerExpandMinifiedSharedDistReferences(t *testing.T) {
// When a minified version inherits the dist field from a previous version
// (i.e. it doesn't include its own dist), expanding + rewriting must not
// corrupt the dist URLs via shared map references.
h := &ComposerHandler{
proxy: testProxy(),
proxyURL: "http://localhost:8080",
}

// In this minified payload, v5.3.0 does NOT include a dist field,
// so it inherits v5.4.0's dist. After expansion and URL rewriting,
// each version must have its own correct dist URL.
input := `{
"minified": "composer/2.0",
"packages": {
"vendor/pkg": [
{
"name": "vendor/pkg",
"version": "5.4.0",
"dist": {
"url": "https://api.github.com/repos/vendor/pkg/zipball/aaa111",
"type": "zip",
"reference": "aaa111"
}
},
{
"version": "5.3.0"
}
]
}
}`

output, err := h.rewriteMetadata([]byte(input))
if err != nil {
t.Fatalf("rewriteMetadata failed: %v", err)
}

var result map[string]any
if err := json.Unmarshal(output, &result); err != nil {
t.Fatalf("failed to parse output: %v", err)
}

packages := result["packages"].(map[string]any)
versions := packages["vendor/pkg"].([]any)
if len(versions) != 2 {
t.Fatalf("expected 2 versions, got %d", len(versions))
}

v1 := versions[0].(map[string]any)
v2 := versions[1].(map[string]any)

dist1 := v1["dist"].(map[string]any)
dist2 := v2["dist"].(map[string]any)

url1 := dist1["url"].(string)
url2 := dist2["url"].(string)

// Each version must have its own URL with its own version in the path
if !strings.Contains(url1, "/5.4.0/") {
t.Errorf("v5.4.0 dist URL should contain /5.4.0/, got %s", url1)
}
if !strings.Contains(url2, "/5.3.0/") {
t.Errorf("v5.3.0 dist URL should contain /5.3.0/, got %s", url2)
}

// The two URLs must be different
if url1 == url2 {
t.Errorf("both versions have the same dist URL (shared reference bug): %s", url1)
}
}

func TestComposerRewriteMetadataCooldown(t *testing.T) {
now := time.Now()
old := now.Add(-10 * 24 * time.Hour).Format(time.RFC3339)
Expand Down
89 changes: 72 additions & 17 deletions internal/server/browse.go
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
package server

import (
"bytes"
"encoding/json"
"fmt"
"io"
Expand All @@ -17,17 +18,75 @@ import (

const contentTypePlainText = "text/plain; charset=utf-8"

// getStripPrefix returns the path prefix to strip for a given ecosystem.
// npm packages wrap content in a "package/" directory.
func getStripPrefix(ecosystem string) string {
switch ecosystem {
case "npm":
return "package/"
default:
// archiveFilename returns a filename suitable for archive format detection.
// Some ecosystems (e.g. composer) store artifacts with bare hash filenames
// that have no extension. This adds .zip when the original has no extension
// and the content is likely a zip archive.
func archiveFilename(filename string) string {
if path.Ext(filename) == "" {
return filename + ".zip"
}
return filename
}

// detectSingleRootDir returns the single top-level directory name if all files
// in the archive live under one common directory (e.g. GitHub zipballs use
// "repo-hash/"). Returns "" if there's no single root or the archive is flat.
func detectSingleRootDir(reader archives.Reader) string {
files, err := reader.List()
if err != nil || len(files) == 0 {
return ""
}

var root string
for _, f := range files {
parts := strings.SplitN(f.Path, "/", 2) //nolint:mnd // split into dir + rest
if len(parts) == 0 {
continue
}
dir := parts[0]
if root == "" {
root = dir
} else if dir != root {
return ""
}
}

if root == "" {
return ""
}
return root + "/"
}

// openArchive opens a cached artifact as an archive reader, auto-detecting
// and stripping a single top-level directory prefix (like GitHub zipballs).
// For npm, the hardcoded "package/" prefix takes precedence.
func openArchive(filename string, content io.Reader, ecosystem string) (archives.Reader, error) { //nolint:ireturn // wraps multiple archive implementations
fname := archiveFilename(filename)

// npm always uses package/ prefix
if ecosystem == "npm" {
return archives.OpenWithPrefix(fname, content, "package/")
}

// Read content into memory so we can scan then wrap with prefix
data, err := io.ReadAll(content)
if err != nil {
return nil, fmt.Errorf("reading artifact: %w", err)
}

// Open once to detect root prefix
probe, err := archives.Open(fname, bytes.NewReader(data))
if err != nil {
return nil, err
}
prefix := detectSingleRootDir(probe)
_ = probe.Close()

return archives.OpenWithPrefix(fname, bytes.NewReader(data), prefix)
}


// BrowseListResponse contains the file listing for a directory in an archives.
type BrowseListResponse struct {
Path string `json:"path"`
Expand Down Expand Up @@ -174,9 +233,8 @@ func (s *Server) browseList(w http.ResponseWriter, r *http.Request, ecosystem, n
}
defer func() { _ = artifactReader.Close() }()

// Open archive with appropriate prefix stripping
stripPrefix := getStripPrefix(ecosystem)
archiveReader, err := archives.OpenWithPrefix(cachedArtifact.Filename, artifactReader, stripPrefix)
// Open archive with auto-detected prefix stripping
archiveReader, err := openArchive(cachedArtifact.Filename, artifactReader, ecosystem)
if err != nil {
s.logger.Error("failed to open archive", "error", err, "filename", cachedArtifact.Filename)
http.Error(w, "failed to open archive", http.StatusInternalServerError)
Expand Down Expand Up @@ -269,9 +327,8 @@ func (s *Server) browseFile(w http.ResponseWriter, r *http.Request, ecosystem, n
}
defer func() { _ = artifactReader.Close() }()

// Open archive with appropriate prefix stripping
stripPrefix := getStripPrefix(ecosystem)
archiveReader, err := archives.OpenWithPrefix(cachedArtifact.Filename, artifactReader, stripPrefix)
// Open archive with auto-detected prefix stripping
archiveReader, err := openArchive(cachedArtifact.Filename, artifactReader, ecosystem)
if err != nil {
s.logger.Error("failed to open archive", "error", err, "filename", cachedArtifact.Filename)
http.Error(w, "failed to open archive", http.StatusInternalServerError)
Expand Down Expand Up @@ -484,17 +541,15 @@ func (s *Server) compareDiff(w http.ResponseWriter, r *http.Request, ecosystem,
}
defer func() { _ = toReader.Close() }()

stripPrefix := getStripPrefix(ecosystem)

fromArchive, err := archives.OpenWithPrefix(fromArtifact.Filename, fromReader, stripPrefix)
fromArchive, err := openArchive(fromArtifact.Filename, fromReader, ecosystem)
if err != nil {
s.logger.Error("failed to open from archive", "error", err)
http.Error(w, "failed to open from archive", http.StatusInternalServerError)
return
}
defer func() { _ = fromArchive.Close() }()

toArchive, err := archives.OpenWithPrefix(toArtifact.Filename, toReader, stripPrefix)
toArchive, err := openArchive(toArtifact.Filename, toReader, ecosystem)
if err != nil {
s.logger.Error("failed to open to archive", "error", err)
http.Error(w, "failed to open to archive", http.StatusInternalServerError)
Expand Down
Loading