Skip to content
471 changes: 467 additions & 4 deletions benchmarks/migration-status.json

Large diffs are not rendered by default.

79 changes: 79 additions & 0 deletions internal/cache/cachepaths/cachepaths.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,79 @@
// Package cachepaths resolves the APM cache root and bucket paths.
package cachepaths

import (
"os"
"path/filepath"
"runtime"
"sync"
)

const (
GitDBBucket = "git/db_v1"
GitCheckoutsBucket = "git/checkouts_v1"
HTTPBucket = "http_v1"
)

var (
tempCacheMu sync.Mutex
tempCacheDir string
)

// GetCacheRoot resolves the cache root directory.
// If noCache is true or APM_NO_CACHE env is set, returns a per-invocation temp dir.
func GetCacheRoot(noCache bool) (string, error) {
if noCache || isNoCacheEnv() {
return getTempCacheDir()
}
if override := os.Getenv("APM_CACHE_DIR"); override != "" {
abs, err := filepath.Abs(override)
if err != nil {
return "", err
}
return abs, os.MkdirAll(abs, 0o700)
}
dir := defaultCacheDir()
return dir, os.MkdirAll(dir, 0o700)
}

func isNoCacheEnv() bool {
v := os.Getenv("APM_NO_CACHE")
return v == "1" || v == "true" || v == "yes"
}

func getTempCacheDir() (string, error) {
tempCacheMu.Lock()
defer tempCacheMu.Unlock()
if tempCacheDir != "" {
return tempCacheDir, nil
}
dir, err := os.MkdirTemp("", "apm-cache-*")
if err != nil {
return "", err
}
tempCacheDir = dir
return dir, nil
}

func defaultCacheDir() string {
switch runtime.GOOS {
case "windows":
local := os.Getenv("LOCALAPPDATA")
if local == "" {
local = filepath.Join(os.Getenv("USERPROFILE"), "AppData", "Local")
}
return filepath.Join(local, "apm", "Cache")
case "darwin":
if xdg := os.Getenv("XDG_CACHE_HOME"); xdg != "" {
return filepath.Join(xdg, "apm")
}
home, _ := os.UserHomeDir()
return filepath.Join(home, "Library", "Caches", "apm")
default:
if xdg := os.Getenv("XDG_CACHE_HOME"); xdg != "" {
return filepath.Join(xdg, "apm")
}
home, _ := os.UserHomeDir()
return filepath.Join(home, ".cache", "apm")
}
}
77 changes: 77 additions & 0 deletions internal/cache/integrity/integrity.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,77 @@
// Package integrity verifies cached git checkout integrity.
package integrity

import (
"os"
"path/filepath"
"strings"
)

// ReadHeadSHA returns the resolved 40-char SHA at HEAD, or empty string on failure.
func ReadHeadSHA(checkoutDir string) string {
gitPath := filepath.Join(checkoutDir, ".git")
info, err := os.Stat(gitPath)
if err != nil {
return ""
}

var gitDir string
if !info.IsDir() {
content, err := os.ReadFile(gitPath)
if err != nil {
return ""
}
line := strings.TrimSpace(string(content))
if !strings.HasPrefix(line, "gitdir:") {
return ""
}
target := strings.TrimSpace(line[len("gitdir:"):])
abs, err := filepath.Abs(filepath.Join(checkoutDir, target))
if err != nil {
return ""
}
gitDir = abs
} else {
gitDir = gitPath
}

headPath := filepath.Join(gitDir, "HEAD")
headContent, err := os.ReadFile(headPath)
if err != nil {
return ""
}
head := strings.TrimSpace(string(headContent))
if strings.HasPrefix(head, "ref: ") {
refName := strings.TrimPrefix(head, "ref: ")
refFile := filepath.Join(gitDir, refName)
data, err := os.ReadFile(refFile)
if err != nil {
// Try packed-refs
return resolvePackedRef(gitDir, refName)
}
return strings.TrimSpace(string(data))
}
return head
}

func resolvePackedRef(gitDir, refName string) string {
data, err := os.ReadFile(filepath.Join(gitDir, "packed-refs"))
if err != nil {
return ""
}
for _, line := range strings.Split(string(data), "\n") {
if strings.HasSuffix(line, " "+refName) {
parts := strings.Fields(line)
if len(parts) >= 1 {
return parts[0]
}
}
}
return ""
}

// VerifyCheckout checks that the checkout's HEAD matches expectedSHA.
func VerifyCheckout(checkoutDir, expectedSHA string) bool {
actual := ReadHeadSHA(checkoutDir)
return actual != "" && actual == expectedSHA
}
95 changes: 95 additions & 0 deletions internal/cache/urlnormalize/urlnormalize.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,95 @@
// Package urlnormalize provides URL normalization for cache key derivation.
package urlnormalize

import (
"crypto/sha256"
"fmt"
"regexp"
"strings"
)

var scpLikeRe = regexp.MustCompile(`^(?P<user>[a-zA-Z0-9_][a-zA-Z0-9_.+-]*)@(?P<host>[^:/]+):(?P<path>.+)$`)

var defaultPorts = map[string]string{
"https": "443",
"ssh": "22",
"http": "80",
"git": "9418",
}

// NormalizeRepoURL normalizes a git repository URL for cache key derivation.
func NormalizeRepoURL(url string) string {
u := strings.TrimSpace(url)
// Strip trailing .git
u = strings.TrimSuffix(u, ".git")

// SCP -> SSH URL conversion
if m := scpLikeRe.FindStringSubmatch(u); m != nil {
user := m[scpLikeRe.SubexpIndex("user")]
host := m[scpLikeRe.SubexpIndex("host")]
path := m[scpLikeRe.SubexpIndex("path")]
u = fmt.Sprintf("ssh://%s@%s/%s", user, strings.ToLower(host), path)
}

// Parse scheme://[user@]host[:port]/path
scheme := ""
rest := u
if idx := strings.Index(u, "://"); idx >= 0 {
scheme = strings.ToLower(u[:idx])
rest = u[idx+3:]
}

// Separate userinfo@host:port from path
var userinfo, hostport, path string
if slashIdx := strings.Index(rest, "/"); slashIdx >= 0 {
hostport = rest[:slashIdx]
path = rest[slashIdx:]
} else {
hostport = rest
}

// Split userinfo from host
if atIdx := strings.LastIndex(hostport, "@"); atIdx >= 0 {
userinfo = hostport[:atIdx]
hostport = hostport[atIdx+1:]
}

// Strip password from userinfo
if colonIdx := strings.Index(userinfo, ":"); colonIdx >= 0 {
userinfo = userinfo[:colonIdx]
}

// Lowercase host, strip default port
hostLower := strings.ToLower(hostport)
if colonIdx := strings.LastIndex(hostLower, ":"); colonIdx >= 0 {
host := hostLower[:colonIdx]
port := hostLower[colonIdx+1:]
if dp, ok := defaultPorts[scheme]; ok && port == dp {
hostLower = host
}
}

// Lowercase github/gitlab/bitbucket paths
pathNorm := path
if hostLower == "github.com" || hostLower == "gitlab.com" || hostLower == "bitbucket.org" {
pathNorm = strings.ToLower(path)
}

// Reassemble
result := ""
if scheme != "" {
result = scheme + "://"
}
if userinfo != "" {
result += userinfo + "@"
}
result += hostLower + pathNorm
return result
}

// CacheKey returns the first 16 hex chars of SHA256 of the normalized URL.
func CacheKey(url string) string {
normalized := NormalizeRepoURL(url)
sum := sha256.Sum256([]byte(normalized))
return fmt.Sprintf("%x", sum)[:16]
}
82 changes: 82 additions & 0 deletions internal/compilation/agentformatter/agentformatter.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,82 @@
// Package agentformatter provides CLAUDE.md and GEMINI.md formatters for APM compilation.
package agentformatter

import (
"path/filepath"
"strings"
)

// ClaudePlacement holds the result of CLAUDE.md placement analysis.
type ClaudePlacement struct {
ClaudePath string
InstructionFiles []string
AgentFiles []string
Dependencies []string
CoveragePatterns []string
SourceAttribution map[string]string
}

// ClaudeCompilationResult holds the result of CLAUDE.md compilation.
type ClaudeCompilationResult struct {
Success bool
Placements []ClaudePlacement
ContentMap map[string]string // path -> content
Warnings []string
Errors []string
}

// GeminiPlacement holds the result of GEMINI.md placement analysis.
type GeminiPlacement struct {
GeminiPath string
InstructionFiles []string
}

// GeminiCompilationResult holds the result of GEMINI.md compilation.
type GeminiCompilationResult struct {
Success bool
Placements []GeminiPlacement
ContentMap map[string]string
Warnings []string
Errors []string
Stats map[string]float64
}

// RenderGeminiStub generates the content for a GEMINI.md stub file.
func RenderGeminiStub(agentsPath string, version string) string {
rel := agentsPath
if rel == "" {
rel = "AGENTS.md"
}
var sb strings.Builder
sb.WriteString("<!-- Generated by APM CLI ")
sb.WriteString(version)
sb.WriteString(" -->\n")
sb.WriteString("<!-- Build ID: __BUILD_ID__ -->\n\n")
sb.WriteString("@")
sb.WriteString(filepath.ToSlash(rel))
sb.WriteString("\n")
return sb.String()
}

// RenderClaudeHeader returns the CLAUDE.md file header comment.
func RenderClaudeHeader() string {
return "<!-- Generated by APM CLI -->\n"
}

// SummarizeClaudeResult returns a human-readable summary of the compilation result.
func SummarizeClaudeResult(r *ClaudeCompilationResult) string {
if !r.Success {
return "[x] CLAUDE.md compilation failed: " + strings.Join(r.Errors, "; ")
}
return "[+] CLAUDE.md compiled successfully (" + itoa(len(r.Placements)) + " placement(s))"
}

func itoa(n int) string {
if n < 0 {
return "-" + itoa(-n)
}
if n < 10 {
return string(rune('0' + n))
}
return itoa(n/10) + string(rune('0'+n%10))
}
50 changes: 50 additions & 0 deletions internal/compilation/buildid/buildid.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,50 @@
// Package buildid stabilizes build IDs in compiled outputs.
package buildid

import (
"crypto/sha256"
"fmt"
"strings"

"github.com/githubnext/apm/internal/compilation/compilationconst"
)

// StabilizeBuildID replaces BuildIDPlaceholder with a deterministic 12-char SHA256 hash.
// It is idempotent: returns content unchanged if no placeholder is present.
func StabilizeBuildID(content string) string {
lines := strings.Split(content, "\n")
trailingNL := strings.HasSuffix(content, "\n")

// Remove trailing empty string from Split when content ends with newline.
if trailingNL && len(lines) > 0 && lines[len(lines)-1] == "" {
lines = lines[:len(lines)-1]
}

idx := -1
for i, line := range lines {
if line == compilationconst.BuildIDPlaceholder {
idx = i
break
}
}
if idx < 0 {
return content
}

hashLines := make([]string, 0, len(lines)-1)
for i, line := range lines {
if i != idx {
hashLines = append(hashLines, line)
}
}

sum := sha256.Sum256([]byte(strings.Join(hashLines, "\n")))
buildID := fmt.Sprintf("%x", sum)[:12]
lines[idx] = fmt.Sprintf("<!-- Build ID: %s -->", buildID)

result := strings.Join(lines, "\n")
if trailingNL {
result += "\n"
}
return result
}
Loading