diff --git a/.git-blame-ignore-revs b/.git-blame-ignore-revs new file mode 100644 index 0000000000..37459703e9 --- /dev/null +++ b/.git-blame-ignore-revs @@ -0,0 +1,2 @@ +# ran runic on the code base +a84228360d6cff568a55911733e830cdf1c492da diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 0000000000..c558006ed1 --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,11 @@ +version: 2 +updates: + - package-ecosystem: "github-actions" + directory: "/" + schedule: + interval: "monthly" + open-pull-requests-limit: 100 + labels: + - "dependencies" + - "github-actions" + - "ci" diff --git a/.github/workflows/backport-label-audit.yml b/.github/workflows/backport-label-audit.yml new file mode 100644 index 0000000000..cb90223830 --- /dev/null +++ b/.github/workflows/backport-label-audit.yml @@ -0,0 +1,62 @@ +name: Backport Label Audit +# Run this workflow manually to audit backport labels on pull requests +# and remove labels from PRs that have already been backported. +# Optionally specify a release version to limit the audit to that version + +on: + workflow_dispatch: + inputs: + version: + description: 'Release version to audit (e.g., 1.13). Leave empty to audit all versions.' + required: false + type: string + dry_run: + description: 'Dry run (only report, do not modify)' + required: true + type: choice + options: + - 'true' + - 'false' + default: 'true' + +jobs: + audit-backport-labels: + runs-on: ubuntu-latest + permissions: + pull-requests: write + steps: + - name: Checkout Backporter + uses: actions/checkout@v6 + with: + repository: KristofferC/Backporter + ref: master + path: backporter + + - name: Setup Julia + uses: julia-actions/setup-julia@v2 + with: + version: '1' + + - name: Cache Julia packages + uses: julia-actions/cache@v3 + with: + cache-name: backporter + + - name: Install dependencies + run: | + cd backporter + julia --project -e 'using Pkg; Pkg.instantiate()' + + - name: Run backport label audit + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: | + cd backporter + ARGS="--audit -r ${{ github.repository }}" + if [ -n "${{ inputs.version }}" ]; then + ARGS="$ARGS -v ${{ inputs.version }}" + fi + if [ "${{ inputs.dry_run }}" = "true" ]; then + ARGS="$ARGS --dry-run" + fi + julia --project backporter.jl $ARGS diff --git a/.github/workflows/backport-label-cleanup.yml b/.github/workflows/backport-label-cleanup.yml new file mode 100644 index 0000000000..4bc9930b24 --- /dev/null +++ b/.github/workflows/backport-label-cleanup.yml @@ -0,0 +1,64 @@ +name: Backport Label Cleanup +# Runs automatically when a pull request to a release branch is merged +# to remove backport labels from the merged PRs + +on: + pull_request: + types: [closed] + branches: + - 'release-*' + +jobs: + remove-backport-labels: + if: github.event.pull_request.merged == true + runs-on: ubuntu-latest + permissions: + pull-requests: write + steps: + - name: Extract version from branch + id: extract + run: | + BRANCH="${{ github.event.pull_request.base.ref }}" + if [[ "$BRANCH" =~ ^release-([0-9]+\.[0-9]+)$ ]]; then + echo "version=${BASH_REMATCH[1]}" >> "$GITHUB_OUTPUT" + else + echo "Branch $BRANCH does not match release-X.Y pattern" + exit 0 + fi + + - name: Checkout Backporter + if: steps.extract.outputs.version != '' + uses: actions/checkout@v6 + with: + repository: KristofferC/Backporter + ref: master + path: backporter + + - name: Setup Julia + if: steps.extract.outputs.version != '' + uses: julia-actions/setup-julia@v2 + with: + version: '1' + + - name: Cache Julia packages + if: steps.extract.outputs.version != '' + uses: julia-actions/cache@v3 + with: + cache-name: backporter + + - name: Install dependencies + if: steps.extract.outputs.version != '' + run: | + cd backporter + julia --project -e 'using Pkg; Pkg.instantiate()' + + - name: Run backport label cleanup + if: steps.extract.outputs.version != '' + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: | + cd backporter + julia --project backporter.jl --audit \ + -v ${{ steps.extract.outputs.version }} \ + -r ${{ github.repository }} \ + --cleanup-pr ${{ github.event.pull_request.number }} diff --git a/.github/workflows/check.yml b/.github/workflows/check.yml new file mode 100644 index 0000000000..f555558d5c --- /dev/null +++ b/.github/workflows/check.yml @@ -0,0 +1,30 @@ +name: Code checks + +on: + pull_request: + push: + branches: ["master"] + +jobs: + + pre-commit: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v6 + - uses: pre-commit/action@2c7b3805fd2a0fd8c1884dcaebf91fc102a13ecd # v3.0.1 + env: + # Skip runic-pre-commit since we use runic-action below instead + SKIP: runic + + runic: + name: "Runic" + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v6 + - uses: julia-actions/setup-julia@v2 + with: + version: '1.11' + - uses: julia-actions/cache@v3 + - uses: fredrikekre/runic-action@v1 + with: + version: "1.4" # Keep version in sync with .pre-commit-config.yaml diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 11f1643502..0173d2722d 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -1,9 +1,6 @@ name: Run tests on: pull_request: - branches: - - 'master' - - 'release-*' push: branches: - 'master' @@ -55,15 +52,15 @@ jobs: julia-version: 'nightly' pkg-server: "pkg.julialang.org" steps: - - name: Set git to use LF and fix TEMP on windows - if: matrix.os == 'windows-latest' + - name: Set git to use LF, fix TEMP, set JULIA_DEPOT_PATH (windows) + if: runner.os == 'Windows' run: | git config --global core.autocrlf false git config --global core.eol lf # See https://github.com/actions/virtual-environments/issues/712 echo "TMP=${USERPROFILE}\AppData\Local\Temp" >> ${GITHUB_ENV} echo "TEMP=${USERPROFILE}\AppData\Local\Temp" >> ${GITHUB_ENV} - - uses: actions/checkout@v4 + - uses: actions/checkout@v6 - uses: julia-actions/setup-julia@v2 with: version: ${{ matrix.julia-version }} @@ -71,10 +68,11 @@ jobs: - uses: julia-actions/julia-runtest@v1 with: coverage: true + depwarn: error env: JULIA_PKG_SERVER: ${{ matrix.pkg-server }} JULIA_TEST_VERBOSE_LOGS_DIR: ${{ github.workspace }} - - uses: actions/upload-artifact@v4 + - uses: actions/upload-artifact@v7 if: ${{ always() }} with: name: ${{ join(matrix.*, '-') }}_Pkg.log @@ -82,21 +80,29 @@ jobs: - uses: julia-actions/julia-processcoverage@v1 env: JULIA_PKG_SERVER: ${{ matrix.pkg-server }} - - uses: codecov/codecov-action@v3 + - uses: codecov/codecov-action@v6 with: - file: lcov.info + files: lcov.info + token: ${{ secrets.CODECOV_TOKEN }} docs: runs-on: ubuntu-latest timeout-minutes: 60 steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v6 - uses: julia-actions/setup-julia@v2 with: # version: '1.6' version: 'nightly' + - uses: julia-actions/cache@v3 - name: Generate docs run: | julia --project --color=yes -e 'using Pkg; Pkg.activate("docs"); Pkg.instantiate();' julia --project=docs --color=yes docs/make.jl pdf env: DOCUMENTER_KEY: ${{ secrets.DOCUMENTER_KEY }} + - name: Upload documentation artifacts + uses: actions/upload-artifact@v7 + if: always() + with: + name: pkg-docs + path: docs/build/ diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 0000000000..68066c2cc2 --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,17 @@ +repos: + - repo: 'https://github.com/pre-commit/pre-commit-hooks' + rev: v5.0.0 + hooks: + - id: check-added-large-files + - id: check-case-conflict + # - id: check-toml # we have tomls with invalid syntax for tests + - id: check-yaml + - id: end-of-file-fixer + - id: mixed-line-ending + - id: trailing-whitespace + - repo: 'https://github.com/fredrikekre/runic-pre-commit' + rev: v2.0.1 + hooks: + - id: runic + additional_dependencies: + - 'Runic@1.4' # Keep version in sync with .github/workflows/Check.yml diff --git a/CHANGELOG.md b/CHANGELOG.md index 056a6f1f36..d6c8707f29 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,9 +1,82 @@ +Pkg v1.14 Release Notes +======================= + +- During package source installation, Pkg now reports when a package has an Artifacts.toml but no artifacts match the + current platform. ([#4646]) + +Pkg v1.13 Release Notes +======================= + +- `Pkg.test` now respects the `--check-bounds` setting from the parent Julia session instead of forcing `--check-bounds=yes`. + +- Interactive precompilation now supports keyboard controls: `d`/`q`/`]` to detach (letting precompilation continue + silently in the background while returning to the REPL), `c` to cancel, `i` for a profile peek, `v` to toggle + verbose mode, `?`/`h` for help, and `Ctrl-C` to interrupt. After detaching, use `pkg> precompile --monitor` to + reattach, `--stop` to stop gracefully, or `--cancel` to cancel immediately. ([#4602]) +- Project.toml environments now support a `readonly` field to mark environments as read-only, preventing modifications. + ([#4284]) +- `Pkg.build` now supports an `allow_reresolve` keyword argument to control whether the build process can re-resolve + package versions, similar to the existing option for `Pkg.test`. ([#3329]) +- Packages are now automatically added to `[sources]` when they are added by url or devved. ([#4225]) +- Packages added via URL now honor nested `[sources]` entries, allowing private dependency chains to resolve without registry metadata. ([#4366]) +- `update` now shows a helpful tip when trying to upgrade a specific package that can be upgraded but is held back + because it's part of a less optimal resolver solution ([#4266]) +- `Pkg.status` now displays yanked packages with a `[yanked]` indicator and shows a warning when yanked packages are + present. `Pkg.resolve` errors also display warnings about yanked packages that are not resolvable. ([#4310]) +- Added `pkg> compat --current` command to automatically populate missing compat entries with the currently resolved + package versions. Use `pkg> compat --current` for all packages or `pkg> compat Foo --current` for specific packages. + ([#3266]) +- Added `Pkg.precompile() do` block syntax to delay autoprecompilation until after multiple operations complete, + improving efficiency when performing several environment changes. ([#4262]) +- Added `Pkg.autoprecompilation_enabled(state::Bool)` to globally enable or disable automatic precompilation for Pkg + operations. ([#4262]) +- Implemented atomic TOML writes to prevent data corruption when Pkg operations are interrupted or multiple processes + write simultaneously. All TOML files are now written atomically using temporary files and atomic moves. ([#4293]) +- Implemented lazy loading for RegistryInstance to significantly improve startup performance for operations that don't + require full registry data. This reduces `Pkg.instantiate()` time by approximately 60% in many cases. ([#4304]) +- Added support for directly adding git submodules via `Pkg.add(path="/path/to/git-submodule.jl")`. ([#3344]) +- Enhanced REPL user experience by automatically detecting and stripping accidental leading `]` characters in commands. + ([#3122]) +- Improved tip messages to show REPL mode syntax when operating in REPL mode. ([#3854]) +- Enhanced error handling with more descriptive error messages when operations fail on empty URLs during git repository + installation or registry discovery. ([#4282]) +- Improved error messages for invalid compat entries to provide better guidance for fixing them. ([#4302]) +- Added warnings when attempting to add local paths that contain dirty git repositories. ([#4309]) +- Enhanced package parsing to better handle complex URLs and paths with branch/tag/subdir specifiers. ([#4299]) +- Improved artifact download behavior to only attempt downloads from the Pkg server when the package is registered on + that server's registries. ([#4297]) +- Added comprehensive documentation page about depots, including depot layouts and configuration. ([#2245]) +- Enhanced error handling for packages missing from registries or manifests with more informative messages. ([#4303]) +- Added more robust error handling when packages have revisions but no source information. ([#4311]) +- Enhanced registry status reporting with more detailed information. ([#4300]) +- Fixed various edge cases in package resolution and manifest handling. ([#4307], [#4308], [#4312]) +- Improved handling of path separators across different operating systems. ([#4305]) +- Added better error messages when accessing private PackageSpec.repo field. ([#4170]) + Pkg v1.12 Release Notes ======================= - Pkg now has support for "workspaces" which is a way to resolve multiple project files into a single manifest. - The functions `Pkg.status`, `Pkg.why`, `Pkg.instantiate`, `Pkg.precompile` (and their REPL variants) have been updated - to take a `workspace` option. Read more about this feature in the manual about the TOML-files. + The functions `Pkg.status`, `Pkg.why`, `Pkg.instantiate`, `Pkg.precompile` (and their REPL variants) have been + updated to take a `workspace` option, with fixes for workspace path collection and package resolution in workspace + environments. Read more about this feature in the manual about the TOML-files. ([#3841], [#4229]) +- Pkg now supports "apps" which are Julia packages that can be run directly from the terminal after installation. + Apps can be defined in a package's Project.toml and installed via Pkg. Apps now support multiple apps per package + via submodules, allowing packages to define multiple command-line applications, with enhanced functionality including + update capabilities and better handling of already installed apps. ([#3772], [#4277], [#4263]) +- `status` now shows when different versions/sources of dependencies are loaded than that which is expected by the + manifest ([#4109]) +- When adding or developing a package that exists in the `[weakdeps]` section, it is now automatically removed from + weak dependencies and added as a regular dependency. ([#3865]) +- Enhanced fuzzy matching algorithm for package name suggestions with improved multi-factor scoring for better package + name suggestions. ([#4287]) +- The Pkg REPL now supports GitHub pull request URLs, allowing direct package installation from PRs via + `pkg> add https://github.com/Org/Package.jl/pull/123` ([#4295]) +- Improved git repository cloning performance by changing from `refs/*` to `refs/heads/*` to speed up operations on + repositories with many branches. ([#2330]) +- Improved REPL command parsing to handle leading whitespace with comma-separated packages. ([#4274]) +- Improved error messages when providing incorrect package UUIDs. ([#4270]) +- Added confirmation prompts before removing compat entries to prevent accidental deletions. ([#4254]) Pkg v1.11 Release Notes ======================= @@ -21,7 +94,7 @@ Pkg v1.10 Release Notes ======================= Pkg v1.9 Release Notes -======================= +====================== - New functionality: `Pkg.why` and `pkg> why` to show why a package is inside the environment (shows all "paths" to a package starting at the direct dependencies). - When code coverage tracking is enabled for `Pkg.test` the new path-specific code-coverage option is used to limit coverage @@ -83,6 +156,16 @@ Pkg v1.7 Release Notes - The `mode` keyword for `PackageSpec` has been removed ([#2454]). +[#4225]: https://github.com/JuliaLang/Pkg.jl/issues/4225 +[#4284]: https://github.com/JuliaLang/Pkg.jl/issues/4284 +[#3526]: https://github.com/JuliaLang/Pkg.jl/issues/3526 +[#3708]: https://github.com/JuliaLang/Pkg.jl/issues/3708 +[#3732]: https://github.com/JuliaLang/Pkg.jl/issues/3732 +[#3772]: https://github.com/JuliaLang/Pkg.jl/issues/3772 +[#3783]: https://github.com/JuliaLang/Pkg.jl/issues/3783 +[#3841]: https://github.com/JuliaLang/Pkg.jl/issues/3841 +[#3865]: https://github.com/JuliaLang/Pkg.jl/issues/3865 +[#4109]: https://github.com/JuliaLang/Pkg.jl/issues/4109 [#2284]: https://github.com/JuliaLang/Pkg.jl/issues/2284 [#2431]: https://github.com/JuliaLang/Pkg.jl/issues/2431 [#2432]: https://github.com/JuliaLang/Pkg.jl/issues/2432 @@ -101,3 +184,36 @@ Pkg v1.7 Release Notes [#2995]: https://github.com/JuliaLang/Pkg.jl/issues/2995 [#3002]: https://github.com/JuliaLang/Pkg.jl/issues/3002 [#3021]: https://github.com/JuliaLang/Pkg.jl/issues/3021 +[#3266]: https://github.com/JuliaLang/Pkg.jl/pull/3266 +[#4266]: https://github.com/JuliaLang/Pkg.jl/pull/4266 +[#4310]: https://github.com/JuliaLang/Pkg.jl/pull/4310 +[#3329]: https://github.com/JuliaLang/Pkg.jl/pull/3329 +[#4262]: https://github.com/JuliaLang/Pkg.jl/pull/4262 +[#4293]: https://github.com/JuliaLang/Pkg.jl/pull/4293 +[#4304]: https://github.com/JuliaLang/Pkg.jl/pull/4304 +[#3344]: https://github.com/JuliaLang/Pkg.jl/pull/3344 +[#2330]: https://github.com/JuliaLang/Pkg.jl/pull/2330 +[#3122]: https://github.com/JuliaLang/Pkg.jl/pull/3122 +[#3854]: https://github.com/JuliaLang/Pkg.jl/pull/3854 +[#4282]: https://github.com/JuliaLang/Pkg.jl/pull/4282 +[#4302]: https://github.com/JuliaLang/Pkg.jl/pull/4302 +[#4309]: https://github.com/JuliaLang/Pkg.jl/pull/4309 +[#4299]: https://github.com/JuliaLang/Pkg.jl/pull/4299 +[#4295]: https://github.com/JuliaLang/Pkg.jl/pull/4295 +[#4277]: https://github.com/JuliaLang/Pkg.jl/pull/4277 +[#4297]: https://github.com/JuliaLang/Pkg.jl/pull/4297 +[#2245]: https://github.com/JuliaLang/Pkg.jl/pull/2245 +[#4303]: https://github.com/JuliaLang/Pkg.jl/pull/4303 +[#4254]: https://github.com/JuliaLang/Pkg.jl/pull/4254 +[#4270]: https://github.com/JuliaLang/Pkg.jl/pull/4270 +[#4263]: https://github.com/JuliaLang/Pkg.jl/pull/4263 +[#4229]: https://github.com/JuliaLang/Pkg.jl/pull/4229 +[#4274]: https://github.com/JuliaLang/Pkg.jl/pull/4274 +[#4311]: https://github.com/JuliaLang/Pkg.jl/pull/4311 +[#4300]: https://github.com/JuliaLang/Pkg.jl/pull/4300 +[#4307]: https://github.com/JuliaLang/Pkg.jl/pull/4307 +[#4308]: https://github.com/JuliaLang/Pkg.jl/pull/4308 +[#4312]: https://github.com/JuliaLang/Pkg.jl/pull/4312 +[#4305]: https://github.com/JuliaLang/Pkg.jl/pull/4305 +[#4170]: https://github.com/JuliaLang/Pkg.jl/pull/4170 +[#4287]: https://github.com/JuliaLang/Pkg.jl/pull/4287 diff --git a/Project.toml b/Project.toml index 4ddbbefd00..4a2f27d233 100644 --- a/Project.toml +++ b/Project.toml @@ -3,7 +3,7 @@ uuid = "44cfe95a-1eb2-52ea-b672-e2afdf69b78f" keywords = ["package management"] license = "MIT" desc = "The next-generation Julia package manager." -version = "1.12.0" +version = "1.14.0" [workspace] projects = ["test", "docs"] @@ -23,6 +23,7 @@ SHA = "ea8e919c-243c-51af-8825-aaa63cd721ce" TOML = "fa267f1f-6049-4f14-aa54-33bafae1ed76" Tar = "a4e569a6-e804-4fa4-b0f3-eef7a1d5b13e" UUIDs = "cf7118a7-6976-5b1a-9a39-7adc72f591a4" +Zstd_jll = "3161d3a3-bdf6-5164-811a-617609db77b4" p7zip_jll = "3f19e933-33d8-53b3-aaab-bd5110c3b7a0" [weakdeps] @@ -32,4 +33,21 @@ REPL = "3fa0cd96-eef1-5676-8a61-b3b8758bbffb" REPLExt = "REPL" [compat] +Artifacts = "1.11" +Dates = "1.11" +Downloads = "1.6" +FileWatching = "1.11" +LibGit2 = "1.11" +Libdl = "1.11" +Logging = "1.11" +Markdown = "1.11" +Printf = "1.11" +REPL = "1.11" +Random = "1.11" +SHA = "0.7, 1" +TOML = "1" +Tar = "1.10" +UUIDs = "1.11" +Zstd_jll = "1.5.7" julia = "1.12" +p7zip_jll = "17.5" diff --git a/README.md b/README.md index 5cc370c4c9..ad4ec9f25c 100644 --- a/README.md +++ b/README.md @@ -13,12 +13,29 @@ If you want to develop this package do the following steps: - Make a fork and then clone the repo locally on your computer - Change the current directory to the Pkg repo you just cloned and start julia with `julia --project`. - `import Pkg` will now load the files in the cloned repo instead of the Pkg stdlib. -- To test your changes, simply do `include("test/runtests.jl")`. +- To test your changes, simply do `Pkg.test()`. If you need to build Julia from source with a Git checkout of Pkg, then instead use `make DEPS_GIT=Pkg` when building Julia. The `Pkg` repo is in `stdlib/Pkg`, and created initially with a detached `HEAD`. If you're doing this from a pre-existing Julia repository, you may need to `make clean` beforehand. If you need to build Julia from source with Git checkouts of two or more stdlibs, please see the instructions in the [`Building Julia from source with a Git checkout of a stdlib`](https://github.com/JuliaLang/julia/blob/master/doc/src/devdocs/build/build.md#building-julia-from-source-with-a-git-checkout-of-a-stdlib) section of the [`doc/src/devdocs/build/build.md`](https://github.com/JuliaLang/julia/blob/master/doc/src/devdocs/build/build.md) file within the Julia devdocs. +## Pre-commit hooks + +This repository uses pre-commit hooks to automatically check and format code before commits. The hooks perform various checks including: + +- File size and case conflict validation +- YAML syntax checking +- Trailing whitespace removal and line ending fixes +- Julia code formatting with Runic + +To install and use the pre-commit hooks: + +1. Install pre-commit: `pip install pre-commit` (or use your system's package manager) +2. Install the hooks: `pre-commit install` from the root of the repository +3. Run on all files: `pre-commit run --all-files` from the root of the repository + +Once installed, the hooks will run automatically on each commit. You can also run them manually anytime with `pre-commit run`. + ## Synchronization with the Julia repo To check which commit julia master uses see [JuliaLang/julia/stdlib/Pkg.version](https://github.com/JuliaLang/julia/blob/master/stdlib/Pkg.version). diff --git a/contrib/list_missing_pkg_tags.jl b/contrib/list_missing_pkg_tags.jl new file mode 100644 index 0000000000..93309889aa --- /dev/null +++ b/contrib/list_missing_pkg_tags.jl @@ -0,0 +1,89 @@ +using LibGit2 + +const JULIA_REPO_URL = "https://github.com/JuliaLang/julia.git" +const JULIA_REPO_DIR = "julia" +const PKG_VERSION_PATH = "stdlib/Pkg.version" +const PKG_REPO_URL = "https://github.com/JuliaLang/Pkg.jl.git" +const PKG_REPO_DIR = "Pkg.jl" + +function checkout_or_update_repo(url, dir) + return if isdir(dir) + println("Updating existing repository: $dir") + repo = LibGit2.GitRepo(dir) + LibGit2.fetch(repo) + else + println("Cloning repository: $url") + LibGit2.clone(url, dir) + end +end + +function get_tags(repo) + refs = LibGit2.ref_list(repo) + tags = filter(ref -> startswith(ref, "refs/tags/"), refs) + return sort!(replace.(tags, "refs/tags/" => "")) +end + +function is_stable_v1_release(tag) + return occursin(r"^v\d+\.\d+\.\d+$", tag) && VersionNumber(tag) >= v"1.0.0" +end + +function extract_pkg_sha1(text::AbstractString) + m = match(r"PKG_SHA1\s*=\s*([a-f0-9]{40})", text) + return m !== nothing ? m[1] : nothing +end + +function get_commit_hash_for_pkg_version(repo, tag) + return try + tag_ref = LibGit2.GitReference(repo, "refs/tags/" * tag) + LibGit2.checkout!(repo, string(LibGit2.GitHash(LibGit2.peel(tag_ref)))) + version_file = joinpath(JULIA_REPO_DIR, PKG_VERSION_PATH) + if isfile(version_file) + return extract_pkg_sha1(readchomp(version_file)) + else + println("Warning: Pkg.version file missing for tag $tag") + return nothing + end + catch + println("Error processing tag $tag") + rethrow() + end +end + +tempdir = mktempdir() +cd(tempdir) do + # Update Julia repo + checkout_or_update_repo(JULIA_REPO_URL, JULIA_REPO_DIR) + julia_repo = LibGit2.GitRepo(JULIA_REPO_DIR) + + # Get Julia tags, filtering only stable releases + julia_tags = filter(is_stable_v1_release, get_tags(julia_repo)) + version_commit_map = Dict{String, String}() + + for tag in julia_tags + println("Processing Julia tag: $tag") + commit_hash = get_commit_hash_for_pkg_version(julia_repo, tag) + if commit_hash !== nothing + version_commit_map[tag] = commit_hash + end + end + + # Update Pkg.jl repo + checkout_or_update_repo(PKG_REPO_URL, PKG_REPO_DIR) + pkg_repo = LibGit2.GitRepo(PKG_REPO_DIR) + + # Get existing tags in Pkg.jl + pkg_tags = Set(get_tags(pkg_repo)) + + # Filter out versions that already exist + missing_versions = filter(v -> v ∉ pkg_tags, collect(keys(version_commit_map))) + + # Sort versions numerically + sort!(missing_versions, by = VersionNumber) + + # Generate `git tag` commands + println("\nGit tag commands for missing Pkg.jl versions:") + for version in missing_versions + commit = version_commit_map[version] + println("git tag $version $commit") + end +end diff --git a/docs/NEWS-update.jl b/docs/NEWS-update.jl index 3812e9e437..d0ca10d391 100644 --- a/docs/NEWS-update.jl +++ b/docs/NEWS-update.jl @@ -7,11 +7,11 @@ s = read(NEWS, String) m = match(r"\[#[0-9]+\]:", s) if m !== nothing - s = s[1:m.offset-1] + s = s[1:(m.offset - 1)] end footnote(n) = "[#$n]: https://github.com/JuliaLang/Pkg.jl/issues/$n" -N = map(m -> parse(Int,m.captures[1]), eachmatch(r"\[#([0-9]+)\]", s)) +N = map(m -> parse(Int, m.captures[1]), eachmatch(r"\[#([0-9]+)\]", s)) foots = join(map(footnote, sort!(unique(N))), "\n") open(NEWS, "w") do f diff --git a/docs/generate.jl b/docs/generate.jl index fa4af617ef..3d227f1374 100644 --- a/docs/generate.jl +++ b/docs/generate.jl @@ -4,38 +4,42 @@ function generate(io, command) cmd_nospace = replace(command, " " => "-") - println(io, """ - ```@raw html -
-
- - $(command) - - — - REPL command -
-
- ``` - ```@eval - using Pkg - Dict(Pkg.REPLMode.canonical_names())["$(command)"].help - ``` - ```@raw html -
-
- ``` - """) + return println( + io, """ + ```@raw html +
+
+ + $(command) + + — + REPL command +
+
+ ``` + ```@eval + using Pkg + Dict(Pkg.REPLMode.canonical_names())["$(command)"].help + ``` + ```@raw html +
+
+ ``` + """ + ) end function generate() io = IOBuffer() - println(io, """ + println( + io, """ # [**11.** REPL Mode Reference](@id REPL-Mode-Reference) This section describes available commands in the Pkg REPL. The Pkg REPL mode is mostly meant for interactive use, and for non-interactive use it is recommended to use the functional API, see [API Reference](@ref API-Reference). - """) + """ + ) # list commands println(io, "## `package` commands") foreach(command -> generate(io, command), ["add", "build", "compat", "develop", "free", "generate", "pin", "remove", "test", "update"]) diff --git a/docs/make.jl b/docs/make.jl index be6905de5a..6b38dad0d7 100644 --- a/docs/make.jl +++ b/docs/make.jl @@ -9,7 +9,7 @@ const formats = Any[ Documenter.HTML( prettyurls = get(ENV, "CI", nothing) == "true", canonical = "https://julialang.github.io/Pkg.jl/v1/", - assets = ["assets/custom.css"], + assets = ["assets/custom.css", "assets/favicon.ico"], ), ] if "pdf" in ARGS @@ -17,7 +17,7 @@ if "pdf" in ARGS end # setup for doctesting -DocMeta.setdocmeta!(Pkg.BinaryPlatforms, :DocTestSetup, :(using Base.BinaryPlatforms); recursive=true) +DocMeta.setdocmeta!(Pkg.BinaryPlatforms, :DocTestSetup, :(using Base.BinaryPlatforms); recursive = true) # Run doctests first and disable them in makedocs Documenter.doctest(joinpath(@__DIR__, "src"), [Pkg]) @@ -35,6 +35,7 @@ makedocs( "managing-packages.md", "environments.md", "creating-packages.md", + "apps.md", "compatibility.md", "registries.md", "artifacts.md", @@ -42,6 +43,8 @@ makedocs( "toml-files.md", "repl.md", "api.md", + "protocol.md", + "depots.md", ], ) diff --git a/docs/src/api.md b/docs/src/api.md index 61979453b9..ed3c15b20b 100644 --- a/docs/src/api.md +++ b/docs/src/api.md @@ -1,4 +1,4 @@ -# [**12.** API Reference](@id API-Reference) +# [**13.** API Reference](@id API-Reference) This section describes the functional API for interacting with Pkg.jl. It is recommended to use the functional API, rather than the Pkg REPL mode, @@ -39,6 +39,7 @@ Pkg.gc Pkg.status Pkg.compat Pkg.precompile +Pkg.autoprecompilation_enabled Pkg.offline Pkg.why Pkg.dependencies @@ -47,6 +48,7 @@ Pkg.project Pkg.undo Pkg.redo Pkg.setprotocol! +Pkg.readonly PackageSpec PackageMode UpgradeLevel @@ -79,3 +81,10 @@ Pkg.Artifacts.ensure_artifact_installed Pkg.Artifacts.ensure_all_artifacts_installed Pkg.Artifacts.archive_artifact ``` + +## [Package Server Authentication Hooks](@id Package-Server-Authentication-Hooks) + +```@docs +Pkg.PlatformEngines.register_auth_error_handler +Pkg.PlatformEngines.deregister_auth_error_handler +``` diff --git a/docs/src/apps.md b/docs/src/apps.md new file mode 100644 index 0000000000..0606f9db52 --- /dev/null +++ b/docs/src/apps.md @@ -0,0 +1,160 @@ +# [**6.** Apps](@id Apps) + +!!! note + The app support in Pkg is currently considered experimental and some functionality and API may change. + + Some inconveniences that can be encountered are: + - You need to manually make `~/.julia/bin` available on the PATH environment. + - The path to the julia executable used is the same as the one used to install the app. If this + julia installation gets removed, you might need to reinstall the app. + +Apps are Julia packages that are intended to be run as "standalone programs" (by e.g. typing the name of the app in the terminal possibly together with some arguments or flags/options). +This is in contrast to most Julia packages that are used as "libraries" and are loaded by other files or in the Julia REPL. + +## Creating a Julia app + +A Julia app is structured similar to a standard Julia library with the following additions: + +- A `@main` entry point in the package module (see the [Julia help on `@main`](https://docs.julialang.org/en/v1/manual/command-line-interface/#The-Main.main-entry-point) for details) +- An `[apps]` section in the `Project.toml` file listing the executable names that the package provides. + +A very simple example of an app that prints the reversed input arguments would be: + +```julia +# src/MyReverseApp.jl +module MyReverseApp + +function (@main)(ARGS) + for arg in ARGS + print(stdout, reverse(arg), " ") + end + return +end + +end # module +``` + +```toml +# Project.toml + +# standard fields here + +[apps] +reverse = {} +``` +The empty table `{}` is to allow for giving metadata about the app. + +After installing this app one could run: + +``` +$ reverse some input string + emos tupni gnirts +``` + +directly in the terminal. + +## Multiple Apps per Package + +A single package can define multiple apps by using submodules. Each app can have its own entry point in a different submodule of the package. + +```julia +# src/MyMultiApp.jl +module MyMultiApp + +function (@main)(ARGS) + println("Main app: ", join(ARGS, " ")) +end + +include("CLI.jl") + +end # module +``` + +```julia +# src/CLI.jl +module CLI + +function (@main)(ARGS) + println("CLI submodule: ", join(ARGS, " ")) +end + +end # module CLI +``` + +```toml +# Project.toml + +# standard fields here + +[apps] +main-app = {} +cli-app = { submodule = "CLI" } +``` + +This will create two executables: +- `main-app` that runs `julia -m MyMultiApp` +- `cli-app` that runs `julia -m MyMultiApp.CLI` + +## Configuring Julia Flags + +Apps can specify default Julia command-line flags that will be passed to the Julia process when the app is run. This is useful for configuring performance settings, threading, or other Julia options specific to your application. + +### Default Julia Flags + +You can specify default Julia flags in the `Project.toml` file using the `julia_flags` field: + +```toml +# Project.toml + +[apps] +myapp = { julia_flags = ["--threads=4", "--optimize=2"] } +performance-app = { julia_flags = ["--threads=auto", "--startup-file=yes", "--depwarn=no"] } +debug-app = { submodule = "Debug", julia_flags = ["--check-bounds=yes", "--optimize=0"] } +``` + +With this configuration: +- `myapp` will run with 4 threads and optimization level 2 +- `performance-app` will run with automatic thread detection, startup file enabled, and deprecation warnings disabled +- `debug-app` will run with bounds checking enabled and no optimization + +### Runtime Julia Flags + +You can override or add to the default Julia flags at runtime using the `--` separator. Everything before `--` will be passed as flags to Julia, and everything after `--` will be passed as arguments to your app: + +```bash +# Uses default flags from Project.toml +myapp input.txt output.txt + +# Override thread count, keep other defaults +myapp --threads=8 -- input.txt output.txt + +# Add additional flags +myapp --threads=2 --optimize=3 --check-bounds=yes -- input.txt output.txt + +# Only Julia flags, no app arguments +myapp --threads=1 -- +``` + +The final Julia command will combine: +1. Fixed flags (like `--startup-file=no` and `-m ModuleName`) +2. Default flags from `julia_flags` in Project.toml +3. Runtime flags specified before `--` +4. App arguments specified after `--` + +### Overriding the Julia Executable + +By default, apps run with the same Julia executable that was used to install them. You can override this globally using the `JULIA_APPS_JULIA_CMD` environment variable: + +```bash +# Use a different Julia version for all apps +export JULIA_APPS_JULIA_CMD=/path/to/different/julia +myapp input.txt + +# On Windows +set JULIA_APPS_JULIA_CMD=C:\path\to\different\julia.exe +myapp input.txt +``` + +## Installing Julia apps + +The installation of Julia apps is similar to [installing Julia libraries](@ref Managing-Packages) but instead of using e.g. `Pkg.add` or `pkg> add` one uses `Pkg.Apps.add` or `pkg> app add` (`develop` is also available). diff --git a/docs/src/artifacts.md b/docs/src/artifacts.md index 66a55f99f5..d5fe5f38b7 100644 --- a/docs/src/artifacts.md +++ b/docs/src/artifacts.md @@ -1,4 +1,4 @@ -# [**8.** Artifacts](@id Artifacts) +# [**9.** Artifacts](@id Artifacts) `Pkg` can install and manage containers of data that are not Julia packages. These containers can contain platform-specific binaries, datasets, text, or any other kind of data that would be convenient to place within an immutable, life-cycled datastore. These containers, (called "Artifacts") can be created locally, hosted anywhere, and automatically downloaded and unpacked upon installation of your Julia package. @@ -230,7 +230,7 @@ This is deduced automatically by the `artifacts""` string macro, however, if you !!! compat "Julia 1.7" Pkg's extended platform selection requires at least Julia 1.7, and is considered experimental. -New in Julia 1.6, `Platform` objects can have extended attributes applied to them, allowing artifacts to be tagged with things such as CUDA driver version compatibility, microarchitectural compatibility, julia version compatibility and more! +New in Julia 1.7, `Platform` objects can have extended attributes applied to them, allowing artifacts to be tagged with things such as CUDA driver version compatibility, microarchitectural compatibility, julia version compatibility and more! Note that this feature is considered experimental and may change in the future. If you as a package developer find yourself needing this feature, please get in contact with us so it can evolve for the benefit of the whole ecosystem. In order to support artifact selection at `Pkg.add()` time, `Pkg` will run the specially-named file `/.pkg/select_artifacts.jl`, passing the current platform triplet as the first argument. diff --git a/docs/src/assets/favicon.ico b/docs/src/assets/favicon.ico new file mode 100644 index 0000000000..eeb1edd944 Binary files /dev/null and b/docs/src/assets/favicon.ico differ diff --git a/docs/src/basedocs.md b/docs/src/basedocs.md index 7d51728ffe..9e07aa4ca9 100644 --- a/docs/src/basedocs.md +++ b/docs/src/basedocs.md @@ -4,7 +4,7 @@ EditURL = "https://github.com/JuliaLang/Pkg.jl/blob/master/docs/src/basedocs.md" # Pkg -Pkg is Julia's builtin package manager, and handles operations +Pkg is Julia's built-in package manager, and handles operations such as installing, updating and removing packages. !!! note diff --git a/docs/src/compatibility.md b/docs/src/compatibility.md index bc1c58e3e9..dee8b05841 100644 --- a/docs/src/compatibility.md +++ b/docs/src/compatibility.md @@ -1,4 +1,4 @@ -# [**6.** Compatibility](@id Compatibility) +# [**7.** Compatibility](@id Compatibility) Compatibility refers to the ability to restrict the versions of the dependencies that your project is compatible with. If the compatibility for a dependency is not given, the project is assumed to be compatible with all versions of that dependency. @@ -22,7 +22,7 @@ The format of the version specifier is described in detail below. The rules below apply to the `Project.toml` file; for registries, see [Registry Compat.toml](@ref). !!! info - Note that registration into Julia's General Registry requires each dependency to have a `[compat`] entry with an upper bound. + Note that registration into Julia's General Registry requires each dependency to have a `[compat]` entry with an upper bound. ## Version specifier format @@ -97,7 +97,7 @@ PkgA = "~1.2.3" # [1.2.3, 1.3.0) PkgB = "~1.2" # [1.2.0, 1.3.0) PkgC = "~1" # [1.0.0, 2.0.0) PkgD = "~0.2.3" # [0.2.3, 0.3.0) -PkgE = "~0.0.3" # [0.0.3, 0.0.4) +PkgE = "~0.0.3" # [0.0.3, 0.1.0) PkgF = "~0.0" # [0.0.0, 0.1.0) PkgG = "~0" # [0.0.0, 1.0.0) ``` @@ -164,7 +164,7 @@ PkgA = "0.2 - 0" # 0.2.0 - 0.*.* = [0.2.0, 1.0.0) ``` -## Fixing conflicts +## [Fixing conflicts](@id Fixing-conflicts) Version conflicts were introduced previously with an [example](@ref conflicts) of a conflict arising in a package `D` used by two other packages, `B` and `C`. diff --git a/docs/src/creating-packages.md b/docs/src/creating-packages.md index 7bb72c2e91..21fedf7b1f 100644 --- a/docs/src/creating-packages.md +++ b/docs/src/creating-packages.md @@ -11,7 +11,7 @@ To generate the bare minimum files for a new package, use `pkg> generate`. ```julia-repl -(@v1.8) pkg> generate HelloWorld +(@v1.10) pkg> generate HelloWorld ``` This creates a new project `HelloWorld` in a subdirectory by the same name, with the following files (visualized with the external [`tree` command](https://linux.die.net/man/1/tree)): @@ -118,7 +118,7 @@ describe about public symbols. A public symbol is a symbol that is exported from package with the `export` keyword or marked as public with the `public` keyword. When you change the behavior of something that was previously public so that the new version no longer conforms to the specifications provided in the old version, you should -adjust your package version number according to [Julia's variant on SemVer](#Version-specifier-format). +adjust your package version number according to [Julia's variant on SemVer](@ref Version-specifier-format). If you would like to include a symbol in your public API without exporting it into the global namespace of folks who call `using YourPackage`, you should mark that symbol as public with `public that_symbol`. Symbols marked as public with the `public` keyword are @@ -127,7 +127,7 @@ just as public as those marked as public with the `export` keyword, but when fol `YourPackage.that_symbol`. Let's say we would like our `greet` function to be part of the public API, but not the -`greet_alien` function. We could the write the following and release it as version `1.0.0`. +`greet_alien` function. We could then write the following and release it as version `1.0.0`. ```julia module HelloWorld @@ -275,79 +275,159 @@ test-specific dependencies, are available, see below. ### Test-specific dependencies -There are two ways of adding test-specific dependencies (dependencies that are not dependencies of the package but will still be available to -load when the package is tested). +Test-specific dependencies are dependencies that are not dependencies of the package itself but are available when the package is tested. -#### `target` based test specific dependencies +#### Recommended approach: Using workspaces with `test/Project.toml` -Using this method of adding test-specific dependencies, the packages are added under an `[extras]` section and to a test target, -e.g. to add `Markdown` and `Test` as test dependencies, add the following to the `Project.toml` file: - -```toml -[extras] -Markdown = "d6f4376e-aef5-505a-96c1-9c027394607a" -Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40" - -[targets] -test = ["Markdown", "Test"] -``` - -Note that the only supported targets are `test` and `build`, the latter of which (not recommended) can be used -for any `deps/build.jl` scripts. - -#### Alternative approach: `test/Project.toml` file test specific dependencies +!!! compat + Workspaces require Julia 1.12+. For older Julia versions, see the legacy approaches below. -!!! note - The exact interaction between `Project.toml`, `test/Project.toml` and their corresponding - `Manifest.toml`s are not fully worked out and may be subject to change in future versions. - The older method of adding test-specific dependencies, described in the previous section, - will therefore be supported throughout all Julia 1.X releases. +The recommended way to add test-specific dependencies is to use workspaces. This is done by: -In Julia 1.2 and later test dependencies can be declared in `test/Project.toml`. When running -tests, Pkg will automatically merge this and the package Projects to create the test environment. +1. Adding a `[workspace]` section to your package's `Project.toml`: -!!! note - If no `test/Project.toml` exists Pkg will use the `target` based test specific dependencies. +```toml +[workspace] +projects = ["test"] +``` -To add a test-specific dependency, i.e. a dependency that is available only when testing, -it is thus enough to add this dependency to the `test/Project.toml` project. This can be -done from the Pkg REPL by activating this environment, and then use `add` as one normally -does. Let's add the `Test` standard library as a test dependency: +2. Creating a `test/Project.toml` file with your test dependencies: ```julia-repl (HelloWorld) pkg> activate ./test [ Info: activating environment at `~/HelloWorld/test/Project.toml`. -(test) pkg> add Test +(HelloWorld/test) pkg> dev . # add current package to test dependencies using its path + Resolving package versions... + Updating `~/HelloWorld/test/Project.toml` + [8dfed614] + HelloWorld v0.1.0 `..` + +(HelloWorld/test) pkg> add Test # add other test dependencies Resolving package versions... Updating `~/HelloWorld/test/Project.toml` [8dfed614] + Test - Updating `~/HelloWorld/test/Manifest.toml` - [...] ``` -We can now use `Test` in the test script and we can see that it gets installed when testing: +When using workspaces, the package manager resolves dependencies for all projects in the workspace together, and creates a single `Manifest.toml` next to the base `Project.toml`. This provides better dependency resolution and makes it easier to manage test-specific dependencies. +Note that dependencies of `HelloWorld` itself are **not** automatically inherited. Any package used directly in tests must also be listed under `[deps]`. + +!!! info + Unlike some earlier test dependency workflows, this one explicitly requires adding `HelloWorld` (the parent package) to your `test/Project.toml`. + +The resulting `test/Project.toml` will look like: + +```toml +[deps] +HelloWorld = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" # UUID from HelloWorld's Project.toml +Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40" + +[sources] +HelloWorld = {path = ".."} +``` + +You can now use `Test` in the test script: ```julia-repl julia> write("test/runtests.jl", """ - using Test + using HelloWorld, Test @test 1 == 1 """); -(test) pkg> activate . +(HelloWorld/test) pkg> activate . (HelloWorld) pkg> test Testing HelloWorld Resolving package versions... - Updating `/var/folders/64/76tk_g152sg6c6t0b4nkn1vw0000gn/T/tmpPzUPPw/Project.toml` - [d8327f2a] + HelloWorld v0.1.0 [`~/.julia/dev/Pkg/HelloWorld`] + Testing HelloWorld tests passed +``` + +Workspaces can also be used for other purposes, such as documentation or benchmarks, by adding additional projects to the workspace: + +```toml +[workspace] +projects = ["test", "docs", "benchmarks"] +``` + +See the section on [Workspaces](@ref) in the `Project.toml` documentation for more details. + +#### Alternative approach: Using `[sources]` with path-based dependencies + +An alternative to workspaces is to use the `[sources]` section in `test/Project.toml` to reference the parent package. The `[sources]` section allows you to specify custom locations (paths or URLs) for dependencies, overriding registry information. This approach creates a **separate manifest** in the `test/` directory (unlike workspaces which create a single shared manifest). + +To use this approach: + +1. Create a `test/Project.toml` file and add your test dependencies: + +```julia-repl +(HelloWorld) pkg> activate ./test +[ Info: activating environment at `~/HelloWorld/test/Project.toml`. + +(HelloWorld/test) pkg> add Test + Resolving package versions... + Updating `~/HelloWorld/test/Project.toml` [8dfed614] + Test - Updating `/var/folders/64/76tk_g152sg6c6t0b4nkn1vw0000gn/T/tmpPzUPPw/Manifest.toml` - [d8327f2a] + HelloWorld v0.1.0 [`~/.julia/dev/Pkg/HelloWorld`] - Testing HelloWorld tests passed``` ``` +2. Add the parent package as a dependency using `[sources]` with a relative path: + +```toml +# In test/Project.toml +[deps] +HelloWorld = "00000000-0000-0000-0000-000000000000" # Your package UUID +Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40" + +[sources] +HelloWorld = {path = ".."} +``` + +The `[sources]` section tells Pkg to use the local path for `HelloWorld` instead of looking it up in a registry. This creates a separate `test/Manifest.toml` that tracks the resolved dependencies for your test environment independently from the main package manifest. You can now run tests directly: + +```julia-repl +$ julia --project=test +julia> using HelloWorld, Test + +julia> include("test/runtests.jl") +``` + +!!! note "Difference from workspaces" + The key difference from workspaces is that this approach uses a **separate manifest file** (`test/Manifest.toml`) for the test environment, while workspaces create a **single shared manifest** (`Manifest.toml`) that resolves all projects together. This means: + + - With `[sources]` + path: Dependencies are resolved independently for each environment + - With workspaces: Dependencies are resolved together, ensuring compatibility across all projects in the workspace + + For more details on `[sources]`, see the [`[sources]` section](@ref sources-section) in the Project.toml documentation. + +#### Legacy approach: `target` based test specific dependencies + +!!! warning + This approach is legacy and maintained for compatibility. New packages should use workspaces instead. + +Using this method, test-specific dependencies are added under an `[extras]` section and to a test target: + +```toml +[extras] +Markdown = "d6f4376e-aef5-505a-96c1-9c027394607a" +Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40" + +[targets] +test = ["Markdown", "Test"] +``` + +Note that the only supported targets are `test` and `build`, the latter of which (not recommended) can be used for any `deps/build.jl` scripts. + +#### Legacy approach: `test/Project.toml` without workspace + +!!! warning + This approach is legacy and maintained for compatibility. New packages should use workspaces instead. + +In Julia 1.2 and later, test dependencies can be declared in `test/Project.toml` without using a workspace. When running tests, Pkg will automatically merge the package and test projects to create the test environment. + +!!! note + If no `test/Project.toml` exists, Pkg will use the `target` based test specific dependencies. + +This approach works similarly to the workspace approach, but without the workspace declaration in the main `Project.toml`. + ## Compatibility on dependencies Every dependency should in general have a compatibility constraint on it. @@ -450,9 +530,7 @@ Extensions can have arbitrary names (here `ContourExt`), following the format of In `Pkg` output, extension names are always shown together with their parent package name. !!! compat - Often you will put the extension dependencies into the `test` target so they are loaded when running e.g. `Pkg.test()`. On earlier Julia versions - this requires you to also put the package in the `[extras]` section. This is unfortunate but the project verifier on older Julia versions will - complain if this is not done. + Often you will want to load extension dependencies when testing your package. The recommended approach is to use workspaces and add the extension dependencies to your `test/Project.toml` (see [Test-specific dependencies](@ref adding-tests-to-packages)). For older Julia versions that don't support workspaces, you can put the extension dependencies into the `test` target, which requires you to also put the package in the `[extras]` section. The project verifier on older Julia versions will complain if this is not done. !!! note If you use a manifest generated by a Julia version that does not know about extensions with a Julia version that does @@ -557,73 +635,18 @@ This is done by making the following changes (using the example above): In the case where one wants to use an extension (without worrying about the feature of the extension being available on older Julia versions) while still -supporting older Julia versions the packages under `[weakdeps]` should be +supporting older Julia versions without workspace support, the packages under `[weakdeps]` should be duplicated into `[extras]`. This is an unfortunate duplication, but without doing this the project verifier under older Julia versions will throw an error if it finds packages under `[compat]` that is not listed in `[extras]`. -## Package naming rules - -Package names should be sensible to most Julia users, *even to those who are not domain experts*. -The following rules apply to the `General` registry but may be useful for other package -registries as well. - -Since the `General` registry belongs to the entire community, people may have opinions about -your package name when you publish it, especially if it's ambiguous or can be confused with -something other than what it is. Usually, you will then get suggestions for a new name that -may fit your package better. - -1. Avoid jargon. In particular, avoid acronyms unless there is minimal possibility of confusion. - - * It's ok to say `USA` if you're talking about the USA. - * It's not ok to say `PMA`, even if you're talking about positive mental attitude. -2. Avoid using `Julia` in your package name or prefixing it with `Ju`. - - * It is usually clear from context and to your users that the package is a Julia package. - * Package names already have a `.jl` extension, which communicates to users that `Package.jl` is a Julia package. - * Having Julia in the name can imply that the package is connected to, or endorsed by, contributors - to the Julia language itself. -3. Packages that provide most of their functionality in association with a new type should have pluralized - names. - - * `DataFrames` provides the `DataFrame` type. - * `BloomFilters` provides the `BloomFilter` type. - * In contrast, `JuliaParser` provides no new type, but instead new functionality in the `JuliaParser.parse()` - function. -4. Err on the side of clarity, even if clarity seems long-winded to you. - - * `RandomMatrices` is a less ambiguous name than `RndMat` or `RMT`, even though the latter are shorter. -5. A less systematic name may suit a package that implements one of several possible approaches to - its domain. - - * Julia does not have a single comprehensive plotting package. Instead, `Gadfly`, `PyPlot`, `Winston` - and other packages each implement a unique approach based on a particular design philosophy. - * In contrast, `SortingAlgorithms` provides a consistent interface to use many well-established - sorting algorithms. -6. Packages that wrap external libraries or programs can be named after those libraries or programs. - - * `CPLEX.jl` wraps the `CPLEX` library, which can be identified easily in a web search. - * `MATLAB.jl` provides an interface to call the MATLAB engine from within Julia. - -7. Avoid naming a package closely to an existing package - * `Websocket` is too close to `WebSockets` and can be confusing to users. Rather use a new name such as `SimpleWebsockets`. - -8. Avoid using a distinctive name that is already in use in a well known, unrelated project. - * Don't use the names `Tkinter.jl`, `TkinterGUI.jl`, etc. for a package that is unrelated - to the popular `tkinter` python package, even if it provides bindings to Tcl/Tk. - A package name of `Tkinter.jl` would only be appropriate if the package used Python's - library to accomplish its work or was spearheaded by the same community of developers. - * It's okay to name a package `HTTP.jl` even though it is unrelated to the popular rust - crate `http` because in most usages the name "http" refers to the hypertext transfer - protocol, not to the `http` rust crate. - * It's okay to name a package `OpenSSL.jl` if it provides an interface to the OpenSSL - library, even without explicit affiliation with the creators of the OpenSSL (provided - there's no copyright or trademark infringement etc.) - -9. Packages should follow the [Stylistic Conventions](https://docs.julialang.org/en/v1/manual/variables/#Stylistic-Conventions). - * The package name begin with a capital letter and word separation is shown with upper camel case - * Packages that provide the functionality of a project from another language should use the Julia convention - * Packages that [provide pre-built libraries and executables](https://docs.binarybuilder.org/stable/jll/) can keep orignal name, but should get `_jll`as a suffix. For example `pandoc_jll` wraps pandoc. However, note that the generation and release of most JLL packages is handled by the [Yggdrasil](https://github.com/JuliaPackaging/Yggdrasil) system. +For Julia 1.13+, using workspaces is recommended and this duplication is not necessary. + +## Package naming guidelines + +The [package naming guidelines for the `General` registry](https://github.com/JuliaRegistries/General/blob/master/NAMING_GUIDELINES.md) establish clear rules that may be helpful for naming packages even if they are not submitted to the General registry. + +For the complete list of rules for automatic merging into the General registry, see [the AutoMerge guidelines](https://juliaregistries.github.io/RegistryCI.jl/stable/guidelines/). ## Registering packages @@ -631,6 +654,28 @@ Once a package is ready it can be registered with the [General Registry](https:/ Currently, packages are submitted via [`Registrator`](https://juliaregistrator.github.io/). In addition to `Registrator`, [`TagBot`](https://github.com/marketplace/actions/julia-tagbot) helps manage the process of tagging releases. +## Creating new package versions + +After registering your package, you'll want to release new versions as you add features and fix bugs. The typical workflow is: + +1. **Update the version number** in your `Project.toml` file according to [semantic versioning rules](@ref Version-specifier-format). For example: + - Increment the patch version (1.2.3 → 1.2.4) for bug fixes + - Increment the minor version (1.2.3 → 1.3.0) for new features that don't break existing functionality + - Increment the major version (1.2.3 → 2.0.0) for breaking changes + +2. **Commit your changes** to your package repository, including the updated version number. + +3. **Tag the release** using Registrator. Comment `@JuliaRegistrator register` on a commit or pull request in your GitHub repository + +4. **Automated tagging**: Once you've set up [`TagBot`](https://github.com/marketplace/actions/julia-tagbot), it will automatically create a git tag in your repository when a new version is registered. This keeps your repository tags synchronized with registered versions. + +The registration process typically takes a few minutes. Registrator will: +- Check that your package meets registry requirements (has tests, proper version bounds, etc.) +- Submit a pull request to the General registry +- Automated checks will run, and if everything passes, the PR will be automatically merged + +For private registries or more advanced workflows, see the documentation for [LocalRegistry.jl](https://github.com/GunnarFarneback/LocalRegistry.jl) and [RegistryCI.jl](https://github.com/JuliaRegistries/RegistryCI.jl). + ## Best Practices Packages should avoid mutating their own state (writing to files within their package directory). @@ -649,3 +694,10 @@ To support the various use cases in the Julia package ecosystem, the Pkg develop * [`Preferences.jl`](https://github.com/JuliaPackaging/Preferences.jl) allows packages to read and write preferences to the top-level `Project.toml`. These preferences can be read at runtime or compile-time, to enable or disable different aspects of package behavior. Packages previously would write out files to their own package directories to record options set by the user or environment, but this is highly discouraged now that `Preferences` is available. + +## See Also + +- [Managing Packages](@ref Managing-Packages) - Learn how to add, update, and manage package dependencies +- [Working with Environments](@ref Working-with-Environments) - Understand environments and reproducible development +- [Compatibility](@ref Compatibility) - Specify version constraints for dependencies +- [API Reference](@ref) - Functional API for non-interactive package management diff --git a/docs/src/depots.md b/docs/src/depots.md new file mode 100644 index 0000000000..94eaf776a1 --- /dev/null +++ b/docs/src/depots.md @@ -0,0 +1,306 @@ +# **15.** Depots + +The packages installed for a particular environment, defined in the +files `Project.toml` and `Manifest.toml` within the directory +structure, are not actually installed within that directory but into a +"depot". The location of the depots are set by the variable +[`DEPOT_PATH`](https://docs.julialang.org/en/v1/base/constants/#Base.DEPOT_PATH). + +For details on the default depot locations and how they vary by installation method, +see the [`DEPOT_PATH`](https://docs.julialang.org/en/v1/base/constants/#Base.DEPOT_PATH) documentation. + +Packages which are installed by a user go into the first depot and the Julia +standard library is in the last depot. + +You should not need to manage the user depot directly. Pkg will automatically clean up +the depots when packages are removed after a delay. However you may want to manually +remove old `.julia/compiled/` subdirectories if you have any that reside for older Julia +versions that you no longer use (hence have not been run to tidy themselves up). + +## Configuring the depot path with `JULIA_DEPOT_PATH` + +The depot path can be configured using the `JULIA_DEPOT_PATH` environment variable, +which is used to populate the global Julia [`DEPOT_PATH`](https://docs.julialang.org/en/v1/base/constants/#Base.DEPOT_PATH) variable +at startup. For complete details on the behavior of this environment variable, +see the [environment variables documentation](https://docs.julialang.org/en/v1/manual/environment-variables/#JULIA_DEPOT_PATH). + +### When to customize the depot path + +You may want to change your depot location in several scenarios: + +- **Corporate environments**: When your user folder synchronizes with a server (such as with + Active Directory roaming profiles), storing thousands of package files in the default depot + can cause significant slowdowns during login/logout. +- **Storage constraints**: When your user directory has limited quota or is on a slow network drive. +- **Shared computing**: When multiple users need access to the same packages on a shared system. +- **Custom organization**: When you prefer to organize Julia packages separately from your user directory. + +### Platform-specific configuration + +`JULIA_DEPOT_PATH` is an **operating system environment variable**, not a Julia REPL command. +The method for setting it varies by platform: + +#### Unix/Linux/macOS + +For temporary configuration (current shell session only): + +```bash +export JULIA_DEPOT_PATH="/custom/depot:" +``` + +For permanent configuration, add the export command to your shell configuration file +(e.g., `~/.bashrc`, `~/.zshrc`, or `~/.profile`). + +#### Windows + +For temporary configuration in **PowerShell** (current session only): + +```powershell +$env:JULIA_DEPOT_PATH = "C:\custom\depot;" +``` + +For temporary configuration in **Command Prompt** (current session only): + +```cmd +set JULIA_DEPOT_PATH=C:\custom\depot; +``` + +For permanent system-wide or user-level configuration: + +1. Press `Win+R` to open the Run dialog +2. Type `sysdm.cpl` and press Enter +3. Go to the "Advanced" tab +4. Click "Environment Variables" +5. Add a new user or system variable named `JULIA_DEPOT_PATH` with your desired path + (e.g., `C:\custom\depot;`) + +!!! note + The trailing path separator (`:` on Unix, `;` on Windows) is crucial for including + the default system depots, which contain the standard library and other bundled + resources. Without it, Julia will only use the specified depot and will have to precompile + standard library packages, which can be time-consuming and inefficient. + +### Alternative configuration methods + +Instead of setting an operating system environment variable, you can configure the depot +path using Julia's `startup.jl` file, which runs automatically when Julia starts: + +```julia +# In ~/.julia/config/startup.jl (Unix) or C:\Users\USERNAME\.julia\config\startup.jl (Windows) +empty!(DEPOT_PATH) +push!(DEPOT_PATH, "/custom/depot") +push!(DEPOT_PATH, joinpath(homedir(), ".julia")) # Include default depot as fallback +``` + +This approach provides per-user permanent configuration without requiring operating system +environment variable changes. However, setting `JULIA_DEPOT_PATH` is generally preferred +as it takes effect before Julia loads any code. + +!!! warning + Modifying `DEPOT_PATH` at runtime (in the REPL or in scripts) after Julia has started + is generally not recommended, as Julia may have already loaded packages from the + original depot locations. + +## Shared depots for distributed computing + +When using Julia in distributed computing environments, such as high-performance computing +(HPC) clusters, it's recommended to use a shared depot via `JULIA_DEPOT_PATH`. This allows +multiple Julia processes to share precompiled packages and reduces redundant compilation. + +Since Julia v1.10, multiple processes using the same depot coordinate via pidfile locks +to ensure only one process precompiles a package while others wait. However, due to +the caching of native code in pkgimages since v1.9, you may need to set the `JULIA_CPU_TARGET` +environment variable appropriately to ensure cache compatibility across different +worker nodes with varying CPU capabilities. + +For more details, see the [FAQ section on distributed computing](https://docs.julialang.org/en/v1/manual/faq/#Computing-cluster) +and the [environment variables documentation](https://docs.julialang.org/en/v1/manual/environment-variables/#JULIA_CPU_TARGET). + +## Setting up shared depots for multi-user systems + +In multi-user environments such as JupyterHub deployments, university computing labs, or shared servers, +system administrators often want to provide a set of commonly-used packages that are available to all +users while still allowing individual users to install their own packages. This can be achieved by +setting up a layered depot structure with a read-only shared depot and user-specific writable depots. + +### Overview of the approach + +The key concept is to use `JULIA_DEPOT_PATH` to create a layered depot structure where: + +1. **User depot** (first in path): User-specific packages and modifications +2. **Shared depot** (middle in path): Common packages installed by administrators +3. **System depot** (last in path): Julia standard library and bundled resources + +When Julia searches for packages, it looks through depots in order. This allows users to: +- Access pre-installed packages from the shared depot +- Install additional packages into their own depot +- Override shared packages if needed by installing different versions in their user depot + +### Administrator setup + +#### Step 1: Create the shared depot + +As a system administrator, create a shared depot location accessible to all users: + +```bash +# Create shared depot directory +sudo mkdir -p /opt/julia/shared_depot + +# Create a shared user for managing the depot (optional but recommended) +sudo useradd -r -s /bin/bash -d /opt/julia/shared_depot julia-shared + +# Set ownership +sudo chown -R julia-shared:julia-shared /opt/julia/shared_depot +``` + +#### Step 2: Install shared packages + +Switch to the shared user account and configure Julia to use the shared depot: + +```bash +sudo su - julia-shared +export JULIA_DEPOT_PATH="/opt/julia/shared_depot:" +``` + +Then install commonly-used packages. You can do this interactively or by instantiating from a Project.toml: + +```bash +# Interactive installation +julia -e 'using Pkg; Pkg.add(["Plots", "DataFrames", "CSV", "LinearAlgebra"])' + +# Or from a Project.toml file +cd /opt/julia/shared_depot +# Create or copy your Project.toml and Manifest.toml files here +julia --project=. -e 'using Pkg; Pkg.instantiate()' +``` + +!!! tip + Using a `Project.toml` and `Manifest.toml` file to define the shared environment is + recommended as it provides reproducibility and version control. You can maintain these + files in a git repository for tracking changes. + +#### Step 3: Clean the shared depot (optional) + +To minimize the shared depot size, you can remove registries from the shared depot: + +```bash +rm -rf /opt/julia/shared_depot/registries +``` + +Since Pkg only writes to the first depot in `JULIA_DEPOT_PATH`, users will maintain their own +registries in their user depots anyway. Removing registries from the shared depot simply avoids +storing duplicate registry data. + +#### Step 4: Set appropriate permissions + +Make the shared depot read-only for regular users: + +```bash +# Make shared depot readable by all users +sudo chmod -R a+rX /opt/julia/shared_depot + +# Ensure it's not writable by others +sudo chmod -R go-w /opt/julia/shared_depot +``` + +### User configuration + +Each user should configure their `JULIA_DEPOT_PATH` to include both their personal depot and +the shared depot. The exact syntax depends on where you want the user depot: + +#### Using default user depot location + +To use the default `~/.julia` as the user depot with the shared depot as a fallback: + +```bash +export JULIA_DEPOT_PATH="~/.julia:/opt/julia/shared_depot:" +``` + +The trailing `:` ensures the system depot (with standard library) is still included. + +#### Using a custom user depot location + +If you want users to have their depot in a different location (e.g., to avoid home directory quotas): + +```bash +export JULIA_DEPOT_PATH="/scratch/$USER/julia_depot:/opt/julia/shared_depot:" +``` + +#### System-wide configuration + +To configure this for all users automatically, add the export command to system-wide shell +configuration files: + +**On Linux:** +```bash +# In /etc/profile.d/julia.sh +export JULIA_DEPOT_PATH="~/.julia:/opt/julia/shared_depot:" +``` + +**On macOS:** +```bash +# In /etc/zshrc or /etc/bashrc +export JULIA_DEPOT_PATH="~/.julia:/opt/julia/shared_depot:" +``` + +Users can then further customize their individual depot paths if needed. + +### Pre-seeding user environments + +In some scenarios (e.g., for student lab computers or container images), you may want to +pre-seed individual user environments. This can be done by: + +1. Creating a template environment with a `Project.toml` and `Manifest.toml` +2. Copying these files to each user's Julia project directory +3. Having users (or a startup script) run `Pkg.instantiate()` on first use + +Since packages in the shared depot will be found automatically, `instantiate()` will only +download packages that aren't already available in the shared depot. + +```bash +# As administrator, create template +mkdir -p /opt/julia/template_project +# Create Project.toml with desired packages +julia --project=/opt/julia/template_project -e 'using Pkg; Pkg.add("Example"); Pkg.add("Plots")' + +# Users copy the template and instantiate +cp -r /opt/julia/template_project ~/my_project +cd ~/my_project +julia --project=. -e 'using Pkg; Pkg.instantiate()' +``` + +### Updating shared packages + +To update packages in the shared depot: + +1. Switch to the shared user account +2. Set `JULIA_DEPOT_PATH` to point only to the shared depot +3. Update packages as needed +4. Optionally, clean up old package versions to save space + +```bash +sudo su - julia-shared +export JULIA_DEPOT_PATH="/opt/julia/shared_depot:" +julia -e 'using Pkg; Pkg.update()' +``` + +!!! note + Updating packages in the shared depot adds new versions alongside existing ones. Users with + Manifest.toml files remain pinned to their specific versions and won't be affected. If you + explicitly clean up old package versions to save disk space, users who need those versions + can run `Pkg.instantiate()` to download them to their local depot. + +### Troubleshooting + +**Packages not found despite being in shared depot:** +Verify that `JULIA_DEPOT_PATH` is set correctly and includes the shared depot. Check that +the trailing separator is present to include system depots. Use `DEPOT_PATH` in the Julia +REPL to verify the depot search path. + +```julia +julia> DEPOT_PATH +3-element Vector{String}: + "/home/user/.julia" + "/opt/julia/shared_depot" + "/usr/local/share/julia" +``` diff --git a/docs/src/environments.md b/docs/src/environments.md index 54fa4e9fe9..12fb08641f 100644 --- a/docs/src/environments.md +++ b/docs/src/environments.md @@ -1,16 +1,16 @@ -# [**4.** Working with Environment](@id Working-with-Environments) +# [**4.** Working with Environments](@id Working-with-Environments) The following discusses Pkg's interaction with environments. For more on the role, environments play in code loading, including the "stack" of environments from which code can be loaded, see [this section in the Julia manual](https://docs.julialang.org/en/v1/manual/code-loading/#Environments-1). ## Creating your own environments -So far we have added packages to the default environment at `~/.julia/environments/v1.9`. It is however easy to create other, independent, projects. +So far we have added packages to the default environment at `~/.julia/environments/v1.10`. It is however easy to create other, independent, projects. This approach has the benefit of allowing you to check in a `Project.toml`, and even a `Manifest.toml` if you wish, into version control (e.g. git) alongside your code. It should be pointed out that when two projects use the same package at the same version, the content of this package is not duplicated. In order to create a new project, create a directory for it and then activate that directory to make it the "active project", which package operations manipulate: ```julia-repl -(@v1.9) pkg> activate MyProject +(@v1.10) pkg> activate MyProject Activating new environment at `~/MyProject/Project.toml` (MyProject) pkg> st @@ -28,7 +28,7 @@ false Installed Example ─ v0.5.3 Updating `~/MyProject/Project.toml` [7876af07] + Example v0.5.3 - Updating `~~/MyProject/Manifest.toml` + Updating `~/MyProject/Manifest.toml` [7876af07] + Example v0.5.3 Precompiling environment... 1 dependency successfully precompiled in 2 seconds @@ -45,7 +45,7 @@ Example = "7876af07-990d-54b4-ab0e-23690620f79a" julia> print(read(joinpath("MyProject", "Manifest.toml"), String)) # This file is machine-generated - editing it directly is not advised -julia_version = "1.9.4" +julia_version = "1.10.0" manifest_format = "2.0" project_hash = "2ca1c6c58cb30e79e021fb54e5626c96d05d5fdc" @@ -66,7 +66,7 @@ shell> git clone https://github.com/JuliaLang/Example.jl.git Cloning into 'Example.jl'... ... -(@v1.12) pkg> activate Example.jl +(@v1.10) pkg> activate Example.jl Activating project at `~/Example.jl` (Example) pkg> instantiate @@ -82,7 +82,22 @@ If you only have a `Project.toml`, a `Manifest.toml` must be generated by "resol If you already have a resolved `Manifest.toml`, then you will still need to ensure that the packages are installed and with the correct versions. Again `instantiate` does this for you. -In short, `instantiate` is your friend to make sure an environment is ready to use. If there's nothing to do, `instantiate` does nothing. +In short, [`instantiate`](@ref Pkg.instantiate) is your friend to make sure an environment is ready to use. If there's nothing to do, `instantiate` does nothing. + +## Returning to the default environment + +To return to the default environment after working in a project environment, simply call `activate` with no arguments: + +```julia-repl +(MyProject) pkg> activate + Activating project at `~/.julia/environments/v1.10` + +(@v1.10) pkg> +``` + +This deactivates the current project and returns you to the default shared environment (typically `@v#.#`). +There is no separate "deactivate" command—calling `activate()` with no arguments is how you return to your +base package setup. This only affects the current Julia session; the change does not persist when you restart Julia. !!! note "Specifying project on startup" Instead of using `activate` from within Julia, you can specify the project on startup using @@ -103,7 +118,7 @@ also want a scratch space to try out a new package, or a sandbox to resolve vers between several incompatible packages. ```julia-repl -(@v1.9) pkg> activate --temp # requires Julia 1.5 or later +(@v1.10) pkg> activate --temp # requires Julia 1.5 or later Activating new environment at `/var/folders/34/km3mmt5930gc4pzq1d08jvjw0000gn/T/jl_a31egx/Project.toml` (jl_a31egx) pkg> add Example @@ -117,18 +132,18 @@ between several incompatible packages. ## Shared environments -A "shared" environment is simply an environment that exists in `~/.julia/environments`. The default `v1.9` environment is +A "shared" environment is simply an environment that exists in `~/.julia/environments`. The default `v1.10` environment is therefore a shared environment: ```julia-repl -(@v1.9) pkg> st -Status `~/.julia/environments/v1.9/Project.toml` +(@v1.10) pkg> st +Status `~/.julia/environments/v1.10/Project.toml` ``` Shared environments can be activated with the `--shared` flag to `activate`: ```julia-repl -(@v1.9) pkg> activate --shared mysharedenv +(@v1.10) pkg> activate --shared mysharedenv Activating project at `~/.julia/environments/mysharedenv` (@mysharedenv) pkg> @@ -151,7 +166,7 @@ or using Pkg's precompile option, which can precompile the entire environment, o which can be significantly faster than the code-load route above. ```julia-repl -(@v1.9) pkg> precompile +(@v1.10) pkg> precompile Precompiling environment... 23 dependencies successfully precompiled in 36 seconds ``` @@ -165,11 +180,11 @@ By default, any package that is added to a project or updated in a Pkg action wi with its dependencies. ```julia-repl -(@v1.9) pkg> add Images +(@v1.10) pkg> add Images Resolving package versions... - Updating `~/.julia/environments/v1.9/Project.toml` + Updating `~/.julia/environments/v1.10/Project.toml` [916415d5] + Images v0.25.2 - Updating `~/.julia/environments/v1.9/Manifest.toml` + Updating `~/.julia/environments/v1.10/Manifest.toml` ... Precompiling environment... Progress [===================> ] 45/97 @@ -190,16 +205,73 @@ If a given package version errors during auto-precompilation, Pkg will remember automatically tries and will skip that package with a brief warning. Manual precompilation can be used to force these packages to be retried, as `pkg> precompile` will always retry all packages. -To disable the auto-precompilation, set `ENV["JULIA_PKG_PRECOMPILE_AUTO"]=0`. - The indicators next to the package names displayed during precompilation -indicate the status of that package's precompilation. +indicate the status of that package's precompilation. - `[◐, ◓, ◑, ◒]` Animated "clock" characters indicate that the package is currently being precompiled. - `✓` A green checkmark indicates that the package has been successfully precompiled (after which that package will disappear from the list). If the checkmark is yellow it means that the package is currently loaded so the session will need to be restarted to access the version that was just precompiled. - `?` A question mark character indicates that a `PrecompilableError` was thrown, indicating that precompilation was disallowed, i.e. `__precompile__(false)` in that package. - `✗` A cross indicates that the package failed to precompile. +#### Keyboard Controls and Background Precompilation + +!!! compat "Julia 1.14" + Keyboard controls and background detach during precompilation are available in Julia 1.14 and later. + +In interactive sessions, precompilation displays live progress with keyboard controls available: + +- **`d`/`q`/`]`** — Detach. Returns to the REPL while precompilation continues silently in the background. +- **`c`** — Cancel. Kills all subprocesses; prompts for Enter to confirm. +- **`i`** — Info. Sends a profiling signal to subprocesses for a profile peek without interrupting compilation. +- **`v`** — Toggle verbose mode. Shows timing, worker PID, CPU%, and memory per compiling package. +- **`?`/`h`** — Show keyboard shortcut help. +- **Ctrl-C** — Interrupt. Sends SIGINT to subprocesses and displays their output. + +After pressing `d` to detach, you can manage the background precompilation using: + +- `pkg> precompile --monitor`: Reattach to see live progress. Press `d` again to detach. +- `pkg> precompile --stop`: Gracefully stop background precompilation (waits for active jobs to finish). +- `pkg> precompile --cancel`: Immediately cancel background precompilation (interrupts active jobs). + +#### Controlling Auto-precompilation + +Auto-precompilation can be controlled in several ways: + +- **Environment variable**: Set `ENV["JULIA_PKG_PRECOMPILE_AUTO"]=0` to disable auto-precompilation globally. +- **Programmatically**: Use `Pkg.autoprecompilation_enabled(false)` to disable auto-precompilation for the current session, or `Pkg.autoprecompilation_enabled(true)` to re-enable it. +- **Scoped control**: Use `Pkg.precompile(f, args...; kwargs...)` to execute a function `f` with auto-precompilation temporarily disabled, then automatically trigger precompilation afterward if any packages were modified during the execution. + +!!! compat "Julia 1.13" + The `Pkg.autoprecompilation_enabled()` function and `Pkg.precompile()` do-block syntax require at least Julia 1.13. + +For example, to add multiple packages without triggering precompilation after each one: + +```julia-repl +julia> Pkg.precompile() do + Pkg.add("Example") + Pkg.dev("JSON") + Pkg.update("HTTP") + end + Resolving package versions... + ... +Precompiling environment... + 14 dependencies successfully precompiled in 25 seconds +``` + +Or to temporarily disable auto-precompilation: + +```julia-repl +julia> Pkg.autoprecompilation_enabled(false) +false + +julia> Pkg.add("Example") # No precompilation happens + Resolving package versions... + ... + +julia> Pkg.autoprecompilation_enabled(true) +true +``` + ### Precompiling new versions of loaded packages If a package that has been updated is already loaded in the session, the precompilation process will go ahead and precompile diff --git a/docs/src/getting-started.md b/docs/src/getting-started.md index 58693bc583..124600049f 100644 --- a/docs/src/getting-started.md +++ b/docs/src/getting-started.md @@ -22,18 +22,18 @@ To get back to the Julia REPL, press `Ctrl+C` or backspace (when the REPL cursor Upon entering the Pkg REPL, you should see the following prompt: ```julia-repl -(@v1.9) pkg> +(@v1.10) pkg> ``` To add a package, use `add`: ```julia-repl -(@v1.9) pkg> add Example +(@v1.10) pkg> add Example Resolving package versions... Installed Example ─ v0.5.3 - Updating `~/.julia/environments/v1.9/Project.toml` + Updating `~/.julia/environments/v1.10/Project.toml` [7876af07] + Example v0.5.3 - Updating `~/.julia/environments/v1.9/Manifest.toml` + Updating `~/.julia/environments/v1.10/Manifest.toml` [7876af07] + Example v0.5.3 ``` @@ -49,14 +49,14 @@ julia> Example.hello("friend") We can also specify multiple packages at once to install: ```julia-repl -(@v1.9) pkg> add JSON StaticArrays +(@v1.10) pkg> add JSON StaticArrays ``` The `status` command (or the shorter `st` command) can be used to see installed packages. ```julia-repl -(@v1.9) pkg> st -Status `~/.julia/environments/v1.6/Project.toml` +(@v1.10) pkg> st +Status `~/.julia/environments/v1.10/Project.toml` [7876af07] Example v0.5.3 [682c06a0] JSON v0.21.3 [90137ffa] StaticArrays v1.5.9 @@ -68,13 +68,13 @@ Status `~/.julia/environments/v1.6/Project.toml` To remove packages, use `rm` (or `remove`): ```julia-repl -(@v1.9) pkg> rm JSON StaticArrays +(@v1.10) pkg> rm JSON StaticArrays ``` Use `up` (or `update`) to update the installed packages ```julia-repl -(@v1.9) pkg> up +(@v1.10) pkg> up ``` If you have been following this guide it is likely that the packages installed are at the latest version @@ -82,13 +82,13 @@ so `up` will not do anything. Below we show the status output in the case where an old version of the Example package and then upgrade it: ```julia-repl -(@v1.9) pkg> st -Status `~/.julia/environments/v1.9/Project.toml` +(@v1.10) pkg> st +Status `~/.julia/environments/v1.10/Project.toml` ⌃ [7876af07] Example v0.5.1 Info Packages marked with ⌃ have new versions available and may be upgradable. -(@v1.9) pkg> up - Updating `~/.julia/environments/v1.9/Project.toml` +(@v1.10) pkg> up + Updating `~/.julia/environments/v1.10/Project.toml` [7876af07] ↑ Example v0.5.1 ⇒ v0.5.3 ``` @@ -101,8 +101,8 @@ For more information about managing packages, see the [Managing Packages](@ref M Up to this point, we have covered basic package management: adding, updating, and removing packages. -You may have noticed the `(@v1.9)` in the REPL prompt. -This lets us know that `v1.9` is the **active environment**. +You may have noticed the `(@v1.10)` in the REPL prompt. +This lets us know that `v1.10` is the **active environment**. Different environments can have totally different packages and versions installed from another environment. The active environment is the environment that will be modified by Pkg commands such as `add`, `rm` and `update`. @@ -110,7 +110,7 @@ Let's set up a new environment so we may experiment. To set the active environment, use `activate`: ```julia-repl -(@v1.9) pkg> activate tutorial +(@v1.10) pkg> activate tutorial [ Info: activating new environment at `~/tutorial/Project.toml`. ``` @@ -159,6 +159,17 @@ We can see that the `tutorial` environment now contains `Example` and `JSON`. Julia. Learning how to use environments effectively will improve your experience with Julia packages. +When you're done working in a specific environment and want to return to the default environment, use `activate` with no arguments: + +```julia-repl +(tutorial) pkg> activate + Activating project at `~/.julia/environments/v1.10` + +(@v1.10) pkg> +``` + +This returns you to the default `@v1.10` environment. There is no separate "deactivate" command—`activate` with no arguments serves this purpose. + For more information about environments, see the [Working with Environments](@ref Working-with-Environments) section of the documentation. ## Asking for Help @@ -166,16 +177,16 @@ For more information about environments, see the [Working with Environments](@re If you are ever stuck, you can ask `Pkg` for help: ```julia-repl -(@v1.9) pkg> ? +(@v1.10) pkg> ? ``` You should see a list of available commands along with short descriptions. You can ask for more detailed help by specifying a command: ```julia-repl -(@v1.9) pkg> ?develop +(@v1.10) pkg> ?develop ``` This guide should help you get started with `Pkg`. -`Pkg` has much more to offer in terms of powerful package management, -read the full manual to learn more! +`Pkg` has much more to offer in terms of powerful package management. +For more advanced topics, see [Managing Packages](@ref Managing-Packages), [Working with Environments](@ref Working-with-Environments), and [Creating Packages](@ref creating-packages-tutorial). diff --git a/docs/src/glossary.md b/docs/src/glossary.md index 60e0546039..44c394ef66 100644 --- a/docs/src/glossary.md +++ b/docs/src/glossary.md @@ -1,4 +1,4 @@ -# [**9.** Glossary](@id Glossary) +# [**10.** Glossary](@id Glossary) **Project:** a source tree with a standard layout, including a `src` directory for the main body of Julia code, a `test` directory for testing the project, @@ -14,8 +14,8 @@ may optionally have a manifest file: - **Manifest file:** a file in the root directory of a project, named `Manifest.toml` (or `JuliaManifest.toml`), describing a complete dependency graph and exact versions of each package and library used by a project. The file name may - also be suffixed by `-v{major}.{minor}.toml` which julia will prefer if the version - matches `VERSION`, allowing multiple environments to be maintained for different julia + also be suffixed by `-v{major}.{minor}.toml` which Julia will prefer if the version + matches `VERSION`, allowing multiple environments to be maintained for different Julia versions. **Package:** a project which provides reusable functionality that can be used by @@ -29,6 +29,15 @@ identify the package in projects that depend on it. to load a package without a project file or UUID from a project with them. Once you've loaded from a project file, everything needs a project file and UUID. +!!! note + **Packages vs. Modules:** A *package* is a source tree with a `Project.toml` file + and other components that Pkg can install and manage. A *module* is a Julia language + construct (created with the `module` keyword) that provides a namespace for code. + Typically, a package contains a module of the same name (e.g., the `DataFrames` package + contains a `DataFrames` module), but they are distinct concepts: the package is the + distributable unit that Pkg manages, while the module is the namespace that your code + interacts with using `import` or `using`. + **Application:** a project which provides standalone functionality not intended to be reused by other Julia projects. For example a web application or a command-line utility, or simulation/analytics code accompanying a scientific paper. @@ -46,7 +55,7 @@ since that could conflict with the configuration of the main application. **Environment:** the combination of the top-level name map provided by a project file combined with the dependency graph and map from packages to their entry points -provided by a manifest file. For more detail see the manual section on code loading. +provided by a manifest file. For more detail see the [manual section on code loading](https://docs.julialang.org/en/v1/manual/code-loading/). - **Explicit environment:** an environment in the form of an explicit project file and an optional corresponding manifest file together in a directory. If the @@ -107,7 +116,7 @@ Julia's code loading mechanisms, look for registries, installed packages, named environments, repo clones, cached compiled package images, and configuration files. The depot path is controlled by the Julia `DEPOT_PATH` global variable which is populated at startup based on the value of the `JULIA_DEPOT_PATH` -environment variable. The first entry is the “user depot” and should be writable +environment variable. The first entry is the "user depot" and should be writable by and owned by the current user. The user depot is where: registries are cloned, new package versions are installed, named environments are created and updated, package repositories are cloned, newly compiled package image files are saved, @@ -115,3 +124,15 @@ log files are written, development packages are checked out by default, and global configuration data is saved. Later entries in the depot path are treated as read-only and are appropriate for registries, packages, etc. installed and managed by system administrators. + +**Materialize:** the process of installing all packages and dependencies specified +in a manifest file to recreate an exact environment state. When you +`instantiate` a project, Pkg materializes its environment by downloading and +installing all the exact package versions recorded in the `Manifest.toml` file. +This ensures reproducibility across different machines and users. + +**Canonical:** refers to a single, authoritative location for each specific +version of a package. When the same package version is used by multiple +environments, Pkg stores it in one canonical location and all environments +reference that same location, rather than duplicating the package files. This +saves disk space and ensures consistency. diff --git a/docs/src/index.md b/docs/src/index.md index e51ffcec71..da1aa13c4f 100644 --- a/docs/src/index.md +++ b/docs/src/index.md @@ -4,6 +4,23 @@ Welcome to the documentation for Pkg, [Julia](https://julialang.org)'s package m The documentation covers many things, for example managing package installations, developing packages, working with package registries and more. +```@eval +import Markdown +# For Pkg, we need to determine the appropriate Julia version for the PDF +# Since Pkg docs are versioned by Julia version, we'll use a similar approach to Julia docs +julia_patch = if VERSION.prerelease == () + "v$(VERSION.major).$(VERSION.minor).$(VERSION.patch)" +elseif VERSION.prerelease[1] == "DEV" + "dev" +end +file = "Pkg.jl.pdf" +url = "https://raw.githubusercontent.com/JuliaLang/Pkg.jl/gh-pages-pdf/$(julia_patch)/$(file)" +Markdown.parse(""" +!!! note + The documentation is also available in PDF format: [$file]($url). +""") +``` + Throughout the manual the REPL interface to Pkg, the Pkg REPL mode, is used in the examples. There is also a functional API, which is preferred when not working interactively. This API is documented in the [API Reference](@ref) section. diff --git a/docs/src/managing-packages.md b/docs/src/managing-packages.md index b5889221cf..8dbc6fc9e9 100644 --- a/docs/src/managing-packages.md +++ b/docs/src/managing-packages.md @@ -10,25 +10,26 @@ The most frequently used is `add` and its usage is described first. In the Pkg REPL, packages can be added with the `add` command followed by the name of the package, for example: ```julia-repl -(@v1.8) pkg> add JSON - Installing known registries into `~/` +(@v1.13) pkg> add JSON Resolving package versions... - Installed Parsers ─ v2.4.0 - Installed JSON ──── v0.21.3 - Updating `~/.julia/environments/v1.8/Project.toml` - [682c06a0] + JSON v0.21.3 - Updating `~/environments/v1.9/Manifest.toml` - [682c06a0] + JSON v0.21.3 - [69de0a69] + Parsers v2.4.0 - [ade2ca70] + Dates - [a63ad114] + Mmap - [de0858da] + Printf - [4ec0a83e] + Unicode -Precompiling environment... - 2 dependencies successfully precompiled in 2 seconds -``` - -Here we added the package `JSON` to the current environment (which is the default `@v1.8` environment). + Updating `~/.julia/environments/v1.13/Project.toml` + [682c06a0] + JSON v0.21.4 + Updating `~/.julia/environments/v1.13/Manifest.toml` + [682c06a0] + JSON v0.21.4 + [69de0a69] + Parsers v2.8.3 + [aea7be01] + PrecompileTools v1.3.2 + [21216c6a] + Preferences v1.5.0 + [ade2ca70] + Dates v1.11.0 + [a63ad114] + Mmap v1.11.0 + [de0858da] + Printf v1.11.0 + [9a3f8284] + Random v1.11.0 + [ea8e919c] + SHA v0.7.0 + [fa267f1f] + TOML v1.0.3 + [cf7118a7] + UUIDs v1.11.0 + [4ec0a83e] + Unicode v1.11.0 +``` + +Here we added the package `JSON` to the current environment (which is the default `@v1.10` environment). In this example, we are using a fresh Julia installation, and this is our first time adding a package using Pkg. By default, Pkg installs the General registry and uses this registry to look up packages requested for inclusion in the current environment. @@ -40,42 +41,48 @@ It is possible to add multiple packages in one command as `pkg> add A B C`. The status output contains the packages you have added yourself, in this case, `JSON`: ```julia-repl -(@v1.11) pkg> st - Status `~/.julia/environments/v1.8/Project.toml` - [682c06a0] JSON v0.21.3 +(@v1.13) pkg> st +Status `~/.julia/environments/v1.13/Project.toml` + [682c06a0] JSON v0.21.4 ``` The manifest status shows all the packages in the environment, including recursive dependencies: ```julia-repl -(@v1.11) pkg> st -m -Status `~/environments/v1.9/Manifest.toml` - [682c06a0] JSON v0.21.3 - [69de0a69] Parsers v2.4.0 - [ade2ca70] Dates - [a63ad114] Mmap - [de0858da] Printf - [4ec0a83e] Unicode -``` - -Since standard libraries (e.g. ` Dates`) are shipped with Julia, they do not have a version. +(@v1.13) pkg> st -m +Status `~/.julia/environments/v1.13/Manifest.toml` + [682c06a0] JSON v0.21.4 + [69de0a69] Parsers v2.8.3 + [aea7be01] PrecompileTools v1.3.2 + [21216c6a] Preferences v1.5.0 + [ade2ca70] Dates v1.11.0 + [a63ad114] Mmap v1.11.0 + [de0858da] Printf v1.11.0 + [9a3f8284] Random v1.11.0 + [ea8e919c] SHA v0.7.0 + [fa267f1f] TOML v1.0.3 + [cf7118a7] UUIDs v1.11.0 + [4ec0a83e] Unicode v1.11.0 +``` + +Note that before 1.11 standard libraries (e.g. ` Dates`) did not have dedicated version numbers. To specify that you want a particular version (or set of versions) of a package, use the `compat` command. For example, to require any patch release of the v0.21 series of JSON after v0.21.4, call `compat JSON 0.21.4`: ```julia-repl -(@1.11) pkg> compat JSON 0.21.4 +(@v1.10) pkg> compat JSON 0.21.4 Compat entry set: JSON = "0.21.4" Resolve checking for compliance with the new compat rules... Error empty intersection between JSON@0.21.3 and project compatibility 0.21.4 - 0.21 Suggestion Call `update` to attempt to meet the compatibility requirements. -(@1.11) pkg> update +(@v1.10) pkg> update Updating registry at `~/.julia/registries/General.toml` - Updating `~/.julia/environments/1.11/Project.toml` + Updating `~/.julia/environments/v1.10/Project.toml` [682c06a0] ↑ JSON v0.21.3 ⇒ v0.21.4 - Updating `~/.julia/environments/1.11/Manifest.toml` + Updating `~/.julia/environments/v1.10/Manifest.toml` [682c06a0] ↑ JSON v0.21.3 ⇒ v0.21.4 ``` @@ -96,11 +103,11 @@ julia> JSON.json(Dict("foo" => [1, "bar"])) |> print A specific version of a package can be installed by appending a version after a `@` symbol to the package name: ```julia-repl -(@v1.8) pkg> add JSON@0.21.1 +(@v1.10) pkg> add JSON@0.21.1 Resolving package versions... - Updating `~/.julia/environments/v1.8/Project.toml` + Updating `~/.julia/environments/v1.10/Project.toml` ⌃ [682c06a0] + JSON v0.21.1 - Updating `~/environments/v1.9/Manifest.toml` + Updating `~/.julia/environments/v1.10/Manifest.toml` ⌃ [682c06a0] + JSON v0.21.1 ⌅ [69de0a69] + Parsers v1.1.2 [ade2ca70] + Dates @@ -118,12 +125,12 @@ If a branch (or a certain commit) of `Example` has a hotfix that is not yet incl we can explicitly track that branch (or commit) by appending `#branchname` (or `#commitSHA1`) to the package name: ```julia-repl -(@v1.8) pkg> add Example#master +(@v1.10) pkg> add Example#master Cloning git-repo `https://github.com/JuliaLang/Example.jl.git` Resolving package versions... - Updating `~/.julia/environments/v1.8/Project.toml` + Updating `~/.julia/environments/v1.10/Project.toml` [7876af07] + Example v0.5.4 `https://github.com/JuliaLang/Example.jl.git#master` - Updating `~/environments/v1.9/Manifest.toml` + Updating `~/.julia/environments/v1.10/Manifest.toml` [7876af07] + Example v0.5.4 `https://github.com/JuliaLang/Example.jl.git#master` ``` @@ -139,12 +146,12 @@ When updating packages, updates are pulled from that branch. To go back to tracking the registry version of `Example`, the command `free` is used: ```julia-repl -(@v1.8) pkg> free Example +(@v1.10) pkg> free Example Resolving package versions... Installed Example ─ v0.5.3 - Updating `~/.julia/environments/v1.8/Project.toml` + Updating `~/.julia/environments/v1.10/Project.toml` [7876af07] ~ Example v0.5.4 `https://github.com/JuliaLang/Example.jl.git#master` ⇒ v0.5.3 - Updating `~/environments/v1.9/Manifest.toml` + Updating `~/.julia/environments/v1.10/Manifest.toml` [7876af07] ~ Example v0.5.4 `https://github.com/JuliaLang/Example.jl.git#master` ⇒ v0.5.3 ``` @@ -153,12 +160,12 @@ To go back to tracking the registry version of `Example`, the command `free` is If a package is not in a registry, it can be added by specifying a URL to the Git repository: ```julia-repl -(@v1.8) pkg> add https://github.com/fredrikekre/ImportMacros.jl +(@v1.10) pkg> add https://github.com/fredrikekre/ImportMacros.jl Cloning git-repo `https://github.com/fredrikekre/ImportMacros.jl` Resolving package versions... - Updating `~/.julia/environments/v1.8/Project.toml` + Updating `~/.julia/environments/v1.10/Project.toml` [92a963f6] + ImportMacros v1.0.0 `https://github.com/fredrikekre/ImportMacros.jl#master` - Updating `~/environments/v1.9/Manifest.toml` + Updating `~/.julia/environments/v1.10/Manifest.toml` [92a963f6] + ImportMacros v1.0.0 `https://github.com/fredrikekre/ImportMacros.jl#master` ``` @@ -167,7 +174,7 @@ For unregistered packages, we could have given a branch name (or commit SHA1) to If you want to add a package using the SSH-based `git` protocol, you have to use quotes because the URL contains a `@`. For example, ```julia-repl -(@v1.8) pkg> add "git@github.com:fredrikekre/ImportMacros.jl.git" +(@v1.10) pkg> add "git@github.com:fredrikekre/ImportMacros.jl.git" Cloning git-repo `git@github.com:fredrikekre/ImportMacros.jl.git` Updating registry at `~/.julia/registries/General` Resolving package versions... @@ -188,7 +195,7 @@ repository: pkg> add https://github.com/timholy/SnoopCompile.jl.git:SnoopCompileCore Cloning git-repo `https://github.com/timholy/SnoopCompile.jl.git` Resolving package versions... - Updating `~/.julia/environments/v1.8/Project.toml` + Updating `~/.julia/environments/v1.10/Project.toml` [e2b509da] + SnoopCompileCore v2.9.0 `https://github.com/timholy/SnoopCompile.jl.git:SnoopCompileCore#master` Updating `~/.julia/environments/v1.8/Manifest.toml` [e2b509da] + SnoopCompileCore v2.9.0 `https://github.com/timholy/SnoopCompile.jl.git:SnoopCompileCore#master` @@ -214,15 +221,15 @@ from that local repo are pulled when packages are updated. By only using `add` your environment always has a "reproducible state", in other words, as long as the repositories and registries used are still accessible it is possible to retrieve the exact state of all the dependencies in the environment. This has the advantage that you can send your environment (`Project.toml` and `Manifest.toml`) to someone else and they can [`Pkg.instantiate`](@ref) that environment in the same state as you had it locally. -However, when you are developing a package, it is more convenient to load packages at their current state at some path. For this reason, the `dev` command exists. +However, when you are [developing a package](@ref developing), it is more convenient to load packages at their current state at some path. For this reason, the `dev` command exists. Let's try to `dev` a registered package: ```julia-repl -(@v1.8) pkg> dev Example +(@v1.10) pkg> dev Example Updating git-repo `https://github.com/JuliaLang/Example.jl.git` Resolving package versions... - Updating `~/.julia/environments/v1.8/Project.toml` + Updating `~/.julia/environments/v1.10/Project.toml` [7876af07] + Example v0.5.4 `~/.julia/dev/Example` Updating `~/.julia/environments/v1.8/Manifest.toml` [7876af07] + Example v0.5.4 `~/.julia/dev/Example` @@ -263,9 +270,9 @@ julia> Example.plusone(1) To stop tracking a path and use the registered version again, use `free`: ```julia-repl -(@v1.8) pkg> free Example +(@v1.10) pkg> free Example Resolving package versions... - Updating `~/.julia/environments/v1.8/Project.toml` + Updating `~/.julia/environments/v1.10/Project.toml` [7876af07] ~ Example v0.5.4 `~/.julia/dev/Example` ⇒ v0.5.3 Updating `~/.julia/environments/v1.8/Manifest.toml` [7876af07] ~ Example v0.5.4 `~/.julia/dev/Example` ⇒ v0.5.3 @@ -300,29 +307,29 @@ When new versions of packages are released, it is a good idea to update. Simply to the latest compatible version. Sometimes this is not what you want. You can specify a subset of the dependencies to upgrade by giving them as arguments to `up`, e.g: ```julia-repl -(@v1.8) pkg> up Example +(@v1.10) pkg> up Example ``` This will only allow Example do upgrade. If you also want to allow dependencies of Example to upgrade (with the exception of packages that are in the project) you can pass the `--preserve=direct` flag. ```julia-repl -(@v1.8) pkg> up --preserve=direct Example +(@v1.10) pkg> up --preserve=direct Example ``` And if you also want to allow dependencies of Example that are also in the project to upgrade, you can use `--preserve=none`: ```julia-repl -(@v1.8) pkg> up --preserve=none Example +(@v1.10) pkg> up --preserve=none Example ``` ## Pinning a package A pinned package will never be updated. A package can be pinned using `pin`, for example: ```julia-repl -(@v1.8) pkg> pin Example +(@v1.10) pkg> pin Example Resolving package versions... - Updating `~/.julia/environments/v1.8/Project.toml` + Updating `~/.julia/environments/v1.10/Project.toml` [7876af07] ~ Example v0.5.3 ⇒ v0.5.3 ⚲ Updating `~/.julia/environments/v1.8/Manifest.toml` [7876af07] ~ Example v0.5.3 ⇒ v0.5.3 ⚲ @@ -331,8 +338,8 @@ A pinned package will never be updated. A package can be pinned using `pin`, for Note the pin symbol `⚲` showing that the package is pinned. Removing the pin is done using `free` ```julia-repl -(@v1.8) pkg> free Example - Updating `~/.julia/environments/v1.8/Project.toml` +(@v1.10) pkg> free Example + Updating `~/.julia/environments/v1.10/Project.toml` [7876af07] ~ Example v0.5.3 ⚲ ⇒ v0.5.3 Updating `~/.julia/environments/v1.8/Manifest.toml` [7876af07] ~ Example v0.5.3 ⚲ ⇒ v0.5.3 @@ -343,7 +350,7 @@ Note the pin symbol `⚲` showing that the package is pinned. Removing the pin i The tests for a package can be run using `test` command: ```julia-repl -(@v1.8) pkg> test Example +(@v1.10) pkg> test Example ... Testing Example Testing Example tests passed @@ -356,7 +363,7 @@ The output of the build process is directed to a file. To explicitly run the build step for a package, the `build` command is used: ```julia-repl -(@v1.8) pkg> build IJulia +(@v1.10) pkg> build IJulia Building Conda ─→ `~/.julia/scratchspaces/44cfe95a-1eb2-52ea-b672-e2afdf69b78f/6e47d11ea2776bc5627421d59cdcc1296c058071/build.log` Building IJulia → `~/.julia/scratchspaces/44cfe95a-1eb2-52ea-b672-e2afdf69b78f/98ab633acb0fe071b671f6c1785c46cd70bb86bd/build.log` @@ -486,7 +493,31 @@ To fix such errors, you have a number of options: - remove either `A` or `B` from your environment. Perhaps `B` is left over from something you were previously working on, and you don't need it anymore. If you don't need `A` and `B` at the same time, this is the easiest way to fix the problem. - try reporting your conflict. In this case, we were able to deduce that `B` requires an outdated version of `D`. You could thus report an issue in the development repository of `B.jl` asking for an updated version. - try fixing the problem yourself. - This becomes easier once you understand `Project.toml` files and how they declare their compatibility requirements. We'll return to this example in [Fixing conflicts](@ref). + This becomes easier once you understand `Project.toml` files and how they declare their compatibility requirements. We'll return to this example in [Fixing conflicts](@ref Fixing-conflicts). + +## Yanked packages + +Package registries can mark specific versions of packages as "yanked". A yanked package version +is one that should no longer be used, typically because it contains serious bugs, security +vulnerabilities, or other critical issues. When a package version is yanked, it becomes +unavailable for new installations but remains accessible (i.e. via `instantiate`) to maintain reproducibility +of existing environments. + +When you run `pkg> status`, yanked packages are clearly marked with a warning symbol: + +```julia-repl +(@v1.13) pkg> status + Status `~/.julia/environments/v1.13/Project.toml` + [682c06a0] JSON v0.21.3 + [f4259836] Example v1.2.0 [yanked] +``` + +The `[yanked]` annotation indicate that version `v1.2.0` of the `Example` package +has been yanked and should be updated or replaced. + +When resolving dependencies, Pkg will warn you if yanked packages are present and may provide +guidance on how to resolve the situation. It's important to address yanked packages promptly +to ensure the security and stability of your Julia environment. ## Garbage collecting old, unused packages @@ -502,7 +533,7 @@ If you are short on disk space and want to clean out as many unused packages and To run a typical garbage collection with default arguments, simply use the `gc` command at the `pkg>` REPL: ```julia-repl -(@v1.8) pkg> gc +(@v1.10) pkg> gc Active manifests at: `~/BinaryProvider/Manifest.toml` ... diff --git a/docs/src/protocol.md b/docs/src/protocol.md new file mode 100644 index 0000000000..c3503c2d96 --- /dev/null +++ b/docs/src/protocol.md @@ -0,0 +1,203 @@ +# [**14.** Package and Storage Server Protocol Reference](@id Pkg-Server-Protocols) + +The Julia Package Server Protocol (Pkg Protocol) and the Package Storage Server Protocol (Storage Protocol) define how Julia's package manager, Pkg, obtains and manages packages and their associated resources. They aim to enhance the Julia package ecosystem, making it more efficient, reliable, and user-friendly, avoiding potential points of failure, and ensuring the permanent availability of package versions and artifacts, which is paramount for the stability and reproducibility of Julia projects. + +The Pkg client, by default, gets all resources over HTTPS from a single open source service run by the Julia community. This service for serving packages is additionally backed by multiple independent storage services which interface with proprietary origin services (GitHub, etc.) and guarantee persistent availability of resources into the future. + +The protocols also aim to address some of the limitations that existed prior to its introduction. + +- **Vanishing Resources.** It is possible for authors to delete code repositories of registered Julia packages. Without some kind of package server, no one can install a package which has been deleted. If someone happens to have a current fork of a deleted package, that can be made the new official repository for the package, but the chances of them having no or outdated forks are high. An even worse situation could happen for artifacts since they tend not to be kept in version control and are much more likely to be served from "random" web servers at a fixed URL with content changing over time. Artifact publishers are unlikely to retain all past versions of artifacts, so old versions of packages that depend on specific artifact content will not be reproducible in the future unless we do something to ensure that they are kept around after the publisher has stopped hosting them. By storing all package versions and artifacts in a single place, we can ensure that they are available forever. +- **Usage Insights.** It is valuable for the Julia community to know how many people are using Julia or what the relative popularity of different packages and operating systems is. Julia uses GitHub to host its ecosystem. GitHub - a commercial, proprietary service - has this information but does not make it available to the Julia community. We are of course using GitHub for free, so we can't complain, but it seems unfortunate that a commercial entity has this valuable information while the open source community remains in the dark. The Julia community really could use insight into who is using Julia and how, so that we can prioritize packages and platforms, and give real numbers when people ask "how many people are using Julia?" +- **Decoupling from Git and GitHub.** Prior to this, Julia package ecosystem was very deeply coupled to git and was even specialized on GitHub specifically in many ways. The Pkg and Storage Protocols allowed us to decouple ourselves from git as the primary mechanism for getting packages. Now Julia continues to support using git, but does not require it just to install packages from the default public registry anymore. This decoupling also paves the way for supporting other version control systems in the future, making git no longer so special. Special treatment of GitHub will also go away since we get the benefits of specializing for GitHub (fast tarball downloads) directly from the Pkg protocols. +- **Firewall problems.** Prior to this, Pkg's need to connect to arbitrary servers using a miscellany of protocols caused several problems with firewalls. A large set of protocols and an unbounded list of servers needed to be whitelisted just to support default Pkg operation. If Pkg only needed to talk to a single service over a single, secure protocol (i.e. HTTPS), then whitelisting Pkg for standard use would be dead simple. + +## Protocols & Services + +1. **Pkg Protocol:** what Julia Pkg Clients speak to Pkg Servers. The Pkg Server serves all resources that Pkg Clients need to install and use registered packages, including registry data, packages and artifacts. It is designed to be easily horizontally scalable and not to have any hard operational requirements: if service is slow, just start more servers; if a Pkg Server crashes, forget it and boot up a new one. +2. **Storage Protocol:** what Pkg Servers speak to get resources from Storage Services. Julia clients do not interact with Storage services directly and multiple independent Storage Services can symmetrically (all are treated equally) provide their service to a given Pkg Server. Since Pkg Servers cache what they serve to Clients and handle convenient content presentation, Storage Services can expose a much simpler protocol: all they do is serve up complete versions of registries, packages and artifacts, while guaranteeing persistence and completeness. Persistence means: once a version of a resource has been served, that version can be served forever. Completeness means: if the service serves a registry, it can serve all package versions referenced by that registry; if it serves a package version, it can serve all artifacts used by that package. + +Both protocols work over HTTPS, using only GET and HEAD requests. As is normal for HTTP, HEAD requests are used to get information about a resource, including whether it would be served, without actually downloading it. As described in what follows, the Pkg Protocol is client-to-server and may be unauthenticated, use basic auth, or OpenID; the Storage Protocol is server-to-server only and uses mutual authentication with TLS certificates. + +The following diagram shows how these services interact with each other and with external services such as GitHub, GitLab and BitBucket for source control, and S3 and HDFS for long-term persistence: + + ┌───────────┐ + + │ Amazon S3 │ + + │ Storage │ + + └───────────┘ + + ▲ + + ║ + + ▼ + + Storage ╔═══════════╗ ┌───────────┐ + + Pkg Protocol ║ Storage ║ ┌──▶│ GitHub │ + + Protocol ┌──▶║ Service A ║───┤ └───────────┘ + + ┏━━━━━━━━━━━━┓ ┏━━━━━━━━━━━━┓ │ ╚═══════════╝ │ ┌───────────┐ + + ┃ Pkg Client ┃────▶┃ Pkg Server ┃───┤ ╔═══════════╗ ├──▶│ GitLab │ + + ┗━━━━━━━━━━━━┛ ┗━━━━━━━━━━━━┛ │ ║ Storage ║ │ └───────────┘ + + └──▶║ Service B ║───┤ ┌───────────┐ + + ╚═══════════╝ └──▶│ BitBucket │ + + ▲ └───────────┘ + + ║ + + ▼ + + ┌───────────┐ + + │ HDFS │ + + │ Cluster │ + + └───────────┘ + +Each Julia Pkg Client is configured to talk to a Pkg Server. By default, they talk to `pkg.julialang.org`, a public, unauthenticated Pkg Server. If the environment variable `JULIA_PKG_SERVER` is set, the Pkg Client connects to that host instead. For example, if `JULIA_PKG_SERVER` is set to `pkg.company.com` then the Pkg Client will connect to `https://pkg.company.com`. So in typical operation, a Pkg Client will no longer rely on `libgit2` or a git command-line client, both of which have been an ongoing headache, especially behind firewalls and on Windows. If fact, git will only be necessary when working with git-hosted registries and unregistered packages - those will continue to work as they have previously, fetched using git. + +While the default Pkg Server at `pkg.julialang.org` is unauthenticated, other parties may host Pkg Server instances elsewhere, authenticated or unauthenticated, public or private, as they wish. People can connect to those servers by setting the `JULIA_PKG_SERVER` variable. There will be a configuration file for providing authentication information to Pkg Servers using either basic auth or OpenID. The Pkg Server implementation will be open source and have minimal operational requirements. Specifically, it needs: + +1. The ability to accept incoming connections on port 443; +2. The ability to connect to a configurable set of Storage Services; +3. Temporary disk storage for caching resources (registries, packages, artifacts). + +A Pkg Service may be backed by more than one actual server, as is typical for web services. The Pkg Service is stateless, so this kind of horizontal scaling is straightforward. Each Pkg Server serves registry, package and artifact resources to Pkg Clients and caches whatever it serves. Each Pkg Server, in turn, gets those resources from one or more Storage Services. Storage services are responsible for fetching resources from code hosting sites like GitHub, GitLab and BitBucket, and for persisting everything that they have ever served to long-term storage systems like Amazon S3, hosted HDFS clusters - or whatever an implementor wants to use. If the original copies of resources vanish, Pkg Servers must always serve up all previously served versions of resources. + +The Storage Protocol is designed to be extremely simple so that multiple independent implementations can coexist, and each Pkg Server may be symmetrically backed by multiple different Storage Services, providing both redundant backup and ensuring that no single implementation has a "choke hold" on the ecosystem - anyone can implement a new Storage Service and add it to the set of services backing the default Pkg Server at `pkg.julialang.org`. The simplest possible version of a Storage Service is a static HTTPS site serving files generated from a snapshot of a registry. Although this does not provide adequate long-term backup capabilities, and would need to be regenerated whenever a registry changes, it may be sufficient for some private uses. Having multiple independently operated Storage Services helps ensure that even if one Storage Service becomes unavailable or unreliable - for technical, financial, or political reasons - others will keep operating and so will the Pkg ecosystem. + +## The Pkg Protocol + +This section describes the protocol used by Pkg Clients to get resources from Pkg Servers, including the latest versions of registries, package source trees, and artifacts. There is also a standard system for asking for diffs of all of these from previous versions, to minimize how much data the client needs to download in order to update itself. There is additionally a bundle mechanism for requesting and receiving a set of resources in a single request. + +### Authentication + +The authentication scheme between a Pkg client and server will be HTTP authorization with bearer tokens, as standardized in RFC6750. This means that authenticated access is accomplished by the client by making an HTTPS request including a `Authorization: Bearer $access_token` header. + +The format of the token, its contents and validation mechanism are not specified by the Pkg Protocol. They are left to the server to define. The server is expected to validate the token and determine whether the client is authorized to access the requested resource. Similarly at the client side, the implementation of the token acquisition is not specified by the Pkg Protocol. However Pkg provides [hooks](#Authentication-Hooks) that can be implemented at the client side to trigger the token acquisition process. Tokens thus acquired are expected to be stored in a local file, the format of which is specified by the Pkg Protocol. Pkg will be able to read the token from this file and include it in the request to the server. Pkg can also, optionally, detect when the token is about to expire and trigger a refresh. The Pkg client also supports automatic token refresh, since bearer tokens are recommended to be short-lived (no more than a day). + +The authorization information is saved locally in `$(DEPOT_PATH[1])/servers/$server/auth.toml` which is a TOML file with the following fields: + +- `access_token` (REQUIRED): the bearer token used to authorize normal requests +- `expires_at` (OPTIONAL): an absolute expiration time +- `expires_in` (OPTIONAL): a relative expiration time +- `refresh_token` (OPTIONAL): bearer token used to authorize refresh requests +- `refresh_url` (OPTIONAL): URL to fetch a new token from + +The `auth.toml` file may contain other fields (e.g. user name, user email), but they are ignored by Pkg. The two other fields mentioned in RFC6750 are `token_type` and `scope`: these are omitted since only tokens of type `Bearer` are supported currently and the scope is always implicitly to provide access to Pkg protocol URLs. Pkg servers should, however, not send `auth.toml` files with `token_type` or `scope` fields, as these names may be used in the future, e.g. to support other kinds of tokens or to limit the scope of an authorization to a subset of Pkg protocol URLs. + +Initially, the user or user agent (IDE) must acquire a `auth.toml` file and save it to the correct location. After that, Pkg will determine whether the access token needs to be refreshed by examining the `expires_at` and/or `expires_in` fields of the auth file. The expiration time is the minimum of `expires_at` and `mtime(auth_file) + expires_in`. When the Pkg client downloads a new `auth.toml` file, if there is a relative `expires_in` field, an absolute `expires_at` value is computed based on the client's current clock time. This combination of policies allows expiration to work gracefully even in the presence of clock skew between the server and the client. + +If the access token is expired and there are `refresh_token` and `refresh_url` fields in `auth.toml`, a new auth file is requested by making a request to `refresh_url` with an `Authorization: Bearer $refresh_token` header. Pkg will refuse to make a refresh request unless `refresh_url` is an HTTPS URL. Note that `refresh_url` need not be a URL on the Pkg server: token refresh can be handled by a separate server. If the request is successful and the returned `auth.toml` file is a well-formed TOML file with at least an `access_token` field, it is saved to `$(DEPOT_PATH[1])/servers/$server/auth.toml`. + +Checking for access token expiry and refreshing `auth.toml` is done before each Pkg client request to a Pkg server, and if the auth file is updated the new access token is used, so the token should in theory always be up to date. Practice is different from theory, of course, and if the Pkg server considers the access token expired, it may return an HTTP 401 Unauthorized response, and the Pkg client should attempt to refresh the auth token. If, after attempting to refresh the access token, the server still returns HTTP 401 Unauthorized, the Pkg client will present the body of the error response to the user or user agent (IDE). + +## Authentication Hooks +A mechanism to register a hook at the client is provided to allow the user agent to handle an auth failure. It can, for example, present a login page and take the user through the necessary authentication flow to get a new auth token and store it in `auth.toml`. + +- A handler can also be registered using [`register_auth_error_handler`](@ref Pkg.PlatformEngines.register_auth_error_handler). It returns a function that can be called to deregister the handler. +- A handler can also be deregistered using [`deregister_auth_error_handler`](@ref Pkg.PlatformEngines.deregister_auth_error_handler). + +Example: + +```julia +# register a handler +dispose = Pkg.PlatformEngines.register_auth_error_handler((url, svr, err) -> begin + PkgAuth.authenticate(svr*"/auth") + return true, true +end) + +# ... client code ... + +# deregister the handler +dispose() +# or +Pkg.PlatformEngines.deregister_auth_error_handler(url, svr) +``` + +### Resources + +The client can make GET or HEAD requests to the following resources: + +- `/registries`: map of registry uuids at this server to their current tree hashes, each line of the response data is of the form `/registry/$uuid/$hash` representing a resource pointing to particular version of a registry +- `/registry/$uuid/$hash`: tarball of registry uuid at the given tree hash +- `/package/$uuid/$hash`: tarball of package uuid at the given tree hash +- `/artifact/$hash`: tarball of an artifact with the given tree hash + +Only the `/registries` changes - all other resources can be cached forever and the server will indicate this with the appropriate HTTP headers. + +### Compression Negotiation + +The Pkg protocol supports multiple compression formats. + +- **Zstd compression** (current): Modern clients send `Accept-Encoding: zstd, gzip` to request Zstandard-compressed resources with gzip as a fallback. +- **Gzip compression** (legacy): Older clients that only support gzip send `Accept-Encoding: gzip` or omit the header entirely. + +Clients verify the actual compression format by reading file magic bytes after download: + +- **Zstd format**: Magic bytes `0x28 0xB5 0x2F 0xFD` (4 bytes) - decompressed with `zstd` (significantly faster) +- **Gzip format**: Magic bytes `0x1F 0x8B` (2 bytes) - decompressed with 7z + + +### Reference Implementation + +A reference implementation of the Pkg Server protocol is available at [PkgServer.jl](https://github.com/JuliaPackaging/PkgServer.jl). + +## The Storage Protocol + +This section describes the protocol used by Pkg Servers to get resources from Storage Servers, including the latest versions of registries, package source trees, and artifacts. The Pkg Server requests each type of resource when it needs it and caches it for as long as it can, so Storage Services should not have to serve the same resources to the same Pkg Server instance many times. + +### Authentication + +Since the Storage protocol is a server-to-server protocol, it uses certificate-based mutual authentication: each side of the connection presents certificates of identity to the other. The operator of a Storage Service must issue a client certificate to the operator of a Pkg Service certifying that it is authorized to use the Storage Service. + +### Resources + +The Storage Protocol is similar to the Pkg Protocol: + +- `/registries`: map of registry uuids at this server to their current tree hashes +- `/registry/$uuid/$hash`: tarball of registry uuid at the given tree hash +- `/package/$uuid/$hash`: tarball of package uuid at the given tree hash +- `/artifact/$hash`: tarball of an artifact with the given tree hash + +As is the case with the Pkg Server protocol, only the `/registries` resource changes over time—all other resources are permanently cacheable and Pkg Servers are expected to cache resources indefinitely, only deleting them if they need to reclaim storage space. + +### Interaction + +Fetching resources from a single Storage Server is straightforward: the Pkg Server asks for a version of a registry by UUID and hash and the Storage Server returns a tarball of that registry tree if it knows about that registry and version, or an HTTP 404 error if it doesn't. + +Each Pkg Server may use multiple Storage Services for availability and depth of backup. For a given resource, the Pkg Server makes a HEAD request to each Storage Service requesting the resource, and then makes a GET request for the resource to the first Storage Server that replies to the HEAD request with a 200 OK. If no Storage Service responds with a 200 OK in enough time, the Pkg Server should respond to the request for the corresponding resource with a 404 error. Each Storage Service which responds with a 200 OK must behave as if it had served the resource, regardless of whether it does so or not - i.e. persist the resource to long-term storage. + +One subtlety is how the Pkg Server determines what the latest version of each registry is. It can get a map from registry UUIDs to version hashes from each Storage Server, but hashes are unordered - if multiple Storage Servers reply with different hashes, which one should the Pkg Server use? When Storage Servers disagree on the latest hash of a registry, the Pkg Server should ask each Storage Server about the hashes that the other servers returned: if Service A knows about Service B's hash but B doesn't know about A's hash, then A's hash is more recent and should be used. If each server doesn't know about the other's hash, then neither hash is strictly newer than the other one and either could be used. The Pkg Server can break the tie any way it wants, e.g. randomly or by using the lexicographically earlier hash. + +### Guarantees + +The primary guarantee that a Storage Server makes is that if it has ever successfully served a resource—registry tree, package source tree, artifact tree — it must be able to serve that same resource version forever. + +It's tempting to also require it to guarantee that if a Storage Server serves a registry tree, it can also serve every package source tree referred to within that registry tree. Similarly, it is tempting to require that if a Storage Server can serve a package source tree that it should be able to serve any artifacts referenced by that version of the package. However, this could fail for reasons entirely beyond the control of the server: what if the registry is published with wrong package hashes? What if someone registers a package version, doesn't git tag it, then force pushes the branch that the version was on? In both of these cases, the Storage Server may not be able to fetch a version of a package through no fault of its own. Similarly, artifact hashes in packages might be incorrect or vanish before the Storage Server can retrieve them. + +Therefore, we don't strictly require that Storage Servers guarantee this kind of closure under resource references. We do, however, recommend that Storage Servers proactively fetch resources referred to by other resources as soon as possible. When a new version of a registry is available, the Storage Server should fetch all the new package versions in the registry immediately. When a package version is fetched—for any reason, whether because it was included in a new registry snapshot or because an upstream Pkg Server requested it by hash—all artifacts that it references should be fetched immediately. + +## Verification + +Since all resources are content addressed, the Pkg Clients and Pkg Server can and should verify that resources that they receive from upstream have the correct content hash. If a resource does not have the right hash, it should not be used and not be served further downstream. Pkg Servers should try to fetch the resource from other Storage Services and serve one that has the correct content. Pkg Clients should error if they get a resource with an incorrect content hash. + +Git uses SHA1 for content hashing. There is a pure Julia implementation of git's content hashing algorithm, which is being used to verify artifacts in Julia 1.3 (among other things). The SHA1 hashing algorithm is considered to be cryptographically compromised at this point, and while it's not completely broken, git is already starting to plan how to move away from using SHA1 hashes. To that end, we should consider getting ahead of this problem by using a stronger hash like SHA3-256 in these protocols. Having control over these protocols actually makes this considerably easier than if we were continuing to rely on git for resource acquisition. + +The first step to using SHA3-256 instead of SHA1 is to populate registries with additional hashes for package versions. Currently each package version is identified by a git-tree-sha1 entry. We would add git-tree-sha3-256 entries that give the SHA3-256 hashes computed using the same git tree hashing logic. From this origin, the Pkg Client, Pkg Server and Storage Servers all just need to use SHA3-256 hashes rather than SHA1 hashes. + +## References + +1. Pkg & Storage Protocols [https://github.com/JuliaLang/Pkg.jl/issues/1377](https://github.com/JuliaLang/Pkg.jl/issues/1377) +2. Authenticated Pkg Client Support: [https://github.com/JuliaLang/Pkg.jl/pull/1538](https://github.com/JuliaLang/Pkg.jl/pull/1538) +3. Authentication Hooks: [https://github.com/JuliaLang/Pkg.jl/pull/1630](https://github.com/JuliaLang/Pkg.jl/pull/1630) diff --git a/docs/src/registries.md b/docs/src/registries.md index 7c50727204..85a2777dc7 100644 --- a/docs/src/registries.md +++ b/docs/src/registries.md @@ -1,4 +1,4 @@ -# **7.** Registries +# **8.** Registries Registries contain information about packages, such as available releases and dependencies, and where they can be downloaded. @@ -99,6 +99,39 @@ are the following files: `Compat.toml`, `Deps.toml`, `Package.toml`, and `Versions.toml`. The formats of these files are described below. +### Registry Package.toml + +The `Package.toml` file contains basic metadata about the package, such as its name, UUID, repository URL, and optional metadata. + +#### Package metadata + +The `[metadata]` table in `Package.toml` provides a location for metadata about the package that doesn't fit into the other registry files. This is an extensible framework for adding package-level metadata. + +#### Deprecated packages + +One use of the `[metadata]` table is to mark packages as deprecated using `[metadata.deprecated]`. Deprecated packages will: +- Show as `[deprecated]` in package status output +- Be excluded from tab-completion suggestions +- Still be installable and usable + +The `[metadata.deprecated]` table can contain arbitrary metadata fields. Two special fields are recognized by Pkg and displayed when using `pkg> status --deprecated`: +- `reason`: A string explaining why the package is deprecated +- `alternative`: A string suggesting a replacement package + +Example: + +```toml +name = "MyPackage" +uuid = "..." +repo = "..." + +[metadata.deprecated] +reason = "This package is no longer maintained" +alternative = "ReplacementPackage" +``` + +Other fields can be added to `[metadata.deprecated]` for use by registries or other tools. + ### Registry Compat.toml The `Compat.toml` file has a series of blocks specifying version @@ -127,6 +160,55 @@ together in these blocks. The interpretation of these ranges is given by the com So for this package, versions `[0.8.0, 0.8.3]` depend on versions `[0.4.0, 0.6.0)` of `DependencyA` and version `[0.3.0, 0.6.0)` of `DependencyB`. Meanwhile, it is also true that versions `[0.8.2, 0.8.5]` require specific versions of `DependencyC` (so that all three are required for versions `0.8.2` and `0.8.3`). +### Registry formats + +!!! compat "Julia 1.7" + Compressed registry formats are available starting with Julia 1.7. + +Registries can be installed in several different formats, each with different tradeoffs: + +#### Compressed registries (preferred) + +When using a package server (the default), registries are downloaded as compressed tarballs. This is the preferred format for the General registry because it is: +- **Fast for the initial download**: Only a single compressed file needs to be transferred +- **Fast to use**: Pkg reads data directly from the packed tarball, avoiding many small filesystem reads +- **Low disk usage**: The registry can be read directly from the compressed file without extraction + +You can check if a registry is compressed by running `Pkg.Registry.status()`, which will describe it as a "packed registry" when it remains in its tarball and an "unpacked registry" when the files have been extracted to disk. + +#### Git registries + +Registries can also be installed as git clones. This format: +- **Provides immediate updates**: Running `Pkg.Registry.update()` fetches the latest changes directly from the git repository +- **Uses more disk space**: The full git history is stored locally +- **May be slower**: Cloning and updating can take longer than downloading a compressed tarball +- **Integrates with local tooling**: All registry files are present on disk, so you can inspect or customize them using familiar editors and git workflows + +To install a registry as a git clone, use: + +```julia +Pkg.Registry.add(url = "https://github.com/JuliaRegistries/General.git") +``` + +#### Converting between formats + +To convert an existing registry from git to compressed (or vice versa), remove and re-add it: + +```julia-repl +# Convert to compressed (uses package server if available) +pkg> registry rm General + +pkg> registry add General + +# Convert to git +pkg> registry rm General + +pkg> registry add https://github.com/JuliaRegistries/General.git +``` + +!!! note + The environment variable `JULIA_PKG_SERVER` controls whether package servers are used. Setting it to an empty string (`JULIA_PKG_SERVER=""`) disables package server usage and forces git clones. To force unpacking even when using a package server, set `JULIA_PKG_UNPACK_REGISTRY=true`. + ### Registry flavors The default Pkg Server (`pkg.julialang.org`) offers two different "flavors" of registry. diff --git a/docs/src/toml-files.md b/docs/src/toml-files.md index 79496e0321..f928e7be61 100644 --- a/docs/src/toml-files.md +++ b/docs/src/toml-files.md @@ -1,4 +1,4 @@ -# [**10.** `Project.toml` and `Manifest.toml`](@id Project-and-Manifest) +# [**11.** `Project.toml` and `Manifest.toml`](@id Project-and-Manifest) Two files that are central to Pkg are `Project.toml` and `Manifest.toml`. `Project.toml` and `Manifest.toml` are written in [TOML](https://github.com/toml-lang/toml) (hence the @@ -22,13 +22,38 @@ are described below. ### The `authors` field -For a package, the optional `authors` field is a list of strings describing the -package authors, in the form `NAME `. For example: +For a package, the optional `authors` field is a TOML array describing the package authors. +Entries in the array can either be a string in the form `"NAME"` or `"NAME "`, or a table keys following the [Citation File Format schema](https://github.com/citation-file-format/citation-file-format/blob/main/schema-guide.md) for either a +[`person`](https://github.com/citation-file-format/citation-file-format/blob/main/schema-guide.md#definitionsperson) or an [`entity`](https://github.com/citation-file-format/citation-file-format/blob/main/schema-guide.md#definitionsentity). + +For example: ```toml -authors = ["Some One ", - "Foo Bar "] +authors = [ + "Some One ", + "Foo Bar ", + {given-names = "Baz", family-names = "Qux", email = "bazqux@example.com", orcid = "https://orcid.org/0000-0000-0000-0000", website = "https://github.com/bazqux"}, +] ``` +If all authors are specified by tables, it is possible to use [the TOML Array of Tables syntax](https://toml.io/en/v1.0.0#array-of-tables) +```toml +[[authors]] +given-names = "Some" +family-names = "One" +email = "someone@email.com" + +[[authors]] +given-names = "Foo" +family-names = "Bar" +email = "foo@bar.com" + +[[authors]] +given-names = "Baz" +family-names = "Qux" +email = "bazqux@example.com" +orcid = "https://orcid.org/0000-0000-0000-0000" +website = "https://github.com/bazqux" +``` ### The `name` field @@ -39,7 +64,7 @@ name = "Example" The name must be a valid [identifier](https://docs.julialang.org/en/v1/base/base/#Base.isidentifier) (a sequence of Unicode characters that does not start with a number and is neither `true` nor `false`). For packages, it is recommended to follow the -[package naming rules](@ref Package-naming-rules). The `name` field is mandatory +[package naming rules](@ref Package-naming-guidelines). The `name` field is mandatory for packages. @@ -55,6 +80,13 @@ The `uuid` field is mandatory for packages. !!! note It is recommended that `UUIDs.uuid4()` is used to generate random UUIDs. +#### Why UUIDs are important + +UUIDs serve several critical purposes in the Julia package ecosystem: + +- **Unique identification**: UUIDs uniquely identify packages across all registries and repositories, preventing naming conflicts. Two different packages can have the same name (e.g., in different registries), but their UUIDs will always be different. +- **Multiple registries**: UUIDs enable the use of multiple package registries (including private registries) without conflicts, as each package is uniquely identified by its UUID regardless of which registry it comes from. + ### The `version` field @@ -77,6 +109,33 @@ Note that Pkg.jl deviates from the SemVer specification when it comes to version the section on [pre-1.0 behavior](@ref compat-pre-1.0) for more details. +### The `readonly` field + +The `readonly` field is a boolean that, when set to `true`, marks the environment as read-only. This prevents any modifications to the environment, including adding, removing, or updating packages. For example: + +```toml +readonly = true +``` + +When an environment is marked as readonly, Pkg will throw an error if any operation that would modify the environment is attempted. +If the `readonly` field is not present or set to `false` (the default), the environment can be modified normally. + +You can also programmatically check and modify the readonly state using the [`Pkg.readonly`](@ref) function: + +```julia +# Check if current environment is readonly +is_readonly = Pkg.readonly() + +# Enable readonly mode +previous_state = Pkg.readonly(true) + +# Disable readonly mode +Pkg.readonly(false) +``` + +When readonly mode is enabled, the status display will show `(readonly)` next to the project name to indicate the environment is protected from modifications. + + ### The `[deps]` section All dependencies of the package/project are listed in the `[deps]` section. Each dependency @@ -91,24 +150,101 @@ Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40" Typically it is not needed to manually add entries to the `[deps]` section; this is instead handled by Pkg operations such as `add`. -### The `[sources]` section +### [The `[sources]` section](@id sources-section) -Specifiying a path or repo (+ branch) for a dependency is done in the `[sources]` section. +Specifying a path or repo (+ branch) for a dependency is done in the `[sources]` section. These are especially useful for controlling unregistered dependencies without having to bundle a corresponding manifest file. +Each entry in the `[sources]` section supports the following keys: + +- **`url`**: The URL of the Git repository. Cannot be used with `path`. +- **`rev`**: The Git revision (branch name, tag, or commit hash) to use. Only valid with `url`. +- **`subdir`**: A subdirectory within the repository containing the package. +- **`path`**: A local filesystem path to the package. Cannot be used with `url` or `rev`. This will `dev` the package. + +This might in practice look something like: + ```toml [sources] Example = {url = "https://github.com/JuliaLang/Example.jl", rev = "custom_branch"} +WithinMonorepo = {url = "https://github.org/author/BigProject", subdir = "SubPackage"} SomeDependency = {path = "deps/SomeDependency.jl"} ``` -Note that this information is only used when this environment is active, i.e. it is not used if this project is a package that is being used as a dependency. +#### When `[sources]` entries are used + +Sources are read and applied in the following situations: + +1. **Active environment**: When resolving dependencies for the currently active environment, sources from the environment's `Project.toml` override registry information for direct dependencies. + +2. **Automatic addition**: When you add a package by URL (e.g., `pkg> add https://github.com/...`) or develop a package (e.g., `pkg> dev Example`), Pkg automatically adds an entry to `[sources]` for that package in your active environment's `Project.toml`. + +3. **Recursive collection**: When a package is added by URL or path, Pkg recursively collects `[sources]` entries from that package's dependencies. This allows private dependency chains to resolve without registry metadata. For example: + - If you `add` Package A by URL, and Package A has a `[sources]` entry for Package B + - And Package B (also specified by URL in A's sources) has a `[sources]` entry for Package C + - Then all three packages' source information will be collected and used during resolution + +This recursive behavior is particularly useful for managing chains of unregistered or private packages. + +!!! note "Scope of sources" + Sources are only used when the environment containing them is the active environment being resolved. If a package is used as a dependency in another project, its `[sources]` section is **not** consulted (except when that package itself was added by URL or path, in which case recursive collection applies as described above). + +!!! tip "Test-specific dependencies" + A use case for `[sources]` with `path` is in `test/Project.toml` to reference the parent package using `path = ".."`. This allows test dependencies to be managed independently with their own manifest file. See [Test-specific dependencies](@ref) for more details on this and other approaches. + +!!! compat + Specifying sources requires Julia 1.11+. + +### The `[weakdeps]` section + +Weak dependencies are optional dependencies that will not automatically install when the package is installed, +but for which you can still specify compatibility constraints. Weak dependencies are typically used in conjunction +with package extensions (see [`[extensions]`](@ref extensions-section) below), which allow conditional loading of code +when the weak dependency is available in the environment. + +Example: +```toml +[weakdeps] +SomePackage = "b3785f31-9d33-4cdf-bc73-f646780f1739" + +[compat] +SomePackage = "1.2" +``` + +For more details on using weak dependencies and extensions, see the +[Weak dependencies](@ref Weak-dependencies) section in the Creating Packages guide. + +!!! compat + Weak dependencies require Julia 1.9+. + +### [The `[extensions]` section](@id extensions-section) + +Extensions allow packages to provide optional functionality that is only loaded when certain other packages +(typically listed in `[weakdeps]`) are available. Each entry in the `[extensions]` section maps an extension +name to one or more package dependencies required to load that extension. + +Example: +```toml +[weakdeps] +Contour = "d38c429a-6771-53c6-b99e-75d170b6e991" + +[extensions] +ContourExt = "Contour" +``` + +The extension code itself should be placed in an `ext/` directory at the package root, with the file name +matching the extension name (e.g., `ext/ContourExt.jl`). For more details on creating and using extensions, +see the [Conditional loading of code in packages (Extensions)](@ref Conditional-loading-of-code-in-packages-(Extensions)) section in the Creating Packages guide. + +!!! compat + Extensions require Julia 1.9+. ### The `[compat]` section -Compatibility constraints for the dependencies listed under `[deps]` can be listed in the -`[compat]` section. +Compatibility constraints for dependencies can be listed in the `[compat]` section. This applies to +packages listed under `[deps]`, `[weakdeps]`, and `[extras]`. + Example: ```toml @@ -128,13 +264,17 @@ constraints in detail. It is also possible to list constraints on `julia` itself julia = "1.1" ``` -### The `[workspace]` section +### [The `[workspace]` section](@id Workspaces) A project file can define a workspace by giving a set of projects that is part of that workspace. Each project in a workspace can include their own dependencies, compatibility information, and even function as full packages. When the package manager resolves dependencies, it considers the requirements of all the projects in the workspace. The compatible versions identified during this process are recorded in a single manifest file located next to the base project file. +Note that dependencies of the root package are **not** automatically available in child projects. +Each child must declare its own `[deps]`. The parent package itself can be included in a child project +via a `[sources]` path entry. See [Test-specific dependencies](@ref adding-tests-to-packages) for more information. + A workspace is defined in the base project by giving a list of the projects in it: ```toml @@ -142,11 +282,54 @@ A workspace is defined in the base project by giving a list of the projects in i projects = ["test", "docs", "benchmarks", "PrivatePackage"] ``` -This structure is particularly beneficial for developers using a monorepo approach, where a large number of unregistered packages may be involved. It's also useful for adding documentation or benchmarks to a package by including additional dependencies beyond those of the package itself. +This structure is particularly beneficial for developers using a monorepo approach, where a large number of unregistered packages may be involved. It's also useful for adding test-specific dependencies to a package by including a `test` project in the workspace (see [Test-specific dependencies](@ref adding-tests-to-packages)), or for adding documentation or benchmarks with their own dependencies. -Workspace can be nested: a project that itself defines a workspace can also be part of another workspace. +Workspaces can be nested: a project that itself defines a workspace can also be part of another workspace. In this case, the workspaces are "merged" with a single manifest being stored alongside the "root project" (the project that doesn't have another workspace including it). +### The `[extras]` section (legacy) + +!!! warning + The `[extras]` section is a legacy feature maintained for compatibility. For Julia 1.13+, + using [workspaces](@ref Workspaces) is the recommended approach for managing test-specific + and other optional dependencies. + +The `[extras]` section lists additional dependencies that are not regular dependencies of the package, +but may be used in specific contexts like testing. These are typically used in conjunction with the +`[targets]` section. + +Example: +```toml +[extras] +Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40" +Markdown = "d6f4376e-aef5-505a-96c1-9c027394607a" +``` + +For more information, see the [Test-specific dependencies](@ref adding-tests-to-packages) section. + +### The `[targets]` section (legacy) + +!!! warning + The `[targets]` section is a legacy feature maintained for compatibility. For Julia 1.13+, + using [workspaces](@ref Workspaces) is the recommended approach for managing test-specific + and build dependencies. + +The `[targets]` section specifies which packages from `[extras]` should be available in specific +contexts. The only supported targets are `test` (for test dependencies) and `build` (for build-time +dependencies used by `deps/build.jl` scripts). + +Example: +```toml +[extras] +Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40" +Markdown = "d6f4376e-aef5-505a-96c1-9c027394607a" + +[targets] +test = ["Test", "Markdown"] +``` + +For more information, see the [Test-specific dependencies](@ref adding-tests-to-packages) section. + ## `Manifest.toml` The manifest file is an absolute record of the state of the packages in the environment. @@ -161,7 +344,7 @@ For the details, see [`Pkg.instantiate`](@ref). ### Different Manifests for Different Julia versions -Starting from Julia v1.11, there is an option to name manifest files in the format `Manifest-v{major}.{minor}.toml`. +Starting from Julia v1.10.8, there is an option to name manifest files in the format `Manifest-v{major}.{minor}.toml`. Julia will then preferentially use the version-specific manifest file if available. For example, if both `Manifest-v1.11.toml` and `Manifest.toml` exist, Julia 1.11 will prioritize using `Manifest-v1.11.toml`. However, Julia versions 1.10, 1.12, and all others will default to using `Manifest.toml`. @@ -185,6 +368,39 @@ This shows the Julia version the manifest was created on, the "format" of the ma and a hash of the project file, so that it is possible to see when the manifest is stale compared to the project file. +#### Manifest format versions + +The `manifest_format` field indicates the structure version of the manifest file: +- `"2.0"`: The standard format for Julia 1.7+ +- `"2.1"`: The current format (requires Julia 1.13+). This format introduced registry tracking in the `[registries]` section. + +### The `[registries]` section + +!!! compat + Registry tracking in manifests requires Julia 1.13+ and manifest format `"2.1"`. + +Starting with manifest format `2.1`, the manifest can include a `[registries]` section that tracks +metadata about the registries from which packages were obtained. This ensures that the exact source +of each package version can be identified, which is particularly important when using multiple +registries or private registries. + +Each registry entry in the manifest looks like this: + +```toml +[registries.General] +uuid = "23338594-aafe-5451-b93e-139f81909106" +url = "https://github.com/JuliaRegistries/General.git" +``` + +The registry entries include: +* **`uuid`** (required): The unique identifier for the registry. +* **`url`** (optional): The URL where the registry can be found. This enables automatic installation + of registries when instantiating an environment on a new machine. + +The section key (e.g., `General` in the example above) is the registry name. + +### Package entries + Each dependency has its own section in the manifest file, and its content varies depending on how the dependency was added to the environment. Every dependency section includes a combination of the following entries: @@ -201,6 +417,11 @@ dependency section includes a combination of the following entries: or a commit `repo-rev = "66607a62a83cb07ab18c0b35c038fcd62987c9b1"`. * `git-tree-sha1`: a content hash of the source tree, for example `git-tree-sha1 = "ca3820cc4e66f473467d912c4b2b3ae5dc968444"`. +* `registries`: a reference to the registry IDs from which this package version was obtained. This can be either + a single string (e.g., `registries = "General"`) or a vector of strings if the package is available in multiple + registries (e.g., `registries = ["General", "MyRegistry"]`). All registries containing this package version + are recorded. This field is only present in manifest format `2.1` or later, and only for packages that were + added from a registry (not for developed or git-tracked packages). #### Added package @@ -215,10 +436,12 @@ deps = ["DependencyA", "DependencyB"] git-tree-sha1 = "8eb7b4d4ca487caade9ba3e85932e28ce6d6e1f8" uuid = "7876af07-990d-54b4-ab0e-23690620f79a" version = "1.2.3" +registries = "General" ``` Note, in particular, that no `repo-url` is present, since that information is included in -the registry where this package was found. +the registry where this package was found. The `registries` field (present in manifest format `2.1`+) +references an entry in the `[registries]` section that contains the registry metadata. #### Added package by branch diff --git a/ext/REPLExt/REPLExt.jl b/ext/REPLExt/REPLExt.jl index 92e636cd2f..faf7f6cb0c 100644 --- a/ext/REPLExt/REPLExt.jl +++ b/ext/REPLExt/REPLExt.jl @@ -1,18 +1,25 @@ module REPLExt +if Base.get_bool_env("JULIA_PKG_DISALLOW_PKG_PRECOMPILATION", false) == true + error("Precompililing Pkg extension REPLExt is disallowed. JULIA_PKG_DISALLOW_PKG_PRECOMPILATION=$(ENV["JULIA_PKG_DISALLOW_PKG_PRECOMPILATION"])") +end + using Markdown, UUIDs, Dates import REPL import .REPL: LineEdit, REPLCompletions, TerminalMenus import Pkg -import .Pkg: linewrap, pathrepr, compat, can_fancyprint, printpkgstyle, PKGMODE_PROJECT +import .Pkg: linewrap, pathrepr, can_fancyprint, printpkgstyle, PKGMODE_PROJECT using .Pkg: Types, Operations, API, Registry, Resolve, REPLMode, safe_realpath using .REPLMode: Statement, CommandSpec, Command, prepare_cmd, tokenize, core_parse, SPECS, api_options, parse_option, api_options, is_opt, wrap_option using .Types: Context, PkgError, pkgerror, EnvCache +using .API: set_current_compat +import .API: _compat + include("completions.jl") include("compat.jl") @@ -21,13 +28,32 @@ include("compat.jl") # REPL mode creation # ###################### +const BRACKET_INSERT_SUPPORTED = hasfield(REPL.Options, :auto_insert_closing_bracket) + struct PkgCompletionProvider <: LineEdit.CompletionProvider end -function LineEdit.complete_line(c::PkgCompletionProvider, s; hint::Bool=false) +function LineEdit.complete_line(c::PkgCompletionProvider, s; hint::Bool = false) partial = REPL.beforecursor(s.input_buffer) full = LineEdit.input_string(s) ret, range, should_complete = completions(full, lastindex(partial); hint) - return ret, partial[range], should_complete + # Convert to new completion interface format + named_completions = map(LineEdit.NamedCompletion, ret) + # Convert UnitRange to Region (Pair{Int,Int}) to match new completion interface + # range represents character positions in partial string, convert to 0-based byte positions + if length(range) == 0 && first(range) > last(range) + # Empty backward range like 4:3 means insert at cursor position + # The cursor is at position last(range), so insert after it + pos = thisind(partial, last(range) + 1) - 1 + region = pos => pos + elseif isempty(range) + region = 0 => 0 + else + # Convert 1-based character positions to 0-based byte positions + start_pos = thisind(full, first(range)) - 1 + end_pos = thisind(full, last(range)) + region = start_pos => end_pos + end + return named_completions, region, should_complete end prev_project_file = nothing @@ -113,23 +139,27 @@ function on_done(s, buf, ok, repl) do_cmds(repl, input) REPL.prepare_next(repl) REPL.reset_state(s) - s.current_mode.sticky || REPL.transition(s, main) + return s.current_mode.sticky || REPL.transition(s, main) end # Set up the repl Pkg REPLMode function create_mode(repl::REPL.AbstractREPL, main::LineEdit.Prompt) - pkg_mode = LineEdit.Prompt(promptf; + pkg_mode = LineEdit.Prompt( + promptf; prompt_prefix = repl.options.hascolor ? Base.text_colors[:blue] : "", prompt_suffix = "", complete = PkgCompletionProvider(), - sticky = true) + sticky = true + ) pkg_mode.repl = repl hp = main.hist hp.mode_mapping[:pkg] = pkg_mode pkg_mode.hist = hp - search_prompt, skeymap = LineEdit.setup_search_keymap(hp) + skeymap = if !isdefined(REPL, :History) + last(LineEdit.setup_search_keymap(hp)) # TODO: Remove + end prefix_prompt, prefix_keymap = LineEdit.setup_prefix_keymap(hp, pkg_mode) pkg_mode.on_done = (s, buf, ok) -> Base.@invokelatest(on_done(s, buf, ok, repl)) @@ -145,25 +175,30 @@ function create_mode(repl::REPL.AbstractREPL, main::LineEdit.Prompt) repl_keymap = Dict() if shell_mode !== nothing - let shell_mode=shell_mode - repl_keymap[';'] = function (s,o...) - if isempty(s) || position(LineEdit.buffer(s)) == 0 + let shell_mode = shell_mode + repl_keymap[';'] = function (s, o...) + return if isempty(s) || position(LineEdit.buffer(s)) == 0 buf = copy(LineEdit.buffer(s)) LineEdit.transition(s, shell_mode) do LineEdit.state(s, shell_mode).input_buffer = buf end else LineEdit.edit_insert(s, ';') - LineEdit.check_for_hint(s) && LineEdit.refresh_line(s) + LineEdit.check_show_hint(s) end end end end - b = Dict{Any,Any}[ - skeymap, repl_keymap, mk, prefix_keymap, LineEdit.history_keymap, - LineEdit.default_keymap, LineEdit.escape_defaults - ] + b = Dict{Any, Any}[] + if !isdefined(REPL, :History) + push!(b, skeymap) + end + push!(b, repl_keymap) + if BRACKET_INSERT_SUPPORTED && repl.options.auto_insert_closing_bracket + push!(b, LineEdit.bracket_insert_keymap) + end + push!(b, mk, prefix_keymap, LineEdit.history_keymap, LineEdit.default_keymap, LineEdit.escape_defaults) pkg_mode.keymap_dict = LineEdit.keymap(b) return pkg_mode end @@ -172,16 +207,20 @@ function repl_init(repl::REPL.LineEditREPL) main_mode = repl.interface.modes[1] pkg_mode = create_mode(repl, main_mode) push!(repl.interface.modes, pkg_mode) - keymap = Dict{Any,Any}( - ']' => function (s,args...) + keymap = Dict{Any, Any}( + ']' => function (s, args...) if isempty(s) || position(LineEdit.buffer(s)) == 0 buf = copy(LineEdit.buffer(s)) - LineEdit.transition(s, pkg_mode) do + return LineEdit.transition(s, pkg_mode) do LineEdit.state(s, pkg_mode).input_buffer = buf end else - LineEdit.edit_insert(s, ']') - LineEdit.check_for_hint(s) && LineEdit.refresh_line(s) + if BRACKET_INSERT_SUPPORTED && repl.options.auto_insert_closing_bracket + return LineEdit.bracket_insert_keymap[']'](s, args...) + else + LineEdit.edit_insert(s, ']') + return LineEdit.check_show_hint(s) + end end end ) @@ -201,9 +240,9 @@ function try_prompt_pkg_add(pkgs::Vector{Symbol}) end if isempty(ctx.registries) if !REG_WARNED[] - printstyled(ctx.io, " │ "; color=:green) + printstyled(ctx.io, " │ "; color = :green) printstyled(ctx.io, "Attempted to find missing packages in package registries but no registries are installed.\n") - printstyled(ctx.io, " └ "; color=:green) + printstyled(ctx.io, " └ "; color = :green) printstyled(ctx.io, "Use package mode to install a registry. `pkg> registry add` will install the default registries.\n\n") REG_WARNED[] = true end @@ -223,22 +262,22 @@ function try_prompt_pkg_add(pkgs::Vector{Symbol}) available_pkg_list = length(available_pkgs) == 1 ? String(available_pkgs[1]) : "[$(join(available_pkgs, ", "))]" msg1 = "Package$(plural1) $(missing_pkg_list) not found, but $(plural2) named $(available_pkg_list) $(plural3) available from a registry." for line in linewrap(msg1, io = ctx.io, padding = length(" │ ")) - printstyled(ctx.io, " │ "; color=:green) + printstyled(ctx.io, " │ "; color = :green) println(ctx.io, line) end - printstyled(ctx.io, " │ "; color=:green) + printstyled(ctx.io, " │ "; color = :green) println(ctx.io, "Install package$(plural4)?") msg2 = string("add ", join(available_pkgs, ' ')) for (i, line) in pairs(linewrap(msg2; io = ctx.io, padding = length(string(" | ", promptf())))) - printstyled(ctx.io, " │ "; color=:green) + printstyled(ctx.io, " │ "; color = :green) if i == 1 - printstyled(ctx.io, promptf(); color=:blue) + printstyled(ctx.io, promptf(); color = :blue) else print(ctx.io, " "^length(promptf())) end println(ctx.io, line) end - printstyled(ctx.io, " └ "; color=:green) + printstyled(ctx.io, " └ "; color = :green) Base.prompt(stdin, ctx.io, "(y/n/o)", default = "y") catch err if err isa InterruptException # if ^C is entered @@ -254,7 +293,7 @@ function try_prompt_pkg_add(pkgs::Vector{Symbol}) resp = strip(resp) lower_resp = lowercase(resp) if lower_resp in ["y", "yes"] - API.add(string.(available_pkgs); allow_autoprecomp=false) + API.add(string.(available_pkgs); allow_autoprecomp = false) elseif lower_resp in ["o"] editable_envs = filter(v -> v != "@stdlib", LOAD_PATH) option_list = String[] @@ -273,16 +312,16 @@ function try_prompt_pkg_add(pkgs::Vector{Symbol}) push!(keybindings, only("$n")) push!(shown_envs, expanded_env) end - menu = TerminalMenus.RadioMenu(option_list, keybindings=keybindings, pagesize=length(option_list)) + menu = TerminalMenus.RadioMenu(option_list; keybindings = keybindings, pagesize = length(option_list), charset = :ascii) default = something( # select the first non-default env by default, if possible findfirst(!=(Base.active_project()), shown_envs), 1 ) print(ctx.io, "\e[1A\e[1G\e[0J") # go up one line, to the start, and clear it - printstyled(ctx.io, " └ "; color=:green) + printstyled(ctx.io, " └ "; color = :green) choice = try - TerminalMenus.request("Select environment:", menu, cursor=default) + TerminalMenus.request("Select environment:", menu, cursor = default) catch err if err isa InterruptException # if ^C is entered println(ctx.io) @@ -292,7 +331,7 @@ function try_prompt_pkg_add(pkgs::Vector{Symbol}) end choice == -1 && return false API.activate(shown_envs[choice]) do - API.add(string.(available_pkgs); allow_autoprecomp=false) + API.add(string.(available_pkgs); allow_autoprecomp = false) end elseif (lower_resp in ["n"]) return false @@ -308,7 +347,6 @@ function try_prompt_pkg_add(pkgs::Vector{Symbol}) end - function __init__() if isdefined(Base, :active_repl) if Base.active_repl isa REPL.LineEditREPL @@ -326,7 +364,7 @@ function __init__() end end end - if !in(try_prompt_pkg_add, REPL.install_packages_hooks) + return if !in(try_prompt_pkg_add, REPL.install_packages_hooks) push!(REPL.install_packages_hooks, try_prompt_pkg_add) end end diff --git a/ext/REPLExt/compat.jl b/ext/REPLExt/compat.jl index a9a537cf0f..f51e6b877c 100644 --- a/ext/REPLExt/compat.jl +++ b/ext/REPLExt/compat.jl @@ -1,7 +1,9 @@ # TODO: Overload -function compat(ctx::Context; io = nothing) +function _compat(ctx::Context; io = nothing, input_io = stdin) io = something(io, ctx.io) - can_fancyprint(io) || pkgerror("Pkg.compat cannot be run interactively in this terminal") + if input_io isa Base.TTY # testing uses IOBuffer + can_fancyprint(io) || pkgerror("Pkg.compat cannot be run interactively in this terminal") + end printpkgstyle(io, :Compat, pathrepr(ctx.env.project_file)) longest_dep_len = max(5, length.(collect(keys(ctx.env.project.deps)))...) opt_strs = String[] @@ -9,14 +11,14 @@ function compat(ctx::Context; io = nothing) compat_str = Operations.get_compat_str(ctx.env.project, "julia") push!(opt_strs, Operations.compat_line(io, "julia", nothing, compat_str, longest_dep_len, indent = "")) push!(opt_pkgs, "julia") - for (dep, uuid) in sort(collect(ctx.env.project.deps); by = x->x.first) + for (dep, uuid) in sort(collect(ctx.env.project.deps); by = x -> x.first) compat_str = Operations.get_compat_str(ctx.env.project, dep) push!(opt_strs, Operations.compat_line(io, dep, uuid, compat_str, longest_dep_len, indent = "")) push!(opt_pkgs, dep) end - menu = TerminalMenus.RadioMenu(opt_strs, pagesize=length(opt_strs)) + menu = TerminalMenus.RadioMenu(opt_strs; pagesize = length(opt_strs), charset = :ascii) choice = try - TerminalMenus.request(" Select an entry to edit:", menu) + TerminalMenus.request(TerminalMenus.default_terminal(in = input_io, out = io), " Select an entry to edit:", menu) catch err if err isa InterruptException # if ^C is entered println(io) @@ -35,10 +37,12 @@ function compat(ctx::Context; io = nothing) start_pos = length(prompt) + 2 move_start = "\e[$(start_pos)G" clear_to_end = "\e[0J" - ccall(:jl_tty_set_mode, Int32, (Ptr{Cvoid},Int32), stdin.handle, true) + if input_io isa Base.TTY + ccall(:jl_tty_set_mode, Int32, (Ptr{Cvoid}, Int32), input_io.handle, true) + end while true print(io, move_start, clear_to_end, buffer, "\e[$(start_pos + cursor)G") - inp = TerminalMenus._readkey(stdin) + inp = TerminalMenus._readkey(input_io) if inp == '\r' # Carriage return println(io) break @@ -54,7 +58,7 @@ function compat(ctx::Context; io = nothing) elseif inp == TerminalMenus.END_KEY cursor = length(buffer) elseif inp == TerminalMenus.DEL_KEY - if cursor == 0 + if cursor == 0 && !isempty(buffer) buffer = buffer[2:end] elseif cursor < length(buffer) buffer = buffer[1:cursor] * buffer[(cursor + 2):end] @@ -62,16 +66,18 @@ function compat(ctx::Context; io = nothing) elseif inp isa TerminalMenus.Key # ignore all other escaped (multi-byte) keys elseif inp == '\x7f' # backspace - if cursor == 1 - buffer = buffer[2:end] - elseif cursor == length(buffer) - buffer = buffer[1:end - 1] - elseif cursor > 0 - buffer = buffer[1:(cursor-1)] * buffer[(cursor + 1):end] + if cursor > 0 + if cursor == 1 + buffer = buffer[2:end] + elseif cursor == length(buffer) + buffer = buffer[1:(end - 1)] + else + buffer = buffer[1:(cursor - 1)] * buffer[(cursor + 1):end] + end + cursor -= 1 else continue end - cursor -= 1 else if cursor == 0 buffer = inp * buffer @@ -85,9 +91,11 @@ function compat(ctx::Context; io = nothing) end buffer finally - ccall(:jl_tty_set_mode, Int32, (Ptr{Cvoid},Int32), stdin.handle, false) + if input_io isa Base.TTY + ccall(:jl_tty_set_mode, Int32, (Ptr{Cvoid}, Int32), input_io.handle, false) + end end new_entry = strip(resp) - compat(ctx, dep, string(new_entry)) + API._compat(ctx, dep, string(new_entry)) return end diff --git a/ext/REPLExt/completions.jl b/ext/REPLExt/completions.jl index eca5e11218..60646bad78 100644 --- a/ext/REPLExt/completions.jl +++ b/ext/REPLExt/completions.jl @@ -11,7 +11,7 @@ function _shared_envs() return possible end -function complete_activate(options, partial, i1, i2; hint::Bool) +function complete_activate(options, partial, i1, i2; hint::Bool, arguments = []) shared = get(options, :shared, false) if shared return _shared_envs() @@ -34,11 +34,10 @@ function complete_local_dir(s, i1, i2) end function complete_expanded_local_dir(s, i1, i2, expanded_user, oldi2) - cmp = REPL.REPLCompletions.complete_path(s, i2, shell_escape=true) - cmp2 = cmp[2] - completions = [REPL.REPLCompletions.completion_text(p) for p in cmp[1]] - completions = filter!(completions) do x - Base.isaccessibledir(s[1:prevind(s, first(cmp2)-i1+1)]*x) + paths, dir, success = REPL.REPLCompletions.complete_path(s; cmd_escape = true) + completions = [REPL.REPLCompletions.completion_text(p) for p in paths] + filter!(completions) do x + Base.isaccessibledir(joinpath(dir, x)) end if expanded_user if length(completions) == 1 && endswith(joinpath(homedir(), ""), first(completions)) @@ -48,83 +47,110 @@ function complete_expanded_local_dir(s, i1, i2, expanded_user, oldi2) end return completions, i1:oldi2, true end - - return completions, cmp[2], !isempty(completions) + prefix = splitdir(s)[2] + startpos = i2 - lastindex(prefix) + 1 + return completions, startpos:i2, !isempty(completions) end const JULIA_UUID = UUID("1222c4b2-2114-5bfd-aeef-88e4692bbb3e") -function complete_remote_package(partial; hint::Bool) - found_match = false - isempty(partial) && return String[] + +# Helper function to extract already-specified package names from arguments +# Used for deduplicating completion suggestions (issue #4098) +function extract_specified_names(arguments) + specified_names = Set{String}() + # Exclude the last argument, which is the one currently being completed + for i in 1:(length(arguments) - 1) + arg = arguments[i] + arg_str = arg isa String ? arg : arg.raw + # Extract package name (before any @, #, =, or : specifiers) + pkg_name = first(split(arg_str, ['@', '#', '=', ':'])) + push!(specified_names, pkg_name) + end + return specified_names +end +function complete_remote_package!(comps, partial; hint::Bool) + isempty(partial) && return true # true means returned early + found_match = !isempty(comps) cmp = Set{String}() for reg in Registry.reachable_registries() for (uuid, regpkg) in reg name = regpkg.name name in cmp && continue if startswith(regpkg.name, partial) - pkg = Registry.registry_info(regpkg) - compat_info = Registry.compat_info(pkg) - # Filter versions - for (v, uncompressed_compat) in compat_info + pkg = Registry.registry_info(reg, regpkg) + Registry.isdeprecated(pkg) && continue + # Check if any non-yanked version is compatible with current Julia + found_compatible_version = false + for v in keys(pkg.version_info) Registry.isyanked(pkg, v) && continue # TODO: Filter based on offline mode - is_julia_compat = nothing - for (pkg_uuid, vspec) in uncompressed_compat - if pkg_uuid == JULIA_UUID - is_julia_compat = VERSION in vspec - is_julia_compat && continue - end - end - # Found a compatible version or compat on julia at all => compatible - if is_julia_compat === nothing || is_julia_compat - push!(cmp, name) - # In hint mode the result is only used if there is a single matching entry - # so we abort the search - if hint && found_match - return sort!(collect(cmp)) - end - found_match = true + # Query compressed compat for this version (optimized: only fetch Julia compat) + julia_vspec = Pkg.Registry.query_compat_for_version(pkg, v, JULIA_UUID) + # Found a compatible version or no julia compat at all => compatible + if julia_vspec === nothing || VERSION in julia_vspec + found_compatible_version = true break end end + if found_compatible_version + push!(cmp, name) + # In hint mode the result is only used if there is a single matching entry + # so we can return no matches in case of more than one match + if hint && found_match + return true # true means returned early + end + found_match = true + end end end end - return sort!(collect(cmp)) + append!(comps, sort!(collect(cmp))) + return false # false means performed full search end -function complete_help(options, partial; hint::Bool) +function complete_help(options, partial; hint::Bool, arguments = []) names = String[] for cmds in values(SPECS) - append!(names, [spec.canonical_name for spec in values(cmds)]) + append!(names, [spec.canonical_name for spec in values(cmds)]) end return sort!(unique!(append!(names, collect(keys(SPECS))))) end -function complete_installed_packages(options, partial; hint::Bool) - env = try EnvCache() +function complete_installed_packages(options, partial; hint::Bool, arguments = []) + env = try + EnvCache() catch err err isa PkgError || rethrow() return String[] end mode = get(options, :mode, PKGMODE_PROJECT) - return mode == PKGMODE_PROJECT ? + packages = mode == PKGMODE_PROJECT ? collect(keys(env.project.deps)) : unique!([entry.name for (uuid, entry) in env.manifest]) + + # Filter out already-specified packages + specified_names = extract_specified_names(arguments) + return filter(pkg -> !(pkg in specified_names), packages) end -function complete_all_installed_packages(options, partial; hint::Bool) - env = try EnvCache() +function complete_all_installed_packages(options, partial; hint::Bool, arguments = []) + env = try + EnvCache() catch err err isa PkgError || rethrow() return String[] end - return unique!([entry.name for (uuid, entry) in env.manifest]) + packages = unique!([entry.name for (uuid, entry) in env.manifest]) + + # Filter out already-specified packages + specified_names = extract_specified_names(arguments) + return filter(pkg -> !(pkg in specified_names), packages) end -function complete_installed_packages_and_compat(options, partial; hint::Bool) - env = try EnvCache() +function complete_installed_packages_and_compat(options, partial; hint::Bool, arguments = []) + env = try + EnvCache() catch err err isa PkgError || rethrow() return String[] @@ -135,27 +161,59 @@ function complete_installed_packages_and_compat(options, partial; hint::Bool) end end -function complete_fixed_packages(options, partial; hint::Bool) - env = try EnvCache() +function complete_fixed_packages(options, partial; hint::Bool, arguments = []) + env = try + EnvCache() catch err err isa PkgError || rethrow() return String[] end - return unique!([entry.name for (uuid, entry) in env.manifest.deps if Operations.isfixed(entry)]) + packages = unique!([entry.name for (uuid, entry) in env.manifest.deps if Operations.isfixed(entry)]) + + # Filter out already-specified packages + specified_names = extract_specified_names(arguments) + return filter(pkg -> !(pkg in specified_names), packages) end -function complete_add_dev(options, partial, i1, i2; hint::Bool) +function complete_add_dev(options, partial, i1, i2; hint::Bool, arguments = []) comps, idx, _ = complete_local_dir(partial, i1, i2) if occursin(Base.Filesystem.path_separator_re, partial) return comps, idx, !isempty(comps) end - comps = vcat(comps, sort(complete_remote_package(partial; hint))) - if !isempty(partial) + returned_early = complete_remote_package!(comps, partial; hint) + # returning early means that no further search should be done here + if !returned_early append!(comps, filter!(startswith(partial), [info.name for info in values(Types.stdlib_infos())])) end + + # Filter out already-specified packages + specified_names = extract_specified_names(arguments) + filter!(pkg -> !(pkg in specified_names), comps) + return comps, idx, !isempty(comps) end +# TODO: Move +import Pkg: Operations, Types, Apps +function complete_installed_apps(options, partial; hint, arguments = []) + manifest = try + Types.read_manifest(joinpath(Apps.app_env_folder(), "AppManifest.toml")) + catch err + err isa PkgError || rethrow() + return String[] + end + apps = String[] + for (uuid, entry) in manifest.deps + append!(apps, keys(entry.apps)) + push!(apps, entry.name) + end + apps = unique!(apps) + + # Filter out already-specified packages + specified_names = extract_specified_names(arguments, partial) + return filter(app -> !(app in specified_names), apps) +end + ######################## # COMPLETION INTERFACE # ######################## @@ -179,13 +237,23 @@ function complete_command(statement::Statement, final::Bool, on_sub::Bool) end complete_opt(opt_specs) = - unique(sort(map(wrap_option, - map(x -> getproperty(x, :name), - collect(values(opt_specs)))))) + unique( + sort( + map( + wrap_option, + map( + x -> getproperty(x, :name), + collect(values(opt_specs)) + ) + ) + ) +) -function complete_argument(spec::CommandSpec, options::Vector{String}, - partial::AbstractString, offset::Int, - index::Int; hint::Bool) +function complete_argument( + spec::CommandSpec, options::Vector{String}, arguments::Vector, + partial::AbstractString, offset::Int, + index::Int; hint::Bool + ) if spec.completions isa Symbol # if completions is a symbol, it is a function in REPLExt that needs to be forwarded # to REPLMode (couldn't be linked there because REPLExt is not a dependency of REPLMode) @@ -195,11 +263,16 @@ function complete_argument(spec::CommandSpec, options::Vector{String}, @error "REPLMode indicates a completion function called :$(spec.completions) that cannot be found in REPLExt" rethrow() end - spec.completions = function(opts, partial, offset, index; hint::Bool) - applicable(completions, opts, partial, offset, index) ? - completions(opts, partial, offset, index; hint) : - completions(opts, partial; hint) + spec.completions = function (opts, partial, offset, index; hint::Bool, arguments = []) + # Wrapper that normalizes completion function calls. + if applicable(completions, opts, partial, offset, index) + # Function takes 4 positional args: (opts, partial, offset, index; hint, arguments) + return completions(opts, partial, offset, index; hint, arguments) + else + # Function takes 2 positional args: (opts, partial; hint, arguments) + return completions(opts, partial; hint, arguments) end + end end spec.completions === nothing && return String[] # finish parsing opts @@ -210,18 +283,19 @@ function complete_argument(spec::CommandSpec, options::Vector{String}, e isa PkgError && return String[] rethrow() end - return spec.completions(opts, partial, offset, index; hint) + return spec.completions(opts, partial, offset, index; hint, arguments) end function _completions(input, final, offset, index; hint::Bool) statement, word_count, partial = nothing, nothing, nothing try - words = tokenize(input)[end] + words = tokenize(input; rm_leading_bracket = false)[end] word_count = length(words) statement, partial = core_parse(words) if final partial = "" # last token is finalized -> no partial end + partial = something(partial, "") catch return String[], 0:-1, false end @@ -236,11 +310,11 @@ function _completions(input, final, offset, index; hint::Bool) command_is_focused() && return String[], 0:-1, false if final # complete arg by default - x = complete_argument(statement.spec, statement.options, partial, offset, index; hint) + x = complete_argument(statement.spec, statement.options, statement.arguments, partial, offset, index; hint) else # complete arg or opt depending on last token x = is_opt(partial) ? complete_opt(statement.spec.option_specs) : - complete_argument(statement.spec, statement.options, partial, offset, index; hint) + complete_argument(statement.spec, statement.options, statement.arguments, partial, offset, index; hint) end end @@ -255,7 +329,7 @@ function _completions(input, final, offset, index; hint::Bool) end end -function completions(full, index; hint::Bool=false)::Tuple{Vector{String},UnitRange{Int},Bool} +function completions(full, index; hint::Bool = false)::Tuple{Vector{String}, UnitRange{Int}, Bool} pre = full[1:index] isempty(pre) && return default_commands(), 0:-1, false # empty input -> complete commands offset_adjust = 0 @@ -264,8 +338,8 @@ function completions(full, index; hint::Bool=false)::Tuple{Vector{String},UnitRa pre = string(pre[1], " ", pre[2:end]) offset_adjust = -1 end - last = split(pre, ' ', keepempty=true)[end] - offset = isempty(last) ? index+1+offset_adjust : last.offset+1+offset_adjust - final = isempty(last) # is the cursor still attached to the final token? + last = split(pre, ' ', keepempty = true)[end] + offset = isempty(last) ? index + 1 + offset_adjust : last.offset + 1 + offset_adjust + final = isempty(last) # is the cursor still attached to the final token? return _completions(pre, final, offset, index; hint) end diff --git a/ext/REPLExt/precompile.jl b/ext/REPLExt/precompile.jl index 2deb9b84f0..796c746eba 100644 --- a/ext/REPLExt/precompile.jl +++ b/ext/REPLExt/precompile.jl @@ -14,29 +14,35 @@ let original_load_path = copy(LOAD_PATH) __init__() Pkg.UPDATED_REGISTRY_THIS_SESSION[] = true - Pkg.DEFAULT_IO[] = Pkg.unstableio(devnull) - withenv("JULIA_PKG_SERVER" => nothing, "JULIA_PKG_UNPACK_REGISTRY" => nothing) do - tmp = Pkg._run_precompilation_script_setup() - cd(tmp) do - try_prompt_pkg_add(Symbol[:notapackage]) - promptf() - term = FakeTerminal() - repl = REPL.LineEditREPL(term, true) - REPL.run_repl(repl) - repl_init(repl) + Base.ScopedValues.@with Pkg.DEFAULT_IO => Pkg.unstableio(devnull) begin + withenv("JULIA_PKG_SERVER" => nothing, "JULIA_PKG_UNPACK_REGISTRY" => nothing) do + tmp = Pkg._run_precompilation_script_setup() + cd(tmp) do + try_prompt_pkg_add(Symbol[:notapackage]) + promptf() + term = FakeTerminal() + repl = REPL.LineEditREPL(term, true) + REPL.run_repl(repl) + repl_init(repl) + end end - end - copy!(DEPOT_PATH, original_depot_path) - copy!(LOAD_PATH, original_load_path) + copy!(DEPOT_PATH, original_depot_path) + copy!(LOAD_PATH, original_load_path) - Base.precompile(Tuple{typeof(REPL.LineEdit.complete_line), REPLExt.PkgCompletionProvider, REPL.LineEdit.PromptState}) - Base.precompile(Tuple{typeof(REPL.REPLCompletions.completion_text), REPL.REPLCompletions.PackageCompletion}) - Base.precompile(Tuple{typeof(REPLExt.on_done), REPL.LineEdit.MIState, Base.GenericIOBuffer{Memory{UInt8}}, Bool, REPL.LineEditREPL}) - Base.precompile(Tuple{typeof(Core.kwcall), NamedTuple{(:hint,), Tuple{Bool}}, typeof(REPL.LineEdit.complete_line), REPLExt.PkgCompletionProvider, REPL.LineEdit.PromptState}) + Base.precompile(Tuple{typeof(REPL.LineEdit.complete_line), REPLExt.PkgCompletionProvider, REPL.LineEdit.PromptState}) + Base.precompile(Tuple{typeof(REPL.REPLCompletions.completion_text), REPL.REPLCompletions.PackageCompletion}) + Base.precompile(Tuple{typeof(REPLExt.on_done), REPL.LineEdit.MIState, Base.GenericIOBuffer{Memory{UInt8}}, Bool, REPL.LineEditREPL}) + return Base.precompile(Tuple{typeof(Core.kwcall), NamedTuple{(:hint,), Tuple{Bool}}, typeof(REPL.LineEdit.complete_line), REPLExt.PkgCompletionProvider, REPL.LineEdit.PromptState}) + end end if Base.generating_output() - pkgreplmode_precompile() + ccall(:jl_tag_newly_inferred_enable, Cvoid, ()) + try + pkgreplmode_precompile() + finally + ccall(:jl_tag_newly_inferred_disable, Cvoid, ()) + end end end # let diff --git a/src/API.jl b/src/API.jl index 5e5723a8b7..8b1f5a1af9 100644 --- a/src/API.jl +++ b/src/API.jl @@ -12,9 +12,9 @@ import FileWatching import Base: StaleCacheKey -import ..depots, ..depots1, ..logdir, ..devdir, ..printpkgstyle +import ..depots, ..depots1, ..logdir, ..devdir, ..printpkgstyle, .._autoprecompilation_enabled_scoped, ..manifest_rel_path import ..Operations, ..GitTools, ..Pkg, ..Registry -import ..can_fancyprint, ..pathrepr, ..isurl, ..PREV_ENV_PATH +import ..can_fancyprint, ..pathrepr, ..isurl, ..PREV_ENV_PATH, ..atomic_toml_write, ..safe_realpath using ..Types, ..TOML using ..Types: VersionTypes using Base.BinaryPlatforms @@ -27,17 +27,17 @@ include("generate.jl") Base.@kwdef struct PackageInfo name::String - version::Union{Nothing,VersionNumber} - tree_hash::Union{Nothing,String} + version::Union{Nothing, VersionNumber} + tree_hash::Union{Nothing, String} is_direct_dep::Bool is_pinned::Bool is_tracking_path::Bool is_tracking_repo::Bool is_tracking_registry::Bool - git_revision::Union{Nothing,String} - git_source::Union{Nothing,String} + git_revision::Union{Nothing, String} + git_source::Union{Nothing, String} source::String - dependencies::Dict{String,UUID} + dependencies::Dict{String, UUID} end function Base.:(==)(a::PackageInfo, b::PackageInfo) @@ -53,29 +53,36 @@ end function package_info(env::EnvCache, pkg::PackageSpec)::PackageInfo entry = manifest_info(env.manifest, pkg.uuid) if entry === nothing - pkgerror("expected package $(err_rep(pkg)) to exist in the manifest", - " (use `resolve` to populate the manifest)") + pkgerror( + "expected package $(err_rep(pkg)) to exist in the manifest", + " (use `resolve` to populate the manifest)" + ) end - package_info(env, pkg, entry) + return package_info(env, pkg, entry) end function package_info(env::EnvCache, pkg::PackageSpec, entry::PackageEntry)::PackageInfo git_source = pkg.repo.source === nothing ? nothing : isurl(pkg.repo.source::String) ? pkg.repo.source::String : - Operations.project_rel_path(env, pkg.repo.source::String) + safe_realpath(manifest_rel_path(env, pkg.repo.source::String)) + _source_path = Operations.source_path(env.manifest_file, pkg) + if _source_path === nothing + @debug "Manifest file $(env.manifest_file) contents:\n$(read(env.manifest_file, String))" + pkgerror("could not find source path for package $(err_rep(pkg)) based on $(env.manifest_file)") + end info = PackageInfo( - name = pkg.name, - version = pkg.version != VersionSpec() ? pkg.version : nothing, - tree_hash = pkg.tree_hash === nothing ? nothing : string(pkg.tree_hash), # TODO or should it just be a SHA? - is_direct_dep = pkg.uuid in values(env.project.deps), - is_pinned = pkg.pinned, - is_tracking_path = pkg.path !== nothing, - is_tracking_repo = pkg.repo.rev !== nothing || pkg.repo.source !== nothing, + name = pkg.name, + version = pkg.version != VersionSpec() ? pkg.version : nothing, + tree_hash = pkg.tree_hash === nothing ? nothing : string(pkg.tree_hash), # TODO or should it just be a SHA? + is_direct_dep = pkg.uuid in values(env.project.deps), + is_pinned = pkg.pinned, + is_tracking_path = pkg.path !== nothing, + is_tracking_repo = pkg.repo.rev !== nothing || pkg.repo.source !== nothing, is_tracking_registry = Operations.is_tracking_registry(pkg), - git_revision = pkg.repo.rev, - git_source = git_source, - source = Operations.project_rel_path(env, Operations.source_path(env.manifest_file, pkg)), - dependencies = copy(entry.deps), #TODO is copy needed? + git_revision = pkg.repo.rev, + git_source = git_source, + source = _source_path, + dependencies = copy(entry.deps), #TODO is copy needed? ) return info end @@ -90,17 +97,17 @@ function dependencies(fn::Function, uuid::UUID) if dep === nothing pkgerror("dependency with UUID `$uuid` does not exist") end - fn(dep) + return fn(dep) end Base.@kwdef struct ProjectInfo - name::Union{Nothing,String} - uuid::Union{Nothing,UUID} - version::Union{Nothing,VersionNumber} + name::Union{Nothing, String} + uuid::Union{Nothing, UUID} + version::Union{Nothing, VersionNumber} ispackage::Bool - dependencies::Dict{String,UUID} - sources::Dict{String,Dict{String,String}} + dependencies::Dict{String, UUID} + sources::Dict{String, Dict{String, String}} path::String end @@ -108,26 +115,28 @@ project() = project(EnvCache()) function project(env::EnvCache)::ProjectInfo pkg = env.pkg return ProjectInfo( - name = pkg === nothing ? nothing : pkg.name, - uuid = pkg === nothing ? nothing : pkg.uuid, - version = pkg === nothing ? nothing : pkg.version::VersionNumber, - ispackage = pkg !== nothing, + name = pkg === nothing ? nothing : pkg.name, + uuid = pkg === nothing ? nothing : pkg.uuid, + version = pkg === nothing ? nothing : pkg.version::VersionNumber, + ispackage = pkg !== nothing, dependencies = env.project.deps, - sources = env.project.sources, - path = env.project_file + sources = env.project.sources, + path = env.project_file ) end -function check_package_name(x::AbstractString, mode::Union{Nothing,String,Symbol}=nothing) +function check_package_name(x::AbstractString, mode::Union{Nothing, String, Symbol} = nothing) if !Base.isidentifier(x) message = sprint() do iostr print(iostr, "`$x` is not a valid package name") if endswith(lowercase(x), ".jl") - print(iostr, ". Perhaps you meant `$(chop(x; tail=3))`") + print(iostr, ". Perhaps you meant `$(chop(x; tail = 3))`") end - if mode !== nothing && any(occursin.(['\\','/'], x)) # maybe a url or a path - print(iostr, "\nThe argument appears to be a URL or path, perhaps you meant ", - "`Pkg.$mode(url=\"...\")` or `Pkg.$mode(path=\"...\")`.") + if mode !== nothing && any(occursin.(['\\', '/'], x)) # maybe a url or a path + print( + iostr, "\nThe argument appears to be a URL or path, perhaps you meant ", + "`Pkg.$mode(url=\"...\")` or `Pkg.$mode(path=\"...\")`." + ) end end pkgerror(message) @@ -137,15 +146,19 @@ end check_package_name(::Nothing, ::Any) = nothing function require_not_empty(pkgs, f::Symbol) - isempty(pkgs) && pkgerror("$f requires at least one package") + return isempty(pkgs) && pkgerror("$f requires at least one package") +end + +function check_readonly(ctx::Context) + return ctx.env.project.readonly && pkgerror("Cannot modify a readonly environment. The project at $(ctx.env.project_file) is marked as readonly.") end # Provide some convenience calls for f in (:develop, :add, :rm, :up, :pin, :free, :test, :build, :status, :why, :precompile) @eval begin $f(pkg::Union{AbstractString, PackageSpec}; kwargs...) = $f([pkg]; kwargs...) - $f(pkgs::Vector{<:AbstractString}; kwargs...) = $f([PackageSpec(pkg) for pkg in pkgs]; kwargs...) - function $f(pkgs::Vector{PackageSpec}; io::IO=$(f === :status ? :stdout_f : :stderr_f)(), kwargs...) + $f(pkgs::Vector{<:AbstractString}; kwargs...) = $f([PackageSpec(pkg) for pkg in pkgs]; kwargs...) + function $f(pkgs::Vector{PackageSpec}; io::IO = $(f === :status ? :stdout_f : :stderr_f)(), kwargs...) $(f != :precompile) && Registry.download_default_registries(io) ctx = Context() # Save initial environment for undo/redo functionality @@ -153,7 +166,7 @@ for f in (:develop, :add, :rm, :up, :pin, :free, :test, :build, :status, :why, : add_snapshot_to_undo(ctx.env) saved_initial_snapshot[] = true end - kwargs = merge((;kwargs...), (:io => io,)) + kwargs = merge((; kwargs...), (:io => io,)) pkgs = deepcopy(pkgs) # don't mutate input foreach(handle_package_input!, pkgs) ret = $f(ctx, pkgs; kwargs...) @@ -162,55 +175,102 @@ for f in (:develop, :add, :rm, :up, :pin, :free, :test, :build, :status, :why, : return ret end $f(ctx::Context; kwargs...) = $f(ctx, PackageSpec[]; kwargs...) - function $f(; name::Union{Nothing,AbstractString}=nothing, uuid::Union{Nothing,String,UUID}=nothing, - version::Union{VersionNumber, String, VersionSpec, Nothing}=nothing, - url=nothing, rev=nothing, path=nothing, mode=PKGMODE_PROJECT, subdir=nothing, kwargs...) + function $f(; + name::Union{Nothing, AbstractString} = nothing, uuid::Union{Nothing, String, UUID} = nothing, + version::Union{VersionNumber, String, VersionSpec, Nothing} = nothing, + url = nothing, rev = nothing, path = nothing, mode = PKGMODE_PROJECT, subdir = nothing, kwargs... + ) pkg = PackageSpec(; name, uuid, version, url, rev, path, subdir) if $f === status || $f === rm || $f === up - kwargs = merge((;kwargs...), (:mode => mode,)) + kwargs = merge((; kwargs...), (:mode => mode,)) end # Handle $f() case - if all(isnothing, [name,uuid,version,url,rev,path,subdir]) + return if all(isnothing, [name, uuid, version, url, rev, path, subdir]) $f(PackageSpec[]; kwargs...) else $f(pkg; kwargs...) end end function $f(pkgs::Vector{<:NamedTuple}; kwargs...) - $f([PackageSpec(;pkg...) for pkg in pkgs]; kwargs...) + return $f([PackageSpec(; pkg...) for pkg in pkgs]; kwargs...) end end end -function update_source_if_set(project, pkg) +function update_source_if_set(env, pkg) + project = env.project source = get(project.sources, pkg.name, nothing) - source === nothing && return - # This should probably not modify the dicts directly... - if pkg.repo.source !== nothing - source["url"] = pkg.repo.source - end - if pkg.repo.rev !== nothing - source["rev"] = pkg.repo.rev - end - if pkg.path !== nothing - source["path"] = pkg.path - end - path, repo = get_path_repo(project, pkg.name) - if path !== nothing - pkg.path = path + if source !== nothing + if pkg.repo == GitRepo() + delete!(project.sources, pkg.name) + else + # This should probably not modify the dicts directly... + if pkg.repo.source !== nothing + source["url"] = pkg.repo.source + delete!(source, "path") + end + if pkg.repo.rev !== nothing + source["rev"] = pkg.repo.rev + delete!(source, "path") + end + if pkg.repo.subdir !== nothing + source["subdir"] = pkg.repo.subdir + end + if pkg.path !== nothing + source["path"] = pkg.path + delete!(source, "url") + delete!(source, "rev") + end + end + if pkg.subdir !== nothing + source["subdir"] = pkg.subdir + end + path, repo = get_path_repo(project, env.project_file, env.manifest_file, pkg.name) + if path !== nothing + pkg.path = path + end + if repo.source !== nothing + pkg.repo.source = repo.source + end + if repo.rev !== nothing + pkg.repo.rev = repo.rev + end + if repo.subdir !== nothing + pkg.repo.subdir = repo.subdir + end end - if repo.source !== nothing - pkg.repo.source = repo.source + + # Packages in manifest should have their paths set to the path in the manifest + for (path, wproj) in env.workspace + if wproj.uuid == pkg.uuid + pkg.path = Types.relative_project_path(env.manifest_file, dirname(path)) + break + end end - if repo.rev !== nothing - pkg.repo.rev = repo.rev + return +end + +# Normalize relative paths from user input (pwd-relative) to internal representation (manifest-relative) +# This ensures all relative paths in Pkg are consistently relative to the manifest file +function normalize_package_paths!(ctx::Context, pkgs::Vector{PackageSpec}) + for pkg in pkgs + if pkg.repo.source !== nothing && !isurl(pkg.repo.source) && !isabspath(pkg.repo.source) + # User provided a relative path (relative to pwd), convert to manifest-relative + absolute_path = abspath(pkg.repo.source) + pkg.repo.source = Types.relative_project_path(ctx.env.manifest_file, absolute_path) + end end + return end -function develop(ctx::Context, pkgs::Vector{PackageSpec}; shared::Bool=true, - preserve::PreserveLevel=Operations.default_preserve(), platform::AbstractPlatform=HostPlatform(), kwargs...) +function develop( + ctx::Context, pkgs::Vector{PackageSpec}; shared::Bool = true, + preserve::PreserveLevel = Operations.default_preserve(), platform::AbstractPlatform = HostPlatform(), kwargs... + ) require_not_empty(pkgs, :develop) Context!(ctx; kwargs...) + Operations.ensure_manifest_registries!(ctx) + check_readonly(ctx) for pkg in pkgs check_package_name(pkg.name, "develop") @@ -224,8 +284,10 @@ function develop(ctx::Context, pkgs::Vector{PackageSpec}; shared::Bool=true, pkgerror("rev argument not supported by `develop`; consider using `add` instead") end if pkg.version != VersionSpec() - pkgerror("version specification invalid when calling `develop`:", - " `$(pkg.version)` specified for package $(err_rep(pkg))") + pkgerror( + "version specification invalid when calling `develop`:", + " `$(pkg.version)` specified for package $(err_rep(pkg))" + ) end # not strictly necessary to check these fields early, but it is more efficient if pkg.name !== nothing && (length(findall(x -> x.name == pkg.name, pkgs)) > 1) @@ -236,8 +298,11 @@ function develop(ctx::Context, pkgs::Vector{PackageSpec}; shared::Bool=true, end end + normalize_package_paths!(ctx, pkgs) + new_git = handle_repos_develop!(ctx, pkgs, shared) + Operations.update_registries(ctx; force = false, update_cooldown = Day(1)) for pkg in pkgs if Types.collides_with_project(ctx.env, pkg) @@ -246,17 +311,21 @@ function develop(ctx::Context, pkgs::Vector{PackageSpec}; shared::Bool=true, if length(findall(x -> x.uuid == pkg.uuid, pkgs)) > 1 pkgerror("it is invalid to specify multiple packages with the same UUID: $(err_rep(pkg))") end - update_source_if_set(ctx.env.project, pkg) + update_source_if_set(ctx.env, pkg) end - Operations.develop(ctx, pkgs, new_git; preserve=preserve, platform=platform) + Operations.develop(ctx, pkgs, new_git; preserve = preserve, platform = platform) return end -function add(ctx::Context, pkgs::Vector{PackageSpec}; preserve::PreserveLevel=Operations.default_preserve(), - platform::AbstractPlatform=HostPlatform(), target::Symbol=:deps, allow_autoprecomp::Bool=true, kwargs...) +function add( + ctx::Context, pkgs::Vector{PackageSpec}; preserve::PreserveLevel = Operations.default_preserve(), + platform::AbstractPlatform = HostPlatform(), target::Symbol = :deps, allow_autoprecomp::Bool = true, kwargs... + ) require_not_empty(pkgs, :add) Context!(ctx; kwargs...) + Operations.ensure_manifest_registries!(ctx) + check_readonly(ctx) for pkg in pkgs check_package_name(pkg.name, "add") @@ -268,8 +337,10 @@ function add(ctx::Context, pkgs::Vector{PackageSpec}; preserve::PreserveLevel=Op end if pkg.repo.source !== nothing || pkg.repo.rev !== nothing if pkg.version != VersionSpec() - pkgerror("version specification invalid when tracking a repository:", - " `$(pkg.version)` specified for package $(err_rep(pkg))") + pkgerror( + "version specification invalid when tracking a repository:", + " `$(pkg.version)` specified for package $(err_rep(pkg))" + ) end end # not strictly necessary to check these fields early, but it is more efficient @@ -281,17 +352,19 @@ function add(ctx::Context, pkgs::Vector{PackageSpec}; preserve::PreserveLevel=Op end end + normalize_package_paths!(ctx, pkgs) + repo_pkgs = PackageSpec[pkg for pkg in pkgs if (pkg.repo.source !== nothing || pkg.repo.rev !== nothing)] new_git = handle_repos_add!(ctx, repo_pkgs) # repo + unpinned -> name, uuid, repo.rev, repo.source, tree_hash # repo + pinned -> name, uuid, tree_hash - Operations.update_registries(ctx; force=false, update_cooldown=Day(1)) + Operations.update_registries(ctx; force = false, update_cooldown = Day(1)) project_deps_resolve!(ctx.env, pkgs) registry_resolve!(ctx.registries, pkgs) stdlib_resolve!(pkgs) - ensure_resolved(ctx, ctx.env.manifest, pkgs, registry=true) + ensure_resolved(ctx, ctx.env.manifest, pkgs, registry = true) for pkg in pkgs if Types.collides_with_project(ctx.env, pkg) @@ -300,15 +373,17 @@ function add(ctx::Context, pkgs::Vector{PackageSpec}; preserve::PreserveLevel=Op if length(findall(x -> x.uuid == pkg.uuid, pkgs)) > 1 pkgerror("it is invalid to specify multiple packages with the same UUID: $(err_rep(pkg))") end - update_source_if_set(ctx.env.project, pkg) + update_source_if_set(ctx.env, pkg) end Operations.add(ctx, pkgs, new_git; allow_autoprecomp, preserve, platform, target) return end -function rm(ctx::Context, pkgs::Vector{PackageSpec}; mode=PKGMODE_PROJECT, all_pkgs::Bool=false, kwargs...) +function rm(ctx::Context, pkgs::Vector{PackageSpec}; mode = PKGMODE_PROJECT, all_pkgs::Bool = false, kwargs...) Context!(ctx; kwargs...) + Operations.ensure_manifest_registries!(ctx) + check_readonly(ctx) if all_pkgs !isempty(pkgs) && pkgerror("cannot specify packages when operating on all packages") append_all_pkgs!(pkgs, ctx, mode) @@ -320,9 +395,11 @@ function rm(ctx::Context, pkgs::Vector{PackageSpec}; mode=PKGMODE_PROJECT, all_p if pkg.name === nothing && pkg.uuid === nothing pkgerror("name or UUID specification required when calling `rm`") end - if !(pkg.version == VersionSpec() && pkg.pinned == false && - pkg.tree_hash === nothing && pkg.repo.source === nothing && - pkg.repo.rev === nothing && pkg.path === nothing) + if !( + pkg.version == VersionSpec() && pkg.pinned == false && + pkg.tree_hash === nothing && pkg.repo.source === nothing && + pkg.repo.rev === nothing && pkg.path === nothing + ) pkgerror("packages may only be specified by name or UUID when calling `rm`") end end @@ -337,40 +414,58 @@ function rm(ctx::Context, pkgs::Vector{PackageSpec}; mode=PKGMODE_PROJECT, all_p end -function append_all_pkgs!(pkgs, ctx, mode) +function append_all_pkgs!(pkgs, ctx, mode; workspace::Bool = false) if mode == PKGMODE_PROJECT || mode == PKGMODE_COMBINED for (name::String, uuid::UUID) in ctx.env.project.deps - path, repo = get_path_repo(ctx.env.project, name) - push!(pkgs, PackageSpec(name=name, uuid=uuid, path=path, repo=repo)) + path, repo = get_path_repo(ctx.env.project, ctx.env.project_file, ctx.env.manifest_file, name) + push!(pkgs, PackageSpec(name = name, uuid = uuid, path = path, repo = repo)) + end + if workspace + for (project_file, project) in ctx.env.workspace + for (name::String, uuid::UUID) in project.deps + path, repo = get_path_repo(project, project_file, ctx.env.manifest_file, name) + existing = findfirst(p -> p.uuid == uuid, pkgs) + if existing !== nothing + Operations.merge_pkg_source!(pkgs[existing], path, repo) + continue + end + push!(pkgs, PackageSpec(name = name, uuid = uuid, path = path, repo = repo)) + end + end end end if mode == PKGMODE_MANIFEST || mode == PKGMODE_COMBINED for (uuid, entry) in ctx.env.manifest - path, repo = get_path_repo(ctx.env.project, entry.name) - push!(pkgs, PackageSpec(name=entry.name, uuid=uuid, path=path, repo=repo)) + path, repo = get_path_repo(ctx.env.project, ctx.env.project_file, ctx.env.manifest_file, entry.name) + push!(pkgs, PackageSpec(name = entry.name, uuid = uuid, path = path, repo = repo)) end end return end -function up(ctx::Context, pkgs::Vector{PackageSpec}; - level::UpgradeLevel=UPLEVEL_MAJOR, mode::PackageMode=PKGMODE_PROJECT, - preserve::Union{Nothing,PreserveLevel}= isempty(pkgs) ? nothing : PRESERVE_ALL, - update_registry::Bool=true, - skip_writing_project::Bool=false, - kwargs...) +function up( + ctx::Context, pkgs::Vector{PackageSpec}; + level::UpgradeLevel = UPLEVEL_MAJOR, mode::PackageMode = PKGMODE_PROJECT, + preserve::Union{Nothing, PreserveLevel} = isempty(pkgs) ? nothing : PRESERVE_ALL, + update_registry::Bool = true, + skip_writing_project::Bool = false, + workspace::Bool = false, + kwargs... + ) Context!(ctx; kwargs...) + Operations.ensure_manifest_registries!(ctx) + check_readonly(ctx) if Operations.is_fully_pinned(ctx) printpkgstyle(ctx.io, :Update, "All dependencies are pinned - nothing to update.", color = Base.info_color()) return end if update_registry Registry.download_default_registries(ctx.io) - Operations.update_registries(ctx; force=true) + Operations.update_registries(ctx; force = true) end Operations.prune_manifest(ctx.env) if isempty(pkgs) - append_all_pkgs!(pkgs, ctx, mode) + append_all_pkgs!(pkgs, ctx, mode; workspace) else mode == PKGMODE_PROJECT && project_deps_resolve!(ctx.env, pkgs) mode == PKGMODE_MANIFEST && manifest_resolve!(ctx.env.manifest, pkgs) @@ -378,22 +473,26 @@ function up(ctx::Context, pkgs::Vector{PackageSpec}; manifest_resolve!(ctx.env.manifest, pkgs) ensure_resolved(ctx, ctx.env.manifest, pkgs) end - + for pkg in pkgs + update_source_if_set(ctx.env, pkg) + end Operations.up(ctx, pkgs, level; skip_writing_project, preserve) return end -resolve(; io::IO=stderr_f(), kwargs...) = resolve(Context(;io); kwargs...) -function resolve(ctx::Context; skip_writing_project::Bool=false, kwargs...) - up(ctx; level=UPLEVEL_FIXED, mode=PKGMODE_MANIFEST, update_registry=false, skip_writing_project, kwargs...) +resolve(; io::IO = stderr_f(), kwargs...) = resolve(Context(; io); kwargs...) +function resolve(ctx::Context; skip_writing_project::Bool = false, kwargs...) + up(ctx; level = UPLEVEL_FIXED, mode = PKGMODE_MANIFEST, update_registry = false, skip_writing_project, kwargs...) return nothing end -function pin(ctx::Context, pkgs::Vector{PackageSpec}; all_pkgs::Bool=false, kwargs...) +function pin(ctx::Context, pkgs::Vector{PackageSpec}; all_pkgs::Bool = false, workspace::Bool = false, kwargs...) Context!(ctx; kwargs...) + Operations.ensure_manifest_registries!(ctx) + check_readonly(ctx) if all_pkgs !isempty(pkgs) && pkgerror("cannot specify packages when operating on all packages") - append_all_pkgs!(pkgs, ctx, PKGMODE_MANIFEST) + append_all_pkgs!(pkgs, ctx, PKGMODE_MANIFEST; workspace) else require_not_empty(pkgs, :pin) end @@ -403,12 +502,16 @@ function pin(ctx::Context, pkgs::Vector{PackageSpec}; all_pkgs::Bool=false, kwar pkgerror("name or UUID specification required when calling `pin`") end if pkg.repo.source !== nothing - pkgerror("repository specification invalid when calling `pin`:", - " `$(pkg.repo.source)` specified for package $(err_rep(pkg))") + pkgerror( + "repository specification invalid when calling `pin`:", + " `$(pkg.repo.source)` specified for package $(err_rep(pkg))" + ) end if pkg.repo.rev !== nothing - pkgerror("git revision specification invalid when calling `pin`:", - " `$(pkg.repo.rev)` specified for package $(err_rep(pkg))") + pkgerror( + "git revision specification invalid when calling `pin`:", + " `$(pkg.repo.rev)` specified for package $(err_rep(pkg))" + ) end version = pkg.version if version isa VersionSpec @@ -416,6 +519,7 @@ function pin(ctx::Context, pkgs::Vector{PackageSpec}; all_pkgs::Bool=false, kwar pkgerror("pinning a package requires a single version, not a versionrange") end end + update_source_if_set(ctx.env, pkg) end project_deps_resolve!(ctx.env, pkgs) @@ -424,11 +528,13 @@ function pin(ctx::Context, pkgs::Vector{PackageSpec}; all_pkgs::Bool=false, kwar return end -function free(ctx::Context, pkgs::Vector{PackageSpec}; all_pkgs::Bool=false, kwargs...) +function free(ctx::Context, pkgs::Vector{PackageSpec}; all_pkgs::Bool = false, workspace::Bool = false, kwargs...) Context!(ctx; kwargs...) + Operations.ensure_manifest_registries!(ctx) + check_readonly(ctx) if all_pkgs !isempty(pkgs) && pkgerror("cannot specify packages when operating on all packages") - append_all_pkgs!(pkgs, ctx, PKGMODE_MANIFEST) + append_all_pkgs!(pkgs, ctx, PKGMODE_MANIFEST; workspace) else require_not_empty(pkgs, :free) end @@ -437,9 +543,11 @@ function free(ctx::Context, pkgs::Vector{PackageSpec}; all_pkgs::Bool=false, kwa if pkg.name === nothing && pkg.uuid === nothing pkgerror("name or UUID specification required when calling `free`") end - if !(pkg.version == VersionSpec() && pkg.pinned == false && - pkg.tree_hash === nothing && pkg.repo.source === nothing && - pkg.repo.rev === nothing && pkg.path === nothing) + if !( + pkg.version == VersionSpec() && pkg.pinned == false && + pkg.tree_hash === nothing && pkg.repo.source === nothing && + pkg.repo.rev === nothing && pkg.path === nothing + ) pkgerror("packages may only be specified by name or UUID when calling `free`") end end @@ -451,17 +559,20 @@ function free(ctx::Context, pkgs::Vector{PackageSpec}; all_pkgs::Bool=false, kwa return end -function test(ctx::Context, pkgs::Vector{PackageSpec}; - coverage=false, test_fn=nothing, - julia_args::Union{Cmd, AbstractVector{<:AbstractString}}=``, - test_args::Union{Cmd, AbstractVector{<:AbstractString}}=``, - force_latest_compatible_version::Bool=false, - allow_earlier_backwards_compatible_versions::Bool=true, - allow_reresolve::Bool=true, - kwargs...) +function test( + ctx::Context, pkgs::Vector{PackageSpec}; + coverage = false, test_fn = nothing, + julia_args::Union{Cmd, AbstractVector{<:AbstractString}} = ``, + test_args::Union{Cmd, AbstractVector{<:AbstractString}} = ``, + force_latest_compatible_version::Bool = false, + allow_earlier_backwards_compatible_versions::Bool = true, + allow_reresolve::Bool = true, + kwargs... + ) julia_args = Cmd(julia_args) test_args = Cmd(test_args) Context!(ctx; kwargs...) + Operations.ensure_manifest_registries!(ctx) if isempty(pkgs) ctx.env.pkg === nothing && pkgerror("The Project.toml of the package being tested must have a name and a UUID entry") #TODO Allow this? @@ -496,18 +607,16 @@ function is_manifest_current(path::AbstractString) return Operations.is_manifest_current(env) end -const UsageDict = Dict{String,DateTime} -const UsageByDepotDict = Dict{String,UsageDict} +const UsageDict = Dict{String, DateTime} +const UsageByDepotDict = Dict{String, UsageDict} """ - gc(ctx::Context=Context(); collect_delay::Period=Day(7), verbose=false, kwargs...) + gc(ctx::Context=Context(); verbose=false, force=false, kwargs...) Garbage-collect package and artifact installations by sweeping over all known `Manifest.toml` and `Artifacts.toml` files, noting those that have been deleted, and then -finding artifacts and packages that are thereafter not used by any other projects, -marking them as "orphaned". This method will only remove orphaned objects (package -versions, artifacts, and scratch spaces) that have been continually un-used for a period -of `collect_delay`; which defaults to seven days. +finding artifacts and packages that are thereafter not used by any other projects. +Unused packages, artifacts, repos, and scratch spaces are immediately deleted. Garbage collection is only applied to the "user depot", e.g. the first entry in the depot path. If you want to run `gc` on all depots set `force=true` (this might require @@ -515,8 +624,11 @@ admin privileges depending on the setup). Use verbose mode (`verbose=true`) for detailed output. """ -function gc(ctx::Context=Context(); collect_delay::Period=Day(7), verbose=false, force=false, kwargs...) +function gc(ctx::Context = Context(); collect_delay::Union{Period, Nothing} = nothing, verbose = false, force = false, kwargs...) Context!(ctx; kwargs...) + if collect_delay !== nothing + @warn "The `collect_delay` parameter is no longer used. Packages are now deleted immediately when they become unreachable." + end env = ctx.env # Only look at user-depot unless force=true @@ -549,6 +661,7 @@ function gc(ctx::Context=Context(); collect_delay::Period=Day(7), verbose=false, for (filename, infos) in parse_toml(usage_filepath) f.(Ref(filename), infos) end + return end # Extract usage data from this depot, (taking only the latest state for each @@ -556,7 +669,7 @@ function gc(ctx::Context=Context(); collect_delay::Period=Day(7), verbose=false, # into the overall list across depots to create a single, coherent view across # all depots. usage = UsageDict() - let usage=usage + let usage = usage reduce_usage!(joinpath(logdir(depot), "manifest_usage.toml")) do filename, info # For Manifest usage, store only the last DateTime for each filename found usage[filename] = max(get(usage, filename, DateTime(0)), DateTime(info["time"])::DateTime) @@ -565,7 +678,7 @@ function gc(ctx::Context=Context(); collect_delay::Period=Day(7), verbose=false, manifest_usage_by_depot[depot] = usage usage = UsageDict() - let usage=usage + let usage = usage reduce_usage!(joinpath(logdir(depot), "artifact_usage.toml")) do filename, info # For Artifact usage, store only the last DateTime for each filename found usage[filename] = max(get(usage, filename, DateTime(0)), DateTime(info["time"])::DateTime) @@ -576,7 +689,7 @@ function gc(ctx::Context=Context(); collect_delay::Period=Day(7), verbose=false, # track last-used usage = UsageDict() parents = Dict{String, Set{String}}() - let usage=usage + let usage = usage reduce_usage!(joinpath(logdir(depot), "scratch_usage.toml")) do filename, info # For Artifact usage, store only the last DateTime for each filename found usage[filename] = max(get(usage, filename, DateTime(0)), DateTime(info["time"])::DateTime) @@ -617,21 +730,20 @@ function gc(ctx::Context=Context(); collect_delay::Period=Day(7), verbose=false, # Write out the TOML file for this depot usage_path = joinpath(logdir(depot), fname) if !(isempty(usage)::Bool) || isfile(usage_path) - let usage=usage - open(usage_path, "w") do io - TOML.print(io, usage, sorted=true) - end + let usage = usage + atomic_toml_write(usage_path, usage, sorted = true) end end end + return end # Write condensed Manifest usage - let all_manifest_tomls=all_manifest_tomls + let all_manifest_tomls = all_manifest_tomls write_condensed_toml(manifest_usage_by_depot, "manifest_usage.toml") do depot, usage # Keep only manifest usage markers that are still existent - let usage=usage - filter!(((k,v),) -> k in all_manifest_tomls, usage) + let usage = usage + filter!(((k, v),) -> k in all_manifest_tomls, usage) # Expand it back into a dict-of-dicts return Dict(k => [Dict("time" => v)] for (k, v) in usage) @@ -640,23 +752,23 @@ function gc(ctx::Context=Context(); collect_delay::Period=Day(7), verbose=false, end # Write condensed Artifact usage - let all_artifact_tomls=all_artifact_tomls + let all_artifact_tomls = all_artifact_tomls write_condensed_toml(artifact_usage_by_depot, "artifact_usage.toml") do depot, usage let usage = usage - filter!(((k,v),) -> k in all_artifact_tomls, usage) + filter!(((k, v),) -> k in all_artifact_tomls, usage) return Dict(k => [Dict("time" => v)] for (k, v) in usage) end end end # Write condensed scratch space usage - let all_scratch_parents=all_scratch_parents, all_scratch_dirs=all_scratch_dirs + let all_scratch_parents = all_scratch_parents, all_scratch_dirs = all_scratch_dirs write_condensed_toml(scratch_usage_by_depot, "scratch_usage.toml") do depot, usage # Keep only scratch directories that still exist - filter!(((k,v),) -> k in all_scratch_dirs, usage) + filter!(((k, v),) -> k in all_scratch_dirs, usage) # Expand it back into a dict-of-dicts - expanded_usage = Dict{String,Vector{Dict}}() + expanded_usage = Dict{String, Vector{Dict}}() for (k, v) in usage # Drop scratch spaces whose parents are all non-existent parents = scratch_parents_by_depot[depot][k] @@ -665,10 +777,12 @@ function gc(ctx::Context=Context(); collect_delay::Period=Day(7), verbose=false, continue end - expanded_usage[k] = [Dict( - "time" => v, - "parent_projects" => collect(parents), - )] + expanded_usage[k] = [ + Dict( + "time" => v, + "parent_projects" => collect(parents), + ), + ] end return expanded_usage end @@ -699,7 +813,15 @@ function gc(ctx::Context=Context(); collect_delay::Period=Day(7), verbose=false, end # Collect the locations of every repo referred to in this manifest - return [Types.add_repo_cache_path(e.repo.source) for (u, e) in manifest if e.repo.source !== nothing] + return [ + Types.add_repo_cache_path( + isurl(e.repo.source) ? e.repo.source : + safe_realpath( + isabspath(e.repo.source) ? e.repo.source : + normpath(joinpath(dirname(path), e.repo.source)) + ) + ) for (u, e) in manifest if e.repo.source !== nothing + ] end function process_artifacts_toml(path, pkgs_to_delete) @@ -756,7 +878,7 @@ function gc(ctx::Context=Context(); collect_delay::Period=Day(7), verbose=false, end # Mark packages/artifacts as active or not by calling the appropriate user function - function mark(process_func::Function, index_files, ctx::Context; do_print=true, verbose=false, file_str=nothing) + function mark(process_func::Function, index_files, ctx::Context; do_print = true, verbose = false, file_str = nothing) marked_paths = String[] active_index_files = Set{String}() for index_file in index_files @@ -783,92 +905,36 @@ function gc(ctx::Context=Context(); collect_delay::Period=Day(7), verbose=false, return Set(marked_paths) end - gc_time = now() - function merge_orphanages!(new_orphanage, paths, deletion_list, old_orphanage = UsageDict()) - for path in paths - free_time = something( - get(old_orphanage, path, nothing), - gc_time, - ) - - # No matter what, store the free time in the new orphanage. This allows - # something terrible to go wrong while trying to delete the artifact/ - # package and it will still try to be deleted next time. The only time - # something is removed from an orphanage is when it didn't exist before - # we even started the `gc` run. - new_orphanage[path] = free_time - - # If this path was orphaned long enough ago, add it to the deletion list. - # Otherwise, we continue to propagate its orphaning date but don't delete - # it. It will get cleaned up at some future `gc`, or it will be used - # again during a future `gc` in which case it will not persist within the - # orphanage list. - if gc_time - free_time >= collect_delay - push!(deletion_list, path) - end - end - end - # Scan manifests, parse them, read in all UUIDs listed and mark those as active # printpkgstyle(ctx.io, :Active, "manifests:") - packages_to_keep = mark(process_manifest_pkgs, all_manifest_tomls, ctx, - verbose=verbose, file_str="manifest files") - - # Do an initial scan of our depots to get a preliminary `packages_to_delete`. - packages_to_delete = String[] - for depot in gc_depots - depot_orphaned_packages = String[] - packagedir = abspath(depot, "packages") - if isdir(packagedir) - for name in readdir(packagedir) - !isdir(joinpath(packagedir, name)) && continue - - for slug in readdir(joinpath(packagedir, name)) - pkg_dir = joinpath(packagedir, name, slug) - !isdir(pkg_dir) && continue - - if !(pkg_dir in packages_to_keep) - push!(depot_orphaned_packages, pkg_dir) - end - end - end - end - merge_orphanages!(UsageDict(), depot_orphaned_packages, packages_to_delete) - end + packages_to_keep = mark( + process_manifest_pkgs, all_manifest_tomls, ctx, + verbose = verbose, file_str = "manifest files" + ) - # Next, do the same for artifacts. Note that we MUST do this after calculating - # `packages_to_delete`, as `process_artifacts_toml()` uses it internally to discount - # `Artifacts.toml` files that will be deleted by the future culling operation. + # Next, do the same for artifacts. # printpkgstyle(ctx.io, :Active, "artifacts:") - artifacts_to_keep = let packages_to_delete=packages_to_delete - mark(x -> process_artifacts_toml(x, packages_to_delete), - all_artifact_tomls, ctx; verbose=verbose, file_str="artifact files") - end - repos_to_keep = mark(process_manifest_repos, all_manifest_tomls, ctx; do_print=false) + artifacts_to_keep = mark( + x -> process_artifacts_toml(x, String[]), + all_artifact_tomls, ctx; verbose = verbose, file_str = "artifact files" + ) + repos_to_keep = mark(process_manifest_repos, all_manifest_tomls, ctx; do_print = false) # printpkgstyle(ctx.io, :Active, "scratchspaces:") - spaces_to_keep = let packages_to_delete=packages_to_delete - mark(x -> process_scratchspace(x, packages_to_delete), - all_scratch_dirs, ctx; verbose=verbose, file_str="scratchspaces") - end + spaces_to_keep = mark( + x -> process_scratchspace(x, String[]), + all_scratch_dirs, ctx; verbose = verbose, file_str = "scratchspaces" + ) - # Collect all orphaned paths (packages, artifacts and repos that are not reachable). These - # are implicitly defined in that we walk all packages/artifacts installed, then if - # they were not marked in the above steps, we reap them. + # Collect all unreachable paths (packages, artifacts and repos that are not reachable) + # and immediately delete them. packages_to_delete = String[] artifacts_to_delete = String[] repos_to_delete = String[] spaces_to_delete = String[] for depot in gc_depots - # We track orphaned objects on a per-depot basis, writing out our `orphaned.toml` - # tracking file immediately, only pushing onto the overall `*_to_delete` lists if - # the package has been orphaned for at least a period of `collect_delay` - depot_orphaned_packages = String[] - depot_orphaned_artifacts = String[] - depot_orphaned_repos = String[] - depot_orphaned_scratchspaces = String[] packagedir = abspath(depot, "packages") if isdir(packagedir) @@ -880,7 +946,7 @@ function gc(ctx::Context=Context(); collect_delay::Period=Day(7), verbose=false, !isdir(pkg_dir) && continue if !(pkg_dir in packages_to_keep) - push!(depot_orphaned_packages, pkg_dir) + push!(packages_to_delete, pkg_dir) end end end @@ -892,7 +958,7 @@ function gc(ctx::Context=Context(); collect_delay::Period=Day(7), verbose=false, repo_dir = joinpath(reposdir, repo) !isdir(repo_dir) && continue if !(repo_dir in repos_to_keep) - push!(depot_orphaned_repos, repo_dir) + push!(repos_to_delete, repo_dir) end end end @@ -904,7 +970,7 @@ function gc(ctx::Context=Context(); collect_delay::Period=Day(7), verbose=false, !isdir(artifact_path) && continue if !(artifact_path in artifacts_to_keep) - push!(depot_orphaned_artifacts, artifact_path) + push!(artifacts_to_delete, artifact_path) end end end @@ -918,13 +984,13 @@ function gc(ctx::Context=Context(); collect_delay::Period=Day(7), verbose=false, space_dir_or_file = joinpath(uuid_dir, space) if isdir(space_dir_or_file) if !(space_dir_or_file in spaces_to_keep) - push!(depot_orphaned_scratchspaces, space_dir_or_file) + push!(spaces_to_delete, space_dir_or_file) end elseif uuid == Operations.PkgUUID && isfile(space_dir_or_file) # special cleanup for the precompile cache files that Pkg saves - if any(prefix->startswith(basename(space_dir_or_file), prefix), ("suspend_cache_", "pending_cache_")) - if mtime(space_dir_or_file) < (time() - (24*60*60)) - push!(depot_orphaned_scratchspaces, space_dir_or_file) + if any(prefix -> startswith(basename(space_dir_or_file), prefix), ("suspend_cache_", "pending_cache_")) + if mtime(space_dir_or_file) < (time() - (24 * 60 * 60)) + push!(spaces_to_delete, space_dir_or_file) end end end @@ -932,27 +998,6 @@ function gc(ctx::Context=Context(); collect_delay::Period=Day(7), verbose=false, end end - # Read in this depot's `orphaned.toml` file: - orphanage_file = joinpath(logdir(depot), "orphaned.toml") - new_orphanage = UsageDict() - old_orphanage = try - TOML.parse(String(read(orphanage_file))) - catch - UsageDict() - end - - # Update the package and artifact lists of things to delete, and - # create the `new_orphanage` list for this depot. - merge_orphanages!(new_orphanage, depot_orphaned_packages, packages_to_delete, old_orphanage) - merge_orphanages!(new_orphanage, depot_orphaned_artifacts, artifacts_to_delete, old_orphanage) - merge_orphanages!(new_orphanage, depot_orphaned_repos, repos_to_delete, old_orphanage) - merge_orphanages!(new_orphanage, depot_orphaned_scratchspaces, spaces_to_delete, old_orphanage) - - # Write out the `new_orphanage` for this depot - mkpath(dirname(orphanage_file)) - open(orphanage_file, "w") do io - TOML.print(io, new_orphanage, sorted=true) - end end function recursive_dir_size(path) @@ -964,12 +1009,12 @@ function gc(ctx::Context=Context(); collect_delay::Period=Day(7), verbose=false, try size += lstat(path).size catch ex - @error("Failed to calculate size of $path", exception=ex) + @error("Failed to calculate size of $path", exception = ex) end end end catch ex - @error("Failed to calculate size of $path", exception=ex) + @error("Failed to calculate size of $path", exception = ex) end return size end @@ -980,7 +1025,7 @@ function gc(ctx::Context=Context(); collect_delay::Period=Day(7), verbose=false, try lstat(path).size catch ex - @error("Failed to calculate size of $path", exception=ex) + @error("Failed to calculate size of $path", exception = ex) 0 end else @@ -988,14 +1033,16 @@ function gc(ctx::Context=Context(); collect_delay::Period=Day(7), verbose=false, end try Base.Filesystem.prepare_for_deletion(path) - Base.rm(path; recursive=true, force=true) + Base.rm(path; recursive = true, force = true) catch e - @warn("Failed to delete $path", exception=e) + @warn("Failed to delete $path", exception = e) return 0 end if verbose - printpkgstyle(ctx.io, :Deleted, pathrepr(path) * " (" * - Base.format_bytes(path_size) * ")") + printpkgstyle( + ctx.io, :Deleted, pathrepr(path) * " (" * + Base.format_bytes(path_size) * ")" + ) end return path_size end @@ -1045,18 +1092,33 @@ function gc(ctx::Context=Context(); collect_delay::Period=Day(7), verbose=false, end end - # Delete any files that could not be rm-ed and were specially moved to the delayed delete directory. - # Do this silently because it's out of scope for Pkg.gc() but it's helpful to use this opportunity to do it - if isdefined(Base.Filesystem, :delayed_delete_dir) - if isdir(Base.Filesystem.delayed_delete_dir()) - for p in readdir(Base.Filesystem.delayed_delete_dir(), join=true) + # Delete anything that could not be rm-ed and was specially recorded in the delayed delete reference folder. + # Do this silently because it's out of scope for Pkg.gc() but it's helpful to use this opportunity to do it. + if isdefined(Base.Filesystem, :delayed_delete_ref) + delayed_delete_ref_path = Base.Filesystem.delayed_delete_ref() + if isdir(delayed_delete_ref_path) + delayed_delete_dirs = Set{String}() + for f in readdir(delayed_delete_ref_path; join = true) try + p = readline(f) + push!(delayed_delete_dirs, dirname(p)) Base.Filesystem.prepare_for_deletion(p) - Base.rm(p; recursive=true, force=true, allow_delayed_delete=false) + Base.rm(p; recursive = true, force = true, allow_delayed_delete = false) + Base.rm(f) catch e - @debug "Failed to delete $p" exception=e + @debug "Failed to delete $p" exception = e end end + for dir in delayed_delete_dirs + if basename(dir) == "julia_delayed_deletes" && isempty(readdir(dir)) + Base.Filesystem.prepare_for_deletion(dir) + Base.rm(dir; recursive = true) + end + end + if isempty(readdir(delayed_delete_ref_path)) + Base.Filesystem.prepare_for_deletion(delayed_delete_ref_path) + Base.rm(delayed_delete_ref_path; recursive = true) + end end end @@ -1072,7 +1134,7 @@ function gc(ctx::Context=Context(); collect_delay::Period=Day(7), verbose=false, s = ndel == 1 ? "" : "s" bytes_saved_string = Base.format_bytes(freed) - printpkgstyle(ctx.io, :Deleted, "$(ndel) $(name)$(s) ($bytes_saved_string)") + return printpkgstyle(ctx.io, :Deleted, "$(ndel) $(name)$(s) ($bytes_saved_string)") end print_deleted(ndel_pkg, package_space_freed, "package installation") print_deleted(ndel_repo, repo_space_freed, "repo") @@ -1083,11 +1145,40 @@ function gc(ctx::Context=Context(); collect_delay::Period=Day(7), verbose=false, printpkgstyle(ctx.io, :Deleted, "no artifacts, repos, packages or scratchspaces") end + # Run git gc on registries if git is available + if Sys.which("git") !== nothing + for depot in gc_depots + reg_dir = joinpath(depot, "registries") + isdir(reg_dir) || continue + + for reg_name in readdir(reg_dir) + reg_path = joinpath(reg_dir, reg_name) + isdir(reg_path) || continue + git_dir = joinpath(reg_path, ".git") + isdir(git_dir) || continue + + try + if verbose + printpkgstyle(ctx.io, :GC, "running git gc on registry $(reg_name)") + end + # Run git gc quietly, don't error if it fails + run(`git -C $reg_path gc --quiet`) + catch e + # Silently ignore errors from git gc + if verbose + @warn "git gc failed for registry $(reg_name)" exception = e + end + end + end + end + end + return end -function build(ctx::Context, pkgs::Vector{PackageSpec}; verbose=false, kwargs...) +function build(ctx::Context, pkgs::Vector{PackageSpec}; verbose = false, allow_reresolve::Bool = true, kwargs...) Context!(ctx; kwargs...) + Operations.ensure_manifest_registries!(ctx) if isempty(pkgs) if ctx.env.pkg !== nothing @@ -1101,7 +1192,7 @@ function build(ctx::Context, pkgs::Vector{PackageSpec}; verbose=false, kwargs... project_resolve!(ctx.env, pkgs) manifest_resolve!(ctx.env.manifest, pkgs) ensure_resolved(ctx, ctx.env.manifest, pkgs) - Operations.build(ctx, Set{UUID}(pkg.uuid for pkg in pkgs), verbose) + return Operations.build(ctx, Set{UUID}(pkg.uuid for pkg in pkgs), verbose; allow_reresolve) end function get_or_make_pkgspec(pkgspecs::Vector{PackageSpec}, ctx::Context, uuid) @@ -1123,13 +1214,37 @@ function get_or_make_pkgspec(pkgspecs::Vector{PackageSpec}, ctx::Context, uuid) end end -function precompile(ctx::Context, pkgs::Vector{PackageSpec}; internal_call::Bool=false, - strict::Bool=false, warn_loaded = true, already_instantiated = false, timing::Bool = false, - _from_loading::Bool=false, configs::Union{Base.Precompilation.Config,Vector{Base.Precompilation.Config}}=(``=>Base.CacheFlags()), - workspace::Bool=false, kwargs...) +function precompile( + ctx::Context, pkgs::Vector{PackageSpec}; internal_call::Bool = false, + strict::Bool = false, warn_loaded = true, already_instantiated = false, timing::Bool = false, + _from_loading::Bool = false, configs::Union{Base.Precompilation.Config, Vector{Base.Precompilation.Config}} = (`` => Base.CacheFlags()), + workspace::Bool = false, monitor::Bool = false, stop::Bool = false, cancel::Bool = false, kwargs... + ) + # Handle background precompilation control options via Base + if monitor + Base.Precompilation.monitor_background_precompile(ctx.io) + return + end + if stop + if Base.Precompilation.stop_background_precompile(graceful = true) + printpkgstyle(ctx.io, :Info, "Stopping background precompilation...") + else + printpkgstyle(ctx.io, :Info, "No background precompilation is running") + end + return + end + if cancel + if Base.Precompilation.stop_background_precompile(graceful = false) + printpkgstyle(ctx.io, :Info, "Canceling background precompilation...") + else + printpkgstyle(ctx.io, :Info, "No background precompilation is running") + end + return + end + Context!(ctx; kwargs...) if !already_instantiated - instantiate(ctx; allow_autoprecomp=false, kwargs...) + instantiate(ctx; allow_autoprecomp = false, kwargs...) @debug "precompile: instantiated" end @@ -1139,17 +1254,28 @@ function precompile(ctx::Context, pkgs::Vector{PackageSpec}; internal_call::Bool return end - io = ctx.io - if io isa IOContext{IO} - # precompile does quite a bit of output and using the IOContext{IO} can cause - # some slowdowns, the important part here is to not specialize the whole - # precompile function on the io - io = io.io + return activate(dirname(ctx.env.project_file)) do + io = if ctx.io isa IOContext{IO} && !isa(ctx.io.io, Base.PipeEndpoint) + # precompile does quite a bit of output and using the IOContext{IO} can cause + # some slowdowns, the important part here is to not specialize the whole + # precompile function on the io. + # But don't unwrap the IOContext if it is a PipeEndpoint, as that would + # cause the output to lose color. + ctx.io.io + else + ctx.io + end + pkgs_name = String[pkg.name for pkg in pkgs] + # Allow user to press 'd' to detach when running interactively + detachable = isinteractive() + return Base.Precompilation.precompilepkgs(pkgs_name; internal_call, strict, warn_loaded, timing, _from_loading, configs, manifest = workspace, io, detachable) end +end - activate(dirname(ctx.env.project_file)) do - pkgs_name = String[pkg.name for pkg in pkgs] - return Base.Precompilation.precompilepkgs(pkgs_name; internal_call, strict, warn_loaded, timing, _from_loading, configs, manifest=workspace, io) +function precompile(f, args...; kwargs...) + return Base.ScopedValues.@with _autoprecompilation_enabled_scoped => false begin + f() + Pkg.precompile(args...; kwargs...) end end @@ -1163,18 +1289,21 @@ function tree_hash(repo::LibGit2.GitRepo, tree_hash::String) end instantiate(; kwargs...) = instantiate(Context(); kwargs...) -function instantiate(ctx::Context; manifest::Union{Bool, Nothing}=nothing, - update_registry::Bool=true, verbose::Bool=false, - platform::AbstractPlatform=HostPlatform(), allow_build::Bool=true, allow_autoprecomp::Bool=true, - workspace::Bool=false, julia_version_strict::Bool=false, kwargs...) +function instantiate( + ctx::Context; manifest::Union{Bool, Nothing} = nothing, + update_registry::Bool = true, verbose::Bool = false, + platform::AbstractPlatform = HostPlatform(), allow_build::Bool = true, allow_autoprecomp::Bool = true, + workspace::Bool = false, julia_version_strict::Bool = false, kwargs... + ) Context!(ctx; kwargs...) if Registry.download_default_registries(ctx.io) copy!(ctx.registries, Registry.reachable_registries()) end + Operations.ensure_manifest_registries!(ctx) if !isfile(ctx.env.project_file) && isfile(ctx.env.manifest_file) _manifest = Pkg.Types.read_manifest(ctx.env.manifest_file) Types.check_manifest_julia_version_compat(_manifest, ctx.env.manifest_file; julia_version_strict) - deps = Dict{String,String}() + deps = Dict{String, String}() for (uuid, pkg) in _manifest if pkg.name in keys(deps) # TODO, query what package to put in Project when in interactive mode? @@ -1183,7 +1312,7 @@ function instantiate(ctx::Context; manifest::Union{Bool, Nothing}=nothing, deps[pkg.name] = string(uuid) end Types.write_project(Dict("deps" => deps), ctx.env.project_file) - return instantiate(Context(); manifest=manifest, update_registry=update_registry, allow_autoprecomp=allow_autoprecomp, verbose=verbose, platform=platform, kwargs...) + return instantiate(Context(); manifest = manifest, update_registry = update_registry, allow_autoprecomp = allow_autoprecomp, verbose = verbose, platform = platform, kwargs...) end if (!isfile(ctx.env.manifest_file) && manifest === nothing) || manifest == false # given no manifest exists, only allow invoking a registry update if there are project deps @@ -1198,17 +1327,24 @@ function instantiate(ctx::Context; manifest::Union{Bool, Nothing}=nothing, Types.check_manifest_julia_version_compat(ctx.env.manifest, ctx.env.manifest_file; julia_version_strict) if Operations.is_manifest_current(ctx.env) === false + resolve_cmd = Pkg.in_repl_mode() ? "pkg> resolve" : "Pkg.resolve()" + update_cmd = Pkg.in_repl_mode() ? "pkg> update" : "Pkg.update()" @warn """The project dependencies or compat requirements have changed since the manifest was last resolved. - It is recommended to `Pkg.resolve()` or consider `Pkg.update()` if necessary.""" + It is recommended to `$resolve_cmd` or consider `$update_cmd` if necessary.""" end Operations.prune_manifest(ctx.env) for (name, uuid) in ctx.env.project.deps get(ctx.env.manifest, uuid, nothing) === nothing || continue - pkgerror("`$name` is a direct dependency, but does not appear in the manifest.", - " If you intend `$name` to be a direct dependency, run `Pkg.resolve()` to populate the manifest.", - " Otherwise, remove `$name` with `Pkg.rm(\"$name\")`.", - " Finally, run `Pkg.instantiate()` again.") + resolve_cmd = Pkg.in_repl_mode() ? "pkg> resolve" : "Pkg.resolve()" + rm_cmd = Pkg.in_repl_mode() ? "pkg> rm $name" : "Pkg.rm(\"$name\")" + instantiate_cmd = Pkg.in_repl_mode() ? "pkg> instantiate" : "Pkg.instantiate()" + pkgerror( + "`$name` is a direct dependency, but does not appear in the manifest.", + " If you intend `$name` to be a direct dependency, run `$resolve_cmd` to populate the manifest.", + " Otherwise, remove `$name` with `$rm_cmd`.", + " Finally, run `$instantiate_cmd` again." + ) end # check if all source code and artifacts are downloaded to exit early if Operations.is_instantiated(ctx.env, workspace; platform) @@ -1228,7 +1364,7 @@ function instantiate(ctx::Context; manifest::Union{Bool, Nothing}=nothing, if !(e isa PkgError) || update_registry == false rethrow(e) end - Operations.update_registries(ctx; force=false) + Operations.update_registries(ctx; force = false) Operations.check_registered(ctx.registries, pkgs) end new_git = UUID[] @@ -1241,18 +1377,18 @@ function instantiate(ctx::Context; manifest::Union{Bool, Nothing}=nothing, ## Download repo at tree hash # determine canonical form of repo source if !isurl(repo_source) - repo_source = normpath(joinpath(dirname(ctx.env.project_file), repo_source)) + repo_source = manifest_rel_path(ctx.env, repo_source) end if !isurl(repo_source) && !isdir(repo_source) pkgerror("Did not find path `$(repo_source)` for $(err_rep(pkg))") end repo_path = Types.add_repo_cache_path(repo_source) - let repo_source=repo_source - LibGit2.with(GitTools.ensure_clone(ctx.io, repo_path, repo_source; isbare=true)) do repo + let repo_source = repo_source + LibGit2.with(GitTools.ensure_clone(ctx.io, repo_path, repo_source; isbare = true, depth = 1)) do repo # We only update the clone if the tree hash can't be found tree_hash_object = tree_hash(repo, string(pkg.tree_hash)) if tree_hash_object === nothing - GitTools.fetch(ctx.io, repo, repo_source; refspecs=Types.refspecs) + GitTools.fetch(ctx.io, repo, repo_source; refspecs = Types.refspecs, depth = LibGit2.Consts.FETCH_DEPTH_UNSHALLOW) tree_hash_object = tree_hash(repo, string(pkg.tree_hash)) end if tree_hash_object === nothing @@ -1266,39 +1402,40 @@ function instantiate(ctx::Context; manifest::Union{Bool, Nothing}=nothing, end # Install all packages - new_apply = Operations.download_source(ctx) + new_apply = Operations.download_source(ctx, pkgs) # Install all artifacts - Operations.download_artifacts(ctx; platform, verbose) + Operations.download_artifacts(ctx, pkgs; platform, verbose) # Run build scripts - allow_build && Operations.build_versions(ctx, union(new_apply, new_git); verbose=verbose) + allow_build && Operations.build_versions(ctx, union(new_apply, new_git); verbose = verbose) - allow_autoprecomp && Pkg._auto_precompile(ctx, already_instantiated = true) + return allow_autoprecomp && Pkg._auto_precompile(ctx, already_instantiated = true) end -@deprecate status(mode::PackageMode) status(mode=mode) +@deprecate status(mode::PackageMode) status(mode = mode) -function status(ctx::Context, pkgs::Vector{PackageSpec}; diff::Bool=false, mode=PKGMODE_PROJECT, workspace::Bool=false, outdated::Bool=false, compat::Bool=false, extensions::Bool=false, io::IO=stdout_f()) +function status(ctx::Context, pkgs::Vector{PackageSpec}; diff::Bool = false, mode = PKGMODE_PROJECT, workspace::Bool = false, outdated::Bool = false, deprecated::Bool = false, compat::Bool = false, extensions::Bool = false, io::IO = stdout_f()) if compat diff && pkgerror("Compat status has no `diff` mode") outdated && pkgerror("Compat status has no `outdated` mode") + deprecated && pkgerror("Compat status has no `deprecated` mode") extensions && pkgerror("Compat status has no `extensions` mode") Operations.print_compat(ctx, pkgs; io) else - Operations.status(ctx.env, ctx.registries, pkgs; mode, git_diff=diff, io, outdated, extensions, workspace) + Operations.status(ctx.env, ctx.registries, pkgs; mode, git_diff = diff, io, outdated, deprecated, extensions, workspace) end return nothing end -function activate(;temp=false, shared=false, prev=false, io::IO=stderr_f()) +function activate(; temp = false, shared = false, prev = false, io::IO = stderr_f()) shared && pkgerror("Must give a name for a shared environment") - temp && return activate(mktempdir(); io=io) + temp && return activate(mktempdir(); io = io) if prev if isempty(PREV_ENV_PATH[]) pkgerror("No previously active environment found") else - return activate(PREV_ENV_PATH[]; io=io) + return activate(PREV_ENV_PATH[]; io = io) end end if !isnothing(Base.active_project()) @@ -1320,14 +1457,14 @@ function _activate_dep(dep_name::AbstractString) return end uuid = get(ctx.env.project.deps, dep_name, nothing) - if uuid !== nothing + return if uuid !== nothing entry = manifest_info(ctx.env.manifest, uuid) if entry.path !== nothing return joinpath(dirname(ctx.env.manifest_file), entry.path::String) end end end -function activate(path::AbstractString; shared::Bool=false, temp::Bool=false, io::IO=stderr_f()) +function activate(path::AbstractString; shared::Bool = false, temp::Bool = false, io::IO = stderr_f()) temp && pkgerror("Can not give `path` argument when creating a temporary environment") if !shared # `pkg> activate path`/`Pkg.activate(path)` does the following @@ -1374,23 +1511,39 @@ end function activate(f::Function, new_project::AbstractString) old = Base.ACTIVE_PROJECT[] Base.ACTIVE_PROJECT[] = new_project - try + return try f() finally Base.ACTIVE_PROJECT[] = old end end -function compat(ctx::Context, pkg::String, compat_str::Union{Nothing,String}; io = nothing, kwargs...) +function _compat(ctx::Context, pkg::String, compat_str::Union{Nothing, String}; current::Bool = false, io = nothing, kwargs...) + if current + if compat_str !== nothing + pkgerror("`current` is true, but `compat_str` is not nothing. This is not allowed.") + end + return set_current_compat(ctx, pkg; io = io) + end io = something(io, ctx.io) pkg = pkg == "Julia" ? "julia" : pkg isnothing(compat_str) || (compat_str = string(strip(compat_str, '"'))) + existing_compat = Operations.get_compat_str(ctx.env.project, pkg) + # Double check before deleting a compat entry issue/3567 + if isinteractive() && (isnothing(compat_str) || isempty(compat_str)) + if !isnothing(existing_compat) + ans = Base.prompt(stdin, ctx.io, "No compat string was given. Delete existing compat entry `$pkg = $(repr(existing_compat))`? [y]/n", default = "y") + if lowercase(ans) !== "y" + return + end + end + end if haskey(ctx.env.project.deps, pkg) || pkg == "julia" success = Operations.set_compat(ctx.env.project, pkg, isnothing(compat_str) ? nothing : isempty(compat_str) ? nothing : compat_str) success === false && pkgerror("invalid compat version specifier \"$(compat_str)\"") write_env(ctx.env) if isnothing(compat_str) || isempty(compat_str) - printpkgstyle(io, :Compat, "entry removed for $(pkg)") + printpkgstyle(io, :Compat, "entry removed:\n $pkg = $(repr(existing_compat))") else printpkgstyle(io, :Compat, "entry set:\n $(pkg) = $(repr(compat_str))") end @@ -1410,15 +1563,98 @@ function compat(ctx::Context, pkg::String, compat_str::Union{Nothing,String}; io pkgerror("No package named $pkg in current Project") end end -compat(pkg::String; kwargs...) = compat(pkg, nothing; kwargs...) -compat(pkg::String, compat_str::Union{Nothing,String}; kwargs...) = compat(Context(), pkg, compat_str; kwargs...) -compat(;kwargs...) = compat(Context(); kwargs...) +function compat(ctx::Context = Context(); current::Bool = false, kwargs...) + if current + return set_current_compat(ctx; kwargs...) + end + return _compat(ctx; kwargs...) +end +compat(pkg::String, compat_str::Union{Nothing, String} = nothing; kwargs...) = _compat(Context(), pkg, compat_str; kwargs...) + + +function set_current_compat(ctx::Context, target_pkg::Union{Nothing, String} = nothing; io = nothing) + io = something(io, ctx.io) + updated_deps = String[] + + deps_to_process = if target_pkg !== nothing + # Process only the specified package + if haskey(ctx.env.project.deps, target_pkg) + [(target_pkg, ctx.env.project.deps[target_pkg])] + else + pkgerror("Package $(target_pkg) not found in project dependencies") + end + else + # Process all packages (existing behavior) + collect(ctx.env.project.deps) + end + + # Process regular package dependencies + for (dep, uuid) in deps_to_process + compat_str = Operations.get_compat_str(ctx.env.project, dep) + if target_pkg !== nothing || isnothing(compat_str) + entry = get(ctx.env.manifest, uuid, nothing) + entry === nothing && continue + v = entry.version + v === nothing && continue + pkgversion = string(Base.thispatch(v)) + Operations.set_compat(ctx.env.project, dep, pkgversion) || + pkgerror("invalid compat version specifier \"$(pkgversion)\"") + push!(updated_deps, dep) + end + end + + # Also handle Julia compat entry when processing all packages (not when targeting a specific package) + if target_pkg === nothing + julia_compat_str = Operations.get_compat_str(ctx.env.project, "julia") + if isnothing(julia_compat_str) + # Set julia compat to current running version + julia_version = string(Base.thispatch(VERSION)) + Operations.set_compat(ctx.env.project, "julia", julia_version) || + pkgerror("invalid compat version specifier \"$(julia_version)\"") + push!(updated_deps, "julia") + end + end + + # Update messaging + if isempty(updated_deps) + if target_pkg !== nothing + printpkgstyle(io, :Info, "$(target_pkg) already has a compat entry or is not in manifest. No changes made.", color = Base.info_color()) + else + printpkgstyle(io, :Info, "no missing compat entries found. No changes made.", color = Base.info_color()) + end + elseif length(updated_deps) == 1 + printpkgstyle(io, :Info, "new entry set for $(only(updated_deps)) based on its current version", color = Base.info_color()) + else + printpkgstyle(io, :Info, "new entries set for $(join(updated_deps, ", ", " and ")) based on their current versions", color = Base.info_color()) + end + + write_env(ctx.env) + return Operations.print_compat(ctx; io) +end +set_current_compat(; kwargs...) = set_current_compat(Context(); kwargs...) ####### # why # ####### -function why(ctx::Context, pkgs::Vector{PackageSpec}; io::IO, workspace::Bool=false, kwargs...) +function why_find_paths!(final_paths, incoming, project_deps, current, path) + push!(path, current) + current in project_deps && push!(final_paths, path) # record once we've traversed to a project dep + haskey(incoming, current) || return # but only return if we've reached a leaf that nothing depends on + for p in incoming[current] + if p in path + # detected dependency cycle and none of the dependencies in the cycle + # are in the project could happen when manually modifying + # the project and running this function function before a + # resolve + continue + end + why_find_paths!(final_paths, incoming, project_deps, p, copy(path)) + end + return +end + +function why(ctx::Context, pkgs::Vector{PackageSpec}; io::IO, workspace::Bool = false, kwargs...) require_not_empty(pkgs, :why) manifest_resolve!(ctx.env.manifest, pkgs) @@ -1442,36 +1678,21 @@ function why(ctx::Context, pkgs::Vector{PackageSpec}; io::IO, workspace::Bool=fa end end - function find_paths!(final_paths, current, path = UUID[]) - push!(path, current) - current in project_deps && push!(final_paths, path) # record once we've traversed to a project dep - haskey(incoming, current) || return # but only return if we've reached a leaf that nothing depends on - for p in incoming[current] - if p in path - # detected dependency cycle and none of the dependencies in the cycle - # are in the project could happen when manually modifying - # the project and running this function function before a - # resolve - continue - end - find_paths!(final_paths, p, copy(path)) - end - end - first = true for pkg in pkgs !first && println(io) first = false final_paths = Set{Vector{UUID}}() - find_paths!(final_paths, pkg.uuid) + why_find_paths!(final_paths, incoming, project_deps, pkg.uuid, UUID[]) foreach(reverse!, final_paths) final_paths_names = map(x -> [ctx.env.manifest[uuid].name for uuid in x], collect(final_paths)) sort!(final_paths_names, by = x -> (x, length(x))) - delimiter = sprint((io, args) -> printstyled(io, args...; color=:light_green), "→", context=io) + delimiter = sprint((io, args) -> printstyled(io, args...; color = :light_green), "→", context = io) for path in final_paths_names println(io, " ", join(path, " $delimiter ")) end end + return end @@ -1493,7 +1714,7 @@ const undo_entries = Dict{String, UndoState}() const max_undo_limit = 50 const saved_initial_snapshot = Ref(false) -function add_snapshot_to_undo(env=nothing) +function add_snapshot_to_undo(env = nothing) # only attempt to take a snapshot if there is # an active project to be found if env === nothing @@ -1511,14 +1732,14 @@ function add_snapshot_to_undo(env=nothing) return end snapshot = UndoSnapshot(now(), env.project, env.manifest) - deleteat!(state.entries, 1:(state.idx-1)) + deleteat!(state.entries, 1:(state.idx - 1)) pushfirst!(state.entries, snapshot) state.idx = 1 - resize!(state.entries, min(length(state.entries), max_undo_limit)) + return resize!(state.entries, min(length(state.entries), max_undo_limit)) end -undo(ctx = Context()) = redo_undo(ctx, :undo, 1) +undo(ctx = Context()) = redo_undo(ctx, :undo, 1) redo(ctx = Context()) = redo_undo(ctx, :redo, -1) function redo_undo(ctx, mode::Symbol, direction::Int) @assert direction == 1 || direction == -1 @@ -1529,16 +1750,16 @@ function redo_undo(ctx, mode::Symbol, direction::Int) state.idx += direction snapshot = state.entries[state.idx] ctx.env.manifest, ctx.env.project = snapshot.manifest, snapshot.project - write_env(ctx.env; update_undo=false) - Operations.show_update(ctx.env, ctx.registries; io=ctx.io) + write_env(ctx.env; update_undo = false) + return Operations.show_update(ctx.env, ctx.registries; io = ctx.io) end function setprotocol!(; - domain::AbstractString="github.com", - protocol::Union{Nothing, AbstractString}=nothing -) - GitTools.setprotocol!(domain=domain, protocol=protocol) + domain::AbstractString = "github.com", + protocol::Union{Nothing, AbstractString} = nothing + ) + GitTools.setprotocol!(domain = domain, protocol = protocol) return nothing end @@ -1546,10 +1767,15 @@ end function handle_package_input!(pkg::PackageSpec) if pkg.path !== nothing && pkg.url !== nothing - pkgerror("`path` and `url` are conflicting specifications") + pkgerror("Conflicting `path` and `url` in PackageSpec") + end + if pkg.repo.source !== nothing || pkg.repo.rev !== nothing || pkg.repo.subdir !== nothing + pkgerror("`repo` is a private field of PackageSpec and should not be set directly") end - pkg.repo = Types.GitRepo(rev = pkg.rev, source = pkg.url !== nothing ? pkg.url : pkg.path, - subdir = pkg.subdir) + pkg.repo = Types.GitRepo( + rev = pkg.rev, source = pkg.url !== nothing ? pkg.url : pkg.path, + subdir = pkg.subdir + ) pkg.path = nothing pkg.tree_hash = nothing if pkg.version === nothing @@ -1558,28 +1784,7 @@ function handle_package_input!(pkg::PackageSpec) if !(pkg.version isa VersionNumber) pkg.version = VersionSpec(pkg.version) end - pkg.uuid = pkg.uuid isa String ? UUID(pkg.uuid) : pkg.uuid -end - -function upgrade_manifest(man_path::String) - dir = mktempdir() - cp(man_path, joinpath(dir, "Manifest.toml")) - Pkg.activate(dir) do - Pkg.upgrade_manifest() - end - mv(joinpath(dir, "Manifest.toml"), man_path, force = true) -end -function upgrade_manifest(ctx::Context = Context()) - before_format = ctx.env.manifest.manifest_format - if before_format == v"2.0" - pkgerror("Format of manifest file at `$(ctx.env.manifest_file)` already up to date: manifest_format == $(before_format)") - elseif before_format != v"1.0" - pkgerror("Format of manifest file at `$(ctx.env.manifest_file)` version is unrecognized: manifest_format == $(before_format)") - end - ctx.env.manifest.manifest_format = v"2.0" - Types.write_manifest(ctx.env) - printpkgstyle(ctx.io, :Updated, "Format of manifest file at `$(ctx.env.manifest_file)` updated from v$(before_format.major).$(before_format.minor) to v2.0") - return nothing + return pkg.uuid = pkg.uuid isa String ? UUID(pkg.uuid) : pkg.uuid end """ @@ -1595,4 +1800,30 @@ function auto_gc(on::Bool) return pstate end +""" + readonly() + +Return whether the current environment is readonly. +""" +function readonly(ctx::Context = Context()) + return ctx.env.project.readonly +end + +""" + readonly(on::Bool) + +Enable or disable readonly mode for the current environment. +Return the previous state. +""" +function readonly(on::Bool, ctx::Context = Context()) + previous_state = ctx.env.project.readonly + ctx.env.project.readonly = on + Types.write_env(ctx.env; skip_readonly_check = true) + + mode_str = on ? "enabled" : "disabled" + printpkgstyle(ctx.io, :Updated, "Readonly mode $mode_str for project at $(ctx.env.project_file)") + + return previous_state +end + end # module diff --git a/src/Apps/Apps.jl b/src/Apps/Apps.jl new file mode 100644 index 0000000000..8521f68958 --- /dev/null +++ b/src/Apps/Apps.jl @@ -0,0 +1,635 @@ +module Apps + +using Pkg +using Pkg: atomic_toml_write +using Pkg.Versions +using Pkg.Types: AppInfo, PackageSpec, Context, EnvCache, PackageEntry, Manifest, handle_repo_add!, handle_repo_develop!, write_manifest, write_project, + pkgerror, projectfile_path, manifestfile_path +using Pkg.Operations: print_single, source_path, update_package_add +using Pkg.API: handle_package_input! +using TOML, UUIDs +using Dates +import Pkg.Registry + +public add, rm, status, update, develop + +app_env_folder() = joinpath(first(DEPOT_PATH), "environments", "apps") +app_manifest_file() = joinpath(app_env_folder(), "AppManifest.toml") +julia_bin_path() = joinpath(first(DEPOT_PATH), "bin") + +app_context() = Context(env = EnvCache(joinpath(app_env_folder(), "Project.toml"))) + +function validate_app_name(name::AbstractString) + if isempty(name) + error("App name cannot be empty") + end + if !occursin(r"^[a-zA-Z][a-zA-Z0-9_-]*$", name) + error("App name must start with a letter and contain only letters, numbers, underscores, and hyphens") + end + return if occursin(r"\.\.", name) || occursin(r"[/\\]", name) + error("App name cannot contain path traversal sequences or path separators") + end +end + +function validate_package_name(name::AbstractString) + if isempty(name) + error("Package name cannot be empty") + end + return if !occursin(r"^[a-zA-Z][a-zA-Z0-9_]*$", name) + error("Package name must start with a letter and contain only letters, numbers, and underscores") + end +end + +function validate_submodule_name(name::Union{AbstractString, Nothing}) + return if name !== nothing + if isempty(name) + error("Submodule name cannot be empty") + end + if !occursin(r"^[a-zA-Z][a-zA-Z0-9_]*$", name) + error("Submodule name must start with a letter and contain only letters, numbers, and underscores") + end + end +end + + +function rm_shim(name; kwargs...) + validate_app_name(name) + return Base.rm(joinpath(julia_bin_path(), name * (Sys.iswindows() ? ".bat" : "")); kwargs...) +end + +function get_project(sourcepath) + project_file = projectfile_path(sourcepath) + + isfile(project_file) || error("Project file not found: $project_file") + + project = Pkg.Types.read_project(project_file) + isempty(project.apps) && error("No apps found in Project.toml for package $(project.name) at version $(project.version)") + return project +end + + +function overwrite_file_if_different(file, content) + # Windows batch files require CRLF line endings for reliable label parsing + if endswith(file, ".bat") + content = replace(content, "\r\n" => "\n") # normalize to LF first + content = replace(content, "\n" => "\r\n") # then convert to CRLF + end + return if !isfile(file) || read(file, String) != content + mkpath(dirname(file)) + write(file, content) + end +end + +function check_apps_in_path(apps) + for app_name in keys(apps) + which_name = app_name * (Sys.iswindows() ? ".bat" : "") + which_result = Sys.which(which_name) + if which_result === nothing + @warn """ + App '$app_name' was installed but is not available in PATH. + Consider adding '$(julia_bin_path())' to your PATH environment variable. + """ maxlog = 1 + break # Only show warning once per installation + else + # Check for collisions + expected_path = joinpath(julia_bin_path(), app_name * (Sys.iswindows() ? ".bat" : "")) + if which_result != expected_path + @warn """ + App '$app_name' collision detected: + Expected: $expected_path + Found: $which_result + Another application with the same name exists in PATH. + """ + end + end + end + return +end + +function get_max_version_register(pkg::PackageSpec, regs) + max_v = nothing + tree_hash = nothing + for reg in regs + if get(reg, pkg.uuid, nothing) !== nothing + reg_pkg = get(reg, pkg.uuid, nothing) + reg_pkg === nothing && continue + pkg_info = Registry.registry_info(reg, reg_pkg) + for (version, info) in pkg_info.version_info + info.yanked && continue + if pkg.version isa VersionNumber + pkg.version == version || continue + else + version in pkg.version || continue + end + if max_v === nothing || version > max_v + max_v = version + tree_hash = info.git_tree_sha1 + end + end + end + end + if max_v === nothing + error("Suitable package version for $(pkg.name) not found in any registries.") + end + return (max_v, tree_hash) +end + + +################## +# Main Functions # +################## + +function _resolve(manifest::Manifest, pkgname = nothing) + for (uuid, pkg) in manifest.deps + if pkgname !== nothing && pkg.name !== pkgname + continue + end + + # TODO: Add support for existing manifest + + projectfile = joinpath(app_env_folder(), pkg.name, "Project.toml") + + sourcepath = source_path(app_manifest_file(), pkg) + original_project_file = projectfile_path(sourcepath) + + mkpath(dirname(projectfile)) + + if isfile(original_project_file) + cp(original_project_file, projectfile; force = true) + chmod(projectfile, 0o644) # Make the copied project file writable + + # Add entryfile stanza pointing to the package entry file + # TODO: What if project file has its own entryfile? + project_data = TOML.parsefile(projectfile) + project_data["entryfile"] = joinpath(sourcepath, "src", "$(pkg.name).jl") + atomic_toml_write(projectfile, project_data) + else + error("could not find project file for package $pkg") + end + + # Create a manifest with the manifest entry + Pkg.activate(joinpath(app_env_folder(), pkg.name)) do + ctx = Context() + ctx.env.manifest.deps[uuid] = pkg + Pkg.resolve(ctx) + end + + # TODO: Julia path + generate_shims_for_apps(pkg.name, pkg.apps, dirname(projectfile), joinpath(Sys.BINDIR, "julia")) + end + return write_manifest(manifest, app_manifest_file()) +end + + +function add(pkg::Vector{PackageSpec}) + for p in pkg + add(p) + end + return +end + + +function add(pkg::PackageSpec) + handle_package_input!(pkg) + + ctx = app_context() + + Pkg.Operations.update_registries(ctx; force = false, update_cooldown = Day(1)) + + manifest = ctx.env.manifest + new = false + + # Download package + if pkg.repo.source !== nothing || pkg.repo.rev !== nothing + entry = Pkg.API.manifest_info(ctx.env.manifest, pkg.uuid) + pkg = update_package_add(ctx, pkg, entry, false) + new = handle_repo_add!(ctx, pkg) + else + pkgs = [pkg] + Pkg.Operations.registry_resolve!(ctx.registries, pkgs) + Pkg.Operations.ensure_resolved(ctx, manifest, pkgs, registry = true) + + pkg.version, pkg.tree_hash = get_max_version_register(pkg, ctx.registries) + + new = Pkg.Operations.download_source(ctx, pkgs) + end + + # Run Pkg.build()? + + Base.rm(joinpath(app_env_folder(), pkg.name); force = true, recursive = true) + sourcepath = source_path(ctx.env.manifest_file, pkg) + project = get_project(sourcepath) + # TODO: Wrong if package itself has a sourcepath? + # PackageEntry requires version::Union{VersionNumber, Nothing}, but project.version can be VersionSpec + entry = PackageEntry(; apps = project.apps, name = pkg.name, version = project.version isa VersionNumber ? project.version : nothing, tree_hash = pkg.tree_hash, path = pkg.path, repo = pkg.repo, uuid = pkg.uuid) + manifest.deps[pkg.uuid] = entry + + _resolve(manifest, pkg.name) + if new === true || (new isa Set{UUID} && pkg.uuid in new) + Pkg.Operations.build_versions(ctx, Set([pkg.uuid]); verbose = true) + end + precompile(pkg.name) + + @info "For package: $(pkg.name) installed apps $(join(keys(project.apps), ","))" + return check_apps_in_path(project.apps) +end + +function develop(pkg::Vector{PackageSpec}) + for p in pkg + develop(p) + end + return +end + +function develop(pkg::PackageSpec) + if pkg.path !== nothing + pkg.path = abspath(pkg.path) + end + handle_package_input!(pkg) + ctx = app_context() + handle_repo_develop!(ctx, pkg, #=shared =# true) + Base.rm(joinpath(app_env_folder(), pkg.name); force = true, recursive = true) + sourcepath = abspath(source_path(ctx.env.manifest_file, pkg)) + project = get_project(sourcepath) + + # Seems like the `.repo.source` field is not cleared. + # At least repo-url is still in the manifest after doing a dev with a path + # Figure out why for normal dev this is not needed. + # XXX: Why needed? + if pkg.path !== nothing + pkg.repo.source = nothing + end + + # PackageEntry requires version::Union{VersionNumber, Nothing}, but project.version can be VersionSpec + entry = PackageEntry(; apps = project.apps, name = pkg.name, version = project.version isa VersionNumber ? project.version : nothing, tree_hash = pkg.tree_hash, path = sourcepath, repo = pkg.repo, uuid = pkg.uuid) + manifest = ctx.env.manifest + manifest.deps[pkg.uuid] = entry + + # For dev, we don't create an app environment - just point shims directly to the dev'd project + write_manifest(manifest, app_manifest_file()) + generate_shims_for_apps(pkg.name, project.apps, sourcepath, joinpath(Sys.BINDIR, "julia")) + + @info "For package: $(pkg.name) installed apps: $(join(keys(project.apps), ","))" + return check_apps_in_path(project.apps) +end + + +update(pkgs_or_apps::String) = update([pkgs_or_apps]) +function update(pkgs_or_apps::Vector) + for pkg_or_app in pkgs_or_apps + if pkg_or_app isa String + pkg_or_app = PackageSpec(pkg_or_app) + end + update(pkg_or_app) + end + return +end + +# XXX: Is updating an app ever different from rm-ing and adding it from scratch? +function update(pkg::Union{PackageSpec, Nothing} = nothing) + ctx = app_context() + manifest = ctx.env.manifest + deps = Pkg.Operations.load_manifest_deps(manifest) + for dep in deps + info = manifest.deps[dep.uuid] + if pkg === nothing || info.name !== pkg.name + continue + end + Pkg.activate(joinpath(app_env_folder(), info.name)) do + # precompile only after updating all apps? + Pkg.update() + end + sourcepath = abspath(source_path(ctx.env.manifest_file, info)) + project = get_project(sourcepath) + # Get the tree hash from the project file + manifest_file = manifestfile_path(joinpath(app_env_folder(), info.name)) + manifest_app = Pkg.Types.read_manifest(manifest_file) + manifest_entry = manifest_app.deps[info.uuid] + + entry = PackageEntry(; + apps = project.apps, name = manifest_entry.name, version = manifest_entry.version, tree_hash = manifest_entry.tree_hash, + path = manifest_entry.path, repo = manifest_entry.repo, uuid = manifest_entry.uuid + ) + + manifest.deps[dep.uuid] = entry + Pkg.Types.write_manifest(manifest, app_manifest_file()) + end + return +end + +function status(pkgs_or_apps::Vector) + return if isempty(pkgs_or_apps) + status() + else + for pkg_or_app in pkgs_or_apps + if pkg_or_app isa String + pkg_or_app = PackageSpec(pkg_or_app) + end + status(pkg_or_app) + end + end +end + +function status(pkg_or_app::Union{PackageSpec, Nothing} = nothing) + # TODO: Sort. + pkg_or_app = pkg_or_app === nothing ? nothing : pkg_or_app.name + manifest = Pkg.Types.read_manifest(joinpath(app_env_folder(), "AppManifest.toml")) + deps = Pkg.Operations.load_manifest_deps(manifest) + + is_pkg = pkg_or_app !== nothing && any(dep -> dep.name == pkg_or_app, values(manifest.deps)) + + for dep in deps + info = manifest.deps[dep.uuid] + if is_pkg && dep.name !== pkg_or_app + continue + end + if !is_pkg && pkg_or_app !== nothing + if !(pkg_or_app in keys(info.apps)) + continue + end + end + + printstyled("[", string(dep.uuid)[1:8], "] "; color = :light_black) + print_single(stdout, dep) + println() + for (appname, appinfo) in info.apps + if !is_pkg && pkg_or_app !== nothing && appname !== pkg_or_app + continue + end + julia_cmd = contractuser(appinfo.julia_command) + printstyled(" $(appname)", color = :green) + printstyled(" $(julia_cmd) \n", color = :gray) + end + end + return +end + +function precompile(pkg::Union{Nothing, String} = nothing) + manifest = Pkg.Types.read_manifest(joinpath(app_env_folder(), "AppManifest.toml")) + deps = Pkg.Operations.load_manifest_deps(manifest) + for dep in deps + # TODO: Parallel app compilation..? + info = manifest.deps[dep.uuid] + if pkg !== nothing && info.name !== pkg + continue + end + Pkg.activate(joinpath(app_env_folder(), info.name)) do + Pkg.instantiate() + Pkg.precompile() + end + end + return +end + + +function require_not_empty(pkgs, f::Symbol) + return if pkgs === nothing || isempty(pkgs) + pkgerror("app $f requires at least one package") + end +end + +rm(pkgs_or_apps::String) = rm([pkgs_or_apps]) +function rm(pkgs_or_apps::Vector) + for pkg_or_app in pkgs_or_apps + if pkg_or_app isa String + pkg_or_app = PackageSpec(pkg_or_app) + end + rm(pkg_or_app) + end + return +end + +function rm(pkg_or_app::Union{PackageSpec, Nothing} = nothing) + pkg_or_app = pkg_or_app === nothing ? nothing : pkg_or_app.name + + require_not_empty(pkg_or_app, :rm) + + manifest = Pkg.Types.read_manifest(joinpath(app_env_folder(), "AppManifest.toml")) + dep_idx = findfirst(dep -> dep.name == pkg_or_app, manifest.deps) + if dep_idx !== nothing + dep = manifest.deps[dep_idx] + @info "Deleting all apps for package $(dep.name)" + delete!(manifest.deps, dep.uuid) + for (appname, appinfo) in dep.apps + @info "Deleted $(appname)" + rm_shim(appname; force = true) + end + if dep.path === nothing + Base.rm(joinpath(app_env_folder(), dep.name); recursive = true) + end + else + for (uuid, pkg) in manifest.deps + app_idx = findfirst(app -> app.name == pkg_or_app, pkg.apps) + if app_idx !== nothing + app = pkg.apps[app_idx] + @info "Deleted app $(app.name)" + delete!(pkg.apps, app.name) + rm_shim(app.name; force = true) + end + if isempty(pkg.apps) + delete!(manifest.deps, uuid) + Base.rm(joinpath(app_env_folder(), pkg.name); recursive = true) + end + end + end + # XXX: What happens if something fails above and we do not write out the updated manifest? + Pkg.Types.write_manifest(manifest, app_manifest_file()) + return +end + +for f in (:develop, :add) + @eval begin + $f(pkg::Union{AbstractString, PackageSpec}; kwargs...) = $f([pkg]; kwargs...) + $f(pkgs::Vector{<:AbstractString}; kwargs...) = $f([PackageSpec(pkg) for pkg in pkgs]; kwargs...) + function $f(; + name::Union{Nothing, AbstractString} = nothing, uuid::Union{Nothing, String, UUID} = nothing, + version::Union{VersionNumber, String, VersionSpec, Nothing} = nothing, + url = nothing, rev = nothing, path = nothing, subdir = nothing, kwargs... + ) + pkg = PackageSpec(; name, uuid, version, url, rev, path, subdir) + return if all(isnothing, [name, uuid, version, url, rev, path, subdir]) + $f(PackageSpec[]; kwargs...) + else + $f(pkg; kwargs...) + end + end + function $f(pkgs::Vector{<:NamedTuple}; kwargs...) + return $f([PackageSpec(; pkg...) for pkg in pkgs]; kwargs...) + end + end +end + + +######### +# Shims # +######### + +const SHIM_COMMENT = Sys.iswindows() ? "REM " : "#" +const SHIM_VERSION = 1.1 +const SHIM_HEADER = """$SHIM_COMMENT This file is generated by the Julia package manager. +$SHIM_COMMENT Shim version: $SHIM_VERSION""" + +function generate_shims_for_apps(pkgname, apps, env, julia) + for (_, app) in apps + generate_shim(pkgname, app, env, julia) + end + return +end + +function generate_shim(pkgname, app::AppInfo, env, julia) + validate_package_name(pkgname) + validate_app_name(app.name) + validate_submodule_name(app.submodule) + + module_spec = app.submodule === nothing ? pkgname : "$(pkgname).$(app.submodule)" + + filename = app.name * (Sys.iswindows() ? ".bat" : "") + julia_bin_filename = joinpath(julia_bin_path(), filename) + mkpath(dirname(julia_bin_filename)) + content = if Sys.iswindows() + julia_escaped = "\"$(Base.shell_escape_wincmd(julia))\"" + module_spec_escaped = "\"$(Base.shell_escape_wincmd(module_spec))\"" + windows_shim(julia_escaped, module_spec_escaped, env, app.julia_flags) + else + julia_escaped = Base.shell_escape(julia) + module_spec_escaped = Base.shell_escape(module_spec) + shell_shim(julia_escaped, module_spec_escaped, env, app.julia_flags) + end + overwrite_file_if_different(julia_bin_filename, content) + return if Sys.isunix() + chmod(julia_bin_filename, 0o755) + end +end + + +function shell_shim(julia_escaped::String, module_spec_escaped::String, env, julia_flags::Vector{String}) + julia_flags_escaped = join(Base.shell_escape.(julia_flags), " ") + julia_flags_part = isempty(julia_flags) ? "" : " $julia_flags_escaped" + + load_path_escaped = Base.shell_escape(env) + depot_path_escaped = Base.shell_escape(join(DEPOT_PATH, ':')) + + return """ + #!/bin/sh + set -eu + + $SHIM_HEADER + + # Pin Julia paths for the child process + export JULIA_LOAD_PATH=$load_path_escaped + export JULIA_DEPOT_PATH=$depot_path_escaped + + # Allow overriding Julia executable via environment variable + if [ -n "\${JULIA_APPS_JULIA_CMD:-}" ]; then + julia_cmd="\$JULIA_APPS_JULIA_CMD" + else + julia_cmd=$julia_escaped + fi + + # If a `--` appears, args before it go to Julia, after it to the app. + # If no `--` appears, all original args go to the app (no Julia args). + found_separator=false + for a in "\$@"; do + [ "\$a" = "--" ] && { found_separator=true; break; } + done + + if [ "\$found_separator" = "true" ]; then + # Build julia_args until `--`, then leave the rest in "\$@" + julia_args="" + while [ "\$#" -gt 0 ]; do + case "\$1" in + --) shift; break ;; + *) julia_args="\$julia_args\${julia_args:+ }\$1"; shift ;; + esac + done + # Here: "\$@" are the app args after the separator + exec "\$julia_cmd" --startup-file=no$julia_flags_part \$julia_args -m $module_spec_escaped "\$@" + else + # No separator: all original args go straight to the app + exec "\$julia_cmd" --startup-file=no$julia_flags_part -m $module_spec_escaped "\$@" + fi + """ +end + +function windows_shim( + julia_escaped::String, + module_spec_escaped::String, + env, + julia_flags::Vector{String}, + ) + flags_escaped = join(Base.shell_escape_wincmd.(julia_flags), " ") + flags_part = isempty(julia_flags) ? "" : " $flags_escaped" + + depot_path = join(DEPOT_PATH, ';') + + return """ + @echo off + setlocal EnableExtensions DisableDelayedExpansion + + $SHIM_HEADER + + rem --- Environment (no delayed expansion here to keep '!' literal) --- + set "JULIA_LOAD_PATH=$env" + set "JULIA_DEPOT_PATH=$depot_path" + + rem --- Allow overriding Julia executable via environment variable --- + if defined JULIA_APPS_JULIA_CMD ( + set "julia_cmd=%JULIA_APPS_JULIA_CMD%" + ) else ( + set "julia_cmd=$julia_escaped" + ) + + rem --- Now enable delayed expansion for string building below --- + setlocal EnableDelayedExpansion + + rem Parse arguments, splitting on first -- into julia_args / app_args + set "found_sep=" + set "julia_args=" + set "app_args=" + + :__next + if "%~1"=="" goto __done + + if not defined found_sep if "%~1"=="--" ( + set "found_sep=1" + shift + goto __next + ) + + if not defined found_sep ( + if defined julia_args ( + set "julia_args=!julia_args! %1" + ) else ( + set "julia_args=%1" + ) + shift + goto __next + ) + + if defined found_sep ( + if defined app_args ( + set "app_args=!app_args! %1" + ) else ( + set "app_args=%1" + ) + shift + goto __next + ) + + :__done + rem If no --, pass all original args to the app; otherwise use split vars + if defined found_sep ( + "%julia_cmd%" ^ + --startup-file=no$flags_part !julia_args! ^ + -m $module_spec_escaped ^ + !app_args! + ) else ( + "%julia_cmd%" ^ + --startup-file=no$flags_part ^ + -m $module_spec_escaped ^ + %* + ) + """ +end + +end diff --git a/src/Artifacts.jl b/src/Artifacts.jl index 957d14aab9..11ac99c129 100644 --- a/src/Artifacts.jl +++ b/src/Artifacts.jl @@ -1,23 +1,24 @@ -module Artifacts +module PkgArtifacts using Artifacts, Base.BinaryPlatforms, SHA using ..MiniProgressBars, ..PlatformEngines using Tar: can_symlink +using FileWatching: FileWatching import ..set_readonly, ..GitTools, ..TOML, ..pkg_server, ..can_fancyprint, - ..stderr_f, ..printpkgstyle + ..stderr_f, ..printpkgstyle, ..mv_temp_dir_retries, ..atomic_toml_write, ..create_cachedir_tag import Base: get, SHA1 import Artifacts: artifact_names, ARTIFACTS_DIR_OVERRIDE, ARTIFACT_OVERRIDES, artifact_paths, - artifacts_dirs, pack_platform!, unpack_platform, load_artifacts_toml, - query_override, with_artifacts_directory, load_overrides + artifacts_dirs, pack_platform!, unpack_platform, load_artifacts_toml, + query_override, with_artifacts_directory, load_overrides import ..Types: write_env_usage, parse_toml - -export create_artifact, artifact_exists, artifact_path, remove_artifact, verify_artifact, - artifact_meta, artifact_hash, bind_artifact!, unbind_artifact!, download_artifact, - find_artifacts_toml, ensure_artifact_installed, @artifact_str, archive_artifact, - select_downloadable_artifacts +const Artifacts = PkgArtifacts # This is to preserve compatability for folks who depend on the internals of this module +export Artifacts, create_artifact, artifact_exists, artifact_path, remove_artifact, verify_artifact, + artifact_meta, artifact_hash, bind_artifact!, unbind_artifact!, download_artifact, + find_artifacts_toml, ensure_artifact_installed, @artifact_str, archive_artifact, + select_downloadable_artifacts, ArtifactDownloadInfo """ create_artifact(f::Function) @@ -30,6 +31,7 @@ function create_artifact(f::Function) # Ensure the `artifacts` directory exists in our default depot artifacts_dir = first(artifacts_dirs()) mkpath(artifacts_dir) + create_cachedir_tag(artifacts_dir) # Temporary directory where we'll do our creation business temp_dir = mktempdir(artifacts_dir) @@ -48,56 +50,14 @@ function create_artifact(f::Function) # system directory by accidentally creating something with the same content-hash # as something that was foolishly overridden. This should be virtually impossible # unless the user has been very unwise, but let's be cautious. - new_path = artifact_path(artifact_hash; honor_overrides=false) - _mv_temp_artifact_dir(temp_dir, new_path) + new_path = artifact_path(artifact_hash; honor_overrides = false) + mv_temp_dir_retries(temp_dir, new_path) # Give the people what they want return artifact_hash finally # Always attempt to cleanup - rm(temp_dir; recursive=true, force=true) - end -end - -""" - _mv_temp_artifact_dir(temp_dir::String, new_path::String)::Nothing -Either rename the directory at `temp_dir` to `new_path` and set it to read-only -or if `new_path` artifact already exists try to do nothing. -""" -function _mv_temp_artifact_dir(temp_dir::String, new_path::String)::Nothing - # Sometimes a rename can fail because the temp_dir is locked by - # anti-virus software scanning the new files. - # In this case we want to sleep and try again. - # I am using the list of error codes to retry from: - # https://github.com/isaacs/node-graceful-fs/blob/234379906b7d2f4c9cfeb412d2516f42b0fb4953/polyfills.js#L87 - # Retry for up to about 60 seconds by retrying 20 times with exponential backoff. - retry = 0 - max_num_retries = 20 # maybe this should be configurable? - sleep_amount = 0.01 # seconds - max_sleep_amount = 5.0 # seconds - while true - isdir(new_path) && return - # This next step is like - # `mv(temp_dir, new_path)`. - # However, `mv` defaults to `cp` if `rename` returns an error. - # `cp` is not atomic, so avoid the potential of calling it. - err = ccall(:jl_fs_rename, Int32, (Cstring, Cstring), temp_dir, new_path) - if err ≥ 0 - # rename worked - chmod(new_path, filemode(dirname(new_path))) - set_readonly(new_path) - return - else - # Ignore rename error if `new_path` exists. - isdir(new_path) && return - if retry < max_num_retries && err ∈ (Base.UV_EACCES, Base.UV_EPERM, Base.UV_EBUSY) - sleep(sleep_amount) - sleep_amount = min(sleep_amount*2.0, max_sleep_amount) - retry += 1 - else - Base.uv_error("rename of $(repr(temp_dir)) to $(repr(new_path))", err) - end - end + rm(temp_dir; recursive = true, force = true) end end @@ -123,9 +83,10 @@ function remove_artifact(hash::SHA1) possible_paths = artifacts_dirs(bytes2hex(hash.bytes)) for path in possible_paths if isdir(path) - rm(path; recursive=true, force=true) + rm(path; recursive = true, force = true) end end + return end """ @@ -135,7 +96,7 @@ Verifies that the given artifact (identified by its SHA1 git tree hash) is insta disk, and retains its integrity. If the given artifact is overridden, skips the verification unless `honor_overrides` is set to `true`. """ -function verify_artifact(hash::SHA1; honor_overrides::Bool=false) +function verify_artifact(hash::SHA1; honor_overrides::Bool = false) # Silently skip overridden artifacts unless we really ask for it if !honor_overrides if query_override(hash) !== nothing @@ -159,7 +120,7 @@ Archive an artifact into a tarball stored at `tarball_path`, returns the SHA256 resultant tarball as a hexadecimal string. Throws an error if the artifact does not exist. If the artifact is overridden, throws an error unless `honor_overrides` is set. """ -function archive_artifact(hash::SHA1, tarball_path::String; honor_overrides::Bool=false) +function archive_artifact(hash::SHA1, tarball_path::String; honor_overrides::Bool = false) if !honor_overrides if query_override(hash) !== nothing error("Will not archive an overridden artifact unless `honor_overrides` is set!") @@ -179,12 +140,62 @@ function archive_artifact(hash::SHA1, tarball_path::String; honor_overrides::Boo end end +""" + ArtifactDownloadInfo + +Auxilliary information about an artifact to be used with `bind_artifact!()` to give +a download location for that artifact, as well as the hash and size of that artifact. +""" +struct ArtifactDownloadInfo + # URL the artifact is available at as a gzip-compressed tarball + url::String + + # SHA256 hash of the tarball + hash::Vector{UInt8} + + # Size in bytes of the tarball. `size <= 0` means unknown. + size::Int64 + + function ArtifactDownloadInfo(url, hash::AbstractVector, size = 0) + valid_hash_len = SHA.digestlen(SHA256_CTX) + hash_len = length(hash) + if hash_len != valid_hash_len + throw(ArgumentError("Invalid hash length '$(hash_len)', must be $(valid_hash_len)")) + end + return new( + String(url), + Vector{UInt8}(hash), + Int64(size), + ) + end +end + +# Convenience constructor for string hashes +ArtifactDownloadInfo(url, hash::AbstractString, args...) = ArtifactDownloadInfo(url, hex2bytes(hash), args...) + +# Convenience constructor for legacy Tuple representation +ArtifactDownloadInfo(args::Tuple) = ArtifactDownloadInfo(args...) + +ArtifactDownloadInfo(adi::ArtifactDownloadInfo) = adi + +# Make the dict that will be embedded in the TOML +function make_dict(adi::ArtifactDownloadInfo) + ret = Dict{String, Any}( + "url" => adi.url, + "sha256" => bytes2hex(adi.hash), + ) + if adi.size > 0 + ret["size"] = adi.size + end + return ret +end + """ bind_artifact!(artifacts_toml::String, name::String, hash::SHA1; - platform::Union{AbstractPlatform,Nothing} = nothing, - download_info::Union{Vector{Tuple},Nothing} = nothing, - lazy::Bool = false, - force::Bool = false) + platform::Union{AbstractPlatform,Nothing} = nothing, + download_info::Union{Vector{Tuple},Nothing} = nothing, + lazy::Bool = false, + force::Bool = false) Writes a mapping of `name` -> `hash` within the given `(Julia)Artifacts.toml` file. If `platform` is not `nothing`, this artifact is marked as platform-specific, and will be @@ -198,11 +209,13 @@ is set to `true`, even if download information is available, this artifact will downloaded until it is accessed via the `artifact"name"` syntax, or `ensure_artifact_installed()` is called upon it. """ -function bind_artifact!(artifacts_toml::String, name::String, hash::SHA1; - platform::Union{AbstractPlatform,Nothing} = nothing, - download_info::Union{Vector{<:Tuple},Nothing} = nothing, - lazy::Bool = false, - force::Bool = false) +function bind_artifact!( + artifacts_toml::String, name::String, hash::SHA1; + platform::Union{AbstractPlatform, Nothing} = nothing, + download_info::Union{Vector{<:Tuple}, Vector{<:ArtifactDownloadInfo}, Nothing} = nothing, + lazy::Bool = false, + force::Bool = false + ) # First, check to see if this artifact is already bound: if isfile(artifacts_toml) artifact_dict = parse_toml(artifacts_toml) @@ -211,7 +224,7 @@ function bind_artifact!(artifacts_toml::String, name::String, hash::SHA1; meta = artifact_dict[name] if !isa(meta, Vector) error("Mapping for '$name' within $(artifacts_toml) already exists!") - elseif any(isequal(platform), unpack_platform(x, name, artifacts_toml) for x in meta) + elseif any(p -> platforms_match(platform, p), unpack_platform(x, name, artifacts_toml) for x in meta) error("Mapping for '$name'/$(triplet(platform)) within $(artifacts_toml) already exists!") end end @@ -220,7 +233,7 @@ function bind_artifact!(artifacts_toml::String, name::String, hash::SHA1; end # Otherwise, the new piece of data we're going to write out is this dict: - meta = Dict{String,Any}( + meta = Dict{String, Any}( "git-tree-sha1" => bytes2hex(hash.bytes), ) @@ -229,15 +242,11 @@ function bind_artifact!(artifacts_toml::String, name::String, hash::SHA1; meta["lazy"] = true end - # Integrate download info, if it is given. We represent the download info as a - # vector of dicts, each with its own `url` and `sha256`, since different tarballs can - # expand to the same tree hash. + # Integrate download info, if it is given. Note that there can be multiple + # download locations, each with its own tarball with its own hash, but which + # expands to the same content/treehash. if download_info !== nothing - meta["download"] = [ - Dict("url" => dl[1], - "sha256" => dl[2], - ) for dl in download_info - ] + meta["download"] = make_dict.(ArtifactDownloadInfo.(download_info)) end if platform === nothing @@ -262,11 +271,7 @@ function bind_artifact!(artifacts_toml::String, name::String, hash::SHA1; # Spit it out onto disk let artifact_dict = artifact_dict parent_dir = dirname(artifacts_toml) - temp_artifacts_toml = isempty(parent_dir) ? tempname(pwd()) : tempname(parent_dir) - open(temp_artifacts_toml, "w") do io - TOML.print(io, artifact_dict, sorted=true) - end - mv(temp_artifacts_toml, artifacts_toml; force=true) + atomic_toml_write(artifacts_toml, artifact_dict, sorted = true) end # Mark that we have used this Artifact.toml @@ -281,8 +286,10 @@ end Unbind the given `name` from an `(Julia)Artifacts.toml` file. Silently fails if no such binding exists within the file. """ -function unbind_artifact!(artifacts_toml::String, name::String; - platform::Union{AbstractPlatform,Nothing} = nothing) +function unbind_artifact!( + artifacts_toml::String, name::String; + platform::Union{AbstractPlatform, Nothing} = nothing + ) artifact_dict = parse_toml(artifacts_toml) if !haskey(artifact_dict, name) return @@ -297,15 +304,13 @@ function unbind_artifact!(artifacts_toml::String, name::String; ) end - open(artifacts_toml, "w") do io - TOML.print(io, artifact_dict, sorted=true) - end + atomic_toml_write(artifacts_toml, artifact_dict, sorted = true) return end """ download_artifact(tree_hash::SHA1, tarball_url::String, tarball_hash::String; - verbose::Bool = false, io::IO=stderr) + verbose::Bool = false, io::IO=stderr) Download/install an artifact into the artifact store. Returns `true` on success, returns an error object on failure. @@ -315,138 +320,163 @@ returns an error object on failure. failure occurs """ function download_artifact( - tree_hash::SHA1, - tarball_url::String, - tarball_hash::Union{String, Nothing} = nothing; - verbose::Bool = false, - quiet_download::Bool = false, - io::IO=stderr_f(), - progress::Union{Function, Nothing} = nothing, -) - if artifact_exists(tree_hash) - return true + tree_hash::SHA1, + tarball_url::String, + tarball_hash::Union{String, Nothing} = nothing; + verbose::Bool = false, + quiet_download::Bool = false, + io::IO = stderr_f(), + progress::Union{Function, Nothing} = nothing, + ) + _artifact_paths = artifact_paths(tree_hash) + pidfile = _artifact_paths[1] * ".pid" + mkpath(dirname(pidfile)) + t_wait_msg = Timer(2) do t + if progress === nothing + @info "downloading $tarball_url ($hex) in another process" + else + progress(0, 0; status = "downloading in another process") + end end + ret = FileWatching.mkpidlock(pidfile, stale_age = 20) do + close(t_wait_msg) + if artifact_exists(tree_hash) + return true + end - # Ensure the `artifacts` directory exists in our default depot - artifacts_dir = first(artifacts_dirs()) - mkpath(artifacts_dir) - # expected artifact path - dst = joinpath(artifacts_dir, bytes2hex(tree_hash.bytes)) + # Ensure the `artifacts` directory exists in our default depot + artifacts_dir = first(artifacts_dirs()) + mkpath(artifacts_dir) + create_cachedir_tag(artifacts_dir) + # expected artifact path + dst = joinpath(artifacts_dir, bytes2hex(tree_hash.bytes)) - # We download by using a temporary directory. We do this because the download may - # be corrupted or even malicious; we don't want to clobber someone else's artifact - # by trusting the tree hash that has been given to us; we will instead download it - # to a temporary directory, calculate the true tree hash, then move it to the proper - # location only after knowing what it is, and if something goes wrong in the process, - # everything should be cleaned up. + # We download by using a temporary directory. We do this because the download may + # be corrupted or even malicious; we don't want to clobber someone else's artifact + # by trusting the tree hash that has been given to us; we will instead download it + # to a temporary directory, calculate the true tree hash, then move it to the proper + # location only after knowing what it is, and if something goes wrong in the process, + # everything should be cleaned up. - # Temporary directory where we'll do our creation business - temp_dir = mktempdir(artifacts_dir) + # Temporary directory where we'll do our creation business + temp_dir = mktempdir(artifacts_dir) - try - download_verify_unpack(tarball_url, tarball_hash, temp_dir; - ignore_existence=true, verbose, quiet_download, io, progress) - isnothing(progress) || progress(10000, 10000; status="verifying") - calc_hash = SHA1(GitTools.tree_hash(temp_dir)) - - # Did we get what we expected? If not, freak out. - if calc_hash.bytes != tree_hash.bytes - msg = """ - Tree Hash Mismatch! - Expected git-tree-sha1: $(bytes2hex(tree_hash.bytes)) - Calculated git-tree-sha1: $(bytes2hex(calc_hash.bytes)) - """ - # Since tree hash calculation is rather fragile and file system dependent, - # we allow setting JULIA_PKG_IGNORE_HASHES=1 to ignore the error and move - # the artifact to the expected location and return true - ignore_hash_env_set = get(ENV, "JULIA_PKG_IGNORE_HASHES", "") != "" - if ignore_hash_env_set - ignore_hash = Base.get_bool_env("JULIA_PKG_IGNORE_HASHES", false) - ignore_hash === nothing && @error( - "Invalid ENV[\"JULIA_PKG_IGNORE_HASHES\"] value", - ENV["JULIA_PKG_IGNORE_HASHES"], - ) - ignore_hash = something(ignore_hash, false) - else - # default: false except Windows users who can't symlink - ignore_hash = Sys.iswindows() && - !mktempdir(can_symlink, artifacts_dir) - end - if ignore_hash - desc = ignore_hash_env_set ? - "Environment variable \$JULIA_PKG_IGNORE_HASHES is true" : - "System is Windows and user cannot create symlinks" - msg *= "\n$desc: \ + try + download_verify_unpack( + tarball_url, tarball_hash, temp_dir; + ignore_existence = true, verbose, quiet_download, io, progress + ) + isnothing(progress) || progress(10000, 10000; status = "verifying") + calc_hash = SHA1(GitTools.tree_hash(temp_dir)) + + # Did we get what we expected? If not, freak out. + if calc_hash.bytes != tree_hash.bytes + msg = """ + Tree Hash Mismatch! + Expected git-tree-sha1: $(bytes2hex(tree_hash.bytes)) + Calculated git-tree-sha1: $(bytes2hex(calc_hash.bytes)) + """ + # Since tree hash calculation is rather fragile and file system dependent, + # we allow setting JULIA_PKG_IGNORE_HASHES=1 to ignore the error and move + # the artifact to the expected location and return true + ignore_hash_env_set = get(ENV, "JULIA_PKG_IGNORE_HASHES", "") != "" + if ignore_hash_env_set + ignore_hash = Base.get_bool_env("JULIA_PKG_IGNORE_HASHES", false) + ignore_hash === nothing && @error( + "Invalid ENV[\"JULIA_PKG_IGNORE_HASHES\"] value", + ENV["JULIA_PKG_IGNORE_HASHES"], + ) + ignore_hash = something(ignore_hash, false) + else + # default: false except Windows users who can't symlink + ignore_hash = Sys.iswindows() && + !mktempdir(can_symlink, artifacts_dir) + end + if ignore_hash + desc = ignore_hash_env_set ? + "Environment variable \$JULIA_PKG_IGNORE_HASHES is true" : + "System is Windows and user cannot create symlinks" + msg *= "\n$desc: \ ignoring hash mismatch and moving \ artifact to the expected location" - @error(msg) - else - error(msg) + @error(msg) + else + error(msg) + end + end + # Move it to the location we expected + isnothing(progress) || progress(10000, 10000; status = "moving to artifact store") + mv_temp_dir_retries(temp_dir, dst) + catch err + @debug "download_artifact error" tree_hash tarball_url tarball_hash err + if isa(err, InterruptException) + rethrow(err) + end + # If something went wrong during download, return the error + return err + finally + # Always attempt to cleanup + try + rm(temp_dir; recursive = true, force = true) + catch e + e isa InterruptException && rethrow() + @warn("Failed to clean up temporary directory $(repr(temp_dir))", exception = e) end end - # Move it to the location we expected - isnothing(progress) || progress(10000, 10000; status="moving to artifact store") - _mv_temp_artifact_dir(temp_dir, dst) - catch err - @debug "download_artifact error" tree_hash tarball_url tarball_hash err - if isa(err, InterruptException) - rethrow(err) - end - # If something went wrong during download, return the error - return err - finally - # Always attempt to cleanup - try - rm(temp_dir; recursive=true, force=true) - catch e - e isa InterruptException && rethrow() - @warn("Failed to clean up temporary directory $(repr(temp_dir))", exception=e) - end + return true end - return true + + return ret end """ ensure_artifact_installed(name::String, artifacts_toml::String; - platform::AbstractPlatform = HostPlatform(), - pkg_uuid::Union{Base.UUID,Nothing}=nothing, - verbose::Bool = false, - quiet_download::Bool = false, - io::IO=stderr) + platform::AbstractPlatform = HostPlatform(), + pkg_uuid::Union{Base.UUID,Nothing}=nothing, + verbose::Bool = false, + quiet_download::Bool = false, + io::IO=stderr) Ensures an artifact is installed, downloading it via the download information stored in `artifacts_toml` if necessary. Throws an error if unable to install. """ -function ensure_artifact_installed(name::String, artifacts_toml::String; - platform::AbstractPlatform = HostPlatform(), - pkg_uuid::Union{Base.UUID,Nothing}=nothing, - verbose::Bool = false, - quiet_download::Bool = false, - progress::Union{Function,Nothing} = nothing, - io::IO=stderr_f()) - meta = artifact_meta(name, artifacts_toml; pkg_uuid=pkg_uuid, platform=platform) +function ensure_artifact_installed( + name::String, artifacts_toml::String; + platform::AbstractPlatform = HostPlatform(), + pkg_uuid::Union{Base.UUID, Nothing} = nothing, + pkg_server_eligible::Bool = true, + verbose::Bool = false, + quiet_download::Bool = false, + progress::Union{Function, Nothing} = nothing, + io::IO = stderr_f() + ) + meta = artifact_meta(name, artifacts_toml; pkg_uuid = pkg_uuid, platform = platform) if meta === nothing error("Cannot locate artifact '$(name)' in '$(artifacts_toml)'") end - return ensure_artifact_installed(name, meta, artifacts_toml; - platform, verbose, quiet_download, progress, io) + return ensure_artifact_installed( + name, meta, artifacts_toml; + pkg_server_eligible, platform, verbose, quiet_download, progress, io + ) end -function ensure_artifact_installed(name::String, meta::Dict, artifacts_toml::String; - platform::AbstractPlatform = HostPlatform(), - verbose::Bool = false, - quiet_download::Bool = false, - progress::Union{Function,Nothing} = nothing, - io::IO=stderr_f()) - +function ensure_artifact_installed( + name::String, meta::Dict, artifacts_toml::String; + pkg_server_eligible::Bool = true, + platform::AbstractPlatform = HostPlatform(), + verbose::Bool = false, + quiet_download::Bool = false, + progress::Union{Function, Nothing} = nothing, + io::IO = stderr_f() + ) hash = SHA1(meta["git-tree-sha1"]) if !artifact_exists(hash) if isnothing(progress) || verbose == true - return try_artifact_download_sources(name, hash, meta, artifacts_toml; platform, verbose, quiet_download, io) + return try_artifact_download_sources(name, hash, meta, artifacts_toml; pkg_server_eligible, platform, verbose, quiet_download, io) else # if a custom progress handler is given it is taken to mean the caller wants to handle the download scheduling - return () -> try_artifact_download_sources(name, hash, meta, artifacts_toml; platform, quiet_download=true, io, progress) + return () -> try_artifact_download_sources(name, hash, meta, artifacts_toml; pkg_server_eligible, platform, quiet_download = true, io, progress) end else return artifact_path(hash) @@ -454,17 +484,18 @@ function ensure_artifact_installed(name::String, meta::Dict, artifacts_toml::Str end function try_artifact_download_sources( - name::String, hash::SHA1, meta::Dict, artifacts_toml::String; - platform::AbstractPlatform=HostPlatform(), - verbose::Bool=false, - quiet_download::Bool=false, - io::IO=stderr_f(), - progress::Union{Function,Nothing}=nothing) + name::String, hash::SHA1, meta::Dict, artifacts_toml::String; + pkg_server_eligible::Bool = true, + platform::AbstractPlatform = HostPlatform(), + verbose::Bool = false, + quiet_download::Bool = false, + io::IO = stderr_f(), + progress::Union{Function, Nothing} = nothing + ) errors = Any[] - # first try downloading from Pkg server - # TODO: only do this if Pkg server knows about this package - if (server = pkg_server()) !== nothing + # first try downloading from Pkg server if the Pkg server knows about this package + if pkg_server_eligible && (server = pkg_server()) !== nothing url = "$server/artifact/$hash" download_success = let url = url @debug "Downloading artifact from Pkg server" name artifacts_toml platform url @@ -543,12 +574,12 @@ end """ ensure_all_artifacts_installed(artifacts_toml::String; - platform = HostPlatform(), - pkg_uuid = nothing, - include_lazy = false, - verbose = false, - quiet_download = false, - io::IO=stderr) + platform = HostPlatform(), + pkg_uuid = nothing, + include_lazy = false, + verbose = false, + quiet_download = false, + io::IO=stderr) Installs all non-lazy artifacts from a given `(Julia)Artifacts.toml` file. `package_uuid` must be provided to properly support overrides from `Overrides.toml` entries in depots. @@ -566,49 +597,56 @@ This function is deprecated and should be replaced with the following snippet: This function is deprecated in Julia 1.6 and will be removed in a future version. Use `select_downloadable_artifacts()` and `ensure_artifact_installed()` instead. """ -function ensure_all_artifacts_installed(artifacts_toml::String; - platform::AbstractPlatform = HostPlatform(), - pkg_uuid::Union{Nothing,Base.UUID} = nothing, - include_lazy::Bool = false, - verbose::Bool = false, - quiet_download::Bool = false, - io::IO=stderr_f()) +function ensure_all_artifacts_installed( + artifacts_toml::String; + platform::AbstractPlatform = HostPlatform(), + pkg_uuid::Union{Nothing, Base.UUID} = nothing, + include_lazy::Bool = false, + verbose::Bool = false, + quiet_download::Bool = false, + io::IO = stderr_f() + ) # This function should not be called anymore; use `select_downloadable_artifacts()` directly. Base.depwarn("`ensure_all_artifacts_installed()` is deprecated; iterate over `select_downloadable_artifacts()` output with `ensure_artifact_installed()`.", :ensure_all_artifacts_installed) # Collect all artifacts we're supposed to install artifacts = select_downloadable_artifacts(artifacts_toml; platform, include_lazy, pkg_uuid) for name in keys(artifacts) # Otherwise, let's try and install it! - ensure_artifact_installed(name, artifacts[name], artifacts_toml; platform=platform, - verbose=verbose, quiet_download=quiet_download, io=io) + ensure_artifact_installed( + name, artifacts[name], artifacts_toml; platform = platform, + verbose = verbose, quiet_download = quiet_download, io = io + ) end + return end """ extract_all_hashes(artifacts_toml::String; - platform = HostPlatform(), - pkg_uuid = nothing, - include_lazy = false) + platform = HostPlatform(), + pkg_uuid = nothing, + include_lazy = false) Extract all hashes from a given `(Julia)Artifacts.toml` file. `package_uuid` must be provided to properly support overrides from `Overrides.toml` entries in depots. If `include_lazy` is set to `true`, then lazy packages will be installed as well. """ -function extract_all_hashes(artifacts_toml::String; - platform::AbstractPlatform = HostPlatform(), - pkg_uuid::Union{Nothing,Base.UUID} = nothing, - include_lazy::Bool = false) +function extract_all_hashes( + artifacts_toml::String; + platform::AbstractPlatform = HostPlatform(), + pkg_uuid::Union{Nothing, Base.UUID} = nothing, + include_lazy::Bool = false + ) hashes = Base.SHA1[] if !isfile(artifacts_toml) return hashes end - artifact_dict = load_artifacts_toml(artifacts_toml; pkg_uuid=pkg_uuid) + artifact_dict = load_artifacts_toml(artifacts_toml; pkg_uuid = pkg_uuid) for name in keys(artifact_dict) # Get the metadata about this name for the requested platform - meta = artifact_meta(name, artifact_dict, artifacts_toml; platform=platform) + meta = artifact_meta(name, artifact_dict, artifacts_toml; platform = platform) # If there are no instances of this name for the desired platform, skip it meta === nothing && continue @@ -644,4 +682,4 @@ ensure_all_artifacts_installed(artifacts_toml::AbstractString; kwargs...) = extract_all_hashes(artifacts_toml::AbstractString; kwargs...) = extract_all_hashes(string(artifacts_toml)::String; kwargs...) -end # module Artifacts +end # module PkgArtifacts diff --git a/src/BinaryPlatformsCompat.jl b/src/BinaryPlatformsCompat.jl new file mode 100644 index 0000000000..93403e05bd --- /dev/null +++ b/src/BinaryPlatformsCompat.jl @@ -0,0 +1,155 @@ +module BinaryPlatformsCompat + + export platform_key_abi, platform_dlext, valid_dl_path, arch, libc, + libgfortran_version, libstdcxx_version, cxxstring_abi, parse_dl_name_version, + detect_libgfortran_version, detect_libstdcxx_version, detect_cxxstring_abi, + call_abi, wordsize, triplet, select_platform, platforms_match, + CompilerABI, Platform, UnknownPlatform, Linux, MacOS, Windows, FreeBSD + + using Base.BinaryPlatforms: parse_dl_name_version, + detect_libgfortran_version, detect_libstdcxx_version, detect_cxxstring_abi, + os, call_abi, select_platform, platforms_match, + AbstractPlatform, Platform, HostPlatform + + import Base.BinaryPlatforms: libgfortran_version, libstdcxx_version, platform_name, + wordsize, platform_dlext, tags, arch, libc, call_abi, + cxxstring_abi + + struct UnknownPlatform <: AbstractPlatform + UnknownPlatform(args...; kwargs...) = new() + end + tags(::UnknownPlatform) = Dict{String, String}("os" => "unknown") + + + struct CompilerABI + libgfortran_version::Union{Nothing, VersionNumber} + libstdcxx_version::Union{Nothing, VersionNumber} + cxxstring_abi::Union{Nothing, Symbol} + + function CompilerABI(; + libgfortran_version::Union{Nothing, VersionNumber} = nothing, + libstdcxx_version::Union{Nothing, VersionNumber} = nothing, + cxxstring_abi::Union{Nothing, Symbol} = nothing + ) + return new(libgfortran_version, libstdcxx_version, cxxstring_abi) + end + end + + # Easy replacement constructor + function CompilerABI( + cabi::CompilerABI; libgfortran_version = nothing, + libstdcxx_version = nothing, + cxxstring_abi = nothing + ) + return CompilerABI(; + libgfortran_version = something(libgfortran_version, Some(cabi.libgfortran_version)), + libstdcxx_version = something(libstdcxx_version, Some(cabi.libstdcxx_version)), + cxxstring_abi = something(cxxstring_abi, Some(cabi.cxxstring_abi)), + ) + end + + libgfortran_version(cabi::CompilerABI) = cabi.libgfortran_version + libstdcxx_version(cabi::CompilerABI) = cabi.libstdcxx_version + cxxstring_abi(cabi::CompilerABI) = cabi.cxxstring_abi + + for T in (:Linux, :Windows, :MacOS, :FreeBSD) + @eval begin + struct $(T) <: AbstractPlatform + p::Platform + function $(T)(arch::Symbol; compiler_abi = nothing, kwargs...) + if compiler_abi !== nothing + kwargs = (; + kwargs..., + :libgfortran_version => libgfortran_version(compiler_abi), + :libstdcxx_version => libstdcxx_version(compiler_abi), + :cxxstring_abi => cxxstring_abi(compiler_abi), + ) + end + return new(Platform(string(arch), $(string(T)); kwargs..., validate_strict = true)) + end + end + end + end + + const PlatformUnion = Union{Linux, MacOS, Windows, FreeBSD} + + # First, methods we need to coerce to Symbol for backwards-compatibility + for f in (:arch, :libc, :call_abi, :cxxstring_abi) + @eval begin + function $(f)(p::PlatformUnion) + str = $(f)(p.p) + if str === nothing + return nothing + end + return Symbol(str) + end + end + end + + # Next, things we don't need to coerce + for f in (:libgfortran_version, :libstdcxx_version, :platform_name, :wordsize, :platform_dlext, :tags, :triplet) + @eval begin + $(f)(p::PlatformUnion) = $(f)(p.p) + end + end + + # Finally, add equality testing between these wrapper types and other AbstractPlatforms + @eval begin + Base.:(==)(a::PlatformUnion, b::AbstractPlatform) = b == a.p + end + + # Add one-off functions + MacOS(; kwargs...) = MacOS(:x86_64; kwargs...) + FreeBSD(; kwargs...) = FreeBSD(:x86_64; kwargs...) + + function triplet(p::AbstractPlatform) + # We are going to sub off to `Base.BinaryPlatforms.triplet()` here, + # with the important exception that we override `os_version` to better + # mimic the old behavior of `triplet()` + if Sys.isfreebsd(p) + p = deepcopy(p) + p["os_version"] = "11.1.0" + elseif Sys.isapple(p) + p = deepcopy(p) + p["os_version"] = "14.0.0" + end + return Base.BinaryPlatforms.triplet(p) + end + + """ + platform_key_abi(machine::AbstractString) + + Returns the platform key for the current platform, or any other though the + the use of the `machine` parameter. + + This method is deprecated, import `Base.BinaryPlatforms` and use either `HostPlatform()` + to get the current host platform, or `parse(Base.BinaryPlatforms.Platform, triplet)` + to parse the triplet for some other platform instead. + """ + platform_key_abi() = HostPlatform() + platform_key_abi(triplet::AbstractString) = parse(Platform, triplet) + + """ + valid_dl_path(path::AbstractString, platform::Platform) + + Return `true` if the given `path` ends in a valid dynamic library filename. + E.g. returns `true` for a path like `"usr/lib/libfoo.so.3.5"`, but returns + `false` for a path like `"libbar.so.f.a"`. + + This method is deprecated and will be removed in Julia 2.0. + """ + function valid_dl_path(path::AbstractString, platform::AbstractPlatform) + try + parse_dl_name_version(path, string(os(platform))::String) + return true + catch e + if isa(e, ArgumentError) + return false + end + rethrow(e) + end + end + +end # module BinaryPlatformsCompat + +const BinaryPlatforms = BinaryPlatformsCompat diff --git a/src/BinaryPlatforms_compat.jl b/src/BinaryPlatforms_compat.jl deleted file mode 100644 index 879dcc0c83..0000000000 --- a/src/BinaryPlatforms_compat.jl +++ /dev/null @@ -1,148 +0,0 @@ -module BinaryPlatforms - -export platform_key_abi, platform_dlext, valid_dl_path, arch, libc, - libgfortran_version, libstdcxx_version, cxxstring_abi, parse_dl_name_version, - detect_libgfortran_version, detect_libstdcxx_version, detect_cxxstring_abi, - call_abi, wordsize, triplet, select_platform, platforms_match, - CompilerABI, Platform, UnknownPlatform, Linux, MacOS, Windows, FreeBSD - -using Base.BinaryPlatforms: parse_dl_name_version, - detect_libgfortran_version, detect_libstdcxx_version, detect_cxxstring_abi, - os, call_abi, select_platform, platforms_match, - AbstractPlatform, Platform, HostPlatform - -import Base.BinaryPlatforms: libgfortran_version, libstdcxx_version, platform_name, - wordsize, platform_dlext, tags, arch, libc, call_abi, - cxxstring_abi - -struct UnknownPlatform <: AbstractPlatform - UnknownPlatform(args...; kwargs...) = new() -end -tags(::UnknownPlatform) = Dict{String,String}("os"=>"unknown") - - -struct CompilerABI - libgfortran_version::Union{Nothing,VersionNumber} - libstdcxx_version::Union{Nothing,VersionNumber} - cxxstring_abi::Union{Nothing,Symbol} - - function CompilerABI(;libgfortran_version::Union{Nothing, VersionNumber} = nothing, - libstdcxx_version::Union{Nothing, VersionNumber} = nothing, - cxxstring_abi::Union{Nothing, Symbol} = nothing) - return new(libgfortran_version, libstdcxx_version, cxxstring_abi) - end -end - -# Easy replacement constructor -function CompilerABI(cabi::CompilerABI; libgfortran_version=nothing, - libstdcxx_version=nothing, - cxxstring_abi=nothing) - return CompilerABI(; - libgfortran_version=something(libgfortran_version, Some(cabi.libgfortran_version)), - libstdcxx_version=something(libstdcxx_version, Some(cabi.libstdcxx_version)), - cxxstring_abi=something(cxxstring_abi, Some(cabi.cxxstring_abi)), - ) -end - -libgfortran_version(cabi::CompilerABI) = cabi.libgfortran_version -libstdcxx_version(cabi::CompilerABI) = cabi.libstdcxx_version -cxxstring_abi(cabi::CompilerABI) = cabi.cxxstring_abi - -for T in (:Linux, :Windows, :MacOS, :FreeBSD) - @eval begin - struct $(T) <: AbstractPlatform - p::Platform - function $(T)(arch::Symbol; compiler_abi=nothing, kwargs...) - if compiler_abi !== nothing - kwargs = (; kwargs..., - :libgfortran_version => libgfortran_version(compiler_abi), - :libstdcxx_version => libstdcxx_version(compiler_abi), - :cxxstring_abi => cxxstring_abi(compiler_abi) - ) - end - return new(Platform(string(arch), $(string(T)); kwargs..., validate_strict=true)) - end - end - end -end - -const PlatformUnion = Union{Linux,MacOS,Windows,FreeBSD} - -# First, methods we need to coerce to Symbol for backwards-compatibility -for f in (:arch, :libc, :call_abi, :cxxstring_abi) - @eval begin - function $(f)(p::PlatformUnion) - str = $(f)(p.p) - if str === nothing - return nothing - end - return Symbol(str) - end - end -end - -# Next, things we don't need to coerce -for f in (:libgfortran_version, :libstdcxx_version, :platform_name, :wordsize, :platform_dlext, :tags, :triplet) - @eval begin - $(f)(p::PlatformUnion) = $(f)(p.p) - end -end - -# Finally, add equality testing between these wrapper types and other AbstractPlatforms -@eval begin - Base.:(==)(a::PlatformUnion, b::AbstractPlatform) = b == a.p -end - -# Add one-off functions -MacOS(; kwargs...) = MacOS(:x86_64; kwargs...) -FreeBSD(; kwargs...) = FreeBSD(:x86_64; kwargs...) - -function triplet(p::AbstractPlatform) - # We are going to sub off to `Base.BinaryPlatforms.triplet()` here, - # with the important exception that we override `os_version` to better - # mimic the old behavior of `triplet()` - if Sys.isfreebsd(p) - p = deepcopy(p) - p["os_version"] = "11.1.0" - elseif Sys.isapple(p) - p = deepcopy(p) - p["os_version"] = "14.0.0" - end - return Base.BinaryPlatforms.triplet(p) -end - -""" - platform_key_abi(machine::AbstractString) - -Returns the platform key for the current platform, or any other though the -the use of the `machine` parameter. - -This method is deprecated, import `Base.BinaryPlatforms` and use either `HostPlatform()` -to get the current host platform, or `parse(Base.BinaryPlatforms.Platform, triplet)` -to parse the triplet for some other platform instead. -""" -platform_key_abi() = HostPlatform() -platform_key_abi(triplet::AbstractString) = parse(Platform, triplet) - -""" - valid_dl_path(path::AbstractString, platform::Platform) - -Return `true` if the given `path` ends in a valid dynamic library filename. -E.g. returns `true` for a path like `"usr/lib/libfoo.so.3.5"`, but returns -`false` for a path like `"libbar.so.f.a"`. - -This method is deprecated and will be removed in Julia 2.0. -""" -function valid_dl_path(path::AbstractString, platform::AbstractPlatform) - try - parse_dl_name_version(path, string(os(platform))::String) - return true - catch e - if isa(e, ArgumentError) - return false - end - rethrow(e) - end -end - -end # module BinaryPlatforms diff --git a/src/GitTools.jl b/src/GitTools.jl index 02fae614ea..70e129350e 100644 --- a/src/GitTools.jl +++ b/src/GitTools.jl @@ -13,6 +13,45 @@ using Printf use_cli_git() = Base.get_bool_env("JULIA_PKG_USE_CLI_GIT", false) const RESOLVING_DELTAS_HEADER = "Resolving Deltas:" +# Check if LibGit2 supports shallow clones (requires LibGit2 >= 1.7.0) +# We check both the LibGit2 version and the existence of `isshallow` to ensure +# the shallow clone functionality is available +function supports_shallow_clone() + # This seems buggy on Windows? Get some weird CI errors with it. + if Sys.iswindows() + return false + end + has_version = @static if isdefined(LibGit2, :VERSION) + LibGit2.VERSION >= v"1.7.0" + else + false + end + has_isshallow = isdefined(LibGit2, :isshallow) + return has_version && has_isshallow +end + +# Check if a URL is a local path or file:// URL +# Shallow clones are only supported for network protocols (HTTP, HTTPS, Git, SSH) +function is_local_repo(url::AbstractString) + # Check if it's a local filesystem path + ispath(url) && return true + # Check if it uses file:// protocol + startswith(url, "file://") && return true + return false +end + +# Check if a repository is a shallow clone +function isshallow(repo::LibGit2.GitRepo) + if supports_shallow_clone() && isdefined(LibGit2, :isshallow) + return LibGit2.isshallow(repo) + else + # Fallback: check for .git/shallow file + repo_path = LibGit2.path(repo) + shallow_file = joinpath(repo_path, "shallow") + return isfile(shallow_file) + end +end + function transfer_progress(progress::Ptr{LibGit2.TransferProgress}, p::Any) progress = unsafe_load(progress) @assert haskey(p, :transfer_progress) @@ -41,13 +80,13 @@ const GIT_USERS = Dict{String, Union{Nothing, String}}() @deprecate setprotocol!(proto::Union{Nothing, AbstractString}) setprotocol!(protocol = proto) false function setprotocol!(; - domain::AbstractString="github.com", - protocol::Union{Nothing, AbstractString}=nothing, - user::Union{Nothing, AbstractString}=(protocol == "ssh" ? "git" : nothing) -) + domain::AbstractString = "github.com", + protocol::Union{Nothing, AbstractString} = nothing, + user::Union{Nothing, AbstractString} = (protocol == "ssh" ? "git" : nothing) + ) domain = lowercase(domain) GIT_PROTOCOLS[domain] = protocol - GIT_USERS[domain] = user + return GIT_USERS[domain] = user end function normalize_url(url::AbstractString) @@ -61,7 +100,7 @@ function normalize_url(url::AbstractString) proto = get(GIT_PROTOCOLS, lowercase(host), nothing) - if proto === nothing + return if proto === nothing url else user = get(GIT_USERS, lowercase(host), nothing) @@ -80,60 +119,75 @@ function ensure_clone(io::IO, target_path, url; kwargs...) end function checkout_tree_to_path(repo::LibGit2.GitRepo, tree::LibGit2.GitObject, path::String) - GC.@preserve path begin + return GC.@preserve path begin opts = LibGit2.CheckoutOptions( checkout_strategy = LibGit2.Consts.CHECKOUT_FORCE, target_directory = Base.unsafe_convert(Cstring, path) ) - LibGit2.checkout_tree(repo, tree, options=opts) + LibGit2.checkout_tree(repo, tree, options = opts) end end -function clone(io::IO, url, source_path; header=nothing, credentials=nothing, kwargs...) +function clone(io::IO, url, source_path; header = nothing, credentials = nothing, isbare = false, depth::Integer = 0, kwargs...) url = String(url)::String source_path = String(source_path)::String @assert !isdir(source_path) || isempty(readdir(source_path)) url = normalize_url(url) + + # Disable shallow clones for local repos (not supported) or if LibGit2 doesn't support it + if depth > 0 && (is_local_repo(url) || !supports_shallow_clone()) + depth = 0 + end + printpkgstyle(io, :Cloning, header === nothing ? "git-repo `$url`" : header) - bar = MiniProgressBar(header = "Fetching:", color = Base.info_color()) + bar = MiniProgressBar(header = "Cloning:", color = Base.info_color()) fancyprint = can_fancyprint(io) - callbacks = if fancyprint - LibGit2.Callbacks( - :transfer_progress => ( - @cfunction(transfer_progress, Cint, (Ptr{LibGit2.TransferProgress}, Any)), - bar, - ) - ) - else - LibGit2.Callbacks() - end fancyprint && start_progress(io, bar) if credentials === nothing credentials = LibGit2.CachedCredentials() end - try + return try if use_cli_git() - cmd = `git clone --quiet $url $source_path` + args = ["--quiet"] + depth > 0 && push!(args, "--depth=$depth") + isbare && push!(args, "--bare") + push!(args, url, source_path) + cmd = `git clone $args` try - run(pipeline(cmd; stdout=devnull)) + run(pipeline(cmd; stdout = devnull)) catch err Pkg.Types.pkgerror("The command $(cmd) failed, error: $err") end return LibGit2.GitRepo(source_path) else + callbacks = if fancyprint + LibGit2.Callbacks( + :transfer_progress => ( + @cfunction(transfer_progress, Cint, (Ptr{LibGit2.TransferProgress}, Any)), + bar, + ) + ) + else + LibGit2.Callbacks() + end mkpath(source_path) - return LibGit2.clone(url, source_path; callbacks=callbacks, credentials=credentials, kwargs...) + # Only pass depth if shallow clones are supported and depth > 0 + if depth > 0 + return LibGit2.clone(url, source_path; callbacks, credentials, isbare, depth, kwargs...) + else + return LibGit2.clone(url, source_path; callbacks, credentials, isbare, kwargs...) + end end catch err - rm(source_path; force=true, recursive=true) + rm(source_path; force = true, recursive = true) err isa LibGit2.GitError || err isa InterruptException || rethrow() if err isa InterruptException Pkg.Types.pkgerror("git clone of `$url` interrupted") elseif (err.class == LibGit2.Error.Net && err.code == LibGit2.Error.EINVALIDSPEC) || - (err.class == LibGit2.Error.Repository && err.code == LibGit2.Error.ENOTFOUND) - Pkg.Types.pkgerror("git repository not found at `$(url)`") + (err.class == LibGit2.Error.Repository && err.code == LibGit2.Error.ENOTFOUND) + Pkg.Types.pkgerror("git repository not found at `$(url)`: ($(err.msg))") else - Pkg.Types.pkgerror("failed to clone from $(url), error: $err") + Pkg.Types.pkgerror("failed to clone from $(url): ($(err.msg))") end finally Base.shred!(credentials) @@ -141,17 +195,26 @@ function clone(io::IO, url, source_path; header=nothing, credentials=nothing, kw end end -function fetch(io::IO, repo::LibGit2.GitRepo, remoteurl=nothing; header=nothing, credentials=nothing, refspecs=[""], kwargs...) +function geturl(repo) + return LibGit2.with(LibGit2.get(LibGit2.GitRemote, repo, "origin")) do remote + LibGit2.url(remote) + end +end + +function fetch(io::IO, repo::LibGit2.GitRepo, remoteurl = nothing; header = nothing, credentials = nothing, refspecs::Vector{String} = [""], depth::Integer = 0, kwargs...) if remoteurl === nothing - remoteurl = LibGit2.with(LibGit2.get(LibGit2.GitRemote, repo, "origin")) do remote - LibGit2.url(remote) - end + remoteurl = geturl(repo) + end + + # Disable shallow fetches for local repos (not supported) or if LibGit2 doesn't support it + if depth > 0 && (is_local_repo(remoteurl) || !supports_shallow_clone()) + depth = 0 end + fancyprint = can_fancyprint(io) remoteurl = normalize_url(remoteurl) printpkgstyle(io, :Updating, header === nothing ? "git-repo `$remoteurl`" : header) bar = MiniProgressBar(header = "Fetching:", color = Base.info_color()) - fancyprint = can_fancyprint(io) callbacks = if fancyprint LibGit2.Callbacks( :transfer_progress => ( @@ -166,27 +229,33 @@ function fetch(io::IO, repo::LibGit2.GitRepo, remoteurl=nothing; header=nothing, if credentials === nothing credentials = LibGit2.CachedCredentials() end - try + return try if use_cli_git() - let remoteurl=remoteurl - cd(LibGit2.path(repo)) do - cmd = `git fetch -q $remoteurl $(only(refspecs))` - try - run(pipeline(cmd; stdout=devnull)) - catch err - Pkg.Types.pkgerror("The command $(cmd) failed, error: $err") - end + let remoteurl = remoteurl + args = ["-C", LibGit2.path(repo), "fetch", "-q"] + depth > 0 && push!(args, "--depth=$depth") + push!(args, remoteurl, only(refspecs)) + cmd = `git $args` + try + run(pipeline(cmd; stdout = devnull)) + catch err + Pkg.Types.pkgerror("The command $(cmd) failed, error: $err") end end else - return LibGit2.fetch(repo; remoteurl=remoteurl, callbacks=callbacks, refspecs=refspecs, kwargs...) + # Only pass depth if shallow clones are supported and depth > 0 + if depth > 0 + return LibGit2.fetch(repo; remoteurl, callbacks, credentials, refspecs, depth, kwargs...) + else + return LibGit2.fetch(repo; remoteurl, callbacks, credentials, refspecs, kwargs...) + end end catch err err isa LibGit2.GitError || rethrow() if (err.class == LibGit2.Error.Repository && err.code == LibGit2.Error.ERROR) - Pkg.Types.pkgerror("Git repository not found at '$(remoteurl)'") + Pkg.Types.pkgerror("Git repository not found at '$(remoteurl)': ($(err.msg))") else - Pkg.Types.pkgerror("failed to fetch from $(remoteurl), error: $err") + Pkg.Types.pkgerror("failed to fetch from $(remoteurl): ($(err.msg))") end finally Base.shred!(credentials) @@ -196,8 +265,8 @@ end # This code gratefully adapted from https://github.com/simonbyrne/GitX.jl -@enum GitMode mode_dir=0o040000 mode_normal=0o100644 mode_executable=0o100755 mode_symlink=0o120000 mode_submodule=0o160000 -Base.string(mode::GitMode) = string(UInt32(mode); base=8) +@enum GitMode mode_dir = 0o040000 mode_normal = 0o100644 mode_executable = 0o100755 mode_symlink = 0o120000 mode_submodule = 0o160000 +Base.string(mode::GitMode) = string(UInt32(mode); base = 8) Base.print(io::IO, mode::GitMode) = print(io, string(mode)) function gitmode(path::AbstractString) @@ -227,7 +296,7 @@ end Calculate the git blob hash of a given path. """ -function blob_hash(::Type{HashType}, path::AbstractString) where HashType +function blob_hash(::Type{HashType}, path::AbstractString) where {HashType} ctx = HashType() if islink(path) datalen = length(readlink(path)) @@ -239,7 +308,7 @@ function blob_hash(::Type{HashType}, path::AbstractString) where HashType SHA.update!(ctx, Vector{UInt8}("blob $(datalen)\0")) # Next, read data in in chunks of 4KB - buff = Vector{UInt8}(undef, 4*1024) + buff = Vector{UInt8}(undef, 4 * 1024) try if islink(path) @@ -287,9 +356,9 @@ end Calculate the git tree hash of a given path. """ -function tree_hash(::Type{HashType}, root::AbstractString; debug_out::Union{IO,Nothing} = nothing, indent::Int=0) where HashType +function tree_hash(::Type{HashType}, root::AbstractString; debug_out::Union{IO, Nothing} = nothing, indent::Int = 0) where {HashType} entries = Tuple{String, Vector{UInt8}, GitMode}[] - for f in sort(readdir(root; join=true); by = f -> gitmode(f) == mode_dir ? f*"/" : f) + for f in sort(readdir(root; join = true); by = f -> gitmode(f) == mode_dir ? f * "/" : f) # Skip `.git` directories if basename(f) == ".git" continue @@ -306,11 +375,11 @@ function tree_hash(::Type{HashType}, root::AbstractString; debug_out::Union{IO,N if debug_out !== nothing child_stream = IOBuffer() end - hash = tree_hash(HashType, filepath; debug_out=child_stream, indent=indent+1) + hash = tree_hash(HashType, filepath; debug_out = child_stream, indent = indent + 1) if debug_out !== nothing indent_str = "| "^indent println(debug_out, "$(indent_str)+ [D] $(basename(filepath)) - $(bytes2hex(hash))") - print(debug_out, String(take!(child_stream))) + print(debug_out, String(take!(child_stream::IOBuffer))) println(debug_out, indent_str) end else @@ -326,7 +395,7 @@ function tree_hash(::Type{HashType}, root::AbstractString; debug_out::Union{IO,N content_size = 0 for (n, h, m) in entries - content_size += ndigits(UInt32(m); base=8) + 1 + sizeof(n) + 1 + sizeof(h) + content_size += ndigits(UInt32(m); base = 8) + 1 + sizeof(n) + 1 + sizeof(h) end # Return the hash of these entries @@ -338,17 +407,24 @@ function tree_hash(::Type{HashType}, root::AbstractString; debug_out::Union{IO,N end return SHA.digest!(ctx) end -tree_hash(root::AbstractString; debug_out::Union{IO,Nothing} = nothing) = tree_hash(SHA.SHA1_CTX, root; debug_out) +tree_hash(root::AbstractString; debug_out::Union{IO, Nothing} = nothing) = tree_hash(SHA.SHA1_CTX, root; debug_out) function check_valid_HEAD(repo) - try LibGit2.head(repo) + return try + LibGit2.head(repo) catch err - Pkg.Types.pkgerror("invalid git HEAD ($(err.msg))") + url = try + geturl(repo) + catch + "(unknown url)" + end + Pkg.Types.pkgerror("invalid git HEAD in $url ($(err.msg))") end end -function git_file_stream(repo::LibGit2.GitRepo, spec::String; fakeit::Bool=false)::IO - blob = try LibGit2.GitBlob(repo, spec) +function git_file_stream(repo::LibGit2.GitRepo, spec::String; fakeit::Bool = false)::IO + blob = try + LibGit2.GitBlob(repo, spec) catch err err isa LibGit2.GitError && err.code == LibGit2.Error.ENOTFOUND || rethrow() fakeit && return devnull diff --git a/src/HistoricalStdlibs.jl b/src/HistoricalStdlibs.jl index d5b4ad5049..6867d1e832 100644 --- a/src/HistoricalStdlibs.jl +++ b/src/HistoricalStdlibs.jl @@ -5,13 +5,13 @@ struct StdlibInfo uuid::UUID # This can be `nothing` if it's an unregistered stdlib - version::Union{Nothing,VersionNumber} + version::Union{Nothing, VersionNumber} deps::Vector{UUID} weakdeps::Vector{UUID} end -const DictStdLibs = Dict{UUID,StdlibInfo} +const DictStdLibs = Dict{UUID, StdlibInfo} # Julia standard libraries with duplicate entries removed so as to store only the # first release in a set of releases that all contain the same set of stdlibs. diff --git a/src/MiniProgressBars.jl b/src/MiniProgressBars.jl index c0a487d6b6..26c11da564 100644 --- a/src/MiniProgressBars.jl +++ b/src/MiniProgressBars.jl @@ -5,12 +5,12 @@ export MiniProgressBar, start_progress, end_progress, show_progress, print_progr using Printf # Until Base.format_bytes supports sigdigits -function pkg_format_bytes(bytes; binary=true, sigdigits::Integer=3) +function pkg_format_bytes(bytes; binary = true, sigdigits::Integer = 3) units = binary ? Base._mem_units : Base._cnt_units factor = binary ? 1024 : 1000 bytes, mb = Base.prettyprint_getunits(bytes, length(units), Int64(factor)) if mb == 1 - return string(Int(bytes), " ", Base._mem_units[mb], bytes==1 ? "" : "s") + return string(Int(bytes), " ", Base._mem_units[mb], bytes == 1 ? "" : "s") else return string(Base.Ryu.writefixed(Float64(bytes), sigdigits), binary ? " $(units[mb])" : "$(units[mb])B") end @@ -37,10 +37,10 @@ const PROGRESS_BAR_PERCENTAGE_GRANULARITY = Ref(0.1) function start_progress(io::IO, _::MiniProgressBar) ansi_disablecursor = "\e[?25l" - print(io, ansi_disablecursor) + return print(io, ansi_disablecursor) end -function show_progress(io::IO, p::MiniProgressBar; termwidth=nothing, carriagereturn=true) +function show_progress(io::IO, p::MiniProgressBar; termwidth = nothing, carriagereturn = true) if p.max == 0 perc = 0.0 prev_perc = 0.0 @@ -62,24 +62,24 @@ function show_progress(io::IO, p::MiniProgressBar; termwidth=nothing, carriagere p.has_shown = true progress_text = if p.mode == :percentage - @sprintf "%2.1f %%" perc + @sprintf "%5.1f %%" perc elseif p.mode == :int - string(p.current, "/", p.max) + string(p.current, "/", p.max) elseif p.mode == :data - lpad(string(pkg_format_bytes(p.current; sigdigits=1), "/", pkg_format_bytes(p.max; sigdigits=1)), 20) + lpad(string(pkg_format_bytes(p.current; sigdigits = 1), "/", pkg_format_bytes(p.max; sigdigits = 1)), 20) else error("Unknown mode $(p.mode)") end termwidth = @something termwidth displaysize(io)[2] - max_progress_width = max(0, min(termwidth - textwidth(p.header) - textwidth(progress_text) - 10 , p.width)) + max_progress_width = max(0, min(termwidth - textwidth(p.header) - textwidth(progress_text) - 10, p.width)) n_filled = floor(Int, max_progress_width * perc / 100) partial_filled = (max_progress_width * perc / 100) - n_filled n_left = max_progress_width - n_filled headers = split(p.header) - to_print = sprint(; context=io) do io + to_print = sprint(; context = io) do io print(io, " "^p.indent) if p.main - printstyled(io, headers[1], " "; color=:green, bold=true) + printstyled(io, headers[1], " "; color = :green, bold = true) length(headers) > 1 && printstyled(io, join(headers[2:end], ' '), " ") else print(io, p.header, " ") @@ -88,49 +88,38 @@ function show_progress(io::IO, p::MiniProgressBar; termwidth=nothing, carriagere print(io, p.status) else hascolor = get(io, :color, false)::Bool - printstyled(io, "━"^n_filled; color=p.color) + printstyled(io, "━"^n_filled; color = p.color) if n_left > 0 if hascolor if partial_filled > 0.5 - printstyled(io, "╸"; color=p.color) # More filled, use ╸ + printstyled(io, "╸"; color = p.color) # More filled, use ╸ else - printstyled(io, "╺"; color=:light_black) # Less filled, use ╺ + printstyled(io, "╺"; color = :light_black) # Less filled, use ╺ end end c = hascolor ? "━" : " " - printstyled(io, c^(n_left-1+!hascolor); color=:light_black) + printstyled(io, c^(n_left - 1 + !hascolor); color = :light_black) end - printstyled(io, " "; color=:light_black) + printstyled(io, " "; color = :light_black) print(io, progress_text) end carriagereturn && print(io, "\r") end # Print everything in one call - print(io, to_print) + return print(io, to_print) end function end_progress(io, p::MiniProgressBar) ansi_enablecursor = "\e[?25h" ansi_clearline = "\e[2K" - print(io, ansi_enablecursor * ansi_clearline) + return print(io, ansi_enablecursor * ansi_clearline) end -# Useful when writing a progress bar in the bottom -# makes the bottom progress bar not flicker -# prog = MiniProgressBar(...) -# prog.end = n -# for progress in 1:n -# print_progress_bottom(io) -# println("stuff") -# prog.current = progress -# showprogress(io, prog) -# end -# function print_progress_bottom(io::IO) ansi_clearline = "\e[2K" ansi_movecol1 = "\e[1G" ansi_moveup(n::Int) = string("\e[", n, "A") - print(io, "\e[S" * ansi_moveup(1) * ansi_clearline * ansi_movecol1) + return print(io, "\e[S" * ansi_moveup(1) * ansi_clearline * ansi_movecol1) end end diff --git a/src/Operations.jl b/src/Operations.jl index 6d6bc94558..10254a95ff 100644 --- a/src/Operations.jl +++ b/src/Operations.jl @@ -2,6 +2,8 @@ module Operations +using Base: CacheFlags +using FileWatching: FileWatching using UUIDs using Random: randstring import LibGit2, Dates, TOML @@ -9,19 +11,61 @@ import LibGit2, Dates, TOML using ..Types, ..Resolve, ..PlatformEngines, ..GitTools, ..MiniProgressBars import ..depots, ..depots1, ..devdir, ..set_readonly, ..Types.PackageEntry import ..Artifacts: ensure_artifact_installed, artifact_names, extract_all_hashes, - artifact_exists, select_downloadable_artifacts + artifact_exists, select_downloadable_artifacts, mv_temp_dir_retries using Base.BinaryPlatforms import ...Pkg import ...Pkg: pkg_server, Registry, pathrepr, can_fancyprint, printpkgstyle, stderr_f, OFFLINE_MODE import ...Pkg: UPDATED_REGISTRY_THIS_SESSION, RESPECT_SYSIMAGE_VERSIONS, should_autoprecompile -import ...Pkg: usable_io +import ...Pkg: usable_io, discover_repo, create_cachedir_tag, manifest_rel_path ######### # Utils # ######### +# Helper functions for yanked package checking +function is_pkgversion_yanked(uuid::UUID, version::VersionNumber, registries::Vector{Registry.RegistryInstance} = Registry.reachable_registries()) + for reg in registries + reg_pkg = get(reg, uuid, nothing) + if reg_pkg !== nothing + info = Registry.registry_info(reg, reg_pkg) + if haskey(info.version_info, version) && Registry.isyanked(info, version) + return true + end + end + end + return false +end + +function is_pkgversion_yanked(pkg::PackageSpec, registries::Vector{Registry.RegistryInstance} = Registry.reachable_registries()) + if pkg.uuid === nothing || pkg.version === nothing || !(pkg.version isa VersionNumber) + return false + end + return is_pkgversion_yanked(pkg.uuid, pkg.version, registries) +end + +function is_pkgversion_yanked(entry::PackageEntry, registries::Vector{Registry.RegistryInstance} = Registry.reachable_registries()) + if entry.version === nothing || !(entry.version isa VersionNumber) + return false + end + return is_pkgversion_yanked(entry.uuid, entry.version, registries) +end + +function get_pkg_deprecation_info(pkg::Union{PackageSpec, PackageEntry}, registries::Vector{Registry.RegistryInstance} = Registry.reachable_registries()) + pkg.uuid === nothing && return nothing + for reg in registries + reg_pkg = get(reg, pkg.uuid, nothing) + if reg_pkg !== nothing + info = Registry.registry_info(reg, reg_pkg) + if Registry.isdeprecated(info) + return info.deprecated + end + end + end + return nothing +end + function default_preserve() - if Base.get_bool_env("JULIA_PKG_PRESERVE_TIERED_INSTALLED", false) + return if Base.get_bool_env("JULIA_PKG_PRESERVE_TIERED_INSTALLED", false) PRESERVE_TIERED_INSTALLED else PRESERVE_TIERED @@ -42,14 +86,53 @@ end # more accurate name is `should_be_tracking_registered_version` # the only way to know for sure is to key into the registries -tracking_registered_version(pkg::Union{PackageSpec, PackageEntry}, julia_version=VERSION) = +tracking_registered_version(pkg::Union{PackageSpec, PackageEntry}, julia_version = VERSION) = !is_stdlib(pkg.uuid, julia_version) && pkg.path === nothing && pkg.repo.source === nothing + +# Try to download all registries referenced in `ctx.env.manifest.registries`. +# Warn if some fail, but don't error (packages may still work with the registries we have). +function ensure_manifest_registries!(ctx::Context) + manifest_regs = ctx.env.manifest.registries + isempty(manifest_regs) && return + + regs_by_uuid = Dict(reg.uuid => reg for reg in ctx.registries) + missing = ManifestRegistryEntry[] + for entry in values(manifest_regs) + reg = get(regs_by_uuid, entry.uuid, nothing) + if reg === nothing + push!(missing, entry) + end + end + + isempty(missing) && return + + # Try to install missing registries that have URLs + specs = Registry.RegistrySpec[] + for entry in missing + if entry.url !== nothing + push!(specs, Registry.RegistrySpec(uuid = entry.uuid, url = entry.url)) + end + end + + if !isempty(specs) + try + Registry.add(specs; io = ctx.io) + copy!(ctx.registries, Registry.reachable_registries()) + catch e + # Warn but don't error - packages may still work with available registries + @warn "Failed to install some registries from manifest" exception = (e, catch_backtrace()) + end + end + + return +end + function source_path(manifest_file::String, pkg::Union{PackageSpec, PackageEntry}, julia_version = VERSION) - pkg.tree_hash !== nothing ? find_installed(pkg.name, pkg.uuid, pkg.tree_hash) : - pkg.path !== nothing ? joinpath(dirname(manifest_file), pkg.path) : - is_or_was_stdlib(pkg.uuid, julia_version) ? Types.stdlib_path(pkg.name) : - nothing + return pkg.tree_hash !== nothing ? find_installed(pkg.name, pkg.uuid, pkg.tree_hash) : + pkg.path !== nothing ? normpath(joinpath(dirname(manifest_file), pkg.path)) : + is_or_was_stdlib(pkg.uuid, julia_version) ? Types.stdlib_path(pkg.name) : + nothing end #TODO rename @@ -67,8 +150,24 @@ function load_version(version, fixed, preserve::PreserveLevel) end end -function load_direct_deps(env::EnvCache, pkgs::Vector{PackageSpec}=PackageSpec[]; - preserve::PreserveLevel=PRESERVE_DIRECT) +function merge_pkg_source!(pkg::PackageSpec, path::Union{Nothing, String}, repo::GitRepo) + if pkg.path === nothing && path !== nothing + pkg.path = path + elseif pkg.repo.source === nothing && repo.source !== nothing + pkg.repo.source = repo.source + end + if pkg.repo.rev === nothing && repo.rev !== nothing + pkg.repo.rev = repo.rev + end + return +end +merge_pkg_source!(target::PackageSpec, source::PackageSpec) = + merge_pkg_source!(target, source.path, source.repo) + +function load_direct_deps( + env::EnvCache, pkgs::Vector{PackageSpec} = PackageSpec[]; + preserve::PreserveLevel = PRESERVE_DIRECT + ) pkgs_direct = load_project_deps(env.project, env.project_file, env.manifest, env.manifest_file, pkgs; preserve) for (path, project) in env.workspace @@ -82,18 +181,7 @@ function load_direct_deps(env::EnvCache, pkgs::Vector{PackageSpec}=PackageSpec[] pkg = pkgs_direct[idxs[1]] idx_to_drop = Int[] for i in Iterators.drop(idxs, 1) - # Merge in sources from other projects - # Manifest info like pinned, tree_hash and version should be the same - # since that is all loaded from the same manifest - if pkg.path === nothing && pkgs_direct[i].path !== nothing - pkg.path = pkgs_direct[i].path - end - if pkg.repo.source === nothing && pkgs_direct[i].repo.source !== nothing - pkg.repo.source = pkgs_direct[i].repo.source - end - if pkg.repo.rev === nothing && pkgs_direct[i].repo.rev !== nothing - pkg.repo.rev = pkgs_direct[i].repo.rev - end + merge_pkg_source!(pkg, pkgs_direct[i]) push!(idx_to_drop, i) end sort!(unique!(idx_to_drop)) @@ -103,70 +191,85 @@ function load_direct_deps(env::EnvCache, pkgs::Vector{PackageSpec}=PackageSpec[] return vcat(pkgs, pkgs_direct) end -function load_project_deps(project::Project, project_file::String, manifest::Manifest, manifest_file::String, pkgs::Vector{PackageSpec}=PackageSpec[]; - preserve::PreserveLevel=PRESERVE_DIRECT) +function load_project_deps( + project::Project, project_file::String, manifest::Manifest, manifest_file::String, pkgs::Vector{PackageSpec} = PackageSpec[]; + preserve::PreserveLevel = PRESERVE_DIRECT + ) pkgs_direct = PackageSpec[] if project.name !== nothing && project.uuid !== nothing && findfirst(pkg -> pkg.uuid == project.uuid, pkgs) === nothing path = Types.relative_project_path(manifest_file, dirname(project_file)) - pkg = PackageSpec(;name=project.name, uuid=project.uuid, version=project.version, path) + pkg = PackageSpec(; name = project.name, uuid = project.uuid, version = project.version, path) push!(pkgs_direct, pkg) end for (name::String, uuid::UUID) in project.deps findfirst(pkg -> pkg.uuid == uuid, pkgs) === nothing || continue # do not duplicate packages - path, repo = get_path_repo(project, name) + path, repo = get_path_repo(project, project_file, manifest_file, name) entry = manifest_info(manifest, uuid) - push!(pkgs_direct, entry === nothing ? - PackageSpec(;uuid, name, path, repo) : - PackageSpec(; - uuid = uuid, - name = name, - path = path === nothing ? entry.path : path, - repo = repo == GitRepo() ? entry.repo : repo, - pinned = entry.pinned, - tree_hash = entry.tree_hash, # TODO should tree_hash be changed too? - version = load_version(entry.version, isfixed(entry), preserve), - )) + push!( + pkgs_direct, entry === nothing ? + PackageSpec(; uuid, name, path, repo) : + PackageSpec(; + uuid = uuid, + name = name, + path = path === nothing ? entry.path : path, + repo = repo == GitRepo() ? entry.repo : repo, + pinned = entry.pinned, + tree_hash = entry.tree_hash, # TODO should tree_hash be changed too? + version = load_version(entry.version, isfixed(entry), preserve), + ) + ) end return pkgs_direct end -function load_manifest_deps(manifest::Manifest, pkgs::Vector{PackageSpec}=PackageSpec[]; - preserve::PreserveLevel=PRESERVE_ALL) +function load_manifest_deps( + manifest::Manifest, pkgs::Vector{PackageSpec} = PackageSpec[]; + preserve::PreserveLevel = PRESERVE_ALL + ) pkgs = copy(pkgs) for (uuid, entry) in manifest findfirst(pkg -> pkg.uuid == uuid, pkgs) === nothing || continue # do not duplicate packages - push!(pkgs, PackageSpec( - uuid = uuid, - name = entry.name, - path = entry.path, - pinned = entry.pinned, - repo = entry.repo, - tree_hash = entry.tree_hash, # TODO should tree_hash be changed too? - version = load_version(entry.version, isfixed(entry), preserve), - )) + push!( + pkgs, PackageSpec( + uuid = uuid, + name = entry.name, + path = entry.path, + pinned = entry.pinned, + repo = entry.repo, + tree_hash = entry.tree_hash, # TODO should tree_hash be changed too? + version = load_version(entry.version, isfixed(entry), preserve), + ) + ) end return pkgs end -function load_all_deps(env::EnvCache, pkgs::Vector{PackageSpec}=PackageSpec[]; - preserve::PreserveLevel=PRESERVE_ALL) - pkgs = load_manifest_deps(env.manifest, pkgs; preserve=preserve) +function load_all_deps( + env::EnvCache, pkgs::Vector{PackageSpec} = PackageSpec[]; + preserve::PreserveLevel = PRESERVE_ALL + ) + pkgs = load_manifest_deps(env.manifest, pkgs; preserve = preserve) # Sources takes presedence over the manifest... for pkg in pkgs - path, repo = get_path_repo(env.project, pkg.name) + path, repo = get_path_repo(env.project, env.project_file, env.manifest_file, pkg.name) if path !== nothing + # Path from [sources] takes precedence - clear tree_hash and repo from manifest + pkg.tree_hash = nothing + pkg.repo = GitRepo() # Clear any repo info pkg.path = path end if repo.source !== nothing + # Repo from [sources] takes precedence - clear path from manifest + pkg.path = nothing pkg.repo.source = repo.source end if repo.rev !== nothing pkg.repo.rev = repo.rev end end - return load_direct_deps(env, pkgs; preserve=preserve) + return load_direct_deps(env, pkgs; preserve = preserve) end function load_all_deps_loadable(env::EnvCache) @@ -178,7 +281,7 @@ function load_all_deps_loadable(env::EnvCache) end -function is_instantiated(env::EnvCache, workspace::Bool=false; platform = HostPlatform())::Bool +function is_instantiated(env::EnvCache, workspace::Bool = false; platform = HostPlatform())::Bool # Load everything if workspace pkgs = Operations.load_all_deps(env) @@ -191,7 +294,7 @@ function is_instantiated(env::EnvCache, workspace::Bool=false; platform = HostPl # so only add it if it isn't there idx = findfirst(x -> x.uuid == env.pkg.uuid, pkgs) if idx === nothing - push!(pkgs, Types.PackageSpec(name=env.pkg.name, uuid=env.pkg.uuid, version=env.pkg.version, path=dirname(env.project_file))) + push!(pkgs, Types.PackageSpec(name = env.pkg.name, uuid = env.pkg.uuid, version = env.pkg.version, path = dirname(env.project_file))) end else # Make sure artifacts for project exist even if it is not a package @@ -201,52 +304,176 @@ function is_instantiated(env::EnvCache, workspace::Bool=false; platform = HostPl return all(pkg -> is_package_downloaded(env.manifest_file, pkg; platform), pkgs) end -function update_manifest!(env::EnvCache, pkgs::Vector{PackageSpec}, deps_map, julia_version) +function update_manifest!(env::EnvCache, pkgs::Vector{PackageSpec}, deps_map, julia_version, registries::Vector{Registry.RegistryInstance}) manifest = env.manifest empty!(manifest) + # Determine which registries are used by tracking packages + used_registry_uuids = Set{UUID}() + pkg_to_registries = Dict{UUID, Vector{UUID}}() + + for pkg in pkgs + if tracking_registered_version(pkg, julia_version) + # Find all registries that have this package version + pkg_reg_uuids = UUID[] + for reg in registries + reg_pkg = get(reg, pkg.uuid, nothing) + reg_pkg === nothing && continue + pkg_info = Registry.registry_info(reg, reg_pkg) + version_info = get(pkg_info.version_info, pkg.version, nothing) + version_info === nothing && continue + push!(pkg_reg_uuids, reg.uuid) + push!(used_registry_uuids, reg.uuid) + end + if !isempty(pkg_reg_uuids) + pkg_to_registries[pkg.uuid] = pkg_reg_uuids + end + end + end + + # Build registry entries and name map for used registries only + uuid_to_name = Dict{UUID, String}() + registry_entries = Dict{String, ManifestRegistryEntry}() + for reg in registries + reg.uuid in used_registry_uuids || continue + reg_name = getfield(reg, :name) + uuid_to_name[reg.uuid] = reg_name + registry_entries[reg_name] = ManifestRegistryEntry( + id = reg_name, + uuid = reg.uuid, + url = getfield(reg, :repo), + ) + end + + # Build package entries for pkg in pkgs - entry = PackageEntry(;name = pkg.name, version = pkg.version, pinned = pkg.pinned, - tree_hash = pkg.tree_hash, path = pkg.path, repo = pkg.repo, uuid=pkg.uuid) + entry = PackageEntry(; + name = pkg.name, + # PackageEntry requires version::Union{VersionNumber, Nothing} + # pkg.version may be a VersionSpec in some cases (e.g., when freeing a package) + # so we convert non-VersionNumber values to nothing + version = pkg.version isa VersionNumber ? pkg.version : nothing, + pinned = pkg.pinned, + tree_hash = pkg.tree_hash, path = pkg.path, repo = pkg.repo, uuid = pkg.uuid + ) if is_stdlib(pkg.uuid, julia_version) # Only set stdlib versions for versioned (external) stdlibs entry.version = stdlib_version(pkg.uuid, julia_version) end entry.deps = deps_map[pkg.uuid] + + # Convert registry UUIDs to names + if haskey(pkg_to_registries, pkg.uuid) + reg_names = String[] + for reg_uuid in pkg_to_registries[pkg.uuid] + if haskey(uuid_to_name, reg_uuid) + push!(reg_names, uuid_to_name[reg_uuid]) + end + end + entry.registries = reg_names + end + env.manifest[pkg.uuid] = entry end prune_manifest(env) - record_project_hash(env) + + env.manifest.registries = registry_entries + env.manifest.manifest_format = v"2.1.0" + return record_project_hash(env) +end + +""" + get_project_syntax_version(p::Project) -> VersionNumber + +Extract the syntax version from a Project. + +This function determines which version of Julia syntax a package uses, following +this precedence order: + +1. If `syntax.julia_version` is present in the Project.toml, use that value +2. If `compat.julia` is specified, use the minimum version from the compat range +3. Otherwise, default to the current Julia VERSION + +This information is used to populate the `syntax.julia_version` field in the +Manifest.toml, allowing Base's loading system to parse each package with the +correct syntax version. +""" +function get_project_syntax_version(p::Project)::VersionNumber + # First check syntax.julia_version entry in Project.other + if p.julia_syntax_version !== nothing + return p.julia_syntax_version + end + + # If not found, default to minimum(compat["julia"]) + if haskey(p.compat, "julia") + julia_compat = p.compat["julia"] + # Get the minimum version from the first range + if !isempty(julia_compat.val.ranges) + first_range = first(julia_compat.val.ranges) + lower_bound = first_range.lower + return VersionNumber(lower_bound.t[1], lower_bound.t[2], lower_bound.t[3]) + end + end + + # Finally, if neither of those are set, default to the current Julia version + return dropbuild(VERSION) end # This has to be done after the packages have been downloaded # since we need access to the Project file to read the information # about extensions -function fixups_from_projectfile!(env::EnvCache) +function fixups_from_projectfile!(ctx::Context) + env = ctx.env for pkg in values(env.manifest) - # isfile_casesenstive within locate_project_file used to error on Windows if given a - # relative path so abspath it to be extra safe https://github.com/JuliaLang/julia/pull/55220 - project_file = Base.locate_project_file(abspath(source_path(env.manifest_file, pkg))) - if project_file isa String && isfile(project_file) - p = Types.read_project(project_file) - pkg.weakdeps = p.weakdeps - pkg.exts = p.exts - pkg.entryfile = p.entryfile - for (name, _) in p.weakdeps - if !haskey(p.deps, name) + if ctx.julia_version !== VERSION && is_stdlib(pkg.uuid, ctx.julia_version) + # Special handling for non-current julia_version resolving given the source for historical stdlibs + # isn't available at this stage as Pkg thinks it should not be needed, so rely on STDLIBS_BY_VERSION + stdlibs = Types.get_last_stdlibs(ctx.julia_version) + p = stdlibs[pkg.uuid] + pkg.weakdeps = Dict{String, Base.UUID}(stdlibs[uuid].name => uuid for uuid in p.weakdeps) + # pkg.exts = p.exts # TODO: STDLIBS_BY_VERSION doesn't record this + # pkg.entryfile = p.entryfile # TODO: STDLIBS_BY_VERSION doesn't record this + for (name, uuid) in pkg.weakdeps + if !(uuid in p.deps) delete!(pkg.deps, name) end end + else + # normal mode based on project files. + # isfile_casesenstive within locate_project_file used to error on Windows if given a + # relative path so abspath it to be extra safe https://github.com/JuliaLang/julia/pull/55220 + sourcepath = source_path(env.manifest_file, pkg) + if sourcepath === nothing + pkgerror("could not find source path for package $(pkg.name) based on manifest $(env.manifest_file)") + end + project_file = Base.locate_project_file(abspath(sourcepath)) + if project_file isa String && isfile(project_file) + p = Types.read_project(project_file) + pkg.weakdeps = p.weakdeps + pkg.exts = p.exts + pkg.entryfile = p.entryfile + pkg.julia_syntax_version = get_project_syntax_version(p) + + for (name, _) in p.weakdeps + if !haskey(p.deps, name) + delete!(pkg.deps, name) + end + end + end end end - prune_manifest(env) + return prune_manifest(env) end #################### # Registry Loading # #################### -function load_tree_hash!(registries::Vector{Registry.RegistryInstance}, pkg::PackageSpec, julia_version) +function load_tree_hash!( + registries::Vector{Registry.RegistryInstance}, + pkg::PackageSpec, + julia_version, + ) if is_stdlib(pkg.uuid, julia_version) && pkg.tree_hash !== nothing # manifests from newer julia versions might have stdlibs that are upgradable (FORMER_STDLIBS) # that have tree_hash recorded, which we need to clear for this version where they are not upgradable @@ -259,7 +486,7 @@ function load_tree_hash!(registries::Vector{Registry.RegistryInstance}, pkg::Pac for reg in registries reg_pkg = get(reg, pkg.uuid, nothing) reg_pkg === nothing && continue - pkg_info = Registry.registry_info(reg_pkg) + pkg_info = Registry.registry_info(reg, reg_pkg) version_info = get(pkg_info.version_info, pkg.version, nothing) version_info === nothing && continue hash′ = version_info.git_tree_sha1 @@ -277,6 +504,32 @@ end ####################################### get_compat(proj::Project, name::String) = haskey(proj.compat, name) ? proj.compat[name].val : Types.VersionSpec() get_compat_str(proj::Project, name::String) = haskey(proj.compat, name) ? proj.compat[name].str : nothing + +# Helper to check if compat is compatible with a non-upgradable stdlib, warn if not, and return appropriate VersionSpec +function check_stdlib_compat(name::String, uuid::UUID, compat::VersionSpec, project::Project, project_file::String, julia_version) + is_stdlib(uuid) && !(uuid in Types.UPGRADABLE_STDLIBS_UUIDS) || return compat + + stdlib_ver = stdlib_version(uuid, julia_version) + stdlib_ver === nothing && return compat + isempty(compat) && return compat + stdlib_ver in compat && return compat + + compat_str = get_compat_str(project, name) + if compat_str !== nothing + suggested_compat = string(compat_str, ", ", stdlib_ver.major == 0 ? string(stdlib_ver.major, ".", stdlib_ver.minor) : string(stdlib_ver.major)) + @warn """Ignoring incompatible compat entry `$name = $(repr(compat_str))` in $(repr(project_file)). + $name is a non-upgradable standard library with version $stdlib_ver in the current Julia version. + Fix by setting compat to $(repr(suggested_compat)) to mark support of the current version $stdlib_ver.""" maxlog = 1 + end + return VersionSpec("*") +end + +# Get compat for a dependency, checking if it's a non-upgradable stdlib and warning if incompatible +function get_compat_with_stdlib_check(project::Project, project_file::String, name::String, uuid::UUID, julia_version) + compat = get_compat(project, name) + return check_stdlib_compat(name, uuid, compat, project, project_file, julia_version) +end + function set_compat(proj::Project, name::String, compat::String) semverspec = Types.semver_spec(compat, throw = false) isnothing(semverspec) && return false @@ -298,22 +551,22 @@ function reset_all_compat!(proj::Project) return nothing end -function collect_project(pkg::Union{PackageSpec, Nothing}, path::String) +function collect_project(pkg::Union{PackageSpec, Nothing}, path::String, manifest_file::String, julia_version) deps = PackageSpec[] weakdeps = Set{UUID}() - project_file = projectfile_path(path; strict=true) - project = project_file === nothing ? Project() : read_project(project_file) + project_file = projectfile_path(path; strict = true) + project = project_file === nothing ? Project() : read_project(project_file) julia_compat = get_compat(project, "julia") - if !isnothing(julia_compat) && !(VERSION in julia_compat) - pkgerror("julia version requirement from Project.toml's compat section not satisfied for package at `$path`") + if !isnothing(julia_compat) && !isnothing(julia_version) && !(julia_version in julia_compat) + pkgerror("julia version requirement for package at `$path` not satisfied: compat entry \"julia = $(get_compat_str(project, "julia"))\" does not include Julia version $julia_version") end for (name, uuid) in project.deps - path, repo = get_path_repo(project, name) - vspec = get_compat(project, name) - push!(deps, PackageSpec(name=name, uuid=uuid, version=vspec, path=path, repo=repo)) + dep_path, repo = get_path_repo(project, project_file, manifest_file, name) + vspec = get_compat_with_stdlib_check(project, something(project_file, path), name, uuid, julia_version) + push!(deps, PackageSpec(name = name, uuid = uuid, version = vspec, path = dep_path, repo = repo)) end for (name, uuid) in project.weakdeps - vspec = get_compat(project, name) + vspec = get_compat_with_stdlib_check(project, something(project_file, path), name, uuid, julia_version) push!(deps, PackageSpec(name, uuid, vspec)) push!(weakdeps, uuid) end @@ -329,27 +582,33 @@ function collect_project(pkg::Union{PackageSpec, Nothing}, path::String) end is_tracking_path(pkg) = pkg.path !== nothing -is_tracking_repo(pkg) = pkg.repo.source !== nothing +is_tracking_repo(pkg) = (pkg.repo.source !== nothing || pkg.repo.rev !== nothing) is_tracking_registry(pkg) = !is_tracking_path(pkg) && !is_tracking_repo(pkg) isfixed(pkg) = !is_tracking_registry(pkg) || pkg.pinned function collect_developed!(env::EnvCache, pkg::PackageSpec, developed::Vector{PackageSpec}) - source = project_rel_path(env, source_path(env.manifest_file, pkg)) + source = source_path(env.manifest_file, pkg) source_env = EnvCache(projectfile_path(source)) pkgs = load_project_deps(source_env.project, source_env.project_file, source_env.manifest, source_env.manifest_file) - for pkg in filter(is_tracking_path, pkgs) + for pkg in pkgs if any(x -> x.uuid == pkg.uuid, developed) continue end - # normalize path - # TODO: If path is collected from project, it is relative to the project file - # otherwise relative to manifest file.... - pkg.path = Types.relative_project_path(env.manifest_file, - project_rel_path(source_env, - source_path(source_env.manifest_file, pkg))) - push!(developed, pkg) - collect_developed!(env, pkg, developed) + if is_tracking_path(pkg) + # normalize path + # TODO: If path is collected from project, it is relative to the project file + # otherwise relative to manifest file.... + pkg.path = Types.relative_project_path( + env.manifest_file, + source_path(source_env.manifest_file, pkg) + ) + push!(developed, pkg) + collect_developed!(env, pkg, developed) + elseif is_tracking_repo(pkg) + push!(developed, pkg) + end end + return end function collect_developed(env::EnvCache, pkgs::Vector{PackageSpec}) @@ -360,62 +619,112 @@ function collect_developed(env::EnvCache, pkgs::Vector{PackageSpec}) return developed end -function collect_fixed!(env::EnvCache, pkgs::Vector{PackageSpec}, names::Dict{UUID, String}) - deps_map = Dict{UUID,Vector{PackageSpec}}() - weak_map = Dict{UUID,Set{UUID}}() +function collect_fixed!(env::EnvCache, pkgs::Vector{PackageSpec}, names::Dict{UUID, String}, julia_version) + deps_map = Dict{UUID, Vector{PackageSpec}}() + weak_map = Dict{UUID, Set{UUID}}() uuid = Types.project_uuid(env) - deps, weakdeps = collect_project(env.pkg, dirname(env.project_file)) + deps, weakdeps = collect_project(env.pkg, dirname(env.project_file), env.manifest_file, julia_version) deps_map[uuid] = deps weak_map[uuid] = weakdeps names[uuid] = env.pkg === nothing ? "project" : env.pkg.name for (path, project) in env.workspace uuid = Types.project_uuid(project, path) - pkg = project.name === nothing ? nothing : PackageSpec(name=project.name, uuid=uuid) - deps, weakdeps = collect_project(pkg, path) + pkg = project.name === nothing ? nothing : PackageSpec(name = project.name, uuid = uuid) + deps, weakdeps = collect_project(pkg, path, env.manifest_file, julia_version) deps_map[Types.project_uuid(env)] = deps weak_map[Types.project_uuid(env)] = weakdeps names[uuid] = project.name === nothing ? "project" : project.name end + pkg_queue = collect(pkgs) + pkg_by_uuid = Dict{UUID, PackageSpec}() for pkg in pkgs + pkg.uuid === nothing && continue + pkg_by_uuid[pkg.uuid] = pkg + end + new_fixed_pkgs = PackageSpec[] + seen = Set(keys(pkg_by_uuid)) + while !isempty(pkg_queue) + pkg = popfirst!(pkg_queue) + pkg.uuid === nothing && continue # add repo package if necessary source = source_path(env.manifest_file, pkg) - path = source === nothing ? nothing : project_rel_path(env, source) + path = source if (path === nothing || !isdir(path)) && (pkg.repo.rev !== nothing || pkg.repo.source !== nothing) # ensure revved package is installed # pkg.tree_hash is set in here - Types.handle_repo_add!(Types.Context(env=env), pkg) + Types.handle_repo_add!(Types.Context(env = env), pkg) # Recompute path - path = project_rel_path(env, source_path(env.manifest_file, pkg)) + path = source_path(env.manifest_file, pkg) end if !isdir(path) - pkgerror("expected package $(err_rep(pkg)) to exist at path `$path`") + # Find which packages depend on this missing package for better error reporting + dependents = String[] + for (dep_uuid, dep_entry) in env.manifest.deps + if pkg.uuid in values(dep_entry.deps) || pkg.uuid in values(dep_entry.weakdeps) + push!(dependents, dep_entry.name === nothing ? "unknown package [$dep_uuid]" : dep_entry.name) + end + end + + error_msg = "expected package $(err_rep(pkg)) to exist at path `$path`" + error_msg *= "\n\nThis package is referenced in the manifest file: $(env.manifest_file)" + + if !isempty(dependents) + if length(dependents) == 1 + error_msg *= "\nIt is required by: $(dependents[1])" + else + error_msg *= "\nIt is required by:\n$(join([" - $dep" for dep in dependents], "\n"))" + end + end + pkgerror(error_msg) end - deps, weakdeps = collect_project(pkg, path) + deps, weakdeps = collect_project(pkg, path, env.manifest_file, julia_version) deps_map[pkg.uuid] = deps weak_map[pkg.uuid] = weakdeps + for dep in deps + names[dep.uuid] = dep.name + dep_uuid = dep.uuid + if !is_tracking_registry(dep) && dep_uuid !== nothing && !(dep_uuid in seen) + # Only recursively collect path sources if the path actually exists + # Repo sources (with URL/rev) are always collected + if is_tracking_path(dep) + dep_source = source_path(env.manifest_file, dep) + if dep_source !== nothing && isdir(dep_source) + push!(pkg_queue, dep) + push!(new_fixed_pkgs, dep) + pkg_by_uuid[dep_uuid] = dep + push!(seen, dep_uuid) + end + else + # Repo source - always add to queue + push!(pkg_queue, dep) + push!(new_fixed_pkgs, dep) + pkg_by_uuid[dep_uuid] = dep + push!(seen, dep_uuid) + end + elseif dep_uuid !== nothing && !haskey(pkg_by_uuid, dep_uuid) + pkg_by_uuid[dep_uuid] = dep + end + end end - fixed = Dict{UUID,Resolve.Fixed}() + fixed = Dict{UUID, Resolve.Fixed}() # Collect the dependencies for the fixed packages for (uuid, deps) in deps_map q = Dict{UUID, VersionSpec}() for dep in deps names[dep.uuid] = dep.name - q[dep.uuid] = dep.version - end - if Types.is_project_uuid(env, uuid) - fix_pkg = env.pkg - else - idx = findfirst(pkg -> pkg.uuid == uuid, pkgs) - fix_pkg = pkgs[idx] + dep_version = dep.version + dep_version === nothing && continue + q[dep.uuid] = dep_version isa VersionSpec ? dep_version : VersionSpec(dep_version) end + fix_pkg = Types.is_project_uuid(env, uuid) ? env.pkg : get(pkg_by_uuid, uuid, nothing) fixpkgversion = fix_pkg === nothing ? v"0.0.0" : fix_pkg.version - fixed[uuid] = Resolve.Fixed(fixpkgversion, q, weak_map[uuid]) + fixed[uuid] = Resolve.Fixed(fixpkgversion, q, get(weak_map, uuid, Set{UUID}())) end - return fixed + return fixed, new_fixed_pkgs end # drops build detail in version but keeps the main prerelease context @@ -430,6 +739,12 @@ function get_compat_workspace(env, name) for (_, project) in env.workspace compat = intersect(compat, get_compat(project, name)) end + + uuid = get(env.project.deps, name, nothing) + if uuid !== nothing + compat = check_stdlib_compat(name, uuid, compat, env.project, env.project_file, VERSION) + end + return compat end @@ -438,22 +753,25 @@ end # sets version to a VersionNumber # adds any other packages which may be in the dependency graph # all versioned packages should have a `tree_hash` -function resolve_versions!(env::EnvCache, registries::Vector{Registry.RegistryInstance}, pkgs::Vector{PackageSpec}, julia_version, - installed_only::Bool) +function resolve_versions!( + env::EnvCache, registries::Vector{Registry.RegistryInstance}, pkgs::Vector{PackageSpec}, julia_version, + installed_only::Bool + ) installed_only = installed_only || OFFLINE_MODE[] + # compatibility if julia_version !== nothing # only set the manifest julia_version if ctx.julia_version is not nothing - env.manifest.julia_version = dropbuild(VERSION) + env.manifest.julia_version = dropbuild(julia_version) v = intersect(julia_version, get_compat_workspace(env, "julia")) if isempty(v) - @warn "julia version requirement for project not satisfied" _module=nothing _file=nothing + @warn "julia version requirement for project not satisfied" _module = nothing _file = nothing end end jll_fix = Dict{UUID, VersionNumber}() for pkg in pkgs - if !is_stdlib(pkg.uuid) && endswith(pkg.name, "_jll") && pkg.version isa VersionNumber + if !is_stdlib(pkg.uuid, julia_version) && endswith(pkg.name, "_jll") && pkg.version isa VersionNumber jll_fix[pkg.uuid] = pkg.version end end @@ -468,7 +786,12 @@ function resolve_versions!(env::EnvCache, registries::Vector{Registry.RegistryIn end end # this also sets pkg.version for fixed packages - fixed = collect_fixed!(env, filter(!is_tracking_registry, pkgs), names) + pkgs_fixed = filter(!is_tracking_registry, pkgs) + fixed, new_fixed_pkgs = collect_fixed!(env, pkgs_fixed, names, julia_version) + for new_pkg in new_fixed_pkgs + any(x -> x.uuid == new_pkg.uuid, pkgs) && continue + push!(pkgs, new_pkg) + end # non fixed packages are `add`ed by version: their version is either restricted or free # fixed packages are `dev`ed or `add`ed by repo # at this point, fixed packages have a version and `deps` @@ -480,8 +803,11 @@ function resolve_versions!(env::EnvCache, registries::Vector{Registry.RegistryIn compat = get_compat_workspace(env, pkg.name) v = intersect(pkg.version, compat) if isempty(v) - throw(Resolve.ResolverError( - "empty intersection between $(pkg.name)@$(pkg.version) and project compatibility $(compat)")) + throw( + Resolve.ResolverError( + "empty intersection between $(pkg.name)@$(pkg.version) and project compatibility $(compat)" + ) + ) end # Work around not clobbering 0.x.y+ for checked out old type of packages if !(pkg.version isa VersionNumber) @@ -496,8 +822,9 @@ function resolve_versions!(env::EnvCache, registries::Vector{Registry.RegistryIn # Unless using the unbounded or historical resolver, always allow stdlibs to update. Helps if the previous resolve # happened on a different julia version / commit and the stdlib version in the manifest is not the current stdlib version unbind_stdlibs = julia_version === VERSION - reqs = Resolve.Requires(pkg.uuid => is_stdlib(pkg.uuid) && unbind_stdlibs ? VersionSpec("*") : VersionSpec(pkg.version) for pkg in pkgs) - graph, compat_map = deps_graph(env, registries, names, reqs, fixed, julia_version, installed_only) + reqs = Resolve.Requires(pkg.uuid => is_stdlib(pkg.uuid, julia_version) && unbind_stdlibs ? VersionSpec("*") : VersionSpec(pkg.version) for pkg in pkgs) + deps_map_compressed, compat_map_compressed, weak_deps_map_compressed, weak_compat_map_compressed, pkg_versions_map, pkg_versions_per_registry, uuid_to_name, reqs, fixed = deps_graph(env, registries, names, reqs, fixed, julia_version, installed_only) + graph = Resolve.Graph(deps_map_compressed, compat_map_compressed, weak_deps_map_compressed, weak_compat_map_compressed, pkg_versions_map, pkg_versions_per_registry, uuid_to_name, reqs, fixed, false, julia_version) Resolve.simplify_graph!(graph) vers = Resolve.resolve(graph) @@ -508,6 +835,13 @@ function resolve_versions!(env::EnvCache, registries::Vector{Registry.RegistryIn # We only fixup a JLL if the old major/minor/patch matches the new major/minor/patch if old_v !== nothing && Base.thispatch(old_v) == Base.thispatch(vers_fix[uuid]) vers_fix[uuid] = old_v + # Add old_v to pkg_versions_map so it's considered available + # even if it was yanked (needed for sysimage compatibility) + versions_for_pkg = get!(pkg_versions_map, uuid, VersionNumber[]) + if !(old_v in versions_for_pkg) + push!(versions_for_pkg, old_v) + sort!(versions_for_pkg) + end end end vers = vers_fix @@ -521,9 +855,13 @@ function resolve_versions!(env::EnvCache, registries::Vector{Registry.RegistryIn pkg.version = vers[pkg.uuid] else name = is_stdlib(uuid) ? stdlib_infos()[uuid].name : registered_name(registries, uuid) - push!(pkgs, PackageSpec(;name=name, uuid=uuid, version=ver)) + push!(pkgs, PackageSpec(; name = name, uuid = uuid, version = ver)) end end + + # Collect all UUIDs that will be in the manifest + pkgs_uuids = Set{UUID}(pkg.uuid for pkg in pkgs) + final_deps_map = Dict{UUID, Dict{String, UUID}}() for pkg in pkgs load_tree_hash!(registries, pkg, julia_version) @@ -531,13 +869,25 @@ function resolve_versions!(env::EnvCache, registries::Vector{Registry.RegistryIn if pkg.uuid in keys(fixed) deps_fixed = Dict{String, UUID}() for dep in keys(fixed[pkg.uuid].requires) + # Only include deps that are actually in the manifest + dep in pkgs_uuids || continue deps_fixed[names[dep]] = dep end deps_fixed else d = Dict{String, UUID}() - for (uuid, _) in compat_map[pkg.uuid][pkg.version] - d[names[uuid]] = uuid + available_versions = get(Vector{VersionNumber}, pkg_versions_map, pkg.uuid) + if !(pkg.version in available_versions) + pkgerror("version $(pkg.version) of package $(pkg.name) is not available. Available versions: $(join(available_versions, ", "))") + end + deps_for_version = Registry.query_deps_for_version( + deps_map_compressed, weak_deps_map_compressed, + pkg.uuid, pkg.version + ) + for uuid in deps_for_version + # Only include deps that are actually in the manifest + uuid in pkgs_uuids || continue + d[names[uuid]] = uuid end d end @@ -549,29 +899,54 @@ function resolve_versions!(env::EnvCache, registries::Vector{Registry.RegistryIn return final_deps_map end -get_or_make!(d::Dict{K,V}, k::K) where {K,V} = get!(d, k) do; V() end +get_or_make!(d::Dict{K, V}, k::K) where {K, V} = get!(d, k) do; + V() +end const JULIA_UUID = UUID("1222c4b2-2114-5bfd-aeef-88e4692bbb3e") const PKGORIGIN_HAVE_VERSION = :version in fieldnames(Base.PkgOrigin) -function deps_graph(env::EnvCache, registries::Vector{Registry.RegistryInstance}, uuid_to_name::Dict{UUID,String}, - reqs::Resolve.Requires, fixed::Dict{UUID,Resolve.Fixed}, julia_version, - installed_only::Bool) +function deps_graph( + env::EnvCache, registries::Vector{Registry.RegistryInstance}, uuid_to_name::Dict{UUID, String}, + reqs::Resolve.Requires, fixed::Dict{UUID, Resolve.Fixed}, julia_version, + installed_only::Bool + ) uuids = Set{UUID}() union!(uuids, keys(reqs)) union!(uuids, keys(fixed)) - for fixed_uuids in map(fx->keys(fx.requires), values(fixed)) + for fixed_uuids in map(fx -> keys(fx.requires), values(fixed)) union!(uuids, fixed_uuids) end + # Collect all weak dependency UUIDs from fixed packages + all_weak_uuids = Set{UUID}() + for fx in values(fixed) + union!(all_weak_uuids, fx.weak) + end + stdlibs_for_julia_version = Types.get_last_stdlibs(julia_version) seen = Set{UUID}() - # pkg -> version -> (dependency => compat): - all_compat = Dict{UUID,Dict{VersionNumber,Dict{UUID,VersionSpec}}}() - weak_compat = Dict{UUID,Dict{VersionNumber,Set{UUID}}}() + # pkg -> vector of (registry data) for handling multiple registries correctly + # Each element in the vector represents data from one registry + all_deps_compressed = Dict{UUID, Vector{Dict{VersionRange, Set{UUID}}}}() + all_compat_compressed = Dict{UUID, Vector{Dict{VersionRange, Dict{UUID, VersionSpec}}}}() + weak_deps_compressed = Dict{UUID, Vector{Dict{VersionRange, Set{UUID}}}}() + weak_compat_compressed = Dict{UUID, Vector{Dict{VersionRange, Dict{UUID, VersionSpec}}}}() + + # pkg -> list of valid versions: + pkg_versions = Dict{UUID, Vector{VersionNumber}}() + + # pkg -> vector of (versions from each registry) - parallel to the compressed data vectors + # This tracks which versions came from which registry to avoid cross-registry compat pollution + pkg_versions_per_registry = Dict{UUID, Vector{Set{VersionNumber}}}() for (fp, fx) in fixed - all_compat[fp] = Dict(fx.version => Dict{UUID,VersionSpec}()) + all_deps_compressed[fp] = [Dict{VersionRange, Set{UUID}}()] + all_compat_compressed[fp] = [Dict{VersionRange, Dict{UUID, VersionSpec}}()] + weak_deps_compressed[fp] = [Dict{VersionRange, Set{UUID}}()] + weak_compat_compressed[fp] = [Dict{VersionRange, Dict{UUID, VersionSpec}}()] + pkg_versions[fp] = [fx.version] + pkg_versions_per_registry[fp] = [Set([fx.version])] end while true @@ -580,85 +955,138 @@ function deps_graph(env::EnvCache, registries::Vector{Registry.RegistryInstance} for uuid in unseen push!(seen, uuid) uuid in keys(fixed) && continue - all_compat_u = get_or_make!(all_compat, uuid) - weak_compat_u = get_or_make!(weak_compat, uuid) uuid_is_stdlib = haskey(stdlibs_for_julia_version, uuid) # If we're requesting resolution of a package that is an # unregistered stdlib we must special-case it here. This is further # complicated by the fact that we can ask this question relative to # a Julia version. + # CRITICAL: Never resolve stdlibs from registry for target julia_version if (julia_version != VERSION && is_unregistered_stdlib(uuid)) || uuid_is_stdlib # We use our historical stdlib versioning data to unpack the version, deps and weakdeps of this uuid stdlib_info = stdlibs_for_julia_version[uuid] v = something(stdlib_info.version, VERSION) - all_compat_u_vr = get_or_make!(all_compat_u, v) + # For stdlibs, create a single registry entry + stdlib_deps = Dict{VersionRange, Set{UUID}}() + stdlib_compat = Dict{VersionRange, Dict{UUID, VersionSpec}}() + stdlib_weak_deps = Dict{VersionRange, Set{UUID}}() + stdlib_weak_compat = Dict{VersionRange, Dict{UUID, VersionSpec}}() + + vrange = VersionRange(v, v) + deps_set = Set{UUID}() for other_uuid in stdlib_info.deps push!(uuids, other_uuid) - all_compat_u_vr[other_uuid] = VersionSpec() + push!(deps_set, other_uuid) end + stdlib_deps[vrange] = deps_set + stdlib_compat[vrange] = Dict{UUID, VersionSpec}() if !isempty(stdlib_info.weakdeps) - weak_all_compat_u_vr = get_or_make!(weak_compat_u, v) + weak_deps_set = Set{UUID}() for other_uuid in stdlib_info.weakdeps push!(uuids, other_uuid) - all_compat_u_vr[other_uuid] = VersionSpec() - push!(weak_all_compat_u_vr, other_uuid) + push!(weak_deps_set, other_uuid) end + stdlib_weak_deps[vrange] = weak_deps_set + stdlib_weak_compat[vrange] = Dict{UUID, VersionSpec}() end + + all_deps_compressed[uuid] = [stdlib_deps] + all_compat_compressed[uuid] = [stdlib_compat] + weak_deps_compressed[uuid] = [stdlib_weak_deps] + weak_compat_compressed[uuid] = [stdlib_weak_compat] + pkg_versions[uuid] = [v] + pkg_versions_per_registry[uuid] = [Set([v])] else + # Accumulate valid versions from all registries + valid_versions = VersionNumber[] + # Store per-registry data separately - don't merge! + pkg_deps_list = Vector{Dict{VersionRange, Set{UUID}}}() + pkg_compat_list = Vector{Dict{VersionRange, Dict{UUID, VersionSpec}}}() + pkg_weak_deps_list = Vector{Dict{VersionRange, Set{UUID}}}() + pkg_weak_compat_list = Vector{Dict{VersionRange, Dict{UUID, VersionSpec}}}() + pkg_versions_per_reg = Vector{Set{VersionNumber}}() + for reg in registries pkg = get(reg, uuid, nothing) pkg === nothing && continue - info = Registry.registry_info(pkg) - - function add_compat!(d, cinfo) - for (v, compat_info) in cinfo - # Filter yanked and if we are in offline mode also downloaded packages - # TODO, pull this into a function - Registry.isyanked(info, v) && continue - if installed_only - pkg_spec = PackageSpec(name=pkg.name, uuid=pkg.uuid, version=v, tree_hash=Registry.treehash(info, v)) - is_package_downloaded(env.manifest_file, pkg_spec) || continue - end + info = Registry.registry_info(reg, pkg) + + # Build filtered version list for this registry + reg_valid_versions = Set{VersionNumber}() + for v in keys(info.version_info) + # Filter yanked and if we are in offline mode also downloaded packages + Registry.isyanked(info, v) && continue + if installed_only + pkg_spec = PackageSpec(name = pkg.name, uuid = pkg.uuid, version = v, tree_hash = Registry.treehash(info, v)) + is_package_downloaded(env.manifest_file, pkg_spec) || continue + end - # Skip package version that are not the same as external packages in sysimage - if PKGORIGIN_HAVE_VERSION && RESPECT_SYSIMAGE_VERSIONS[] && julia_version == VERSION - pkgid = Base.PkgId(uuid, pkg.name) - if Base.in_sysimage(pkgid) - pkgorigin = get(Base.pkgorigins, pkgid, nothing) - if pkgorigin !== nothing && pkgorigin.version !== nothing - if v != pkgorigin.version - continue - end + # Skip package version that are not the same as external packages in sysimage + if PKGORIGIN_HAVE_VERSION && RESPECT_SYSIMAGE_VERSIONS[] && julia_version == VERSION + pkgid = Base.PkgId(uuid, pkg.name) + if Base.in_sysimage(pkgid) + pkgorigin = get(Base.pkgorigins, pkgid, nothing) + if pkgorigin !== nothing && pkgorigin.version !== nothing + if v != pkgorigin.version + continue end end end - dv = get_or_make!(d, v) - merge!(dv, compat_info) - union!(uuids, keys(compat_info)) end + + push!(reg_valid_versions, v) + push!(valid_versions, v) + end + + # Only add this registry's data if it has valid versions + if !isempty(reg_valid_versions) + # Store the full compressed data along with which versions are valid + # The query function will check version membership to avoid cross-registry pollution + push!(pkg_deps_list, info.deps) + push!(pkg_compat_list, info.compat) + push!(pkg_weak_deps_list, info.weak_deps) + push!(pkg_weak_compat_list, info.weak_compat) + push!(pkg_versions_per_reg, reg_valid_versions) end - add_compat!(all_compat_u, Registry.compat_info(info)) - weak_compat_info = Registry.weak_compat_info(info) - if weak_compat_info !== nothing - add_compat!(all_compat_u, weak_compat_info) - # Version to Set - for (v, compat_info) in weak_compat_info - weak_compat_u[v] = keys(compat_info) + + # Collect all dependency UUIDs for discovery + for deps_dict in (info.deps, info.weak_deps) + for (vrange, deps_set) in deps_dict + union!(uuids, deps_set) end end end + + # After processing all registries, sort and store the accumulated versions + pkg_versions[uuid] = sort!(unique!(valid_versions)) + + # Store the per-registry data + all_deps_compressed[uuid] = pkg_deps_list + all_compat_compressed[uuid] = pkg_compat_list + weak_deps_compressed[uuid] = pkg_weak_deps_list + weak_compat_compressed[uuid] = pkg_weak_compat_list + pkg_versions_per_registry[uuid] = pkg_versions_per_reg end end end + # Track weak dependencies that are not available in any registry + unavailable_weak_uuids = Set{UUID}() + for uuid in uuids uuid == JULIA_UUID && continue if !haskey(uuid_to_name, uuid) name = registered_name(registries, uuid) - name === nothing && pkgerror("cannot find name corresponding to UUID $(uuid) in a registry") + if name === nothing + # Allow weak dependencies to be missing from registries + if uuid in all_weak_uuids + push!(unavailable_weak_uuids, uuid) + continue + end + pkgerror("cannot find name corresponding to UUID $(uuid) in a registry") + end uuid_to_name[uuid] = name entry = manifest_info(env.manifest, uuid) entry ≡ nothing && continue @@ -666,8 +1094,24 @@ function deps_graph(env::EnvCache, registries::Vector{Registry.RegistryInstance} end end - return Resolve.Graph(all_compat, weak_compat, uuid_to_name, reqs, fixed, false, julia_version), - all_compat + # Filter out unavailable weak dependencies from fixed packages + if !isempty(unavailable_weak_uuids) + fixed_filtered = Dict{UUID, Resolve.Fixed}() + for (uuid, fx) in fixed + filtered_requires = Requires() + for (req_uuid, req_spec) in fx.requires + if !(req_uuid in unavailable_weak_uuids) + filtered_requires[req_uuid] = req_spec + end + end + # Also filter the weak set + filtered_weak = setdiff(fx.weak, unavailable_weak_uuids) + fixed_filtered[uuid] = Resolve.Fixed(fx.version, filtered_requires, filtered_weak) + end + fixed = fixed_filtered + end + + return all_deps_compressed, all_compat_compressed, weak_deps_compressed, weak_compat_compressed, pkg_versions, pkg_versions_per_registry, uuid_to_name, reqs, fixed end ######################## @@ -683,11 +1127,18 @@ end # Returns if archive successfully installed function install_archive( - urls::Vector{Pair{String,Bool}}, - hash::SHA1, - version_path::String; - io::IO=stderr_f() -)::Bool + urls::Vector{Pair{String, Bool}}, + hash::SHA1, + version_path::String; + name::Union{String, Nothing} = nothing, + io::IO = stderr_f() + )::Bool + # Because we use `mv_temp_dir_retries` which uses `rename` not `mv` it can fail if the temp + # files are on a different fs. So use a temp dir in the same depot dir as some systems might + # be serving different parts of the depot on different filesystems via links i.e. pkgeval does this. + depot_temp = mkpath(joinpath(dirname(dirname(version_path)), "temp")) # .julia/packages/temp + create_cachedir_tag(dirname(dirname(version_path))) + tmp_objects = String[] url_success = false for (url, top) in urls @@ -695,19 +1146,21 @@ function install_archive( push!(tmp_objects, path) # for cleanup url_success = true try - PlatformEngines.download(url, path; verbose=false, io=io) + PlatformEngines.download(url, path; verbose = false, io = io) catch e e isa InterruptException && rethrow() url_success = false end url_success || continue - dir = joinpath(tempdir(), randstring(12)) + # the temp dir should be in the same depot because the `rename` operation in `mv_temp_dir_retries` + # is possible only if the source and destination are on the same filesystem + dir = tempname(depot_temp) * randstring(6) push!(tmp_objects, dir) # for cleanup # Might fail to extract an archive (https://github.com/JuliaPackaging/PkgServer.jl/issues/126) try - unpack(path, dir; verbose=false) + unpack(path, dir; verbose = false) catch e - e isa InterruptException && rethrow() + e isa ProcessFailedException || rethrow() @warn "failed to extract archive downloaded from $(url)" url_success = false end @@ -722,52 +1175,67 @@ function install_archive( unpacked = joinpath(dir, dirs[1]) end # Assert that the tarball unpacked to the tree sha we wanted - # TODO: Enable on Windows when tree_hash handles - # executable bits correctly, see JuliaLang/julia #33212. - if !Sys.iswindows() - if SHA1(GitTools.tree_hash(unpacked)) != hash - @warn "tarball content does not match git-tree-sha1" - url_success = false - end - url_success || continue + computed_hash = GitTools.tree_hash(unpacked) + if SHA1(computed_hash) != hash + @warn "Downloaded package content does not match expected hash (git-tree-sha1); skipping this source" package = name url = url expected = hash computed = computed_hash + url_success = false end + url_success || continue + # Move content to version path - !isdir(version_path) && mkpath(version_path) - mv(unpacked, version_path; force=true) + !isdir(dirname(version_path)) && mkpath(dirname(version_path)) + mv_temp_dir_retries(unpacked, version_path; set_permissions = false) + break # successful install end # Clean up and exit - foreach(x -> Base.rm(x; force=true, recursive=true), tmp_objects) + foreach(x -> Base.rm(x; force = true, recursive = true), tmp_objects) return url_success end -const refspecs = ["+refs/*:refs/remotes/cache/*"] +const refspecs = ["+refs/*:refs/cache/*"] function install_git( - io::IO, - uuid::UUID, - name::String, - hash::SHA1, - urls::Set{String}, - version_path::String -)::Nothing + io::IO, + uuid::UUID, + name::String, + hash::SHA1, + urls::Set{String}, + version_path::String + )::Nothing + if isempty(urls) + pkgerror( + "Package $name [$uuid] has no repository URL available. This could happen if:\n" * + " - The package is not registered in any configured registry\n" * + " - The package exists in a registry but lacks repository information\n" * + " - Registry files are corrupted or incomplete\n" * + " - Network issues prevented registry updates\n" * + "Please check that the package name is correct and that your registries are up to date." + ) + end + repo = nothing tree = nothing # TODO: Consolidate this with some of the repo handling in Types.jl try clones_dir = joinpath(depots1(), "clones") ispath(clones_dir) || mkpath(clones_dir) + create_cachedir_tag(clones_dir) repo_path = joinpath(clones_dir, string(uuid)) - repo = GitTools.ensure_clone(io, repo_path, first(urls); isbare=true, - header = "[$uuid] $name from $(first(urls))") + first_url = first(urls) + repo = GitTools.ensure_clone( + io, repo_path, first_url; isbare = true, + header = "[$uuid] $name from $first_url", depth = 1 + ) git_hash = LibGit2.GitHash(hash.bytes) for url in urls - try LibGit2.with(LibGit2.GitObject, repo, git_hash) do g + try + LibGit2.with(LibGit2.GitObject, repo, git_hash) do g end break # object was found, we can stop catch err err isa LibGit2.GitError && err.code == LibGit2.Error.ENOTFOUND || rethrow() end - GitTools.fetch(io, repo, url, refspecs=refspecs) + GitTools.fetch(io, repo, url, refspecs = refspecs, depth = LibGit2.Consts.FETCH_DEPTH_UNSHALLOW) end tree = try LibGit2.GitObject(repo, git_hash) @@ -778,6 +1246,7 @@ function install_git( tree isa LibGit2.GitTree || error("$name: git object $(string(hash)) should be a tree, not $(typeof(tree))") mkpath(version_path) + create_cachedir_tag(dirname(dirname(version_path))) GitTools.checkout_tree_to_path(repo, tree, version_path) return finally @@ -786,9 +1255,9 @@ function install_git( end end -function collect_artifacts(pkg_root::String; platform::AbstractPlatform=HostPlatform()) +function collect_artifacts(pkg_root::String; platform::AbstractPlatform = HostPlatform(), include_lazy::Bool = false) # Check to see if this package has an (Julia)Artifacts.toml - artifacts_tomls = Tuple{String,Base.TOML.TOMLDict}[] + artifacts_tomls = Tuple{String, Base.TOML.TOMLDict}[] for f in artifact_names artifacts_toml = joinpath(pkg_root, f) if isfile(artifacts_toml) @@ -799,18 +1268,19 @@ function collect_artifacts(pkg_root::String; platform::AbstractPlatform=HostPlat # Despite the fact that we inherit the project, since the in-memory manifest # has not been updated yet, if we try to load any dependencies, it may fail. # Therefore, this project inheritance is really only for Preferences, not dependencies. - select_cmd = Cmd(`$(gen_build_code(selector_path; inherit_project=true)) --compile=min -t1 --startup-file=no $(triplet(platform))`) + # We only guarantee access to the `stdlib`, which is why we set `add_stdlib` here. + select_cmd = Cmd(`$(gen_build_code(selector_path; inherit_project=true, add_stdlib=true)) --compile=min -t1 --startup-file=no $(triplet(platform))`) meta_toml = String(read(select_cmd)) res = TOML.tryparse(meta_toml) if res isa TOML.ParserError - errstr = sprint(showerror, res; context=stderr) + errstr = sprint(showerror, res; context = stderr) pkgerror("failed to parse TOML output from running $(repr(selector_path)), got: \n$errstr") else push!(artifacts_tomls, (artifacts_toml, TOML.parse(meta_toml))) end else # Otherwise, use the standard selector from `Artifacts` - artifacts = select_downloadable_artifacts(artifacts_toml; platform) + artifacts = select_downloadable_artifacts(artifacts_toml; platform, include_lazy) push!(artifacts_tomls, (artifacts_toml, artifacts)) end break @@ -827,28 +1297,42 @@ mutable struct DownloadState const bar::MiniProgressBar end -function download_artifacts(ctx::Context; - platform::AbstractPlatform=HostPlatform(), - julia_version = VERSION, - verbose::Bool=false) +function download_artifacts( + ctx::Context, pkgs; + platform::AbstractPlatform = HostPlatform(), + julia_version = VERSION, + verbose::Bool = false, + io::IO = stderr_f(), + include_lazy::Bool = false + ) env = ctx.env io = ctx.io fancyprint = can_fancyprint(io) - pkg_roots = String[] + pkg_info = Tuple{String, Union{Base.UUID, Nothing}}[] + pkg_uuids = Set(pkg.uuid for pkg in pkgs) for (uuid, pkg) in env.manifest + uuid in pkg_uuids || continue pkg = manifest_info(env.manifest, uuid) pkg_root = source_path(env.manifest_file, pkg, julia_version) - pkg_root === nothing || push!(pkg_roots, pkg_root) + pkg_root === nothing || push!(pkg_info, (pkg_root, uuid)) end - push!(pkg_roots, dirname(env.project_file)) + push!(pkg_info, (dirname(env.project_file), env.pkg !== nothing ? env.pkg.uuid : nothing)) download_jobs = Dict{SHA1, Function}() + # Check what registries the current pkg server tracks + # Disable if precompiling to not access internet + server_registry_info = if Base.JLOptions().incremental == 0 + Registry.pkg_server_registry_info() + else + nothing + end + print_lock = Base.ReentrantLock() # for non-fancyprint printing download_states = Dict{SHA1, DownloadState}() errors = Channel{Any}(Inf) - is_done = false + is_done = Ref{Bool}(false) ansi_moveup(n::Int) = string("\e[", n, "A") ansi_movecol1 = "\e[1G" ansi_cleartoend = "\e[0J" @@ -856,49 +1340,59 @@ function download_artifacts(ctx::Context; ansi_enablecursor = "\e[?25h" ansi_disablecursor = "\e[?25l" - all_collected_artifacts = reduce(vcat, map(pkg_root -> collect_artifacts(pkg_root; platform), pkg_roots)) - used_artifact_tomls = Set{String}(map(first, all_collected_artifacts)) - longest_name_length = maximum(all_collected_artifacts; init=0) do (artifacts_toml, artifacts) - maximum(textwidth, keys(artifacts); init=0) + all_collected_artifacts = reduce( + vcat, map( + ((pkg_root, pkg_uuid),) -> + map(ca -> (ca[1], ca[2], pkg_uuid), collect_artifacts(pkg_root; platform, include_lazy)), pkg_info + ) + ) + used_artifact_tomls = Set{String}(map(ca -> ca[1], all_collected_artifacts)) + longest_name_length = maximum(all_collected_artifacts; init = 0) do (artifacts_toml, artifacts, pkg_uuid) + maximum(textwidth, keys(artifacts); init = 0) end - for (artifacts_toml, artifacts) in all_collected_artifacts + for (artifacts_toml, artifacts, pkg_uuid) in all_collected_artifacts # For each Artifacts.toml, install each artifact we've collected from it for name in keys(artifacts) local rname = rpad(name, longest_name_length) - local hash = SHA1(artifacts[name]["git-tree-sha1"]) - local bar = MiniProgressBar(;header=rname, main=false, indent=2, color = Base.info_color(), mode=:data, always_reprint=true) + local hash = SHA1(artifacts[name]["git-tree-sha1"]::String) + local bar = MiniProgressBar(; header = rname, main = false, indent = 2, color = Base.info_color()::Symbol, mode = :data, always_reprint = true) local dstate = DownloadState(:ready, "", time_ns(), Base.ReentrantLock(), bar) - function progress(total, current; status="") + function progress(total, current; status = "") local t = time_ns() if isempty(status) dstate.bar.max = total dstate.bar.current = current end - lock(dstate.status_lock) do + return lock(dstate.status_lock) do dstate.status = status dstate.status_update_time = t end end + # Check if the current package is eligible for PkgServer artifact downloads + local pkg_server_eligible = pkg_uuid !== nothing && Registry.is_pkg_in_pkgserver_registry(pkg_uuid, server_registry_info, ctx.registries) + # returns a string if exists, or function that downloads the artifact if not - local ret = ensure_artifact_installed(name, artifacts[name], artifacts_toml; - verbose, quiet_download=!(usable_io(io)), io, progress) + local ret = ensure_artifact_installed( + name, artifacts[name], artifacts_toml; + pkg_server_eligible, verbose, quiet_download = !(usable_io(io)), io, progress + ) if ret isa Function download_states[hash] = dstate download_jobs[hash] = () -> begin - try - dstate.state = :running - ret() - if !fancyprint - @lock print_lock printpkgstyle(io, :Installed, "artifact $rname $(MiniProgressBars.pkg_format_bytes(dstate.bar.max; sigdigits=1))") - end - catch - dstate.state = :failed - rethrow() - else - dstate.state = :done + try + dstate.state = :running + ret() + if !fancyprint && dstate.bar.max > 1 # if another process downloaded, then max is never set greater than 1 + @lock print_lock printpkgstyle(io, :Installed, "artifact $rname $(MiniProgressBars.pkg_format_bytes(dstate.bar.max; sigdigits = 1))") end + catch + dstate.state = :failed + rethrow() + else + dstate.state = :done end + end end end end @@ -908,39 +1402,39 @@ function download_artifacts(ctx::Context; t_print = Threads.@spawn begin try print(io, ansi_disablecursor) - first = true - timer = Timer(0, interval=1/10) + first = Ref(true) + timer = Timer(0, interval = 1 / 10) # TODO: Implement as a new MiniMultiProgressBar - main_bar = MiniProgressBar(; indent=2, header = "Installing artifacts", color = :green, mode = :int, always_reprint=true) + main_bar = MiniProgressBar(; indent = 2, header = "Installing artifacts", color = :green, mode = :int, always_reprint = true) main_bar.max = length(download_states) - while !is_done + while !is_done[] main_bar.current = count(x -> x.state == :done, values(download_states)) - str = sprint(context=io) do iostr - first || print(iostr, ansi_cleartoend) + local str = sprint(context = io) do iostr + first[] || print(iostr, ansi_cleartoend) n_printed = 1 - show_progress(iostr, main_bar; carriagereturn=false) + show_progress(iostr, main_bar; carriagereturn = false) println(iostr) - for dstate in sort!(collect(values(download_states)), by=v->v.bar.max, rev=true) - local status, status_update_time = lock(()->(dstate.status, dstate.status_update_time), dstate.status_lock) + for dstate in sort!(collect(values(download_states)), by = v -> v.bar.max, rev = true) + local status, status_update_time = lock(() -> (dstate.status, dstate.status_update_time), dstate.status_lock) # only update the bar's status message if it is stalled for at least 0.5 s. # If the new status message is empty, go back to showing the bar without waiting. if isempty(status) || time_ns() - status_update_time > UInt64(500_000_000) dstate.bar.status = status end dstate.state == :running && (dstate.bar.max > 1000 || !isempty(dstate.bar.status)) || continue - show_progress(iostr, dstate.bar; carriagereturn=false) + show_progress(iostr, dstate.bar; carriagereturn = false) println(iostr) n_printed += 1 end - is_done || print(iostr, ansi_moveup(n_printed), ansi_movecol1) - first = false + is_done[] || print(iostr, ansi_moveup(n_printed), ansi_movecol1) + first[] = false end print(io, str) wait(timer) end print(io, ansi_cleartoend) main_bar.current = count(x -> x[2].state == :done, download_states) - show_progress(io, main_bar; carriagereturn=false) + show_progress(io, main_bar; carriagereturn = false) println(io) catch e e isa InterruptException || rethrow() @@ -953,26 +1447,26 @@ function download_artifacts(ctx::Context; printpkgstyle(io, :Installing, "$(length(download_jobs)) artifacts") end sema = Base.Semaphore(ctx.num_concurrent_downloads) - interrupted = false + interrupted = Ref{Bool}(false) @sync for f in values(download_jobs) - interrupted && break + interrupted[] && break Base.acquire(sema) Threads.@spawn try f() catch e - e isa InterruptException && (interrupted = true) + e isa InterruptException && (interrupted[] = true) put!(errors, e) finally Base.release(sema) end end - is_done = true + is_done[] = true fancyprint && wait(t_print) close(errors) if !isempty(errors) all_errors = collect(errors) - str = sprint(context=io) do iostr + local str = sprint(context = io) do iostr for e in all_errors Base.showerror(iostr, e) length(all_errors) > 1 && println(iostr) @@ -982,12 +1476,22 @@ function download_artifacts(ctx::Context; end end - for f in used_artifact_tomls - write_env_usage(f, "artifact_usage.toml") - end + + return write_env_usage(used_artifact_tomls, "artifact_usage.toml") end -function check_artifacts_downloaded(pkg_root::String; platform::AbstractPlatform=HostPlatform()) +function download_artifacts( + ctx::Context; + platform::AbstractPlatform = HostPlatform(), + julia_version = VERSION, + verbose::Bool = false, + io::IO = stderr_f(), + include_lazy::Bool = false + ) + return download_artifacts(ctx, values(ctx.env.manifest); platform, julia_version, verbose, io, include_lazy) +end + +function check_artifacts_downloaded(pkg_root::String; platform::AbstractPlatform = HostPlatform()) for (artifacts_toml, artifacts) in collect_artifacts(pkg_root; platform) for name in keys(artifacts) if !artifact_exists(Base.SHA1(artifacts[name]["git-tree-sha1"])) @@ -1005,7 +1509,7 @@ function find_urls(registries::Vector{Registry.RegistryInstance}, uuid::UUID) for reg in registries reg_pkg = get(reg, uuid, nothing) reg_pkg === nothing && continue - info = Registry.registry_info(reg_pkg) + info = Registry.registry_info(reg, reg_pkg) repo = info.repo repo === nothing && continue push!(urls, repo) @@ -1014,15 +1518,49 @@ function find_urls(registries::Vector{Registry.RegistryInstance}, uuid::UUID) end -function download_source(ctx::Context; readonly=true) - pkgs_to_install = NamedTuple{(:pkg, :urls, :path), Tuple{PackageEntry, Set{String}, String}}[] - for pkg in values(ctx.env.manifest) +download_source(ctx::Context; readonly::Bool = true) = download_source(ctx, collect(values(ctx.env.manifest)); readonly) + +function count_artifacts(pkg_root::String; platform::AbstractPlatform = HostPlatform()) + for f in artifact_names + artifacts_toml = joinpath(pkg_root, f) + if isfile(artifacts_toml) + eager = select_downloadable_artifacts(artifacts_toml; platform, include_lazy = false) + all_matching = select_downloadable_artifacts(artifacts_toml; platform, include_lazy = true) + return (length(eager), length(all_matching) - length(eager)) + end + end + return nothing +end + +function artifact_suffix(artifact_counts) + artifact_counts === nothing && return "" + n_eager, n_lazy = artifact_counts + n_eager + n_lazy == 0 && return " (no artifacts on this platform)" + return "" +end + +function download_source(ctx::Context, pkgs; readonly::Bool = true) + pidfile_stale_age = 10 # recommended value is about 3-5x an estimated normal download time (i.e. 2-3s) + pkgs_to_install = NamedTuple{(:pkg, :urls, :path), Tuple{eltype(pkgs), Set{String}, String}}[] + for pkg in pkgs tracking_registered_version(pkg, ctx.julia_version) || continue path = source_path(ctx.env.manifest_file, pkg, ctx.julia_version) path === nothing && continue - ispath(path) && continue + if ispath(path) && iswritable(path) + pidfile = path * ".pid" + else + # If the path is not writable, we cannot create a pidfile there so use one in the first depot. + # (pidlocking probably isn't needed as in this case the package source logically is alredy installed + # in the readonly depot, but keep the pidfile logic for consistency) + dir = joinpath(depots1(), "packages", pkg.name) + mkpath(dir) + iswritable(dir) || pkgerror("The primary depot is not writable") + pidfile = joinpath(dir, basename(path) * ".pid") + end + + FileWatching.mkpidlock(() -> ispath(path), pidfile, stale_age = pidfile_stale_age) && continue urls = find_urls(ctx.registries, pkg.uuid) - push!(pkgs_to_install, (;pkg, urls, path)) + push!(pkgs_to_install, (; pkg, urls, path)) end length(pkgs_to_install) == 0 && return Set{UUID}() @@ -1033,7 +1571,7 @@ function download_source(ctx::Context; readonly=true) missed_packages = eltype(pkgs_to_install)[] widths = [textwidth(pkg.name) for (pkg, _) in pkgs_to_install] - max_name = maximum(widths; init=0) + max_name = maximum(widths; init = 0) # Check what registries the current pkg server tracks # Disable if precompiling to not access internet @@ -1043,7 +1581,8 @@ function download_source(ctx::Context; readonly=true) nothing end - @sync begin + # use eager throw version + Base.Experimental.@sync begin jobs = Channel{eltype(pkgs_to_install)}(ctx.num_concurrent_downloads) results = Channel(ctx.num_concurrent_downloads) @@ -1053,61 +1592,69 @@ function download_source(ctx::Context; readonly=true) end end - for i in 1:ctx.num_concurrent_downloads + for i in 1:ctx.num_concurrent_downloads # (default 8) @async begin for (pkg, urls, path) in jobs - if ctx.use_git_for_all_downloads - put!(results, (pkg, false, (urls, path))) - continue - end - try - archive_urls = Pair{String,Bool}[] + mkpath(dirname(path)) # the `packages/Package` dir needs to exist for the pidfile to be created + FileWatching.mkpidlock(path * ".pid", stale_age = pidfile_stale_age) do + if ispath(path) + put!(results, (pkg, nothing, (urls, path))) + return + end + if ctx.use_git_for_all_downloads + put!(results, (pkg, false, (urls, path))) + return + end + archive_urls = Pair{String, Bool}[] # Check if the current package is available in one of the registries being tracked by the pkg server # In that case, download from the package server - if server_registry_info !== nothing + if Registry.is_pkg_in_pkgserver_registry(pkg.uuid, server_registry_info, ctx.registries) server, registry_info = server_registry_info - for reg in ctx.registries - if reg.uuid in keys(registry_info) - if haskey(reg, pkg.uuid) - url = "$server/package/$(pkg.uuid)/$(pkg.tree_hash)" - push!(archive_urls, url => true) - break - end - end - end + url = "$server/package/$(pkg.uuid)/$(pkg.tree_hash)" + push!(archive_urls, url => true) end for repo_url in urls url = get_archive_url_for_version(repo_url, pkg.tree_hash) url !== nothing && push!(archive_urls, url => false) end - success = install_archive(archive_urls, pkg.tree_hash, path, io=ctx.io) - if success && readonly - set_readonly(path) # In add mode, files should be read-only - end - if ctx.use_only_tarballs_for_downloads && !success - pkgerror("failed to get tarball from $(urls)") + try + success = install_archive(archive_urls, pkg.tree_hash, path; name = pkg.name, io = ctx.io) + if success && readonly + set_readonly(path) # In add mode, files should be read-only + end + if ctx.use_only_tarballs_for_downloads && !success + pkgerror("failed to get tarball from $(urls)") + end + put!(results, (pkg, success, (urls, path))) + catch err + put!(results, (pkg, err, catch_backtrace())) end - put!(results, (pkg, success, (urls, path))) - catch err - put!(results, (pkg, err, catch_backtrace())) end end end end - bar = MiniProgressBar(; indent=1, header = "Downloading packages", color = Base.info_color(), - mode=:int, always_reprint=true) + bar = MiniProgressBar(; + indent = 1, header = "Downloading packages", color = Base.info_color(), + mode = :int, always_reprint = true + ) bar.max = length(pkgs_to_install) fancyprint = can_fancyprint(ctx.io) try for i in 1:length(pkgs_to_install) - pkg::PackageEntry, exc_or_success, bt_or_pathurls = take!(results) - exc_or_success isa Exception && pkgerror("Error when installing package $(pkg.name):\n", - sprint(Base.showerror, exc_or_success, bt_or_pathurls)) - success, (urls, path) = exc_or_success, bt_or_pathurls + pkg::eltype(pkgs), exc_or_success_or_nothing, bt_or_pathurls = take!(results) + if exc_or_success_or_nothing isa Exception + exc = exc_or_success_or_nothing + pkgerror("Error when installing package $(pkg.name):\n", sprint(Base.showerror, exc, bt_or_pathurls)) + end + if exc_or_success_or_nothing === nothing + continue # represents when another process did the install + end + success = exc_or_success_or_nothing::Bool + (urls, path) = bt_or_pathurls::Tuple{Set{String}, String} success || push!(missed_packages, (; pkg, urls, path)) bar.current = i - str = sprint(; context=ctx.io) do io + str = sprint(; context = ctx.io) do io if success fancyprint && print_progress_bottom(io) vstr = if pkg.version !== nothing @@ -1116,7 +1663,8 @@ function download_source(ctx::Context; readonly=true) short_treehash = string(pkg.tree_hash)[1:16] "[$short_treehash]" end - printpkgstyle(io, :Installed, string(rpad(pkg.name * " ", max_name + 2, "─"), " ", vstr)) + artifact_str = artifact_suffix(count_artifacts(path)) + printpkgstyle(io, :Installed, string(rpad(pkg.name * " ", max_name + 2, "─"), " ", vstr, artifact_str)) fancyprint && show_progress(io, bar) end end @@ -1132,16 +1680,19 @@ function download_source(ctx::Context; readonly=true) # Use LibGit2 to download any remaining packages # ################################################## for (pkg, urls, path) in missed_packages - uuid = pkg.uuid - install_git(ctx.io, pkg.uuid, pkg.name, pkg.tree_hash, urls, path) - readonly && set_readonly(path) - vstr = if pkg.version !== nothing - "v$(pkg.version)" - else - short_treehash = string(pkg.tree_hash)[1:16] - "[$short_treehash]" + FileWatching.mkpidlock(path * ".pid", stale_age = pidfile_stale_age) do + ispath(path) && return + install_git(ctx.io, pkg.uuid, pkg.name, pkg.tree_hash, urls, path) + readonly && set_readonly(path) + vstr = if pkg.version !== nothing + "v$(pkg.version)" + else + short_treehash = string(pkg.tree_hash)[1:16] + "[$short_treehash]" + end + artifact_str = artifact_suffix(count_artifacts(path)) + printpkgstyle(ctx.io, :Installed, string(rpad(pkg.name * " ", max_name + 2, "─"), " ", vstr, artifact_str)) end - printpkgstyle(ctx.io, :Installed, string(rpad(pkg.name * " ", max_name + 2, "─"), " ", vstr)) end return Set{UUID}(entry.pkg.uuid for entry in pkgs_to_install) @@ -1150,7 +1701,6 @@ end ################################ # Manifest update and pruning # ################################ -project_rel_path(env::EnvCache, path::String) = normpath(joinpath(dirname(env.manifest_file), path)) function prune_manifest(env::EnvCache) # if project uses another manifest, only prune project entry in manifest @@ -1192,10 +1742,11 @@ function prune_deps(iterator, keep::Set{UUID}) end clean && break end + return end function record_project_hash(env::EnvCache) - env.manifest.other["project_hash"] = Types.workspace_resolve_hash(env) + return env.manifest.other["project_hash"] = Types.workspace_resolve_hash(env) end ######### @@ -1232,60 +1783,71 @@ function any_package_not_installed(manifest::Manifest) return false end -function build(ctx::Context, uuids::Set{UUID}, verbose::Bool) +function build(ctx::Context, uuids::Set{UUID}, verbose::Bool; allow_reresolve::Bool = true) if any_package_not_installed(ctx.env.manifest) || !isfile(ctx.env.manifest_file) Pkg.instantiate(ctx, allow_build = false, allow_autoprecomp = false) end all_uuids = get_deps(ctx.env, uuids) - build_versions(ctx, all_uuids; verbose) + return build_versions(ctx, all_uuids; verbose, allow_reresolve) end -function dependency_order_uuids(env::EnvCache, uuids::Vector{UUID})::Dict{UUID,Int} - order = Dict{UUID,Int}() +function dependency_order_visit!( + order::Dict{UUID, Int}, seen::Vector{UUID}, counter::Base.RefValue{Int}, + env::EnvCache, uuid::UUID + ) + uuid in seen && return @warn("Dependency graph not a DAG, linearizing anyway") + haskey(order, uuid) && return + push!(seen, uuid) + deps = if Types.is_project_uuid(env, uuid) + values(env.project.deps) + else + entry = manifest_info(env.manifest, uuid) + values(entry.deps) + end + for dep in deps + dependency_order_visit!(order, seen, counter, env, dep) + end + pop!(seen) + counter[] += 1 + order[uuid] = counter[] + return +end + +function dependency_order_uuids(env::EnvCache, uuids::Vector{UUID})::Dict{UUID, Int} + order = Dict{UUID, Int}() seen = UUID[] - k::Int = 0 - function visit(uuid::UUID) - uuid in seen && - return @warn("Dependency graph not a DAG, linearizing anyway") - haskey(order, uuid) && return - push!(seen, uuid) - if Types.is_project_uuid(env, uuid) - deps = values(env.project.deps) - else - entry = manifest_info(env.manifest, uuid) - deps = values(entry.deps) - end - foreach(visit, deps) - pop!(seen) - order[uuid] = k += 1 + counter = Ref(0) + for uuid in uuids + dependency_order_visit!(order, seen, counter, env, uuid) end - visit(uuid::String) = visit(UUID(uuid)) - foreach(visit, uuids) return order end -function gen_build_code(build_file::String; inherit_project::Bool = false) +function gen_build_code(build_file::String; inherit_project::Bool = false, add_stdlib::Bool = false) code = """ - $(Base.load_path_setup_code(false)) - cd($(repr(dirname(build_file)))) - include($(repr(build_file))) - """ + $(Base.load_path_setup_code(false)) + if $(add_stdlib) + push!(Base.LOAD_PATH, "@stdlib") + end + cd($(repr(dirname(build_file)))) + include($(repr(build_file))) + """ # This will make it so that running Pkg.build runs the build in a session with --startup=no # *unless* the parent julia session is started with --startup=yes explicitly. startup_flag = Base.JLOptions().startupfile == 1 ? "yes" : "no" return ``` - $(Base.julia_cmd()) -O0 --color=no --history-file=no - --startup-file=$startup_flag - $(inherit_project ? `--project=$(Base.active_project())` : ``) - --eval $code - ``` + $(Base.julia_cmd()) -O0 --color=no --history-file=no + --startup-file=$startup_flag + $(inherit_project ? `--project=$(Base.active_project())` : ``) + --eval $code + ``` end with_load_path(f::Function, new_load_path::String) = with_load_path(f, [new_load_path]) function with_load_path(f::Function, new_load_path::Vector{String}) old_load_path = copy(Base.LOAD_PATH) copy!(Base.LOAD_PATH, new_load_path) - try + return try f() finally copy!(LOAD_PATH, old_load_path) @@ -1297,9 +1859,9 @@ pkg_scratchpath() = joinpath(depots1(), "scratchspaces", PkgUUID) builddir(source_path::String) = joinpath(source_path, "deps") buildfile(source_path::String) = joinpath(builddir(source_path), "build.jl") -function build_versions(ctx::Context, uuids::Set{UUID}; verbose=false) +function build_versions(ctx::Context, uuids::Set{UUID}; verbose = false, allow_reresolve::Bool = true) # collect builds for UUIDs with `deps/build.jl` files - builds = Tuple{UUID,String,String,VersionNumber}[] + builds = Tuple{UUID, String, String, VersionNumber}[] for uuid in uuids is_stdlib(uuid) && continue if Types.is_project_uuid(ctx.env, uuid) @@ -1324,84 +1886,95 @@ function build_versions(ctx::Context, uuids::Set{UUID}; verbose=false) # toposort builds by dependencies order = dependency_order_uuids(ctx.env, UUID[first(build) for build in builds]) sort!(builds, by = build -> order[first(build)]) - max_name = maximum(build->textwidth(build[2]), builds; init=0) + max_name = maximum(build -> textwidth(build[2]), builds; init = 0) - bar = MiniProgressBar(; indent=2, header = "Building packages", color = Base.info_color(), - mode=:int, always_reprint=true) + bar = MiniProgressBar(; + indent = 2, header = "Building packages", color = Base.info_color(), + mode = :int, always_reprint = true + ) bar.max = length(builds) fancyprint = can_fancyprint(ctx.io) fancyprint && start_progress(ctx.io, bar) # build each package versions in a child process try - for (n, (uuid, name, source_path, version)) in enumerate(builds) - pkg = PackageSpec(;uuid=uuid, name=name, version=version) - build_file = buildfile(source_path) - # compatibility shim - local build_project_override, build_project_preferences - if isfile(projectfile_path(builddir(source_path))) - build_project_override = nothing - with_load_path([builddir(source_path), Base.LOAD_PATH...]) do - build_project_preferences = Base.get_preferences() - end - else - build_project_override = gen_target_project(ctx, pkg, source_path, "build") - with_load_path([something(projectfile_path(source_path)), Base.LOAD_PATH...]) do - build_project_preferences = Base.get_preferences() + for (n, (uuid, name, source_path, version)) in enumerate(builds) + pkg = PackageSpec(; uuid = uuid, name = name, version = version) + build_file = buildfile(source_path) + # compatibility shim + local build_project_override + build_project_preferences = if isfile(projectfile_path(builddir(source_path))) + build_project_override = nothing + with_load_path([builddir(source_path), Base.LOAD_PATH...]) do + Base.get_preferences() + end + else + build_project_override = gen_target_project(ctx, pkg, source_path, "build") + with_load_path([something(projectfile_path(source_path)), Base.LOAD_PATH...]) do + Base.get_preferences() + end end - end - # Put log output in Pkg's scratchspace if the package is content addressed - # by tree sha and in the build directory if it is tracked by path etc. - entry = manifest_info(ctx.env.manifest, uuid) - if entry !== nothing && entry.tree_hash !== nothing - key = string(entry.tree_hash) - scratch = joinpath(pkg_scratchpath(), key) - mkpath(scratch) - log_file = joinpath(scratch, "build.log") - # Associate the logfile with the package being built - dict = Dict{String,Any}(scratch => [ - Dict{String,Any}("time" => Dates.now(), "parent_projects" => [projectfile_path(source_path)]) - ]) - open(joinpath(depots1(), "logs", "scratch_usage.toml"), "a") do io - TOML.print(io, dict) + # Put log output in Pkg's scratchspace if the package is content addressed + # by tree sha and in the build directory if it is tracked by path etc. + entry = manifest_info(ctx.env.manifest, uuid) + if entry !== nothing && entry.tree_hash !== nothing + key = string(entry.tree_hash) + scratch = joinpath(pkg_scratchpath(), key) + mkpath(scratch) + create_cachedir_tag(joinpath(depots1(), "scratchspaces")) + log_file = joinpath(scratch, "build.log") + # Associate the logfile with the package being built + dict = Dict{String, Any}() + inner_dict = Dict{String, Any}() + inner_dict["time"] = Dates.now() + inner_dict["parent_projects"] = [projectfile_path(source_path)] + dict[scratch] = [inner_dict] + open(joinpath(depots1(), "logs", "scratch_usage.toml"), "a") do io + TOML.print(io, dict) + end + else + log_file = splitext(build_file)[1] * ".log" end - else - log_file = splitext(build_file)[1] * ".log" - end - - fancyprint && print_progress_bottom(ctx.io) - printpkgstyle(ctx.io, :Building, - rpad(name * " ", max_name + 1, "─") * "→ " * pathrepr(log_file)) - bar.current = n-1 + fancyprint && print_progress_bottom(ctx.io) - fancyprint && show_progress(ctx.io, bar) - - let log_file=log_file - sandbox(ctx, pkg, builddir(source_path), build_project_override; preferences=build_project_preferences) do - flush(ctx.io) - ok = open(log_file, "w") do log - std = verbose ? ctx.io : log - success(pipeline(gen_build_code(buildfile(source_path)), - stdout=std, stderr=std)) - end - ok && return - n_lines = isinteractive() ? 100 : 5000 - # TODO: Extract last n lines more efficiently - log_lines = readlines(log_file) - log_show = join(log_lines[max(1, length(log_lines) - n_lines):end], '\n') - full_log_at, last_lines = - if length(log_lines) > n_lines - "\n\nFull log at $log_file", - ", showing the last $n_lines of log" - else - "", "" + printpkgstyle( + ctx.io, :Building, + rpad(name * " ", max_name + 1, "─") * "→ " * pathrepr(log_file) + ) + bar.current = n - 1 + + fancyprint && show_progress(ctx.io, bar) + + let log_file = log_file + sandbox(ctx, pkg, builddir(source_path), build_project_override; preferences = build_project_preferences, allow_reresolve) do + flush(ctx.io) + ok = open(log_file, "w") do log + std = verbose ? ctx.io : log + success( + pipeline( + gen_build_code(buildfile(source_path)), + stdout = std, stderr = std + ) + ) + end + ok && return + n_lines = isinteractive() ? 100 : 5000 + # TODO: Extract last n lines more efficiently + log_lines = readlines(log_file) + log_show = join(log_lines[max(1, length(log_lines) - n_lines):end], '\n') + full_log_at, last_lines = + if length(log_lines) > n_lines + "\n\nFull log at $log_file", + ", showing the last $n_lines of log" + else + "", "" + end + pkgerror("Error building `$(pkg.name)`$last_lines: \n$log_show$full_log_at") end - pkgerror("Error building `$(pkg.name)`$last_lines: \n$log_show$full_log_at") end end - end finally fancyprint && end_progress(ctx.io, bar) end @@ -1482,47 +2055,53 @@ function rm(ctx::Context, pkgs::Vector{PackageSpec}; mode::PackageMode) record_project_hash(ctx.env) # update project & manifest write_env(ctx.env) - show_update(ctx.env, ctx.registries; io=ctx.io) + return show_update(ctx.env, ctx.registries; io = ctx.io) end -update_package_add(ctx::Context, pkg::PackageSpec, ::Nothing, source_path, source_repo, is_dep::Bool) = pkg -function update_package_add(ctx::Context, pkg::PackageSpec, entry::PackageEntry, source_path, source_repo, is_dep::Bool) +update_package_add(ctx::Context, pkg::PackageSpec, ::Nothing, is_dep::Bool) = pkg +function update_package_add(ctx::Context, pkg::PackageSpec, entry::PackageEntry, is_dep::Bool) if entry.pinned if pkg.version == VersionSpec() println(ctx.io, "`$(pkg.name)` is pinned at `v$(entry.version)`: maintaining pinned version") end - return PackageSpec(; uuid=pkg.uuid, name=pkg.name, pinned=true, - version=entry.version, tree_hash=entry.tree_hash, - path=entry.path, repo=entry.repo) + return PackageSpec(; + uuid = pkg.uuid, name = pkg.name, pinned = true, + version = entry.version, tree_hash = entry.tree_hash, + path = entry.path, repo = entry.repo + ) end if entry.path !== nothing || entry.repo.source !== nothing || pkg.repo.source !== nothing return pkg # overwrite everything, nothing to copy over end - if is_stdlib(pkg.uuid) + if is_stdlib(pkg.uuid, ctx.julia_version) return pkg # stdlibs are not versioned like other packages - elseif is_dep && ((isa(pkg.version, VersionNumber) && entry.version == pkg.version) || - (!isa(pkg.version, VersionNumber) && entry.version ∈ pkg.version)) + elseif is_dep && ( + (isa(pkg.version, VersionNumber) && entry.version == pkg.version) || + (!isa(pkg.version, VersionNumber) && entry.version ∈ pkg.version) + ) # leave the package as is at the installed version - return PackageSpec(; uuid=pkg.uuid, name=pkg.name, version=entry.version, - tree_hash=entry.tree_hash) + return PackageSpec(; + uuid = pkg.uuid, name = pkg.name, version = entry.version, + tree_hash = entry.tree_hash + ) end # adding a new version not compatible with the old version, so we just overwrite return pkg end # Update registries AND read them back in. -function update_registries(ctx::Context; force::Bool=true, kwargs...) +function update_registries(ctx::Context; force::Bool = true, kwargs...) OFFLINE_MODE[] && return !force && UPDATED_REGISTRY_THIS_SESSION[] && return - Registry.update(; io=ctx.io, kwargs...) + Registry.update(; io = ctx.io, kwargs...) copy!(ctx.registries, Registry.reachable_registries()) - UPDATED_REGISTRY_THIS_SESSION[] = true + return UPDATED_REGISTRY_THIS_SESSION[] = true end function is_all_registered(registries::Vector{Registry.RegistryInstance}, pkgs::Vector{PackageSpec}) pkgs = filter(tracking_registered_version, pkgs) for pkg in pkgs - if !any(r->haskey(r, pkg.uuid), registries) + if !any(r -> haskey(r, pkg.uuid), registries) return pkg end end @@ -1530,9 +2109,32 @@ function is_all_registered(registries::Vector{Registry.RegistryInstance}, pkgs:: end function check_registered(registries::Vector{Registry.RegistryInstance}, pkgs::Vector{PackageSpec}) + if isempty(registries) && !isempty(pkgs) + registry_pkgs = filter(tracking_registered_version, pkgs) + if !isempty(registry_pkgs) + pkgerror("no registries have been installed. Cannot resolve the following packages:\n$(join(map(pkg -> " " * err_rep(pkg), registry_pkgs), "\n"))") + end + end pkg = is_all_registered(registries, pkgs) if pkg isa PackageSpec - pkgerror("expected package $(err_rep(pkg)) to be registered") + msg = "expected package $(err_rep(pkg)) to be registered" + # check if the name exists in the registry with a different uuid + if pkg.name !== nothing + reg_uuid = Pair{String, Vector{UUID}}[] + for reg in registries + uuids = Registry.uuids_from_name(reg, pkg.name) + if !isempty(uuids) + push!(reg_uuid, reg.name => uuids) + end + end + if !isempty(reg_uuid) + msg *= "\n You may have provided the wrong UUID for package $(pkg.name).\n Found the following UUIDs for that name:" + for (reg, uuids) in reg_uuid + msg *= "\n - $(join(uuids, ", ")) from registry: $reg" + end + end + end + pkgerror(msg) end return nothing end @@ -1544,29 +2146,38 @@ function assert_can_add(ctx::Context, pkgs::Vector{PackageSpec}) # package with the same name exist in the project: assert that they have the same uuid existing_uuid = get(ctx.env.project.deps, pkg.name, pkg.uuid) existing_uuid == pkg.uuid || - pkgerror("""Refusing to add package $(err_rep(pkg)). - Package `$(pkg.name)=$(existing_uuid)` with the same name already exists as a direct dependency. - To remove the existing package, use `import Pkg; Pkg.rm("$(pkg.name)")`. - """) + pkgerror( + """Refusing to add package $(err_rep(pkg)). + Package `$(pkg.name)=$(existing_uuid)` with the same name already exists as a direct dependency. + To remove the existing package, use `$(Pkg.in_repl_mode() ? """pkg> rm $(pkg.name)""" : """import Pkg; Pkg.rm("$(pkg.name)")""")`. + """ + ) # package with the same uuid exist in the project: assert they have the same name name = findfirst(==(pkg.uuid), ctx.env.project.deps) name === nothing || name == pkg.name || - pkgerror("""Refusing to add package $(err_rep(pkg)). - Package `$name=$(pkg.uuid)` with the same UUID already exists as a direct dependency. - To remove the existing package, use `import Pkg; Pkg.rm("$name")`. - """) + pkgerror( + """Refusing to add package $(err_rep(pkg)). + Package `$name=$(pkg.uuid)` with the same UUID already exists as a direct dependency. + To remove the existing package, use `$(Pkg.in_repl_mode() ? """pkg> rm $name""" : """import Pkg; Pkg.rm("$name")""")`. + """ + ) # package with the same uuid exist in the manifest: assert they have the same name entry = get(ctx.env.manifest, pkg.uuid, nothing) entry === nothing || entry.name == pkg.name || - pkgerror("""Refusing to add package $(err_rep(pkg)). - Package `$(entry.name)=$(pkg.uuid)` with the same UUID already exists in the manifest. - To remove the existing package, use `import Pkg; Pkg.rm(Pkg.PackageSpec(uuid="$(pkg.uuid)"); mode=Pkg.PKGMODE_MANIFEST)`. - """) + pkgerror( + """Refusing to add package $(err_rep(pkg)). + Package `$(entry.name)=$(pkg.uuid)` with the same UUID already exists in the manifest. + To remove the existing package, use `$(Pkg.in_repl_mode() ? """pkg> rm --manifest $(entry.name)=$(pkg.uuid)""" : """import Pkg; Pkg.rm(Pkg.PackageSpec(uuid="$(pkg.uuid)"); mode=Pkg.PKGMODE_MANIFEST)""")`. + """ + ) end + return end -function tiered_resolve(env::EnvCache, registries::Vector{Registry.RegistryInstance}, pkgs::Vector{PackageSpec}, julia_version, - try_all_installed::Bool) +function tiered_resolve( + env::EnvCache, registries::Vector{Registry.RegistryInstance}, pkgs::Vector{PackageSpec}, julia_version, + try_all_installed::Bool + ) if try_all_installed try # do not modify existing subgraph and only add installed versions of the new packages @debug "tiered_resolve: trying PRESERVE_ALL_INSTALLED" @@ -1609,29 +2220,92 @@ function targeted_resolve(env::EnvCache, registries::Vector{Registry.RegistryIns return pkgs, deps_map end -function _resolve(io::IO, env::EnvCache, registries::Vector{Registry.RegistryInstance}, - pkgs::Vector{PackageSpec}, preserve::PreserveLevel, julia_version) - printpkgstyle(io, :Resolving, "package versions...") - if preserve == PRESERVE_TIERED_INSTALLED - tiered_resolve(env, registries, pkgs, julia_version, true) - elseif preserve == PRESERVE_TIERED - tiered_resolve(env, registries, pkgs, julia_version, false) +function _resolve( + io::IO, env::EnvCache, registries::Vector{Registry.RegistryInstance}, + pkgs::Vector{PackageSpec}, preserve::PreserveLevel, julia_version + ) + usingstrategy = preserve != PRESERVE_TIERED ? " using $preserve" : "" + printpkgstyle(io, :Resolving, "package versions$(usingstrategy)...") + return try + if preserve == PRESERVE_TIERED_INSTALLED + tiered_resolve(env, registries, pkgs, julia_version, true) + elseif preserve == PRESERVE_TIERED + tiered_resolve(env, registries, pkgs, julia_version, false) + else + targeted_resolve(env, registries, pkgs, preserve, julia_version) + end + catch err + + if err isa Resolve.ResolverError + yanked_pkgs = filter(pkg -> is_pkgversion_yanked(pkg, registries), load_all_deps(env)) + if !isempty(yanked_pkgs) + indent = " "^(Pkg.pkgstyle_indent) + yanked_str = join(map(pkg -> indent * " - " * err_rep(pkg, quotes = false) * " " * string(pkg.version), yanked_pkgs), "\n") + printpkgstyle(io, :Warning, """The following package versions were yanked from their registry and \ + are not resolvable:\n$yanked_str""", color = Base.warn_color()) + end + end + rethrow() + end +end + +function can_skip_resolve_for_add(pkg::PackageSpec, entry::Union{PackageEntry, Nothing}) + # Can't skip if package not in manifest + entry === nothing && return false + + # Can't skip if pinned (needs special handling in resolution) + entry.pinned && return false + + # Can't skip if tracking path or repo + (entry.path !== nothing || entry.repo.source !== nothing || pkg.repo.source !== nothing) && return false + + # Check if requested version is compatible with installed version + version_compatible = if isa(pkg.version, VersionNumber) + entry.version == pkg.version + elseif pkg.version == VersionSpec() + # No version specified, current version is acceptable + true else - targeted_resolve(env, registries, pkgs, preserve, julia_version) + # VersionSpec range specified, check if current version is in range + entry.version ∈ pkg.version end + + return version_compatible +end + +function add_compat_entries!(ctx::Context, pkgs::Vector{PackageSpec}) + # Only add compat entries if env is a package + ctx.env.pkg === nothing && return + + compat_names = String[] + for pkg in pkgs + haskey(ctx.env.project.compat, pkg.name) && continue + v = ctx.env.manifest[pkg.uuid].version + v === nothing && continue + pkgversion = string(Base.thispatch(v)) + set_compat(ctx.env.project, pkg.name, pkgversion) + push!(compat_names, pkg.name) + end + if !isempty(compat_names) + printpkgstyle(ctx.io, :Compat, "entries added for $(join(compat_names, ", "))") + end + return end -function add(ctx::Context, pkgs::Vector{PackageSpec}, new_git=Set{UUID}(); - allow_autoprecomp::Bool=true, preserve::PreserveLevel=default_preserve(), platform::AbstractPlatform=HostPlatform(), - target::Symbol=:deps) +function add( + ctx::Context, pkgs::Vector{PackageSpec}, new_git = Set{UUID}(); + allow_autoprecomp::Bool = true, preserve::PreserveLevel = default_preserve(), platform::AbstractPlatform = HostPlatform(), + target::Symbol = :deps + ) assert_can_add(ctx, pkgs) # load manifest data + pkg_entries = Tuple{PackageSpec, Union{PackageEntry, Nothing}, Bool}[] for (i, pkg) in pairs(pkgs) delete!(ctx.env.project.weakdeps, pkg.name) entry = manifest_info(ctx.env.manifest, pkg.uuid) is_dep = any(uuid -> uuid == pkg.uuid, [uuid for (name, uuid) in ctx.env.project.deps]) - source_path, source_repo = get_path_repo(ctx.env.project, pkg.name) - pkgs[i] = update_package_add(ctx, pkg, entry, source_path, source_repo, is_dep) + push!(pkg_entries, (pkg, entry, is_dep)) + pkgs[i] = update_package_add(ctx, pkg, entry, is_dep) end names = (p.name for p in pkgs) @@ -1645,38 +2319,47 @@ function add(ctx::Context, pkgs::Vector{PackageSpec}, new_git=Set{UUID}(); pkgerror("Unrecognized target $(target)") end + # Check if we can skip resolution for all packages + can_skip_all = target == :deps && all(pkg_entries) do (pkg, entry, _) + can_skip_resolve_for_add(pkg, entry) + end + + if can_skip_all + # All packages are already in manifest with compatible versions + # Just promote to direct dependencies without resolving + foreach(pkg -> target_field[pkg.name] = pkg.uuid, pkgs) # update set of deps/weakdeps/extras + + # if env is a package add compat entries + add_compat_entries!(ctx, pkgs) + + record_project_hash(ctx.env) + write_env(ctx.env) + show_update(ctx.env, ctx.registries; io = ctx.io) + + return + end + foreach(pkg -> target_field[pkg.name] = pkg.uuid, pkgs) # update set of deps/weakdeps/extras if target == :deps # nothing to resolve/install if it's weak or extras # resolve man_pkgs, deps_map = _resolve(ctx.io, ctx.env, ctx.registries, pkgs, preserve, ctx.julia_version) - update_manifest!(ctx.env, man_pkgs, deps_map, ctx.julia_version) + update_manifest!(ctx.env, man_pkgs, deps_map, ctx.julia_version, ctx.registries) new_apply = download_source(ctx) - fixups_from_projectfile!(ctx.env) + fixups_from_projectfile!(ctx) # After downloading resolutionary packages, search for (Julia)Artifacts.toml files # and ensure they are all downloaded and unpacked as well: - download_artifacts(ctx, platform=platform, julia_version=ctx.julia_version) + download_artifacts(ctx, platform = platform, julia_version = ctx.julia_version) # if env is a package add compat entries - if ctx.env.project.name !== nothing && ctx.env.project.uuid !== nothing - compat_names = String[] - for pkg in pkgs - haskey(ctx.env.project.compat, pkg.name) && continue - v = ctx.env.manifest[pkg.uuid].version - v === nothing && continue - pkgversion = string(Base.thispatch(v)) - set_compat(ctx.env.project, pkg.name, pkgversion) - push!(compat_names, pkg.name) - end - printpkgstyle(ctx.io, :Compat, """entries added for $(join(compat_names, ", "))""") - end + add_compat_entries!(ctx, pkgs) record_project_hash(ctx.env) # compat entries changed the hash after it was last recorded in update_manifest! write_env(ctx.env) # write env before building - show_update(ctx.env, ctx.registries; io=ctx.io) + show_update(ctx.env, ctx.registries; io = ctx.io) build_versions(ctx, union(new_apply, new_git)) - allow_autoprecomp && Pkg._auto_precompile(ctx) + allow_autoprecomp && Pkg._auto_precompile(ctx, pkgs) else record_project_hash(ctx.env) write_env(ctx.env) @@ -1687,8 +2370,10 @@ function add(ctx::Context, pkgs::Vector{PackageSpec}, new_git=Set{UUID}(); end # Input: name, uuid, and path -function develop(ctx::Context, pkgs::Vector{PackageSpec}, new_git::Set{UUID}; - preserve::PreserveLevel=default_preserve(), platform::AbstractPlatform=HostPlatform()) +function develop( + ctx::Context, pkgs::Vector{PackageSpec}, new_git::Set{UUID}; + preserve::PreserveLevel = default_preserve(), platform::AbstractPlatform = HostPlatform() + ) assert_can_add(ctx, pkgs) # no need to look at manifest.. dev will just nuke whatever is there before for pkg in pkgs @@ -1697,13 +2382,13 @@ function develop(ctx::Context, pkgs::Vector{PackageSpec}, new_git::Set{UUID}; end # resolve & apply package versions pkgs, deps_map = _resolve(ctx.io, ctx.env, ctx.registries, pkgs, preserve, ctx.julia_version) - update_manifest!(ctx.env, pkgs, deps_map, ctx.julia_version) + update_manifest!(ctx.env, pkgs, deps_map, ctx.julia_version, ctx.registries) new_apply = download_source(ctx) - fixups_from_projectfile!(ctx.env) - download_artifacts(ctx; platform=platform, julia_version=ctx.julia_version) + fixups_from_projectfile!(ctx) + download_artifacts(ctx; platform = platform, julia_version = ctx.julia_version) write_env(ctx.env) # write env before building - show_update(ctx.env, ctx.registries; io=ctx.io) - build_versions(ctx, union(new_apply, new_git)) + show_update(ctx.env, ctx.registries; io = ctx.io) + return build_versions(ctx, union(new_apply, new_git)) end # load version constraint @@ -1714,8 +2399,10 @@ function up_load_versions!(ctx::Context, pkg::PackageSpec, entry::PackageEntry, entry.version !== nothing || return false # no version to set if entry.pinned || level == UPLEVEL_FIXED pkg.version = entry.version - pkg.tree_hash = entry.tree_hash - elseif entry.repo.source !== nothing || source_repo.source !== nothing # repo packages have a version but are treated specially + if pkg.path === nothing + pkg.tree_hash = entry.tree_hash + end + elseif source_path === nothing && pkg.path === nothing && (entry.repo.source !== nothing || source_repo.source !== nothing) # repo packages have a version but are treated specially if source_repo.source !== nothing pkg.repo = source_repo else @@ -1739,7 +2426,7 @@ function up_load_versions!(ctx::Context, pkg::PackageSpec, entry::PackageEntry, r = level == UPLEVEL_PATCH ? VersionRange(ver.major, ver.minor) : level == UPLEVEL_MINOR ? VersionRange(ver.major) : level == UPLEVEL_MAJOR ? VersionRange() : - error("unexpected upgrade level: $level") + error("unexpected upgrade level: $level") pkg.version = VersionSpec(r) end return false @@ -1748,19 +2435,23 @@ end up_load_manifest_info!(pkg::PackageSpec, ::Nothing) = nothing function up_load_manifest_info!(pkg::PackageSpec, entry::PackageEntry) pkg.name = entry.name # TODO check name is same - if pkg.repo == GitRepo() + # Only restore repo from manifest if we don't already have a path set + if pkg.repo == GitRepo() && pkg.path === nothing pkg.repo = entry.repo # TODO check that repo is same end - if pkg.path === nothing + # Only set path if tree_hash is not already set (to avoid invalid state where both are set) + if pkg.path === nothing && pkg.repo == GitRepo() && pkg.tree_hash === nothing pkg.path = entry.path end - pkg.pinned = entry.pinned + return pkg.pinned = entry.pinned # `pkg.version` and `pkg.tree_hash` is set by `up_load_versions!` end -function load_manifest_deps_up(env::EnvCache, pkgs::Vector{PackageSpec}=PackageSpec[]; - preserve::PreserveLevel=PRESERVE_ALL) +function load_manifest_deps_up( + env::EnvCache, pkgs::Vector{PackageSpec} = PackageSpec[]; + preserve::PreserveLevel = PRESERVE_ALL + ) manifest = env.manifest project = env.project explicit_upgraded = Set(pkg.uuid for pkg in pkgs) @@ -1795,33 +2486,40 @@ function load_manifest_deps_up(env::EnvCache, pkgs::Vector{PackageSpec}=PackageS end # The rest of the packages get fixed - push!(pkgs, PackageSpec( - uuid = uuid, - name = entry.name, - path = entry.path, - pinned = entry.pinned, - repo = entry.repo, - tree_hash = entry.tree_hash, # TODO should tree_hash be changed too? - version = something(entry.version, VersionSpec()) - )) + push!( + pkgs, PackageSpec( + uuid = uuid, + name = entry.name, + path = entry.path, + pinned = entry.pinned, + repo = entry.repo, + tree_hash = entry.tree_hash, # TODO should tree_hash be changed too? + version = something(entry.version, VersionSpec()), + ) + ) end return pkgs end function targeted_resolve_up(env::EnvCache, registries::Vector{Registry.RegistryInstance}, pkgs::Vector{PackageSpec}, preserve::PreserveLevel, julia_version) - pkgs = load_manifest_deps_up(env, pkgs; preserve=preserve) + pkgs = load_manifest_deps_up(env, pkgs; preserve = preserve) check_registered(registries, pkgs) deps_map = resolve_versions!(env, registries, pkgs, julia_version, preserve == PRESERVE_ALL_INSTALLED) return pkgs, deps_map end -function up(ctx::Context, pkgs::Vector{PackageSpec}, level::UpgradeLevel; - skip_writing_project::Bool=false, preserve::Union{Nothing,PreserveLevel}=nothing) +function up( + ctx::Context, pkgs::Vector{PackageSpec}, level::UpgradeLevel; + skip_writing_project::Bool = false, preserve::Union{Nothing, PreserveLevel} = nothing + ) + + requested_pkgs = pkgs + new_git = Set{UUID}() # TODO check all pkg.version == VersionSpec() # set version constraints according to `level` for pkg in pkgs - source_path, source_repo = get_path_repo(ctx.env.project, pkg.name) + source_path, source_repo = get_path_repo(ctx.env.project, ctx.env.project_file, ctx.env.manifest_file, pkg.name) entry = manifest_info(ctx.env.manifest, pkg.uuid) new = up_load_versions!(ctx, pkg, entry, source_path, source_repo, level) new && push!(new_git, pkg.uuid) #TODO put download + push! in utility function @@ -1838,26 +2536,55 @@ function up(ctx::Context, pkgs::Vector{PackageSpec}, level::UpgradeLevel; check_registered(ctx.registries, pkgs) deps_map = resolve_versions!(ctx.env, ctx.registries, pkgs, ctx.julia_version, false) end - update_manifest!(ctx.env, pkgs, deps_map, ctx.julia_version) + update_manifest!(ctx.env, pkgs, deps_map, ctx.julia_version, ctx.registries) new_apply = download_source(ctx) - fixups_from_projectfile!(ctx.env) - download_artifacts(ctx, julia_version=ctx.julia_version) + fixups_from_projectfile!(ctx) + download_artifacts(ctx, julia_version = ctx.julia_version) write_env(ctx.env; skip_writing_project) # write env before building - show_update(ctx.env, ctx.registries; io=ctx.io, hidden_upgrades_info = true) - build_versions(ctx, union(new_apply, new_git)) + show_update(ctx.env, ctx.registries; io = ctx.io, hidden_upgrades_info = true) + + if length(requested_pkgs) == 1 + pkg = only(requested_pkgs) + entry = manifest_info(ctx.env.manifest, pkg.uuid) + if entry === nothing || (entry.path === nothing && entry.repo.source === nothing) + # Get current version after the update + current_version = entry !== nothing ? entry.version : nothing + original_entry = manifest_info(ctx.env.original_manifest, pkg.uuid) + original_version = original_entry !== nothing ? original_entry.version : nothing + + # Check if version didn't change and there's a newer version available + if current_version == original_version && current_version !== nothing + temp_pkg = PackageSpec(name = pkg.name, uuid = pkg.uuid, version = current_version) + cinfo = status_compat_info(temp_pkg, ctx.env, ctx.registries) + if cinfo !== nothing + packages_holding_back, max_version, max_version_compat = cinfo + if current_version < max_version + printpkgstyle( + ctx.io, :Info, "$(pkg.name) can be updated but at the cost of upgrading/downgrading other packages. " * + "To force upgrade to the latest version, try `add $(pkg.name)@$(max_version)`", color = Base.info_color() + ) + end + end + end + end + end + + return build_versions(ctx, union(new_apply, new_git)) end -function update_package_pin!(registries::Vector{Registry.RegistryInstance}, pkg::PackageSpec, entry::Union{Nothing, PackageEntry}) +function update_package_pin!(ctx::Context, pkg::PackageSpec, entry::Union{Nothing, PackageEntry}) if entry === nothing - pkgerror("package $(err_rep(pkg)) not found in the manifest, run `Pkg.resolve()` and retry.") + cmd = Pkg.in_repl_mode() ? "pkg> resolve" : "Pkg.resolve()" + pkgerror("package $(err_rep(pkg)) not found in the manifest, run `$cmd` and retry.") end + registries = ctx.registries #if entry.pinned && pkg.version == VersionSpec() # println(ctx.io, "package $(err_rep(pkg)) already pinned") #end # update pinned package pkg.pinned = true - if is_stdlib(pkg.uuid) + if is_stdlib(pkg.uuid, ctx.julia_version) return nothing # nothing left to do elseif pkg.version == VersionSpec() pkg.version = entry.version # pin at current version @@ -1878,19 +2605,19 @@ end is_fully_pinned(ctx::Context) = !isempty(ctx.env.manifest.deps) && all(kv -> last(kv).pinned, ctx.env.manifest.deps) function pin(ctx::Context, pkgs::Vector{PackageSpec}) - foreach(pkg -> update_package_pin!(ctx.registries, pkg, manifest_info(ctx.env.manifest, pkg.uuid)), pkgs) + foreach(pkg -> update_package_pin!(ctx, pkg, manifest_info(ctx.env.manifest, pkg.uuid)), pkgs) pkgs = load_direct_deps(ctx.env, pkgs) # TODO: change pin to not take a version and just have it pin on the current version. Then there is no need to resolve after a pin pkgs, deps_map = _resolve(ctx.io, ctx.env, ctx.registries, pkgs, PRESERVE_TIERED, ctx.julia_version) - update_manifest!(ctx.env, pkgs, deps_map, ctx.julia_version) + update_manifest!(ctx.env, pkgs, deps_map, ctx.julia_version, ctx.registries) new = download_source(ctx) - fixups_from_projectfile!(ctx.env) - download_artifacts(ctx; julia_version=ctx.julia_version) + fixups_from_projectfile!(ctx) + download_artifacts(ctx; julia_version = ctx.julia_version) write_env(ctx.env) # write env before building - show_update(ctx.env, ctx.registries; io=ctx.io) - build_versions(ctx, new) + show_update(ctx.env, ctx.registries; io = ctx.io) + return build_versions(ctx, new) end function update_package_free!(registries::Vector{Registry.RegistryInstance}, pkg::PackageSpec, entry::PackageEntry, err_if_free::Bool) @@ -1910,62 +2637,71 @@ function update_package_free!(registries::Vector{Registry.RegistryInstance}, pkg return # -> name, uuid end if err_if_free - pkgerror("expected package $(err_rep(pkg)) to be pinned, tracking a path,", - " or tracking a repository") + pkgerror( + "expected package $(err_rep(pkg)) to be pinned, tracking a path,", + " or tracking a repository" + ) end return end # TODO: this is two technically different operations with the same name # split into two subfunctions ... -function free(ctx::Context, pkgs::Vector{PackageSpec}; err_if_free=true) +function free(ctx::Context, pkgs::Vector{PackageSpec}; err_if_free = true) for pkg in pkgs entry = manifest_info(ctx.env.manifest, pkg.uuid) delete!(ctx.env.project.sources, pkg.name) update_package_free!(ctx.registries, pkg, entry, err_if_free) end - if any(pkg -> pkg.version == VersionSpec(), pkgs) + return if any(pkg -> pkg.version == VersionSpec(), pkgs) pkgs = load_direct_deps(ctx.env, pkgs) check_registered(ctx.registries, pkgs) # TODO: change free to not take a version and just have it pin on the current version. Then there is no need to resolve after a pin pkgs, deps_map = _resolve(ctx.io, ctx.env, ctx.registries, pkgs, PRESERVE_TIERED, ctx.julia_version) - update_manifest!(ctx.env, pkgs, deps_map, ctx.julia_version) + update_manifest!(ctx.env, pkgs, deps_map, ctx.julia_version, ctx.registries) new = download_source(ctx) - fixups_from_projectfile!(ctx.env) + fixups_from_projectfile!(ctx) download_artifacts(ctx) write_env(ctx.env) # write env before building - show_update(ctx.env, ctx.registries; io=ctx.io) + show_update(ctx.env, ctx.registries; io = ctx.io) build_versions(ctx, new) else foreach(pkg -> manifest_info(ctx.env.manifest, pkg.uuid).pinned = false, pkgs) write_env(ctx.env) - show_update(ctx.env, ctx.registries; io=ctx.io) + show_update(ctx.env, ctx.registries; io = ctx.io) end end function gen_test_code(source_path::String; test_args::Cmd) test_file = testfile(source_path) return """ - $(Base.load_path_setup_code(false)) - cd($(repr(dirname(test_file)))) - append!(empty!(ARGS), $(repr(test_args.exec))) - include($(repr(test_file))) - """ + $(Base.load_path_setup_code(false)) + cd($(repr(dirname(test_file)))) + append!(empty!(ARGS), $(repr(test_args.exec))) + include($(repr(test_file))) + """ end function get_threads_spec() - if Threads.nthreads(:interactive) > 0 + return if haskey(ENV, "JULIA_NUM_THREADS") + if isempty(ENV["JULIA_NUM_THREADS"]) + throw(ArgumentError("JULIA_NUM_THREADS is set to an empty string. It is not clear what Pkg.test should set for `-t` on the test worker.")) + end + # if set, prefer JULIA_NUM_THREADS because this is passed to the test worker via --threads + # which takes precedence in the worker + ENV["JULIA_NUM_THREADS"] + elseif Threads.nthreads(:interactive) > 0 "$(Threads.nthreads(:default)),$(Threads.nthreads(:interactive))" else "$(Threads.nthreads(:default))" end end -function gen_subprocess_flags(source_path::String; coverage, julia_args) +function gen_subprocess_flags(source_path::String; coverage, julia_args::Cmd) coverage_arg = if coverage isa Bool # source_path is the package root, not "src" so "ext" etc. is included coverage ? string("@", source_path) : "none" @@ -1977,7 +2713,6 @@ function gen_subprocess_flags(source_path::String; coverage, julia_args) return ``` --code-coverage=$(coverage_arg) --color=$(Base.have_color === nothing ? "auto" : Base.have_color ? "yes" : "no") - --check-bounds=yes --warn-overwrite=yes --depwarn=$(Base.JLOptions().depwarn == 2 ? "error" : "yes") --inline=$(Bool(Base.JLOptions().can_inline) ? "yes" : "no") @@ -1990,7 +2725,7 @@ end function with_temp_env(fn::Function, temp_env::String) load_path = copy(LOAD_PATH) active_project = Base.ACTIVE_PROJECT[] - try + return try push!(empty!(LOAD_PATH), "@", temp_env) Base.ACTIVE_PROJECT[] = nothing fn() @@ -2005,8 +2740,10 @@ function sandbox_preserve(env::EnvCache, target::PackageSpec, test_project::Stri env = deepcopy(env) # include root in manifest (in case any dependencies point back to it) if env.pkg !== nothing - env.manifest[env.pkg.uuid] = PackageEntry(;name=env.pkg.name, path=dirname(env.project_file), - deps=env.project.deps) + env.manifest[env.pkg.uuid] = PackageEntry(; + name = env.pkg.name, path = dirname(env.project_file), + deps = env.project.deps + ) end # if the source manifest is an old format, upgrade the manifest_format so # that warnings aren't thrown for the temp sandbox manifest @@ -2025,7 +2762,7 @@ end function abspath!(env::EnvCache, manifest::Manifest) for (uuid, entry) in manifest if entry.path !== nothing - entry.path = project_rel_path(env, entry.path) + entry.path = manifest_rel_path(env, entry.path) end end return manifest @@ -2034,40 +2771,96 @@ end function abspath!(env::EnvCache, project::Project) for (key, entry) in project.sources if haskey(entry, "path") - entry["path"] = project_rel_path(env, entry["path"]) + # Paths in project sources are project-relative, so join with project_file dir, not manifest_file dir + entry["path"] = normpath(joinpath(dirname(env.project_file), entry["path"])) end end return project end +function sandbox_with_temp_env( + fn::Function, ctx::Context, target::PackageSpec, tmp::String, + has_sandbox_project::Bool, sandbox_env::EnvCache; + force_latest_compatible_version::Bool, + allow_earlier_backwards_compatible_versions::Bool, + allow_reresolve::Bool + ) + return with_temp_env(tmp) do + temp_ctx = Context() + if has_sandbox_project + abspath!(sandbox_env, temp_ctx.env.project) + end + temp_ctx.env.project.deps[target.name] = target.uuid + + if force_latest_compatible_version + apply_force_latest_compatible_version!( + temp_ctx; + target_name = target.name, + allow_earlier_backwards_compatible_versions, + ) + end + + try + Pkg.resolve(temp_ctx; io = devnull, skip_writing_project = true) + @debug "Using _parent_ dep graph" + catch err # TODO + err isa Resolve.ResolverError || rethrow() + allow_reresolve || rethrow() + @debug err + msg = string( + "Could not use exact versions of packages in manifest, re-resolving. ", + "Note: if you do not check your manifest file into source control, ", + "then you can probably ignore this message. ", + "However, if you do check your manifest file into source control, ", + "then you probably want to pass the `allow_reresolve = false` kwarg ", + "when calling the `Pkg.test` function.", + ) + printpkgstyle(ctx.io, :Test, msg, color = Base.warn_color()) + Pkg.update(temp_ctx; skip_writing_project = true, update_registry = false, io = ctx.io) + printpkgstyle(ctx.io, :Test, "Successfully re-resolved") + @debug "Using _clean_ dep graph" + end + + reset_all_compat!(temp_ctx.env.project) + write_env(temp_ctx.env, update_undo = false) + + # Run sandboxed code + path_sep = Sys.iswindows() ? ';' : ':' + withenv(fn, "JULIA_LOAD_PATH" => "@$(path_sep)$(tmp)", "JULIA_PROJECT" => nothing) + end +end + # ctx + pkg used to compute parent dep graph -function sandbox(fn::Function, ctx::Context, target::PackageSpec, - sandbox_path::String, sandbox_project_override; - preferences::Union{Nothing,Dict{String,Any}} = nothing, - force_latest_compatible_version::Bool=false, - allow_earlier_backwards_compatible_versions::Bool=true, - allow_reresolve::Bool=true) +function sandbox( + fn::Function, ctx::Context, target::PackageSpec, + sandbox_path::String, sandbox_project_override_in; + preferences::Union{Nothing, Dict{String, Any}} = nothing, + force_latest_compatible_version::Bool = false, + allow_earlier_backwards_compatible_versions::Bool = true, + allow_reresolve::Bool = true + ) sandbox_project = projectfile_path(sandbox_path) - mktempdir() do tmp - tmp_project = projectfile_path(tmp) + return mktempdir() do tmp + sandbox_project_override_local = sandbox_project_override_in + tmp_project = projectfile_path(tmp) tmp_manifest = manifestfile_path(tmp) tmp_preferences = joinpath(tmp, first(Base.preferences_names)) # Copy env info over to temp env has_sandbox_project = false - if sandbox_project_override === nothing + if sandbox_project_override_local === nothing if isfile(sandbox_project) - sandbox_project_override = read_project(sandbox_project) + sandbox_project_override_local = read_project(sandbox_project) has_sandbox_project = true else - sandbox_project_override = Project() + sandbox_project_override_local = Project() end end if !has_sandbox_project - abspath!(ctx.env, sandbox_project_override) + abspath!(ctx.env, sandbox_project_override_local) end - Types.write_project(sandbox_project_override, tmp_project) + Types.write_project(sandbox_project_override_local, tmp_project) # create merged manifest # - copy over active subgraph @@ -2102,41 +2895,12 @@ function sandbox(fn::Function, ctx::Context, target::PackageSpec, end # sandbox - with_temp_env(tmp) do - temp_ctx = Context() - if has_sandbox_project - abspath!(sandbox_env, temp_ctx.env.project) - end - temp_ctx.env.project.deps[target.name] = target.uuid - - if force_latest_compatible_version - apply_force_latest_compatible_version!( - temp_ctx; - target_name = target.name, - allow_earlier_backwards_compatible_versions, - ) - end - - try - Pkg.resolve(temp_ctx; io=devnull, skip_writing_project=true) - @debug "Using _parent_ dep graph" - catch err# TODO - err isa Resolve.ResolverError || rethrow() - allow_reresolve || rethrow() - @debug err - printpkgstyle(ctx.io, :Test, "Could not use exact versions of packages in manifest. Re-resolving dependencies", color=Base.warn_color()) - Pkg.update(temp_ctx; skip_writing_project=true, update_registry=false, io=ctx.io) - printpkgstyle(ctx.io, :Test, "Successfully re-resolved") - @debug "Using _clean_ dep graph" - end - - reset_all_compat!(temp_ctx.env.project) - write_env(temp_ctx.env, update_undo = false) - - # Run sandboxed code - path_sep = Sys.iswindows() ? ';' : ':' - withenv(fn, "JULIA_LOAD_PATH" => "@$(path_sep)$(tmp)", "JULIA_PROJECT" => nothing) - end + sandbox_with_temp_env( + fn, ctx, target, tmp, has_sandbox_project, sandbox_env; + force_latest_compatible_version, + allow_earlier_backwards_compatible_versions, + allow_reresolve, + ) end end @@ -2164,7 +2928,7 @@ function gen_target_project(ctx::Context, pkg::PackageSpec, source_path::String, env = ctx.env registries = ctx.registries test_project = Types.Project() - if projectfile_path(source_path; strict=true) === nothing + if projectfile_path(source_path; strict = true) === nothing # no project file, assuming this is an old REQUIRE package test_project.deps = copy(env.manifest[pkg.uuid].deps) if target == "test" @@ -2172,10 +2936,10 @@ function gen_target_project(ctx::Context, pkg::PackageSpec, source_path::String, if isfile(test_REQUIRE_path) @warn "using test/REQUIRE files is deprecated and current support is lacking in some areas" test_pkgs = parse_REQUIRE(test_REQUIRE_path) - package_specs = [PackageSpec(name=pkg) for pkg in test_pkgs] + package_specs = [PackageSpec(name = pkg) for pkg in test_pkgs] registry_resolve!(registries, package_specs) stdlib_resolve!(package_specs) - ensure_resolved(ctx, env.manifest, package_specs, registry=true) + ensure_resolved(ctx, env.manifest, package_specs, registry = true) for spec in package_specs test_project.deps[spec.name] = spec.uuid end @@ -2211,12 +2975,61 @@ end testdir(source_path::String) = joinpath(source_path, "test") testfile(source_path::String) = joinpath(testdir(source_path), "runtests.jl") -function test(ctx::Context, pkgs::Vector{PackageSpec}; - coverage=false, julia_args::Cmd=``, test_args::Cmd=``, - test_fn=nothing, - force_latest_compatible_version::Bool=false, - allow_earlier_backwards_compatible_versions::Bool=true, - allow_reresolve::Bool=true) + +function run_test_subprocess(io::IO, flags::Cmd, source_path::String, test_args::Cmd; with_threads::Bool) + code = gen_test_code(source_path; test_args) + threads_arg = with_threads ? `--threads=$(get_threads_spec())` : `` + cmd = `$(Base.julia_cmd()) $threads_arg $(flags) --eval $code` + return subprocess_handler(cmd, io, "Tests interrupted. Exiting the test process") +end + +function run_test_subprocess_in_env(io::IO, flags::Cmd, source_path::String, test_args::Cmd) + path_sep = Sys.iswindows() ? ';' : ':' + return withenv("JULIA_LOAD_PATH" => "@$(path_sep)$(testdir(source_path))", "JULIA_PROJECT" => nothing) do + run_test_subprocess(io, flags, source_path, test_args; with_threads = false) + end +end + +function run_sandboxed_tests!( + ctx::Context, pkg::PackageSpec, source_path::String, test_args::Cmd, + coverage::Union{Bool, AbstractString}, julia_args::Cmd, test_fn, + pkgs_errored::Vector{Tuple{String, Base.Process}} + ) + test_fn !== nothing && test_fn() + sandbox_ctx = Context(; io = ctx.io) + status( + sandbox_ctx.env, sandbox_ctx.registries; + mode = PKGMODE_COMBINED, + io = sandbox_ctx.io, + ignore_indent = false, + show_usagetips = false, + ) + flags = gen_subprocess_flags(source_path; coverage, julia_args) + + if should_autoprecompile() + cacheflags = parse(CacheFlags, read(`$(Base.julia_cmd()) $(flags) --eval 'show(Base.CacheFlags())'`, String)) + Pkg.precompile(sandbox_ctx; io = sandbox_ctx.io, configs = flags => cacheflags) + end + + printpkgstyle(ctx.io, :Testing, "Running tests...") + flush(ctx.io) + p, interrupted = run_test_subprocess(ctx.io, flags, source_path, test_args; with_threads = true) + if success(p) + printpkgstyle(ctx.io, :Testing, pkg.name * " tests passed ") + elseif !interrupted + push!(pkgs_errored, (pkg.name, p)) + end + return +end + +function test( + ctx::Context, pkgs::Vector{PackageSpec}; + coverage = false, julia_args::Cmd = ``, test_args::Cmd = ``, + test_fn = nothing, + force_latest_compatible_version::Bool = false, + allow_earlier_backwards_compatible_versions::Bool = true, + allow_reresolve::Bool = true + ) Pkg.instantiate(ctx; allow_autoprecomp = false) # do precomp later within sandbox # load manifest data @@ -2237,16 +3050,18 @@ function test(ctx::Context, pkgs::Vector{PackageSpec}; # See if we can find the test files for all packages missing_runtests = String[] - source_paths = String[] # source_path is the package root (not /src) + source_paths = String[] # source_path is the package root (not /src) for pkg in pkgs - sourcepath = project_rel_path(ctx.env, source_path(ctx.env.manifest_file, pkg, ctx.julia_version)) # TODO + sourcepath = source_path(ctx.env.manifest_file, pkg, ctx.julia_version) !isfile(testfile(sourcepath)) && push!(missing_runtests, pkg.name) push!(source_paths, sourcepath) end if !isempty(missing_runtests) - pkgerror(length(missing_runtests) == 1 ? "Package " : "Packages ", - join(missing_runtests, ", "), - " did not provide a `test/runtests.jl` file") + pkgerror( + length(missing_runtests) == 1 ? "Package " : "Packages ", + join(missing_runtests, ", "), + " did not provide a `test/runtests.jl` file" + ) end # sandbox @@ -2257,25 +3072,23 @@ function test(ctx::Context, pkgs::Vector{PackageSpec}; if testdir(source_path) in dirname.(keys(ctx.env.workspace)) proj = Base.locate_project_file(abspath(testdir(source_path))) env = EnvCache(proj) - # Instantiate test env - Pkg.instantiate(Context(env=env); allow_autoprecomp = false) - status(env, ctx.registries; mode=PKGMODE_COMBINED, io=ctx.io, ignore_indent = false, show_usagetips = false) + # Use a Context pointing at the test env so that instantiate and + # precompile operate on the test project rather than the parent. + test_ctx = Context(env = env; io = ctx.io) + Pkg.instantiate(test_ctx; allow_autoprecomp = false) + status(env, ctx.registries; mode = PKGMODE_COMBINED, io = ctx.io, ignore_indent = false, show_usagetips = false) flags = gen_subprocess_flags(source_path; coverage, julia_args) if should_autoprecompile() - cacheflags = Base.CacheFlags(parse(UInt8, read(`$(Base.julia_cmd()) $(flags) --eval 'show(ccall(:jl_cache_flags, UInt8, ()))'`, String))) - Pkg.precompile(; io=ctx.io, configs = flags => cacheflags) + cacheflags = parse(CacheFlags, read(`$(Base.julia_cmd()) $(flags) --eval 'show(Base.CacheFlags())'`, String)) + # Don't warn about already loaded packages, since we are going to run tests in a new + # subprocess anyway. + Pkg.precompile(test_ctx; io = ctx.io, warn_loaded = false, configs = flags => cacheflags) end printpkgstyle(ctx.io, :Testing, "Running tests...") flush(ctx.io) - code = gen_test_code(source_path; test_args) - cmd = `$(Base.julia_cmd()) $(flags) --eval $code` - - path_sep = Sys.iswindows() ? ';' : ':' - p, interrupted = withenv("JULIA_LOAD_PATH" => "@$(path_sep)$(testdir(source_path))", "JULIA_PROJECT" => nothing) do - subprocess_handler(cmd, ctx.io, "Tests interrupted. Exiting the test process") - end + p, interrupted = run_test_subprocess_in_env(ctx.io, flags, source_path, test_args) if success(p) printpkgstyle(ctx.io, :Testing, pkg.name * " tests passed ") elseif !interrupted @@ -2285,47 +3098,37 @@ function test(ctx::Context, pkgs::Vector{PackageSpec}; end # compatibility shim between "targets" and "test/Project.toml" - local test_project_preferences, test_project_override - if isfile(projectfile_path(testdir(source_path))) + local test_project_override + test_project_preferences = if isfile(projectfile_path(testdir(source_path))) test_project_override = nothing with_load_path([testdir(source_path), Base.LOAD_PATH...]) do - test_project_preferences = Base.get_preferences() + Base.get_preferences() end else test_project_override = gen_target_project(ctx, pkg, source_path, "test") with_load_path([something(projectfile_path(source_path)), Base.LOAD_PATH...]) do - test_project_preferences = Base.get_preferences() + Base.get_preferences() end end # now we sandbox printpkgstyle(ctx.io, :Testing, pkg.name) - sandbox(ctx, pkg, testdir(source_path), test_project_override; preferences=test_project_preferences, force_latest_compatible_version, allow_earlier_backwards_compatible_versions, allow_reresolve) do - test_fn !== nothing && test_fn() - sandbox_ctx = Context(;io=ctx.io) - status(sandbox_ctx.env, sandbox_ctx.registries; mode=PKGMODE_COMBINED, io=sandbox_ctx.io, ignore_indent = false, show_usagetips = false) - flags = gen_subprocess_flags(source_path; coverage,julia_args) - - if should_autoprecompile() - cacheflags = Base.CacheFlags(parse(UInt8, read(`$(Base.julia_cmd()) $(flags) --eval 'show(ccall(:jl_cache_flags, UInt8, ()))'`, String))) - Pkg.precompile(sandbox_ctx; io=sandbox_ctx.io, configs = flags => cacheflags) - end - - printpkgstyle(ctx.io, :Testing, "Running tests...") - flush(ctx.io) - code = gen_test_code(source_path; test_args) - cmd = `$(Base.julia_cmd()) $(flags) --threads=$(get_threads_spec()) --eval $code` - p, interrupted = subprocess_handler(cmd, ctx.io, "Tests interrupted. Exiting the test process") - if success(p) - printpkgstyle(ctx.io, :Testing, pkg.name * " tests passed ") - elseif !interrupted - push!(pkgs_errored, (pkg.name, p)) - end + sandbox( + ctx, pkg, testdir(source_path), test_project_override; + preferences = test_project_preferences, + force_latest_compatible_version, + allow_earlier_backwards_compatible_versions, + allow_reresolve, + ) do + run_sandboxed_tests!( + ctx, pkg, source_path, test_args, + coverage, julia_args, test_fn, pkgs_errored, + ) end end # TODO: Should be included in Base function signal_name(signal::Integer) - if signal == Base.SIGHUP + return if signal == Base.SIGHUP "HUP" elseif signal == Base.SIGINT "INT" @@ -2343,9 +3146,9 @@ function test(ctx::Context, pkgs::Vector{PackageSpec}; end # report errors - if !isempty(pkgs_errored) + return if !isempty(pkgs_errored) function reason(p) - if Base.process_signaled(p) + return if Base.process_signaled(p) " (received signal: " * signal_name(p.termsignal) * ")" elseif Base.process_exited(p) && p.exitcode != 1 " (exit code: " * string(p.exitcode) * ")" @@ -2394,7 +3197,7 @@ end # Display -function stat_rep(x::PackageSpec; name=true) +function stat_rep(x::PackageSpec; name = true) name = name ? "$(x.name)" : "" version = x.version == VersionSpec() ? "" : "v$(x.version)" rev = "" @@ -2405,7 +3208,7 @@ function stat_rep(x::PackageSpec; name=true) repo = Operations.is_tracking_repo(x) ? "`$(x.repo.source)$(subdir_str)#$(rev)`" : "" path = Operations.is_tracking_path(x) ? "$(pathrepr(x.path))" : "" pinned = x.pinned ? "⚲" : "" - return join(filter(!isempty, [name,version,repo,path,pinned]), " ") + return join(filter(!isempty, [name, version, repo, path, pinned]), " ") end print_single(io::IO, pkg::PackageSpec) = print(io, stat_rep(pkg)) @@ -2413,20 +3216,20 @@ print_single(io::IO, pkg::PackageSpec) = print(io, stat_rep(pkg)) is_instantiated(::Nothing) = false is_instantiated(x::PackageSpec) = x.version != VersionSpec() || is_stdlib(x.uuid) # Compare an old and new node of the dependency graph and print a single line to summarize the change -function print_diff(io::IO, old::Union{Nothing,PackageSpec}, new::Union{Nothing,PackageSpec}) - if !is_instantiated(old) && is_instantiated(new) - printstyled(io, "+ $(stat_rep(new))"; color=:light_green) +function print_diff(io::IO, old::Union{Nothing, PackageSpec}, new::Union{Nothing, PackageSpec}) + return if !is_instantiated(old) && is_instantiated(new) + printstyled(io, "+ $(stat_rep(new))"; color = :light_green) elseif !is_instantiated(new) - printstyled(io, "- $(stat_rep(old))"; color=:light_red) + printstyled(io, "- $(stat_rep(old))"; color = :light_red) elseif is_tracking_registry(old) && is_tracking_registry(new) && - new.version isa VersionNumber && old.version isa VersionNumber && new.version != old.version + new.version isa VersionNumber && old.version isa VersionNumber && new.version != old.version if new.version > old.version - printstyled(io, "↑ $(stat_rep(old)) ⇒ $(stat_rep(new; name=false))"; color=:light_yellow) + printstyled(io, "↑ $(stat_rep(old)) ⇒ $(stat_rep(new; name = false))"; color = :light_yellow) else - printstyled(io, "↓ $(stat_rep(old)) ⇒ $(stat_rep(new; name=false))"; color=:light_magenta) + printstyled(io, "↓ $(stat_rep(old)) ⇒ $(stat_rep(new; name = false))"; color = :light_magenta) end else - printstyled(io, "~ $(stat_rep(old)) ⇒ $(stat_rep(new; name=false))"; color=:light_yellow) + printstyled(io, "~ $(stat_rep(old)) ⇒ $(stat_rep(new; name = false))"; color = :light_yellow) end end @@ -2438,15 +3241,15 @@ function status_compat_info(pkg::PackageSpec, env::EnvCache, regs::Vector{Regist for reg in regs reg_pkg = get(reg, pkg.uuid, nothing) reg_pkg === nothing && continue - info = Registry.registry_info(reg_pkg) - reg_compat_info = Registry.compat_info(info) - versions = keys(reg_compat_info) + info = Registry.registry_info(reg, reg_pkg) + # Get versions directly from version_info + versions = keys(info.version_info) versions = filter(v -> !Registry.isyanked(info, v), versions) - max_version_reg = maximum(versions; init=v"0") + max_version_reg = maximum(versions; init = v"0") max_version = max(max_version, max_version_reg) compat_spec = get_compat_workspace(env, pkg.name) - versions_in_compat = filter(in(compat_spec), keys(reg_compat_info)) - max_version_in_compat = max(max_version_in_compat, maximum(versions_in_compat; init=v"0")) + versions_in_compat = filter(in(compat_spec), versions) + max_version_in_compat = max(max_version_in_compat, maximum(versions_in_compat; init = v"0")) end max_version == v"0" && return nothing pkg.version >= max_version && return nothing @@ -2478,11 +3281,9 @@ function status_compat_info(pkg::PackageSpec, env::EnvCache, regs::Vector{Regist for reg in regs reg_pkg = get(reg, uuid, nothing) reg_pkg === nothing && continue - info = Registry.registry_info(reg_pkg) - reg_compat_info = Registry.compat_info(info) - compat_info_v = get(reg_compat_info, dep_info.version, nothing) - compat_info_v === nothing && continue - compat_info_v_uuid = get(compat_info_v, pkg.uuid, nothing) + info = Registry.registry_info(reg, reg_pkg) + # Query compressed deps and compat for the specific dependency version (optimized: only fetch this pkg's compat) + compat_info_v_uuid = Registry.query_compat_for_version(info, dep_info.version, pkg.uuid) compat_info_v_uuid === nothing && continue if !(max_version in compat_info_v_uuid) push!(packages_holding_back, dep_pkg.name) @@ -2495,15 +3296,11 @@ function status_compat_info(pkg::PackageSpec, env::EnvCache, regs::Vector{Regist for reg in regs reg_pkg = get(reg, pkg.uuid, nothing) reg_pkg === nothing && continue - info = Registry.registry_info(reg_pkg) - reg_compat_info = Registry.compat_info(info) - compat_info_v = get(reg_compat_info, pkg.version, nothing) - versions = keys(reg_compat_info) - for v in versions - compat_info_v = get(reg_compat_info, v, nothing) - compat_info_v === nothing && continue - compat_info_v_uuid = compat_info_v[JULIA_UUID] - if VERSION in compat_info_v_uuid + info = Registry.registry_info(reg, reg_pkg) + # Check all versions for Julia compatibility (optimized: only fetch Julia compat) + for v in keys(info.version_info) + julia_vspec = Registry.query_compat_for_version(info, v, JULIA_UUID) + if julia_vspec !== nothing && VERSION in julia_vspec push!(julia_compatible_versions, v) end end @@ -2515,7 +3312,7 @@ function status_compat_info(pkg::PackageSpec, env::EnvCache, regs::Vector{Regist return sort!(unique!(packages_holding_back)), max_version, max_version_in_compat end -function diff_array(old_env::Union{EnvCache,Nothing}, new_env::EnvCache; manifest=true, workspace=false) +function diff_array(old_env::Union{EnvCache, Nothing}, new_env::EnvCache; manifest = true, workspace = false) function index_pkgs(pkgs, uuid) idx = findfirst(pkg -> pkg.uuid == uuid, pkgs) return idx === nothing ? nothing : pkgs[idx] @@ -2527,9 +3324,9 @@ function diff_array(old_env::Union{EnvCache,Nothing}, new_env::EnvCache; manifes new = manifest ? load_all_deps_loadable(new_env) : load_project_deps(new_env.project, new_env.project_file, new_env.manifest, new_env.manifest_file) end - T, S = Union{UUID,Nothing}, Union{PackageSpec,Nothing} + T, S = Union{UUID, Nothing}, Union{PackageSpec, Nothing} if old_env === nothing - return Tuple{T,S,S}[(pkg.uuid, nothing, pkg)::Tuple{T,S,S} for pkg in new] + return Tuple{T, S, S}[(pkg.uuid, nothing, pkg)::Tuple{T, S, S} for pkg in new] end if workspace old = manifest ? load_all_deps(old_env) : load_direct_deps(old_env) @@ -2538,13 +3335,12 @@ function diff_array(old_env::Union{EnvCache,Nothing}, new_env::EnvCache; manifes end # merge old and new into single array all_uuids = union(T[pkg.uuid for pkg in old], T[pkg.uuid for pkg in new]) - return Tuple{T,S,S}[(uuid, index_pkgs(old, uuid), index_pkgs(new, uuid))::Tuple{T,S,S} for uuid in all_uuids] + return Tuple{T, S, S}[(uuid, index_pkgs(old, uuid), index_pkgs(new, uuid))::Tuple{T, S, S} for uuid in all_uuids] end -function is_package_downloaded(manifest_file::String, pkg::PackageSpec; platform=HostPlatform()) +function is_package_downloaded(manifest_file::String, pkg::PackageSpec; platform = HostPlatform()) sourcepath = source_path(manifest_file, pkg) - identifier = pkg.name !== nothing ? pkg.name : pkg.uuid - (sourcepath === nothing) && pkgerror("Could not locate the source code for the $(identifier) package. Are you trying to use a manifest generated by a different version of Julia?") + sourcepath === nothing && return false isdir(sourcepath) || return false check_artifacts_downloaded(sourcepath; platform) || return false return true @@ -2564,11 +3360,13 @@ function status_ext_info(pkg::PackageSpec, env::EnvCache) # Note: `get_extension` returns nothing for stdlibs that are loaded via `require_stdlib` ext_loaded = (Base.get_extension(Base.PkgId(pkg.uuid, pkg.name), Symbol(ext)) !== nothing) # Check if deps are loaded - extdeps_info= Tuple{String, Bool}[] + extdeps_info = Tuple{String, Bool}[] for extdep in extdeps if !(haskey(weakdepses, extdep) || haskey(depses, extdep)) - pkgerror(isnothing(pkg.name) ? "M" : "$(pkg.name) has a malformed Project.toml, ", - "the extension package $extdep is not listed in [weakdeps] or [deps]") + pkgerror( + isnothing(pkg.name) ? "M" : "$(pkg.name) has a malformed Project.toml, ", + "the extension package $extdep is not listed in [weakdeps] or [deps]" + ) end uuid = get(weakdepses, extdep, nothing) if uuid === nothing @@ -2598,35 +3396,67 @@ struct PackageStatusData compat_data::Union{Nothing, Tuple{Vector{String}, VersionNumber, VersionNumber}} changed::Bool extinfo::Union{Nothing, Vector{ExtInfo}} + deprecation_info::Union{Nothing, Dict{String, Any}} end -function print_status(env::EnvCache, old_env::Union{Nothing,EnvCache}, registries::Vector{Registry.RegistryInstance}, header::Symbol, - uuids::Vector, names::Vector; manifest=true, diff=false, ignore_indent::Bool, workspace::Bool, outdated::Bool, extensions::Bool, io::IO, - mode::PackageMode, hidden_upgrades_info::Bool, show_usagetips::Bool=true) - not_installed_indicator = sprint((io, args) -> printstyled(io, args...; color=Base.error_color()), "→", context=io) - upgradable_indicator = sprint((io, args) -> printstyled(io, args...; color=:green), "⌃", context=io) - heldback_indicator = sprint((io, args) -> printstyled(io, args...; color=Base.warn_color()), "⌅", context=io) +function print_status( + env::EnvCache, old_env::Union{Nothing, EnvCache}, registries::Vector{Registry.RegistryInstance}, header::Symbol, + uuids::Vector, names::Vector; manifest = true, diff = false, ignore_indent::Bool, workspace::Bool, outdated::Bool, deprecated::Bool, extensions::Bool, io::IO, + mode::PackageMode, hidden_upgrades_info::Bool, show_usagetips::Bool = true + ) + not_installed_indicator = sprint((io, args) -> printstyled(io, args...; color = Base.error_color()), "→", context = io) + upgradable_indicator = sprint((io, args) -> printstyled(io, args...; color = :green), "⌃", context = io) + heldback_indicator = sprint((io, args) -> printstyled(io, args...; color = Base.warn_color()), "⌅", context = io) filter = !isempty(uuids) || !isempty(names) # setup xs = diff_array(old_env, env; manifest, workspace) # filter and return early if possible if isempty(xs) && !diff - printpkgstyle(io, header, "$(pathrepr(manifest ? env.manifest_file : env.project_file)) (empty " * - (manifest ? "manifest" : "project") * ")", ignore_indent) + printpkgstyle( + io, header, "$(pathrepr(manifest ? env.manifest_file : env.project_file)) (empty " * + (manifest ? "manifest" : "project") * ")", ignore_indent + ) return nothing end - no_changes = all(p-> p[2] == p[3], xs) + no_changes = all(p -> p[2] == p[3], xs) if no_changes - printpkgstyle(io, Symbol("No packages added to or removed from"), "$(pathrepr(manifest ? env.manifest_file : env.project_file))", ignore_indent) + if manifest + printpkgstyle(io, :Manifest, "No packages added to or removed from $(pathrepr(env.manifest_file))", ignore_indent; color = Base.info_color()) + else + printpkgstyle(io, :Project, "No packages added to or removed from $(pathrepr(env.project_file))", ignore_indent; color = Base.info_color()) + end else - xs = !filter ? xs : eltype(xs)[(id, old, new) for (id, old, new) in xs if (id in uuids || something(new, old).name in names)] + if filter + # Find packages matching the filter + matching_ids = Set{UUID}() + for (id, old, new) in xs + if (id in uuids || something(new, old).name in names) + push!(matching_ids, id) + end + end + # In manifest mode, also include all dependencies of matching packages + if manifest && !isempty(matching_ids) + deps_to_add = Set{UUID}() + for id in matching_ids + entry = get(env.manifest, id, nothing) + if entry !== nothing + union!(deps_to_add, values(entry.deps)) + end + end + union!(matching_ids, deps_to_add) + end + xs = eltype(xs)[(id, old, new) for (id, old, new) in xs if id in matching_ids] + end if isempty(xs) - printpkgstyle(io, Symbol("No Matches"), - "in $(diff ? "diff for " : "")$(pathrepr(manifest ? env.manifest_file : env.project_file))", ignore_indent) + printpkgstyle( + io, Symbol("No Matches"), + "in $(diff ? "diff for " : "")$(pathrepr(manifest ? env.manifest_file : env.project_file))", ignore_indent + ) return nothing end # main print - printpkgstyle(io, header, pathrepr(manifest ? env.manifest_file : env.project_file), ignore_indent) + readonly_suffix = env.project.readonly ? " (readonly)" : "" + printpkgstyle(io, header, pathrepr(manifest ? env.manifest_file : env.project_file) * readonly_suffix, ignore_indent) if workspace && !manifest for (path, _) in env.workspace relative_path = Types.relative_project_path(env.project_file, path) @@ -2675,14 +3505,27 @@ function print_status(env::EnvCache, old_env::Union{Nothing,EnvCache}, registrie continue end + # Deprecated info + deprecation_info = nothing + pkg_deprecated = false + if !isnothing(new) + pkg_spec = something(new, old) + deprecation_info = get_pkg_deprecation_info(pkg_spec, registries) + pkg_deprecated = deprecation_info !== nothing + end + + # if we are running with deprecated, only show packages that are deprecated + if deprecated && !pkg_deprecated + continue + end # TODO: Show extension deps for project as well? pkg_downloaded = !is_instantiated(new) || is_package_downloaded(env.manifest_file, new) new_ver_avail = !latest_version && !Operations.is_tracking_repo(new) && !Operations.is_tracking_path(new) - pkg_upgradable = new_ver_avail && isempty(cinfo[1]) - pkg_heldback = new_ver_avail && !isempty(cinfo[1]) + pkg_upgradable = new_ver_avail && cinfo !== nothing && isempty(cinfo[1]) + pkg_heldback = new_ver_avail && cinfo !== nothing && !isempty(cinfo[1]) if !pkg_downloaded && (pkg_upgradable || pkg_heldback) # allow space in the gutter for two icons on a single line @@ -2693,12 +3536,12 @@ function print_status(env::EnvCache, old_env::Union{Nothing,EnvCache}, registrie no_visible_packages_heldback &= (!changed || !pkg_heldback) no_packages_heldback &= !pkg_heldback - push!(package_statuses, PackageStatusData(uuid, old, new, pkg_downloaded, pkg_upgradable, pkg_heldback, cinfo, changed, ext_info)) + push!(package_statuses, PackageStatusData(uuid, old, new, pkg_downloaded, pkg_upgradable, pkg_heldback, cinfo, changed, ext_info, deprecation_info)) end for pkg in package_statuses - pad = 0 - print_padding(x) = (print(io, x); pad += 1) + pad = Ref(0) + print_padding(x) = (print(io, x); pad[] += 1) if !pkg.downloaded print_padding(not_installed_indicator) @@ -2712,7 +3555,7 @@ function print_status(env::EnvCache, old_env::Union{Nothing,EnvCache}, registrie end # Fill the remaining padding with spaces - while pad < lpadding + while pad[] < lpadding print_padding(" ") end @@ -2720,20 +3563,62 @@ function print_status(env::EnvCache, old_env::Union{Nothing,EnvCache}, registrie diff ? print_diff(io, pkg.old, pkg.new) : print_single(io, pkg.new) + # show if package is yanked + pkg_spec = something(pkg.new, pkg.old) + if is_pkgversion_yanked(pkg_spec, registries) + printstyled(io, " [yanked]"; color = :yellow) + end + + # show if package is deprecated + if pkg.deprecation_info !== nothing + printstyled(io, " [deprecated]"; color = :yellow) + end + + # show deprecation details when using --deprecated flag + if deprecated && !diff && pkg.deprecation_info !== nothing + reason = get(pkg.deprecation_info, "reason", nothing) + alternative = get(pkg.deprecation_info, "alternative", nothing) + if reason !== nothing + printstyled(io, " (reason: ", reason, ")"; color = :yellow) + end + if alternative !== nothing + printstyled(io, " (alternative: ", alternative, ")"; color = :yellow) + end + end + if outdated && !diff && pkg.compat_data !== nothing packages_holding_back, max_version, max_version_compat = pkg.compat_data if pkg.new.version !== max_version_compat && max_version_compat != max_version - printstyled(io, " [