diff --git a/.git-blame-ignore-revs b/.git-blame-ignore-revs
new file mode 100644
index 0000000000..37459703e9
--- /dev/null
+++ b/.git-blame-ignore-revs
@@ -0,0 +1,2 @@
+# ran runic on the code base
+a84228360d6cff568a55911733e830cdf1c492da
diff --git a/.github/dependabot.yml b/.github/dependabot.yml
new file mode 100644
index 0000000000..c558006ed1
--- /dev/null
+++ b/.github/dependabot.yml
@@ -0,0 +1,11 @@
+version: 2
+updates:
+ - package-ecosystem: "github-actions"
+ directory: "/"
+ schedule:
+ interval: "monthly"
+ open-pull-requests-limit: 100
+ labels:
+ - "dependencies"
+ - "github-actions"
+ - "ci"
diff --git a/.github/workflows/backport-label-audit.yml b/.github/workflows/backport-label-audit.yml
new file mode 100644
index 0000000000..cb90223830
--- /dev/null
+++ b/.github/workflows/backport-label-audit.yml
@@ -0,0 +1,62 @@
+name: Backport Label Audit
+# Run this workflow manually to audit backport labels on pull requests
+# and remove labels from PRs that have already been backported.
+# Optionally specify a release version to limit the audit to that version
+
+on:
+ workflow_dispatch:
+ inputs:
+ version:
+ description: 'Release version to audit (e.g., 1.13). Leave empty to audit all versions.'
+ required: false
+ type: string
+ dry_run:
+ description: 'Dry run (only report, do not modify)'
+ required: true
+ type: choice
+ options:
+ - 'true'
+ - 'false'
+ default: 'true'
+
+jobs:
+ audit-backport-labels:
+ runs-on: ubuntu-latest
+ permissions:
+ pull-requests: write
+ steps:
+ - name: Checkout Backporter
+ uses: actions/checkout@v6
+ with:
+ repository: KristofferC/Backporter
+ ref: master
+ path: backporter
+
+ - name: Setup Julia
+ uses: julia-actions/setup-julia@v2
+ with:
+ version: '1'
+
+ - name: Cache Julia packages
+ uses: julia-actions/cache@v3
+ with:
+ cache-name: backporter
+
+ - name: Install dependencies
+ run: |
+ cd backporter
+ julia --project -e 'using Pkg; Pkg.instantiate()'
+
+ - name: Run backport label audit
+ env:
+ GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+ run: |
+ cd backporter
+ ARGS="--audit -r ${{ github.repository }}"
+ if [ -n "${{ inputs.version }}" ]; then
+ ARGS="$ARGS -v ${{ inputs.version }}"
+ fi
+ if [ "${{ inputs.dry_run }}" = "true" ]; then
+ ARGS="$ARGS --dry-run"
+ fi
+ julia --project backporter.jl $ARGS
diff --git a/.github/workflows/backport-label-cleanup.yml b/.github/workflows/backport-label-cleanup.yml
new file mode 100644
index 0000000000..4bc9930b24
--- /dev/null
+++ b/.github/workflows/backport-label-cleanup.yml
@@ -0,0 +1,64 @@
+name: Backport Label Cleanup
+# Runs automatically when a pull request to a release branch is merged
+# to remove backport labels from the merged PRs
+
+on:
+ pull_request:
+ types: [closed]
+ branches:
+ - 'release-*'
+
+jobs:
+ remove-backport-labels:
+ if: github.event.pull_request.merged == true
+ runs-on: ubuntu-latest
+ permissions:
+ pull-requests: write
+ steps:
+ - name: Extract version from branch
+ id: extract
+ run: |
+ BRANCH="${{ github.event.pull_request.base.ref }}"
+ if [[ "$BRANCH" =~ ^release-([0-9]+\.[0-9]+)$ ]]; then
+ echo "version=${BASH_REMATCH[1]}" >> "$GITHUB_OUTPUT"
+ else
+ echo "Branch $BRANCH does not match release-X.Y pattern"
+ exit 0
+ fi
+
+ - name: Checkout Backporter
+ if: steps.extract.outputs.version != ''
+ uses: actions/checkout@v6
+ with:
+ repository: KristofferC/Backporter
+ ref: master
+ path: backporter
+
+ - name: Setup Julia
+ if: steps.extract.outputs.version != ''
+ uses: julia-actions/setup-julia@v2
+ with:
+ version: '1'
+
+ - name: Cache Julia packages
+ if: steps.extract.outputs.version != ''
+ uses: julia-actions/cache@v3
+ with:
+ cache-name: backporter
+
+ - name: Install dependencies
+ if: steps.extract.outputs.version != ''
+ run: |
+ cd backporter
+ julia --project -e 'using Pkg; Pkg.instantiate()'
+
+ - name: Run backport label cleanup
+ if: steps.extract.outputs.version != ''
+ env:
+ GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+ run: |
+ cd backporter
+ julia --project backporter.jl --audit \
+ -v ${{ steps.extract.outputs.version }} \
+ -r ${{ github.repository }} \
+ --cleanup-pr ${{ github.event.pull_request.number }}
diff --git a/.github/workflows/check.yml b/.github/workflows/check.yml
new file mode 100644
index 0000000000..f555558d5c
--- /dev/null
+++ b/.github/workflows/check.yml
@@ -0,0 +1,30 @@
+name: Code checks
+
+on:
+ pull_request:
+ push:
+ branches: ["master"]
+
+jobs:
+
+ pre-commit:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v6
+ - uses: pre-commit/action@2c7b3805fd2a0fd8c1884dcaebf91fc102a13ecd # v3.0.1
+ env:
+ # Skip runic-pre-commit since we use runic-action below instead
+ SKIP: runic
+
+ runic:
+ name: "Runic"
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v6
+ - uses: julia-actions/setup-julia@v2
+ with:
+ version: '1.11'
+ - uses: julia-actions/cache@v3
+ - uses: fredrikekre/runic-action@v1
+ with:
+ version: "1.4" # Keep version in sync with .pre-commit-config.yaml
diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml
index 11f1643502..0173d2722d 100644
--- a/.github/workflows/test.yml
+++ b/.github/workflows/test.yml
@@ -1,9 +1,6 @@
name: Run tests
on:
pull_request:
- branches:
- - 'master'
- - 'release-*'
push:
branches:
- 'master'
@@ -55,15 +52,15 @@ jobs:
julia-version: 'nightly'
pkg-server: "pkg.julialang.org"
steps:
- - name: Set git to use LF and fix TEMP on windows
- if: matrix.os == 'windows-latest'
+ - name: Set git to use LF, fix TEMP, set JULIA_DEPOT_PATH (windows)
+ if: runner.os == 'Windows'
run: |
git config --global core.autocrlf false
git config --global core.eol lf
# See https://github.com/actions/virtual-environments/issues/712
echo "TMP=${USERPROFILE}\AppData\Local\Temp" >> ${GITHUB_ENV}
echo "TEMP=${USERPROFILE}\AppData\Local\Temp" >> ${GITHUB_ENV}
- - uses: actions/checkout@v4
+ - uses: actions/checkout@v6
- uses: julia-actions/setup-julia@v2
with:
version: ${{ matrix.julia-version }}
@@ -71,10 +68,11 @@ jobs:
- uses: julia-actions/julia-runtest@v1
with:
coverage: true
+ depwarn: error
env:
JULIA_PKG_SERVER: ${{ matrix.pkg-server }}
JULIA_TEST_VERBOSE_LOGS_DIR: ${{ github.workspace }}
- - uses: actions/upload-artifact@v4
+ - uses: actions/upload-artifact@v7
if: ${{ always() }}
with:
name: ${{ join(matrix.*, '-') }}_Pkg.log
@@ -82,21 +80,29 @@ jobs:
- uses: julia-actions/julia-processcoverage@v1
env:
JULIA_PKG_SERVER: ${{ matrix.pkg-server }}
- - uses: codecov/codecov-action@v3
+ - uses: codecov/codecov-action@v6
with:
- file: lcov.info
+ files: lcov.info
+ token: ${{ secrets.CODECOV_TOKEN }}
docs:
runs-on: ubuntu-latest
timeout-minutes: 60
steps:
- - uses: actions/checkout@v4
+ - uses: actions/checkout@v6
- uses: julia-actions/setup-julia@v2
with:
# version: '1.6'
version: 'nightly'
+ - uses: julia-actions/cache@v3
- name: Generate docs
run: |
julia --project --color=yes -e 'using Pkg; Pkg.activate("docs"); Pkg.instantiate();'
julia --project=docs --color=yes docs/make.jl pdf
env:
DOCUMENTER_KEY: ${{ secrets.DOCUMENTER_KEY }}
+ - name: Upload documentation artifacts
+ uses: actions/upload-artifact@v7
+ if: always()
+ with:
+ name: pkg-docs
+ path: docs/build/
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
new file mode 100644
index 0000000000..68066c2cc2
--- /dev/null
+++ b/.pre-commit-config.yaml
@@ -0,0 +1,17 @@
+repos:
+ - repo: 'https://github.com/pre-commit/pre-commit-hooks'
+ rev: v5.0.0
+ hooks:
+ - id: check-added-large-files
+ - id: check-case-conflict
+ # - id: check-toml # we have tomls with invalid syntax for tests
+ - id: check-yaml
+ - id: end-of-file-fixer
+ - id: mixed-line-ending
+ - id: trailing-whitespace
+ - repo: 'https://github.com/fredrikekre/runic-pre-commit'
+ rev: v2.0.1
+ hooks:
+ - id: runic
+ additional_dependencies:
+ - 'Runic@1.4' # Keep version in sync with .github/workflows/Check.yml
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 056a6f1f36..d6c8707f29 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,9 +1,82 @@
+Pkg v1.14 Release Notes
+=======================
+
+- During package source installation, Pkg now reports when a package has an Artifacts.toml but no artifacts match the
+ current platform. ([#4646])
+
+Pkg v1.13 Release Notes
+=======================
+
+- `Pkg.test` now respects the `--check-bounds` setting from the parent Julia session instead of forcing `--check-bounds=yes`.
+
+- Interactive precompilation now supports keyboard controls: `d`/`q`/`]` to detach (letting precompilation continue
+ silently in the background while returning to the REPL), `c` to cancel, `i` for a profile peek, `v` to toggle
+ verbose mode, `?`/`h` for help, and `Ctrl-C` to interrupt. After detaching, use `pkg> precompile --monitor` to
+ reattach, `--stop` to stop gracefully, or `--cancel` to cancel immediately. ([#4602])
+- Project.toml environments now support a `readonly` field to mark environments as read-only, preventing modifications.
+ ([#4284])
+- `Pkg.build` now supports an `allow_reresolve` keyword argument to control whether the build process can re-resolve
+ package versions, similar to the existing option for `Pkg.test`. ([#3329])
+- Packages are now automatically added to `[sources]` when they are added by url or devved. ([#4225])
+- Packages added via URL now honor nested `[sources]` entries, allowing private dependency chains to resolve without registry metadata. ([#4366])
+- `update` now shows a helpful tip when trying to upgrade a specific package that can be upgraded but is held back
+ because it's part of a less optimal resolver solution ([#4266])
+- `Pkg.status` now displays yanked packages with a `[yanked]` indicator and shows a warning when yanked packages are
+ present. `Pkg.resolve` errors also display warnings about yanked packages that are not resolvable. ([#4310])
+- Added `pkg> compat --current` command to automatically populate missing compat entries with the currently resolved
+ package versions. Use `pkg> compat --current` for all packages or `pkg> compat Foo --current` for specific packages.
+ ([#3266])
+- Added `Pkg.precompile() do` block syntax to delay autoprecompilation until after multiple operations complete,
+ improving efficiency when performing several environment changes. ([#4262])
+- Added `Pkg.autoprecompilation_enabled(state::Bool)` to globally enable or disable automatic precompilation for Pkg
+ operations. ([#4262])
+- Implemented atomic TOML writes to prevent data corruption when Pkg operations are interrupted or multiple processes
+ write simultaneously. All TOML files are now written atomically using temporary files and atomic moves. ([#4293])
+- Implemented lazy loading for RegistryInstance to significantly improve startup performance for operations that don't
+ require full registry data. This reduces `Pkg.instantiate()` time by approximately 60% in many cases. ([#4304])
+- Added support for directly adding git submodules via `Pkg.add(path="/path/to/git-submodule.jl")`. ([#3344])
+- Enhanced REPL user experience by automatically detecting and stripping accidental leading `]` characters in commands.
+ ([#3122])
+- Improved tip messages to show REPL mode syntax when operating in REPL mode. ([#3854])
+- Enhanced error handling with more descriptive error messages when operations fail on empty URLs during git repository
+ installation or registry discovery. ([#4282])
+- Improved error messages for invalid compat entries to provide better guidance for fixing them. ([#4302])
+- Added warnings when attempting to add local paths that contain dirty git repositories. ([#4309])
+- Enhanced package parsing to better handle complex URLs and paths with branch/tag/subdir specifiers. ([#4299])
+- Improved artifact download behavior to only attempt downloads from the Pkg server when the package is registered on
+ that server's registries. ([#4297])
+- Added comprehensive documentation page about depots, including depot layouts and configuration. ([#2245])
+- Enhanced error handling for packages missing from registries or manifests with more informative messages. ([#4303])
+- Added more robust error handling when packages have revisions but no source information. ([#4311])
+- Enhanced registry status reporting with more detailed information. ([#4300])
+- Fixed various edge cases in package resolution and manifest handling. ([#4307], [#4308], [#4312])
+- Improved handling of path separators across different operating systems. ([#4305])
+- Added better error messages when accessing private PackageSpec.repo field. ([#4170])
+
Pkg v1.12 Release Notes
=======================
- Pkg now has support for "workspaces" which is a way to resolve multiple project files into a single manifest.
- The functions `Pkg.status`, `Pkg.why`, `Pkg.instantiate`, `Pkg.precompile` (and their REPL variants) have been updated
- to take a `workspace` option. Read more about this feature in the manual about the TOML-files.
+ The functions `Pkg.status`, `Pkg.why`, `Pkg.instantiate`, `Pkg.precompile` (and their REPL variants) have been
+ updated to take a `workspace` option, with fixes for workspace path collection and package resolution in workspace
+ environments. Read more about this feature in the manual about the TOML-files. ([#3841], [#4229])
+- Pkg now supports "apps" which are Julia packages that can be run directly from the terminal after installation.
+ Apps can be defined in a package's Project.toml and installed via Pkg. Apps now support multiple apps per package
+ via submodules, allowing packages to define multiple command-line applications, with enhanced functionality including
+ update capabilities and better handling of already installed apps. ([#3772], [#4277], [#4263])
+- `status` now shows when different versions/sources of dependencies are loaded than that which is expected by the
+ manifest ([#4109])
+- When adding or developing a package that exists in the `[weakdeps]` section, it is now automatically removed from
+ weak dependencies and added as a regular dependency. ([#3865])
+- Enhanced fuzzy matching algorithm for package name suggestions with improved multi-factor scoring for better package
+ name suggestions. ([#4287])
+- The Pkg REPL now supports GitHub pull request URLs, allowing direct package installation from PRs via
+ `pkg> add https://github.com/Org/Package.jl/pull/123` ([#4295])
+- Improved git repository cloning performance by changing from `refs/*` to `refs/heads/*` to speed up operations on
+ repositories with many branches. ([#2330])
+- Improved REPL command parsing to handle leading whitespace with comma-separated packages. ([#4274])
+- Improved error messages when providing incorrect package UUIDs. ([#4270])
+- Added confirmation prompts before removing compat entries to prevent accidental deletions. ([#4254])
Pkg v1.11 Release Notes
=======================
@@ -21,7 +94,7 @@ Pkg v1.10 Release Notes
=======================
Pkg v1.9 Release Notes
-=======================
+======================
- New functionality: `Pkg.why` and `pkg> why` to show why a package is inside the environment (shows all "paths" to a package starting at the direct dependencies).
- When code coverage tracking is enabled for `Pkg.test` the new path-specific code-coverage option is used to limit coverage
@@ -83,6 +156,16 @@ Pkg v1.7 Release Notes
- The `mode` keyword for `PackageSpec` has been removed ([#2454]).
+[#4225]: https://github.com/JuliaLang/Pkg.jl/issues/4225
+[#4284]: https://github.com/JuliaLang/Pkg.jl/issues/4284
+[#3526]: https://github.com/JuliaLang/Pkg.jl/issues/3526
+[#3708]: https://github.com/JuliaLang/Pkg.jl/issues/3708
+[#3732]: https://github.com/JuliaLang/Pkg.jl/issues/3732
+[#3772]: https://github.com/JuliaLang/Pkg.jl/issues/3772
+[#3783]: https://github.com/JuliaLang/Pkg.jl/issues/3783
+[#3841]: https://github.com/JuliaLang/Pkg.jl/issues/3841
+[#3865]: https://github.com/JuliaLang/Pkg.jl/issues/3865
+[#4109]: https://github.com/JuliaLang/Pkg.jl/issues/4109
[#2284]: https://github.com/JuliaLang/Pkg.jl/issues/2284
[#2431]: https://github.com/JuliaLang/Pkg.jl/issues/2431
[#2432]: https://github.com/JuliaLang/Pkg.jl/issues/2432
@@ -101,3 +184,36 @@ Pkg v1.7 Release Notes
[#2995]: https://github.com/JuliaLang/Pkg.jl/issues/2995
[#3002]: https://github.com/JuliaLang/Pkg.jl/issues/3002
[#3021]: https://github.com/JuliaLang/Pkg.jl/issues/3021
+[#3266]: https://github.com/JuliaLang/Pkg.jl/pull/3266
+[#4266]: https://github.com/JuliaLang/Pkg.jl/pull/4266
+[#4310]: https://github.com/JuliaLang/Pkg.jl/pull/4310
+[#3329]: https://github.com/JuliaLang/Pkg.jl/pull/3329
+[#4262]: https://github.com/JuliaLang/Pkg.jl/pull/4262
+[#4293]: https://github.com/JuliaLang/Pkg.jl/pull/4293
+[#4304]: https://github.com/JuliaLang/Pkg.jl/pull/4304
+[#3344]: https://github.com/JuliaLang/Pkg.jl/pull/3344
+[#2330]: https://github.com/JuliaLang/Pkg.jl/pull/2330
+[#3122]: https://github.com/JuliaLang/Pkg.jl/pull/3122
+[#3854]: https://github.com/JuliaLang/Pkg.jl/pull/3854
+[#4282]: https://github.com/JuliaLang/Pkg.jl/pull/4282
+[#4302]: https://github.com/JuliaLang/Pkg.jl/pull/4302
+[#4309]: https://github.com/JuliaLang/Pkg.jl/pull/4309
+[#4299]: https://github.com/JuliaLang/Pkg.jl/pull/4299
+[#4295]: https://github.com/JuliaLang/Pkg.jl/pull/4295
+[#4277]: https://github.com/JuliaLang/Pkg.jl/pull/4277
+[#4297]: https://github.com/JuliaLang/Pkg.jl/pull/4297
+[#2245]: https://github.com/JuliaLang/Pkg.jl/pull/2245
+[#4303]: https://github.com/JuliaLang/Pkg.jl/pull/4303
+[#4254]: https://github.com/JuliaLang/Pkg.jl/pull/4254
+[#4270]: https://github.com/JuliaLang/Pkg.jl/pull/4270
+[#4263]: https://github.com/JuliaLang/Pkg.jl/pull/4263
+[#4229]: https://github.com/JuliaLang/Pkg.jl/pull/4229
+[#4274]: https://github.com/JuliaLang/Pkg.jl/pull/4274
+[#4311]: https://github.com/JuliaLang/Pkg.jl/pull/4311
+[#4300]: https://github.com/JuliaLang/Pkg.jl/pull/4300
+[#4307]: https://github.com/JuliaLang/Pkg.jl/pull/4307
+[#4308]: https://github.com/JuliaLang/Pkg.jl/pull/4308
+[#4312]: https://github.com/JuliaLang/Pkg.jl/pull/4312
+[#4305]: https://github.com/JuliaLang/Pkg.jl/pull/4305
+[#4170]: https://github.com/JuliaLang/Pkg.jl/pull/4170
+[#4287]: https://github.com/JuliaLang/Pkg.jl/pull/4287
diff --git a/Project.toml b/Project.toml
index 4ddbbefd00..4a2f27d233 100644
--- a/Project.toml
+++ b/Project.toml
@@ -3,7 +3,7 @@ uuid = "44cfe95a-1eb2-52ea-b672-e2afdf69b78f"
keywords = ["package management"]
license = "MIT"
desc = "The next-generation Julia package manager."
-version = "1.12.0"
+version = "1.14.0"
[workspace]
projects = ["test", "docs"]
@@ -23,6 +23,7 @@ SHA = "ea8e919c-243c-51af-8825-aaa63cd721ce"
TOML = "fa267f1f-6049-4f14-aa54-33bafae1ed76"
Tar = "a4e569a6-e804-4fa4-b0f3-eef7a1d5b13e"
UUIDs = "cf7118a7-6976-5b1a-9a39-7adc72f591a4"
+Zstd_jll = "3161d3a3-bdf6-5164-811a-617609db77b4"
p7zip_jll = "3f19e933-33d8-53b3-aaab-bd5110c3b7a0"
[weakdeps]
@@ -32,4 +33,21 @@ REPL = "3fa0cd96-eef1-5676-8a61-b3b8758bbffb"
REPLExt = "REPL"
[compat]
+Artifacts = "1.11"
+Dates = "1.11"
+Downloads = "1.6"
+FileWatching = "1.11"
+LibGit2 = "1.11"
+Libdl = "1.11"
+Logging = "1.11"
+Markdown = "1.11"
+Printf = "1.11"
+REPL = "1.11"
+Random = "1.11"
+SHA = "0.7, 1"
+TOML = "1"
+Tar = "1.10"
+UUIDs = "1.11"
+Zstd_jll = "1.5.7"
julia = "1.12"
+p7zip_jll = "17.5"
diff --git a/README.md b/README.md
index 5cc370c4c9..ad4ec9f25c 100644
--- a/README.md
+++ b/README.md
@@ -13,12 +13,29 @@ If you want to develop this package do the following steps:
- Make a fork and then clone the repo locally on your computer
- Change the current directory to the Pkg repo you just cloned and start julia with `julia --project`.
- `import Pkg` will now load the files in the cloned repo instead of the Pkg stdlib.
-- To test your changes, simply do `include("test/runtests.jl")`.
+- To test your changes, simply do `Pkg.test()`.
If you need to build Julia from source with a Git checkout of Pkg, then instead use `make DEPS_GIT=Pkg` when building Julia. The `Pkg` repo is in `stdlib/Pkg`, and created initially with a detached `HEAD`. If you're doing this from a pre-existing Julia repository, you may need to `make clean` beforehand.
If you need to build Julia from source with Git checkouts of two or more stdlibs, please see the instructions in the [`Building Julia from source with a Git checkout of a stdlib`](https://github.com/JuliaLang/julia/blob/master/doc/src/devdocs/build/build.md#building-julia-from-source-with-a-git-checkout-of-a-stdlib) section of the [`doc/src/devdocs/build/build.md`](https://github.com/JuliaLang/julia/blob/master/doc/src/devdocs/build/build.md) file within the Julia devdocs.
+## Pre-commit hooks
+
+This repository uses pre-commit hooks to automatically check and format code before commits. The hooks perform various checks including:
+
+- File size and case conflict validation
+- YAML syntax checking
+- Trailing whitespace removal and line ending fixes
+- Julia code formatting with Runic
+
+To install and use the pre-commit hooks:
+
+1. Install pre-commit: `pip install pre-commit` (or use your system's package manager)
+2. Install the hooks: `pre-commit install` from the root of the repository
+3. Run on all files: `pre-commit run --all-files` from the root of the repository
+
+Once installed, the hooks will run automatically on each commit. You can also run them manually anytime with `pre-commit run`.
+
## Synchronization with the Julia repo
To check which commit julia master uses see [JuliaLang/julia/stdlib/Pkg.version](https://github.com/JuliaLang/julia/blob/master/stdlib/Pkg.version).
diff --git a/contrib/list_missing_pkg_tags.jl b/contrib/list_missing_pkg_tags.jl
new file mode 100644
index 0000000000..93309889aa
--- /dev/null
+++ b/contrib/list_missing_pkg_tags.jl
@@ -0,0 +1,89 @@
+using LibGit2
+
+const JULIA_REPO_URL = "https://github.com/JuliaLang/julia.git"
+const JULIA_REPO_DIR = "julia"
+const PKG_VERSION_PATH = "stdlib/Pkg.version"
+const PKG_REPO_URL = "https://github.com/JuliaLang/Pkg.jl.git"
+const PKG_REPO_DIR = "Pkg.jl"
+
+function checkout_or_update_repo(url, dir)
+ return if isdir(dir)
+ println("Updating existing repository: $dir")
+ repo = LibGit2.GitRepo(dir)
+ LibGit2.fetch(repo)
+ else
+ println("Cloning repository: $url")
+ LibGit2.clone(url, dir)
+ end
+end
+
+function get_tags(repo)
+ refs = LibGit2.ref_list(repo)
+ tags = filter(ref -> startswith(ref, "refs/tags/"), refs)
+ return sort!(replace.(tags, "refs/tags/" => ""))
+end
+
+function is_stable_v1_release(tag)
+ return occursin(r"^v\d+\.\d+\.\d+$", tag) && VersionNumber(tag) >= v"1.0.0"
+end
+
+function extract_pkg_sha1(text::AbstractString)
+ m = match(r"PKG_SHA1\s*=\s*([a-f0-9]{40})", text)
+ return m !== nothing ? m[1] : nothing
+end
+
+function get_commit_hash_for_pkg_version(repo, tag)
+ return try
+ tag_ref = LibGit2.GitReference(repo, "refs/tags/" * tag)
+ LibGit2.checkout!(repo, string(LibGit2.GitHash(LibGit2.peel(tag_ref))))
+ version_file = joinpath(JULIA_REPO_DIR, PKG_VERSION_PATH)
+ if isfile(version_file)
+ return extract_pkg_sha1(readchomp(version_file))
+ else
+ println("Warning: Pkg.version file missing for tag $tag")
+ return nothing
+ end
+ catch
+ println("Error processing tag $tag")
+ rethrow()
+ end
+end
+
+tempdir = mktempdir()
+cd(tempdir) do
+ # Update Julia repo
+ checkout_or_update_repo(JULIA_REPO_URL, JULIA_REPO_DIR)
+ julia_repo = LibGit2.GitRepo(JULIA_REPO_DIR)
+
+ # Get Julia tags, filtering only stable releases
+ julia_tags = filter(is_stable_v1_release, get_tags(julia_repo))
+ version_commit_map = Dict{String, String}()
+
+ for tag in julia_tags
+ println("Processing Julia tag: $tag")
+ commit_hash = get_commit_hash_for_pkg_version(julia_repo, tag)
+ if commit_hash !== nothing
+ version_commit_map[tag] = commit_hash
+ end
+ end
+
+ # Update Pkg.jl repo
+ checkout_or_update_repo(PKG_REPO_URL, PKG_REPO_DIR)
+ pkg_repo = LibGit2.GitRepo(PKG_REPO_DIR)
+
+ # Get existing tags in Pkg.jl
+ pkg_tags = Set(get_tags(pkg_repo))
+
+ # Filter out versions that already exist
+ missing_versions = filter(v -> v ∉ pkg_tags, collect(keys(version_commit_map)))
+
+ # Sort versions numerically
+ sort!(missing_versions, by = VersionNumber)
+
+ # Generate `git tag` commands
+ println("\nGit tag commands for missing Pkg.jl versions:")
+ for version in missing_versions
+ commit = version_commit_map[version]
+ println("git tag $version $commit")
+ end
+end
diff --git a/docs/NEWS-update.jl b/docs/NEWS-update.jl
index 3812e9e437..d0ca10d391 100644
--- a/docs/NEWS-update.jl
+++ b/docs/NEWS-update.jl
@@ -7,11 +7,11 @@ s = read(NEWS, String)
m = match(r"\[#[0-9]+\]:", s)
if m !== nothing
- s = s[1:m.offset-1]
+ s = s[1:(m.offset - 1)]
end
footnote(n) = "[#$n]: https://github.com/JuliaLang/Pkg.jl/issues/$n"
-N = map(m -> parse(Int,m.captures[1]), eachmatch(r"\[#([0-9]+)\]", s))
+N = map(m -> parse(Int, m.captures[1]), eachmatch(r"\[#([0-9]+)\]", s))
foots = join(map(footnote, sort!(unique(N))), "\n")
open(NEWS, "w") do f
diff --git a/docs/generate.jl b/docs/generate.jl
index fa4af617ef..3d227f1374 100644
--- a/docs/generate.jl
+++ b/docs/generate.jl
@@ -4,38 +4,42 @@
function generate(io, command)
cmd_nospace = replace(command, " " => "-")
- println(io, """
- ```@raw html
-
-
-
- ```
- ```@eval
- using Pkg
- Dict(Pkg.REPLMode.canonical_names())["$(command)"].help
- ```
- ```@raw html
-
-
- ```
- """)
+ return println(
+ io, """
+ ```@raw html
+
+
+
+ ```
+ ```@eval
+ using Pkg
+ Dict(Pkg.REPLMode.canonical_names())["$(command)"].help
+ ```
+ ```@raw html
+
+
+ ```
+ """
+ )
end
function generate()
io = IOBuffer()
- println(io, """
+ println(
+ io, """
# [**11.** REPL Mode Reference](@id REPL-Mode-Reference)
This section describes available commands in the Pkg REPL.
The Pkg REPL mode is mostly meant for interactive use,
and for non-interactive use it is recommended to use the
functional API, see [API Reference](@ref API-Reference).
- """)
+ """
+ )
# list commands
println(io, "## `package` commands")
foreach(command -> generate(io, command), ["add", "build", "compat", "develop", "free", "generate", "pin", "remove", "test", "update"])
diff --git a/docs/make.jl b/docs/make.jl
index be6905de5a..6b38dad0d7 100644
--- a/docs/make.jl
+++ b/docs/make.jl
@@ -9,7 +9,7 @@ const formats = Any[
Documenter.HTML(
prettyurls = get(ENV, "CI", nothing) == "true",
canonical = "https://julialang.github.io/Pkg.jl/v1/",
- assets = ["assets/custom.css"],
+ assets = ["assets/custom.css", "assets/favicon.ico"],
),
]
if "pdf" in ARGS
@@ -17,7 +17,7 @@ if "pdf" in ARGS
end
# setup for doctesting
-DocMeta.setdocmeta!(Pkg.BinaryPlatforms, :DocTestSetup, :(using Base.BinaryPlatforms); recursive=true)
+DocMeta.setdocmeta!(Pkg.BinaryPlatforms, :DocTestSetup, :(using Base.BinaryPlatforms); recursive = true)
# Run doctests first and disable them in makedocs
Documenter.doctest(joinpath(@__DIR__, "src"), [Pkg])
@@ -35,6 +35,7 @@ makedocs(
"managing-packages.md",
"environments.md",
"creating-packages.md",
+ "apps.md",
"compatibility.md",
"registries.md",
"artifacts.md",
@@ -42,6 +43,8 @@ makedocs(
"toml-files.md",
"repl.md",
"api.md",
+ "protocol.md",
+ "depots.md",
],
)
diff --git a/docs/src/api.md b/docs/src/api.md
index 61979453b9..ed3c15b20b 100644
--- a/docs/src/api.md
+++ b/docs/src/api.md
@@ -1,4 +1,4 @@
-# [**12.** API Reference](@id API-Reference)
+# [**13.** API Reference](@id API-Reference)
This section describes the functional API for interacting with Pkg.jl.
It is recommended to use the functional API, rather than the Pkg REPL mode,
@@ -39,6 +39,7 @@ Pkg.gc
Pkg.status
Pkg.compat
Pkg.precompile
+Pkg.autoprecompilation_enabled
Pkg.offline
Pkg.why
Pkg.dependencies
@@ -47,6 +48,7 @@ Pkg.project
Pkg.undo
Pkg.redo
Pkg.setprotocol!
+Pkg.readonly
PackageSpec
PackageMode
UpgradeLevel
@@ -79,3 +81,10 @@ Pkg.Artifacts.ensure_artifact_installed
Pkg.Artifacts.ensure_all_artifacts_installed
Pkg.Artifacts.archive_artifact
```
+
+## [Package Server Authentication Hooks](@id Package-Server-Authentication-Hooks)
+
+```@docs
+Pkg.PlatformEngines.register_auth_error_handler
+Pkg.PlatformEngines.deregister_auth_error_handler
+```
diff --git a/docs/src/apps.md b/docs/src/apps.md
new file mode 100644
index 0000000000..0606f9db52
--- /dev/null
+++ b/docs/src/apps.md
@@ -0,0 +1,160 @@
+# [**6.** Apps](@id Apps)
+
+!!! note
+ The app support in Pkg is currently considered experimental and some functionality and API may change.
+
+ Some inconveniences that can be encountered are:
+ - You need to manually make `~/.julia/bin` available on the PATH environment.
+ - The path to the julia executable used is the same as the one used to install the app. If this
+ julia installation gets removed, you might need to reinstall the app.
+
+Apps are Julia packages that are intended to be run as "standalone programs" (by e.g. typing the name of the app in the terminal possibly together with some arguments or flags/options).
+This is in contrast to most Julia packages that are used as "libraries" and are loaded by other files or in the Julia REPL.
+
+## Creating a Julia app
+
+A Julia app is structured similar to a standard Julia library with the following additions:
+
+- A `@main` entry point in the package module (see the [Julia help on `@main`](https://docs.julialang.org/en/v1/manual/command-line-interface/#The-Main.main-entry-point) for details)
+- An `[apps]` section in the `Project.toml` file listing the executable names that the package provides.
+
+A very simple example of an app that prints the reversed input arguments would be:
+
+```julia
+# src/MyReverseApp.jl
+module MyReverseApp
+
+function (@main)(ARGS)
+ for arg in ARGS
+ print(stdout, reverse(arg), " ")
+ end
+ return
+end
+
+end # module
+```
+
+```toml
+# Project.toml
+
+# standard fields here
+
+[apps]
+reverse = {}
+```
+The empty table `{}` is to allow for giving metadata about the app.
+
+After installing this app one could run:
+
+```
+$ reverse some input string
+ emos tupni gnirts
+```
+
+directly in the terminal.
+
+## Multiple Apps per Package
+
+A single package can define multiple apps by using submodules. Each app can have its own entry point in a different submodule of the package.
+
+```julia
+# src/MyMultiApp.jl
+module MyMultiApp
+
+function (@main)(ARGS)
+ println("Main app: ", join(ARGS, " "))
+end
+
+include("CLI.jl")
+
+end # module
+```
+
+```julia
+# src/CLI.jl
+module CLI
+
+function (@main)(ARGS)
+ println("CLI submodule: ", join(ARGS, " "))
+end
+
+end # module CLI
+```
+
+```toml
+# Project.toml
+
+# standard fields here
+
+[apps]
+main-app = {}
+cli-app = { submodule = "CLI" }
+```
+
+This will create two executables:
+- `main-app` that runs `julia -m MyMultiApp`
+- `cli-app` that runs `julia -m MyMultiApp.CLI`
+
+## Configuring Julia Flags
+
+Apps can specify default Julia command-line flags that will be passed to the Julia process when the app is run. This is useful for configuring performance settings, threading, or other Julia options specific to your application.
+
+### Default Julia Flags
+
+You can specify default Julia flags in the `Project.toml` file using the `julia_flags` field:
+
+```toml
+# Project.toml
+
+[apps]
+myapp = { julia_flags = ["--threads=4", "--optimize=2"] }
+performance-app = { julia_flags = ["--threads=auto", "--startup-file=yes", "--depwarn=no"] }
+debug-app = { submodule = "Debug", julia_flags = ["--check-bounds=yes", "--optimize=0"] }
+```
+
+With this configuration:
+- `myapp` will run with 4 threads and optimization level 2
+- `performance-app` will run with automatic thread detection, startup file enabled, and deprecation warnings disabled
+- `debug-app` will run with bounds checking enabled and no optimization
+
+### Runtime Julia Flags
+
+You can override or add to the default Julia flags at runtime using the `--` separator. Everything before `--` will be passed as flags to Julia, and everything after `--` will be passed as arguments to your app:
+
+```bash
+# Uses default flags from Project.toml
+myapp input.txt output.txt
+
+# Override thread count, keep other defaults
+myapp --threads=8 -- input.txt output.txt
+
+# Add additional flags
+myapp --threads=2 --optimize=3 --check-bounds=yes -- input.txt output.txt
+
+# Only Julia flags, no app arguments
+myapp --threads=1 --
+```
+
+The final Julia command will combine:
+1. Fixed flags (like `--startup-file=no` and `-m ModuleName`)
+2. Default flags from `julia_flags` in Project.toml
+3. Runtime flags specified before `--`
+4. App arguments specified after `--`
+
+### Overriding the Julia Executable
+
+By default, apps run with the same Julia executable that was used to install them. You can override this globally using the `JULIA_APPS_JULIA_CMD` environment variable:
+
+```bash
+# Use a different Julia version for all apps
+export JULIA_APPS_JULIA_CMD=/path/to/different/julia
+myapp input.txt
+
+# On Windows
+set JULIA_APPS_JULIA_CMD=C:\path\to\different\julia.exe
+myapp input.txt
+```
+
+## Installing Julia apps
+
+The installation of Julia apps is similar to [installing Julia libraries](@ref Managing-Packages) but instead of using e.g. `Pkg.add` or `pkg> add` one uses `Pkg.Apps.add` or `pkg> app add` (`develop` is also available).
diff --git a/docs/src/artifacts.md b/docs/src/artifacts.md
index 66a55f99f5..d5fe5f38b7 100644
--- a/docs/src/artifacts.md
+++ b/docs/src/artifacts.md
@@ -1,4 +1,4 @@
-# [**8.** Artifacts](@id Artifacts)
+# [**9.** Artifacts](@id Artifacts)
`Pkg` can install and manage containers of data that are not Julia packages. These containers can contain platform-specific binaries, datasets, text, or any other kind of data that would be convenient to place within an immutable, life-cycled datastore.
These containers, (called "Artifacts") can be created locally, hosted anywhere, and automatically downloaded and unpacked upon installation of your Julia package.
@@ -230,7 +230,7 @@ This is deduced automatically by the `artifacts""` string macro, however, if you
!!! compat "Julia 1.7"
Pkg's extended platform selection requires at least Julia 1.7, and is considered experimental.
-New in Julia 1.6, `Platform` objects can have extended attributes applied to them, allowing artifacts to be tagged with things such as CUDA driver version compatibility, microarchitectural compatibility, julia version compatibility and more!
+New in Julia 1.7, `Platform` objects can have extended attributes applied to them, allowing artifacts to be tagged with things such as CUDA driver version compatibility, microarchitectural compatibility, julia version compatibility and more!
Note that this feature is considered experimental and may change in the future.
If you as a package developer find yourself needing this feature, please get in contact with us so it can evolve for the benefit of the whole ecosystem.
In order to support artifact selection at `Pkg.add()` time, `Pkg` will run the specially-named file `/.pkg/select_artifacts.jl`, passing the current platform triplet as the first argument.
diff --git a/docs/src/assets/favicon.ico b/docs/src/assets/favicon.ico
new file mode 100644
index 0000000000..eeb1edd944
Binary files /dev/null and b/docs/src/assets/favicon.ico differ
diff --git a/docs/src/basedocs.md b/docs/src/basedocs.md
index 7d51728ffe..9e07aa4ca9 100644
--- a/docs/src/basedocs.md
+++ b/docs/src/basedocs.md
@@ -4,7 +4,7 @@ EditURL = "https://github.com/JuliaLang/Pkg.jl/blob/master/docs/src/basedocs.md"
# Pkg
-Pkg is Julia's builtin package manager, and handles operations
+Pkg is Julia's built-in package manager, and handles operations
such as installing, updating and removing packages.
!!! note
diff --git a/docs/src/compatibility.md b/docs/src/compatibility.md
index bc1c58e3e9..dee8b05841 100644
--- a/docs/src/compatibility.md
+++ b/docs/src/compatibility.md
@@ -1,4 +1,4 @@
-# [**6.** Compatibility](@id Compatibility)
+# [**7.** Compatibility](@id Compatibility)
Compatibility refers to the ability to restrict the versions of the dependencies that your project is compatible with.
If the compatibility for a dependency is not given, the project is assumed to be compatible with all versions of that dependency.
@@ -22,7 +22,7 @@ The format of the version specifier is described in detail below.
The rules below apply to the `Project.toml` file; for registries, see [Registry Compat.toml](@ref).
!!! info
- Note that registration into Julia's General Registry requires each dependency to have a `[compat`] entry with an upper bound.
+ Note that registration into Julia's General Registry requires each dependency to have a `[compat]` entry with an upper bound.
## Version specifier format
@@ -97,7 +97,7 @@ PkgA = "~1.2.3" # [1.2.3, 1.3.0)
PkgB = "~1.2" # [1.2.0, 1.3.0)
PkgC = "~1" # [1.0.0, 2.0.0)
PkgD = "~0.2.3" # [0.2.3, 0.3.0)
-PkgE = "~0.0.3" # [0.0.3, 0.0.4)
+PkgE = "~0.0.3" # [0.0.3, 0.1.0)
PkgF = "~0.0" # [0.0.0, 0.1.0)
PkgG = "~0" # [0.0.0, 1.0.0)
```
@@ -164,7 +164,7 @@ PkgA = "0.2 - 0" # 0.2.0 - 0.*.* = [0.2.0, 1.0.0)
```
-## Fixing conflicts
+## [Fixing conflicts](@id Fixing-conflicts)
Version conflicts were introduced previously with an [example](@ref conflicts)
of a conflict arising in a package `D` used by two other packages, `B` and `C`.
diff --git a/docs/src/creating-packages.md b/docs/src/creating-packages.md
index 7bb72c2e91..21fedf7b1f 100644
--- a/docs/src/creating-packages.md
+++ b/docs/src/creating-packages.md
@@ -11,7 +11,7 @@
To generate the bare minimum files for a new package, use `pkg> generate`.
```julia-repl
-(@v1.8) pkg> generate HelloWorld
+(@v1.10) pkg> generate HelloWorld
```
This creates a new project `HelloWorld` in a subdirectory by the same name, with the following files (visualized with the external [`tree` command](https://linux.die.net/man/1/tree)):
@@ -118,7 +118,7 @@ describe about public symbols. A public symbol is a symbol that is exported from
package with the `export` keyword or marked as public with the `public` keyword. When you
change the behavior of something that was previously public so that the new
version no longer conforms to the specifications provided in the old version, you should
-adjust your package version number according to [Julia's variant on SemVer](#Version-specifier-format).
+adjust your package version number according to [Julia's variant on SemVer](@ref Version-specifier-format).
If you would like to include a symbol in your public API without exporting it into the
global namespace of folks who call `using YourPackage`, you should mark that symbol as
public with `public that_symbol`. Symbols marked as public with the `public` keyword are
@@ -127,7 +127,7 @@ just as public as those marked as public with the `export` keyword, but when fol
`YourPackage.that_symbol`.
Let's say we would like our `greet` function to be part of the public API, but not the
-`greet_alien` function. We could the write the following and release it as version `1.0.0`.
+`greet_alien` function. We could then write the following and release it as version `1.0.0`.
```julia
module HelloWorld
@@ -275,79 +275,159 @@ test-specific dependencies, are available, see below.
### Test-specific dependencies
-There are two ways of adding test-specific dependencies (dependencies that are not dependencies of the package but will still be available to
-load when the package is tested).
+Test-specific dependencies are dependencies that are not dependencies of the package itself but are available when the package is tested.
-#### `target` based test specific dependencies
+#### Recommended approach: Using workspaces with `test/Project.toml`
-Using this method of adding test-specific dependencies, the packages are added under an `[extras]` section and to a test target,
-e.g. to add `Markdown` and `Test` as test dependencies, add the following to the `Project.toml` file:
-
-```toml
-[extras]
-Markdown = "d6f4376e-aef5-505a-96c1-9c027394607a"
-Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40"
-
-[targets]
-test = ["Markdown", "Test"]
-```
-
-Note that the only supported targets are `test` and `build`, the latter of which (not recommended) can be used
-for any `deps/build.jl` scripts.
-
-#### Alternative approach: `test/Project.toml` file test specific dependencies
+!!! compat
+ Workspaces require Julia 1.12+. For older Julia versions, see the legacy approaches below.
-!!! note
- The exact interaction between `Project.toml`, `test/Project.toml` and their corresponding
- `Manifest.toml`s are not fully worked out and may be subject to change in future versions.
- The older method of adding test-specific dependencies, described in the previous section,
- will therefore be supported throughout all Julia 1.X releases.
+The recommended way to add test-specific dependencies is to use workspaces. This is done by:
-In Julia 1.2 and later test dependencies can be declared in `test/Project.toml`. When running
-tests, Pkg will automatically merge this and the package Projects to create the test environment.
+1. Adding a `[workspace]` section to your package's `Project.toml`:
-!!! note
- If no `test/Project.toml` exists Pkg will use the `target` based test specific dependencies.
+```toml
+[workspace]
+projects = ["test"]
+```
-To add a test-specific dependency, i.e. a dependency that is available only when testing,
-it is thus enough to add this dependency to the `test/Project.toml` project. This can be
-done from the Pkg REPL by activating this environment, and then use `add` as one normally
-does. Let's add the `Test` standard library as a test dependency:
+2. Creating a `test/Project.toml` file with your test dependencies:
```julia-repl
(HelloWorld) pkg> activate ./test
[ Info: activating environment at `~/HelloWorld/test/Project.toml`.
-(test) pkg> add Test
+(HelloWorld/test) pkg> dev . # add current package to test dependencies using its path
+ Resolving package versions...
+ Updating `~/HelloWorld/test/Project.toml`
+ [8dfed614] + HelloWorld v0.1.0 `..`
+
+(HelloWorld/test) pkg> add Test # add other test dependencies
Resolving package versions...
Updating `~/HelloWorld/test/Project.toml`
[8dfed614] + Test
- Updating `~/HelloWorld/test/Manifest.toml`
- [...]
```
-We can now use `Test` in the test script and we can see that it gets installed when testing:
+When using workspaces, the package manager resolves dependencies for all projects in the workspace together, and creates a single `Manifest.toml` next to the base `Project.toml`. This provides better dependency resolution and makes it easier to manage test-specific dependencies.
+Note that dependencies of `HelloWorld` itself are **not** automatically inherited. Any package used directly in tests must also be listed under `[deps]`.
+
+!!! info
+ Unlike some earlier test dependency workflows, this one explicitly requires adding `HelloWorld` (the parent package) to your `test/Project.toml`.
+
+The resulting `test/Project.toml` will look like:
+
+```toml
+[deps]
+HelloWorld = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" # UUID from HelloWorld's Project.toml
+Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40"
+
+[sources]
+HelloWorld = {path = ".."}
+```
+
+You can now use `Test` in the test script:
```julia-repl
julia> write("test/runtests.jl",
"""
- using Test
+ using HelloWorld, Test
@test 1 == 1
""");
-(test) pkg> activate .
+(HelloWorld/test) pkg> activate .
(HelloWorld) pkg> test
Testing HelloWorld
Resolving package versions...
- Updating `/var/folders/64/76tk_g152sg6c6t0b4nkn1vw0000gn/T/tmpPzUPPw/Project.toml`
- [d8327f2a] + HelloWorld v0.1.0 [`~/.julia/dev/Pkg/HelloWorld`]
+ Testing HelloWorld tests passed
+```
+
+Workspaces can also be used for other purposes, such as documentation or benchmarks, by adding additional projects to the workspace:
+
+```toml
+[workspace]
+projects = ["test", "docs", "benchmarks"]
+```
+
+See the section on [Workspaces](@ref) in the `Project.toml` documentation for more details.
+
+#### Alternative approach: Using `[sources]` with path-based dependencies
+
+An alternative to workspaces is to use the `[sources]` section in `test/Project.toml` to reference the parent package. The `[sources]` section allows you to specify custom locations (paths or URLs) for dependencies, overriding registry information. This approach creates a **separate manifest** in the `test/` directory (unlike workspaces which create a single shared manifest).
+
+To use this approach:
+
+1. Create a `test/Project.toml` file and add your test dependencies:
+
+```julia-repl
+(HelloWorld) pkg> activate ./test
+[ Info: activating environment at `~/HelloWorld/test/Project.toml`.
+
+(HelloWorld/test) pkg> add Test
+ Resolving package versions...
+ Updating `~/HelloWorld/test/Project.toml`
[8dfed614] + Test
- Updating `/var/folders/64/76tk_g152sg6c6t0b4nkn1vw0000gn/T/tmpPzUPPw/Manifest.toml`
- [d8327f2a] + HelloWorld v0.1.0 [`~/.julia/dev/Pkg/HelloWorld`]
- Testing HelloWorld tests passed```
```
+2. Add the parent package as a dependency using `[sources]` with a relative path:
+
+```toml
+# In test/Project.toml
+[deps]
+HelloWorld = "00000000-0000-0000-0000-000000000000" # Your package UUID
+Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40"
+
+[sources]
+HelloWorld = {path = ".."}
+```
+
+The `[sources]` section tells Pkg to use the local path for `HelloWorld` instead of looking it up in a registry. This creates a separate `test/Manifest.toml` that tracks the resolved dependencies for your test environment independently from the main package manifest. You can now run tests directly:
+
+```julia-repl
+$ julia --project=test
+julia> using HelloWorld, Test
+
+julia> include("test/runtests.jl")
+```
+
+!!! note "Difference from workspaces"
+ The key difference from workspaces is that this approach uses a **separate manifest file** (`test/Manifest.toml`) for the test environment, while workspaces create a **single shared manifest** (`Manifest.toml`) that resolves all projects together. This means:
+
+ - With `[sources]` + path: Dependencies are resolved independently for each environment
+ - With workspaces: Dependencies are resolved together, ensuring compatibility across all projects in the workspace
+
+ For more details on `[sources]`, see the [`[sources]` section](@ref sources-section) in the Project.toml documentation.
+
+#### Legacy approach: `target` based test specific dependencies
+
+!!! warning
+ This approach is legacy and maintained for compatibility. New packages should use workspaces instead.
+
+Using this method, test-specific dependencies are added under an `[extras]` section and to a test target:
+
+```toml
+[extras]
+Markdown = "d6f4376e-aef5-505a-96c1-9c027394607a"
+Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40"
+
+[targets]
+test = ["Markdown", "Test"]
+```
+
+Note that the only supported targets are `test` and `build`, the latter of which (not recommended) can be used for any `deps/build.jl` scripts.
+
+#### Legacy approach: `test/Project.toml` without workspace
+
+!!! warning
+ This approach is legacy and maintained for compatibility. New packages should use workspaces instead.
+
+In Julia 1.2 and later, test dependencies can be declared in `test/Project.toml` without using a workspace. When running tests, Pkg will automatically merge the package and test projects to create the test environment.
+
+!!! note
+ If no `test/Project.toml` exists, Pkg will use the `target` based test specific dependencies.
+
+This approach works similarly to the workspace approach, but without the workspace declaration in the main `Project.toml`.
+
## Compatibility on dependencies
Every dependency should in general have a compatibility constraint on it.
@@ -450,9 +530,7 @@ Extensions can have arbitrary names (here `ContourExt`), following the format of
In `Pkg` output, extension names are always shown together with their parent package name.
!!! compat
- Often you will put the extension dependencies into the `test` target so they are loaded when running e.g. `Pkg.test()`. On earlier Julia versions
- this requires you to also put the package in the `[extras]` section. This is unfortunate but the project verifier on older Julia versions will
- complain if this is not done.
+ Often you will want to load extension dependencies when testing your package. The recommended approach is to use workspaces and add the extension dependencies to your `test/Project.toml` (see [Test-specific dependencies](@ref adding-tests-to-packages)). For older Julia versions that don't support workspaces, you can put the extension dependencies into the `test` target, which requires you to also put the package in the `[extras]` section. The project verifier on older Julia versions will complain if this is not done.
!!! note
If you use a manifest generated by a Julia version that does not know about extensions with a Julia version that does
@@ -557,73 +635,18 @@ This is done by making the following changes (using the example above):
In the case where one wants to use an extension (without worrying about the
feature of the extension being available on older Julia versions) while still
-supporting older Julia versions the packages under `[weakdeps]` should be
+supporting older Julia versions without workspace support, the packages under `[weakdeps]` should be
duplicated into `[extras]`. This is an unfortunate duplication, but without
doing this the project verifier under older Julia versions will throw an error
if it finds packages under `[compat]` that is not listed in `[extras]`.
-## Package naming rules
-
-Package names should be sensible to most Julia users, *even to those who are not domain experts*.
-The following rules apply to the `General` registry but may be useful for other package
-registries as well.
-
-Since the `General` registry belongs to the entire community, people may have opinions about
-your package name when you publish it, especially if it's ambiguous or can be confused with
-something other than what it is. Usually, you will then get suggestions for a new name that
-may fit your package better.
-
-1. Avoid jargon. In particular, avoid acronyms unless there is minimal possibility of confusion.
-
- * It's ok to say `USA` if you're talking about the USA.
- * It's not ok to say `PMA`, even if you're talking about positive mental attitude.
-2. Avoid using `Julia` in your package name or prefixing it with `Ju`.
-
- * It is usually clear from context and to your users that the package is a Julia package.
- * Package names already have a `.jl` extension, which communicates to users that `Package.jl` is a Julia package.
- * Having Julia in the name can imply that the package is connected to, or endorsed by, contributors
- to the Julia language itself.
-3. Packages that provide most of their functionality in association with a new type should have pluralized
- names.
-
- * `DataFrames` provides the `DataFrame` type.
- * `BloomFilters` provides the `BloomFilter` type.
- * In contrast, `JuliaParser` provides no new type, but instead new functionality in the `JuliaParser.parse()`
- function.
-4. Err on the side of clarity, even if clarity seems long-winded to you.
-
- * `RandomMatrices` is a less ambiguous name than `RndMat` or `RMT`, even though the latter are shorter.
-5. A less systematic name may suit a package that implements one of several possible approaches to
- its domain.
-
- * Julia does not have a single comprehensive plotting package. Instead, `Gadfly`, `PyPlot`, `Winston`
- and other packages each implement a unique approach based on a particular design philosophy.
- * In contrast, `SortingAlgorithms` provides a consistent interface to use many well-established
- sorting algorithms.
-6. Packages that wrap external libraries or programs can be named after those libraries or programs.
-
- * `CPLEX.jl` wraps the `CPLEX` library, which can be identified easily in a web search.
- * `MATLAB.jl` provides an interface to call the MATLAB engine from within Julia.
-
-7. Avoid naming a package closely to an existing package
- * `Websocket` is too close to `WebSockets` and can be confusing to users. Rather use a new name such as `SimpleWebsockets`.
-
-8. Avoid using a distinctive name that is already in use in a well known, unrelated project.
- * Don't use the names `Tkinter.jl`, `TkinterGUI.jl`, etc. for a package that is unrelated
- to the popular `tkinter` python package, even if it provides bindings to Tcl/Tk.
- A package name of `Tkinter.jl` would only be appropriate if the package used Python's
- library to accomplish its work or was spearheaded by the same community of developers.
- * It's okay to name a package `HTTP.jl` even though it is unrelated to the popular rust
- crate `http` because in most usages the name "http" refers to the hypertext transfer
- protocol, not to the `http` rust crate.
- * It's okay to name a package `OpenSSL.jl` if it provides an interface to the OpenSSL
- library, even without explicit affiliation with the creators of the OpenSSL (provided
- there's no copyright or trademark infringement etc.)
-
-9. Packages should follow the [Stylistic Conventions](https://docs.julialang.org/en/v1/manual/variables/#Stylistic-Conventions).
- * The package name begin with a capital letter and word separation is shown with upper camel case
- * Packages that provide the functionality of a project from another language should use the Julia convention
- * Packages that [provide pre-built libraries and executables](https://docs.binarybuilder.org/stable/jll/) can keep orignal name, but should get `_jll`as a suffix. For example `pandoc_jll` wraps pandoc. However, note that the generation and release of most JLL packages is handled by the [Yggdrasil](https://github.com/JuliaPackaging/Yggdrasil) system.
+For Julia 1.13+, using workspaces is recommended and this duplication is not necessary.
+
+## Package naming guidelines
+
+The [package naming guidelines for the `General` registry](https://github.com/JuliaRegistries/General/blob/master/NAMING_GUIDELINES.md) establish clear rules that may be helpful for naming packages even if they are not submitted to the General registry.
+
+For the complete list of rules for automatic merging into the General registry, see [the AutoMerge guidelines](https://juliaregistries.github.io/RegistryCI.jl/stable/guidelines/).
## Registering packages
@@ -631,6 +654,28 @@ Once a package is ready it can be registered with the [General Registry](https:/
Currently, packages are submitted via [`Registrator`](https://juliaregistrator.github.io/).
In addition to `Registrator`, [`TagBot`](https://github.com/marketplace/actions/julia-tagbot) helps manage the process of tagging releases.
+## Creating new package versions
+
+After registering your package, you'll want to release new versions as you add features and fix bugs. The typical workflow is:
+
+1. **Update the version number** in your `Project.toml` file according to [semantic versioning rules](@ref Version-specifier-format). For example:
+ - Increment the patch version (1.2.3 → 1.2.4) for bug fixes
+ - Increment the minor version (1.2.3 → 1.3.0) for new features that don't break existing functionality
+ - Increment the major version (1.2.3 → 2.0.0) for breaking changes
+
+2. **Commit your changes** to your package repository, including the updated version number.
+
+3. **Tag the release** using Registrator. Comment `@JuliaRegistrator register` on a commit or pull request in your GitHub repository
+
+4. **Automated tagging**: Once you've set up [`TagBot`](https://github.com/marketplace/actions/julia-tagbot), it will automatically create a git tag in your repository when a new version is registered. This keeps your repository tags synchronized with registered versions.
+
+The registration process typically takes a few minutes. Registrator will:
+- Check that your package meets registry requirements (has tests, proper version bounds, etc.)
+- Submit a pull request to the General registry
+- Automated checks will run, and if everything passes, the PR will be automatically merged
+
+For private registries or more advanced workflows, see the documentation for [LocalRegistry.jl](https://github.com/GunnarFarneback/LocalRegistry.jl) and [RegistryCI.jl](https://github.com/JuliaRegistries/RegistryCI.jl).
+
## Best Practices
Packages should avoid mutating their own state (writing to files within their package directory).
@@ -649,3 +694,10 @@ To support the various use cases in the Julia package ecosystem, the Pkg develop
* [`Preferences.jl`](https://github.com/JuliaPackaging/Preferences.jl) allows packages to read and write preferences to the top-level `Project.toml`.
These preferences can be read at runtime or compile-time, to enable or disable different aspects of package behavior.
Packages previously would write out files to their own package directories to record options set by the user or environment, but this is highly discouraged now that `Preferences` is available.
+
+## See Also
+
+- [Managing Packages](@ref Managing-Packages) - Learn how to add, update, and manage package dependencies
+- [Working with Environments](@ref Working-with-Environments) - Understand environments and reproducible development
+- [Compatibility](@ref Compatibility) - Specify version constraints for dependencies
+- [API Reference](@ref) - Functional API for non-interactive package management
diff --git a/docs/src/depots.md b/docs/src/depots.md
new file mode 100644
index 0000000000..94eaf776a1
--- /dev/null
+++ b/docs/src/depots.md
@@ -0,0 +1,306 @@
+# **15.** Depots
+
+The packages installed for a particular environment, defined in the
+files `Project.toml` and `Manifest.toml` within the directory
+structure, are not actually installed within that directory but into a
+"depot". The location of the depots are set by the variable
+[`DEPOT_PATH`](https://docs.julialang.org/en/v1/base/constants/#Base.DEPOT_PATH).
+
+For details on the default depot locations and how they vary by installation method,
+see the [`DEPOT_PATH`](https://docs.julialang.org/en/v1/base/constants/#Base.DEPOT_PATH) documentation.
+
+Packages which are installed by a user go into the first depot and the Julia
+standard library is in the last depot.
+
+You should not need to manage the user depot directly. Pkg will automatically clean up
+the depots when packages are removed after a delay. However you may want to manually
+remove old `.julia/compiled/` subdirectories if you have any that reside for older Julia
+versions that you no longer use (hence have not been run to tidy themselves up).
+
+## Configuring the depot path with `JULIA_DEPOT_PATH`
+
+The depot path can be configured using the `JULIA_DEPOT_PATH` environment variable,
+which is used to populate the global Julia [`DEPOT_PATH`](https://docs.julialang.org/en/v1/base/constants/#Base.DEPOT_PATH) variable
+at startup. For complete details on the behavior of this environment variable,
+see the [environment variables documentation](https://docs.julialang.org/en/v1/manual/environment-variables/#JULIA_DEPOT_PATH).
+
+### When to customize the depot path
+
+You may want to change your depot location in several scenarios:
+
+- **Corporate environments**: When your user folder synchronizes with a server (such as with
+ Active Directory roaming profiles), storing thousands of package files in the default depot
+ can cause significant slowdowns during login/logout.
+- **Storage constraints**: When your user directory has limited quota or is on a slow network drive.
+- **Shared computing**: When multiple users need access to the same packages on a shared system.
+- **Custom organization**: When you prefer to organize Julia packages separately from your user directory.
+
+### Platform-specific configuration
+
+`JULIA_DEPOT_PATH` is an **operating system environment variable**, not a Julia REPL command.
+The method for setting it varies by platform:
+
+#### Unix/Linux/macOS
+
+For temporary configuration (current shell session only):
+
+```bash
+export JULIA_DEPOT_PATH="/custom/depot:"
+```
+
+For permanent configuration, add the export command to your shell configuration file
+(e.g., `~/.bashrc`, `~/.zshrc`, or `~/.profile`).
+
+#### Windows
+
+For temporary configuration in **PowerShell** (current session only):
+
+```powershell
+$env:JULIA_DEPOT_PATH = "C:\custom\depot;"
+```
+
+For temporary configuration in **Command Prompt** (current session only):
+
+```cmd
+set JULIA_DEPOT_PATH=C:\custom\depot;
+```
+
+For permanent system-wide or user-level configuration:
+
+1. Press `Win+R` to open the Run dialog
+2. Type `sysdm.cpl` and press Enter
+3. Go to the "Advanced" tab
+4. Click "Environment Variables"
+5. Add a new user or system variable named `JULIA_DEPOT_PATH` with your desired path
+ (e.g., `C:\custom\depot;`)
+
+!!! note
+ The trailing path separator (`:` on Unix, `;` on Windows) is crucial for including
+ the default system depots, which contain the standard library and other bundled
+ resources. Without it, Julia will only use the specified depot and will have to precompile
+ standard library packages, which can be time-consuming and inefficient.
+
+### Alternative configuration methods
+
+Instead of setting an operating system environment variable, you can configure the depot
+path using Julia's `startup.jl` file, which runs automatically when Julia starts:
+
+```julia
+# In ~/.julia/config/startup.jl (Unix) or C:\Users\USERNAME\.julia\config\startup.jl (Windows)
+empty!(DEPOT_PATH)
+push!(DEPOT_PATH, "/custom/depot")
+push!(DEPOT_PATH, joinpath(homedir(), ".julia")) # Include default depot as fallback
+```
+
+This approach provides per-user permanent configuration without requiring operating system
+environment variable changes. However, setting `JULIA_DEPOT_PATH` is generally preferred
+as it takes effect before Julia loads any code.
+
+!!! warning
+ Modifying `DEPOT_PATH` at runtime (in the REPL or in scripts) after Julia has started
+ is generally not recommended, as Julia may have already loaded packages from the
+ original depot locations.
+
+## Shared depots for distributed computing
+
+When using Julia in distributed computing environments, such as high-performance computing
+(HPC) clusters, it's recommended to use a shared depot via `JULIA_DEPOT_PATH`. This allows
+multiple Julia processes to share precompiled packages and reduces redundant compilation.
+
+Since Julia v1.10, multiple processes using the same depot coordinate via pidfile locks
+to ensure only one process precompiles a package while others wait. However, due to
+the caching of native code in pkgimages since v1.9, you may need to set the `JULIA_CPU_TARGET`
+environment variable appropriately to ensure cache compatibility across different
+worker nodes with varying CPU capabilities.
+
+For more details, see the [FAQ section on distributed computing](https://docs.julialang.org/en/v1/manual/faq/#Computing-cluster)
+and the [environment variables documentation](https://docs.julialang.org/en/v1/manual/environment-variables/#JULIA_CPU_TARGET).
+
+## Setting up shared depots for multi-user systems
+
+In multi-user environments such as JupyterHub deployments, university computing labs, or shared servers,
+system administrators often want to provide a set of commonly-used packages that are available to all
+users while still allowing individual users to install their own packages. This can be achieved by
+setting up a layered depot structure with a read-only shared depot and user-specific writable depots.
+
+### Overview of the approach
+
+The key concept is to use `JULIA_DEPOT_PATH` to create a layered depot structure where:
+
+1. **User depot** (first in path): User-specific packages and modifications
+2. **Shared depot** (middle in path): Common packages installed by administrators
+3. **System depot** (last in path): Julia standard library and bundled resources
+
+When Julia searches for packages, it looks through depots in order. This allows users to:
+- Access pre-installed packages from the shared depot
+- Install additional packages into their own depot
+- Override shared packages if needed by installing different versions in their user depot
+
+### Administrator setup
+
+#### Step 1: Create the shared depot
+
+As a system administrator, create a shared depot location accessible to all users:
+
+```bash
+# Create shared depot directory
+sudo mkdir -p /opt/julia/shared_depot
+
+# Create a shared user for managing the depot (optional but recommended)
+sudo useradd -r -s /bin/bash -d /opt/julia/shared_depot julia-shared
+
+# Set ownership
+sudo chown -R julia-shared:julia-shared /opt/julia/shared_depot
+```
+
+#### Step 2: Install shared packages
+
+Switch to the shared user account and configure Julia to use the shared depot:
+
+```bash
+sudo su - julia-shared
+export JULIA_DEPOT_PATH="/opt/julia/shared_depot:"
+```
+
+Then install commonly-used packages. You can do this interactively or by instantiating from a Project.toml:
+
+```bash
+# Interactive installation
+julia -e 'using Pkg; Pkg.add(["Plots", "DataFrames", "CSV", "LinearAlgebra"])'
+
+# Or from a Project.toml file
+cd /opt/julia/shared_depot
+# Create or copy your Project.toml and Manifest.toml files here
+julia --project=. -e 'using Pkg; Pkg.instantiate()'
+```
+
+!!! tip
+ Using a `Project.toml` and `Manifest.toml` file to define the shared environment is
+ recommended as it provides reproducibility and version control. You can maintain these
+ files in a git repository for tracking changes.
+
+#### Step 3: Clean the shared depot (optional)
+
+To minimize the shared depot size, you can remove registries from the shared depot:
+
+```bash
+rm -rf /opt/julia/shared_depot/registries
+```
+
+Since Pkg only writes to the first depot in `JULIA_DEPOT_PATH`, users will maintain their own
+registries in their user depots anyway. Removing registries from the shared depot simply avoids
+storing duplicate registry data.
+
+#### Step 4: Set appropriate permissions
+
+Make the shared depot read-only for regular users:
+
+```bash
+# Make shared depot readable by all users
+sudo chmod -R a+rX /opt/julia/shared_depot
+
+# Ensure it's not writable by others
+sudo chmod -R go-w /opt/julia/shared_depot
+```
+
+### User configuration
+
+Each user should configure their `JULIA_DEPOT_PATH` to include both their personal depot and
+the shared depot. The exact syntax depends on where you want the user depot:
+
+#### Using default user depot location
+
+To use the default `~/.julia` as the user depot with the shared depot as a fallback:
+
+```bash
+export JULIA_DEPOT_PATH="~/.julia:/opt/julia/shared_depot:"
+```
+
+The trailing `:` ensures the system depot (with standard library) is still included.
+
+#### Using a custom user depot location
+
+If you want users to have their depot in a different location (e.g., to avoid home directory quotas):
+
+```bash
+export JULIA_DEPOT_PATH="/scratch/$USER/julia_depot:/opt/julia/shared_depot:"
+```
+
+#### System-wide configuration
+
+To configure this for all users automatically, add the export command to system-wide shell
+configuration files:
+
+**On Linux:**
+```bash
+# In /etc/profile.d/julia.sh
+export JULIA_DEPOT_PATH="~/.julia:/opt/julia/shared_depot:"
+```
+
+**On macOS:**
+```bash
+# In /etc/zshrc or /etc/bashrc
+export JULIA_DEPOT_PATH="~/.julia:/opt/julia/shared_depot:"
+```
+
+Users can then further customize their individual depot paths if needed.
+
+### Pre-seeding user environments
+
+In some scenarios (e.g., for student lab computers or container images), you may want to
+pre-seed individual user environments. This can be done by:
+
+1. Creating a template environment with a `Project.toml` and `Manifest.toml`
+2. Copying these files to each user's Julia project directory
+3. Having users (or a startup script) run `Pkg.instantiate()` on first use
+
+Since packages in the shared depot will be found automatically, `instantiate()` will only
+download packages that aren't already available in the shared depot.
+
+```bash
+# As administrator, create template
+mkdir -p /opt/julia/template_project
+# Create Project.toml with desired packages
+julia --project=/opt/julia/template_project -e 'using Pkg; Pkg.add("Example"); Pkg.add("Plots")'
+
+# Users copy the template and instantiate
+cp -r /opt/julia/template_project ~/my_project
+cd ~/my_project
+julia --project=. -e 'using Pkg; Pkg.instantiate()'
+```
+
+### Updating shared packages
+
+To update packages in the shared depot:
+
+1. Switch to the shared user account
+2. Set `JULIA_DEPOT_PATH` to point only to the shared depot
+3. Update packages as needed
+4. Optionally, clean up old package versions to save space
+
+```bash
+sudo su - julia-shared
+export JULIA_DEPOT_PATH="/opt/julia/shared_depot:"
+julia -e 'using Pkg; Pkg.update()'
+```
+
+!!! note
+ Updating packages in the shared depot adds new versions alongside existing ones. Users with
+ Manifest.toml files remain pinned to their specific versions and won't be affected. If you
+ explicitly clean up old package versions to save disk space, users who need those versions
+ can run `Pkg.instantiate()` to download them to their local depot.
+
+### Troubleshooting
+
+**Packages not found despite being in shared depot:**
+Verify that `JULIA_DEPOT_PATH` is set correctly and includes the shared depot. Check that
+the trailing separator is present to include system depots. Use `DEPOT_PATH` in the Julia
+REPL to verify the depot search path.
+
+```julia
+julia> DEPOT_PATH
+3-element Vector{String}:
+ "/home/user/.julia"
+ "/opt/julia/shared_depot"
+ "/usr/local/share/julia"
+```
diff --git a/docs/src/environments.md b/docs/src/environments.md
index 54fa4e9fe9..12fb08641f 100644
--- a/docs/src/environments.md
+++ b/docs/src/environments.md
@@ -1,16 +1,16 @@
-# [**4.** Working with Environment](@id Working-with-Environments)
+# [**4.** Working with Environments](@id Working-with-Environments)
The following discusses Pkg's interaction with environments. For more on the role, environments play in code loading, including the "stack" of environments from which code can be loaded, see [this section in the Julia manual](https://docs.julialang.org/en/v1/manual/code-loading/#Environments-1).
## Creating your own environments
-So far we have added packages to the default environment at `~/.julia/environments/v1.9`. It is however easy to create other, independent, projects.
+So far we have added packages to the default environment at `~/.julia/environments/v1.10`. It is however easy to create other, independent, projects.
This approach has the benefit of allowing you to check in a `Project.toml`, and even a `Manifest.toml` if you wish, into version control (e.g. git) alongside your code.
It should be pointed out that when two projects use the same package at the same version, the content of this package is not duplicated.
In order to create a new project, create a directory for it and then activate that directory to make it the "active project", which package operations manipulate:
```julia-repl
-(@v1.9) pkg> activate MyProject
+(@v1.10) pkg> activate MyProject
Activating new environment at `~/MyProject/Project.toml`
(MyProject) pkg> st
@@ -28,7 +28,7 @@ false
Installed Example ─ v0.5.3
Updating `~/MyProject/Project.toml`
[7876af07] + Example v0.5.3
- Updating `~~/MyProject/Manifest.toml`
+ Updating `~/MyProject/Manifest.toml`
[7876af07] + Example v0.5.3
Precompiling environment...
1 dependency successfully precompiled in 2 seconds
@@ -45,7 +45,7 @@ Example = "7876af07-990d-54b4-ab0e-23690620f79a"
julia> print(read(joinpath("MyProject", "Manifest.toml"), String))
# This file is machine-generated - editing it directly is not advised
-julia_version = "1.9.4"
+julia_version = "1.10.0"
manifest_format = "2.0"
project_hash = "2ca1c6c58cb30e79e021fb54e5626c96d05d5fdc"
@@ -66,7 +66,7 @@ shell> git clone https://github.com/JuliaLang/Example.jl.git
Cloning into 'Example.jl'...
...
-(@v1.12) pkg> activate Example.jl
+(@v1.10) pkg> activate Example.jl
Activating project at `~/Example.jl`
(Example) pkg> instantiate
@@ -82,7 +82,22 @@ If you only have a `Project.toml`, a `Manifest.toml` must be generated by "resol
If you already have a resolved `Manifest.toml`, then you will still need to ensure that the packages are installed and with the correct versions. Again `instantiate` does this for you.
-In short, `instantiate` is your friend to make sure an environment is ready to use. If there's nothing to do, `instantiate` does nothing.
+In short, [`instantiate`](@ref Pkg.instantiate) is your friend to make sure an environment is ready to use. If there's nothing to do, `instantiate` does nothing.
+
+## Returning to the default environment
+
+To return to the default environment after working in a project environment, simply call `activate` with no arguments:
+
+```julia-repl
+(MyProject) pkg> activate
+ Activating project at `~/.julia/environments/v1.10`
+
+(@v1.10) pkg>
+```
+
+This deactivates the current project and returns you to the default shared environment (typically `@v#.#`).
+There is no separate "deactivate" command—calling `activate()` with no arguments is how you return to your
+base package setup. This only affects the current Julia session; the change does not persist when you restart Julia.
!!! note "Specifying project on startup"
Instead of using `activate` from within Julia, you can specify the project on startup using
@@ -103,7 +118,7 @@ also want a scratch space to try out a new package, or a sandbox to resolve vers
between several incompatible packages.
```julia-repl
-(@v1.9) pkg> activate --temp # requires Julia 1.5 or later
+(@v1.10) pkg> activate --temp # requires Julia 1.5 or later
Activating new environment at `/var/folders/34/km3mmt5930gc4pzq1d08jvjw0000gn/T/jl_a31egx/Project.toml`
(jl_a31egx) pkg> add Example
@@ -117,18 +132,18 @@ between several incompatible packages.
## Shared environments
-A "shared" environment is simply an environment that exists in `~/.julia/environments`. The default `v1.9` environment is
+A "shared" environment is simply an environment that exists in `~/.julia/environments`. The default `v1.10` environment is
therefore a shared environment:
```julia-repl
-(@v1.9) pkg> st
-Status `~/.julia/environments/v1.9/Project.toml`
+(@v1.10) pkg> st
+Status `~/.julia/environments/v1.10/Project.toml`
```
Shared environments can be activated with the `--shared` flag to `activate`:
```julia-repl
-(@v1.9) pkg> activate --shared mysharedenv
+(@v1.10) pkg> activate --shared mysharedenv
Activating project at `~/.julia/environments/mysharedenv`
(@mysharedenv) pkg>
@@ -151,7 +166,7 @@ or using Pkg's precompile option, which can precompile the entire environment, o
which can be significantly faster than the code-load route above.
```julia-repl
-(@v1.9) pkg> precompile
+(@v1.10) pkg> precompile
Precompiling environment...
23 dependencies successfully precompiled in 36 seconds
```
@@ -165,11 +180,11 @@ By default, any package that is added to a project or updated in a Pkg action wi
with its dependencies.
```julia-repl
-(@v1.9) pkg> add Images
+(@v1.10) pkg> add Images
Resolving package versions...
- Updating `~/.julia/environments/v1.9/Project.toml`
+ Updating `~/.julia/environments/v1.10/Project.toml`
[916415d5] + Images v0.25.2
- Updating `~/.julia/environments/v1.9/Manifest.toml`
+ Updating `~/.julia/environments/v1.10/Manifest.toml`
...
Precompiling environment...
Progress [===================> ] 45/97
@@ -190,16 +205,73 @@ If a given package version errors during auto-precompilation, Pkg will remember
automatically tries and will skip that package with a brief warning. Manual precompilation can be used to
force these packages to be retried, as `pkg> precompile` will always retry all packages.
-To disable the auto-precompilation, set `ENV["JULIA_PKG_PRECOMPILE_AUTO"]=0`.
-
The indicators next to the package names displayed during precompilation
-indicate the status of that package's precompilation.
+indicate the status of that package's precompilation.
- `[◐, ◓, ◑, ◒]` Animated "clock" characters indicate that the package is currently being precompiled.
- `✓` A green checkmark indicates that the package has been successfully precompiled (after which that package will disappear from the list). If the checkmark is yellow it means that the package is currently loaded so the session will need to be restarted to access the version that was just precompiled.
- `?` A question mark character indicates that a `PrecompilableError` was thrown, indicating that precompilation was disallowed, i.e. `__precompile__(false)` in that package.
- `✗` A cross indicates that the package failed to precompile.
+#### Keyboard Controls and Background Precompilation
+
+!!! compat "Julia 1.14"
+ Keyboard controls and background detach during precompilation are available in Julia 1.14 and later.
+
+In interactive sessions, precompilation displays live progress with keyboard controls available:
+
+- **`d`/`q`/`]`** — Detach. Returns to the REPL while precompilation continues silently in the background.
+- **`c`** — Cancel. Kills all subprocesses; prompts for Enter to confirm.
+- **`i`** — Info. Sends a profiling signal to subprocesses for a profile peek without interrupting compilation.
+- **`v`** — Toggle verbose mode. Shows timing, worker PID, CPU%, and memory per compiling package.
+- **`?`/`h`** — Show keyboard shortcut help.
+- **Ctrl-C** — Interrupt. Sends SIGINT to subprocesses and displays their output.
+
+After pressing `d` to detach, you can manage the background precompilation using:
+
+- `pkg> precompile --monitor`: Reattach to see live progress. Press `d` again to detach.
+- `pkg> precompile --stop`: Gracefully stop background precompilation (waits for active jobs to finish).
+- `pkg> precompile --cancel`: Immediately cancel background precompilation (interrupts active jobs).
+
+#### Controlling Auto-precompilation
+
+Auto-precompilation can be controlled in several ways:
+
+- **Environment variable**: Set `ENV["JULIA_PKG_PRECOMPILE_AUTO"]=0` to disable auto-precompilation globally.
+- **Programmatically**: Use `Pkg.autoprecompilation_enabled(false)` to disable auto-precompilation for the current session, or `Pkg.autoprecompilation_enabled(true)` to re-enable it.
+- **Scoped control**: Use `Pkg.precompile(f, args...; kwargs...)` to execute a function `f` with auto-precompilation temporarily disabled, then automatically trigger precompilation afterward if any packages were modified during the execution.
+
+!!! compat "Julia 1.13"
+ The `Pkg.autoprecompilation_enabled()` function and `Pkg.precompile()` do-block syntax require at least Julia 1.13.
+
+For example, to add multiple packages without triggering precompilation after each one:
+
+```julia-repl
+julia> Pkg.precompile() do
+ Pkg.add("Example")
+ Pkg.dev("JSON")
+ Pkg.update("HTTP")
+ end
+ Resolving package versions...
+ ...
+Precompiling environment...
+ 14 dependencies successfully precompiled in 25 seconds
+```
+
+Or to temporarily disable auto-precompilation:
+
+```julia-repl
+julia> Pkg.autoprecompilation_enabled(false)
+false
+
+julia> Pkg.add("Example") # No precompilation happens
+ Resolving package versions...
+ ...
+
+julia> Pkg.autoprecompilation_enabled(true)
+true
+```
+
### Precompiling new versions of loaded packages
If a package that has been updated is already loaded in the session, the precompilation process will go ahead and precompile
diff --git a/docs/src/getting-started.md b/docs/src/getting-started.md
index 58693bc583..124600049f 100644
--- a/docs/src/getting-started.md
+++ b/docs/src/getting-started.md
@@ -22,18 +22,18 @@ To get back to the Julia REPL, press `Ctrl+C` or backspace (when the REPL cursor
Upon entering the Pkg REPL, you should see the following prompt:
```julia-repl
-(@v1.9) pkg>
+(@v1.10) pkg>
```
To add a package, use `add`:
```julia-repl
-(@v1.9) pkg> add Example
+(@v1.10) pkg> add Example
Resolving package versions...
Installed Example ─ v0.5.3
- Updating `~/.julia/environments/v1.9/Project.toml`
+ Updating `~/.julia/environments/v1.10/Project.toml`
[7876af07] + Example v0.5.3
- Updating `~/.julia/environments/v1.9/Manifest.toml`
+ Updating `~/.julia/environments/v1.10/Manifest.toml`
[7876af07] + Example v0.5.3
```
@@ -49,14 +49,14 @@ julia> Example.hello("friend")
We can also specify multiple packages at once to install:
```julia-repl
-(@v1.9) pkg> add JSON StaticArrays
+(@v1.10) pkg> add JSON StaticArrays
```
The `status` command (or the shorter `st` command) can be used to see installed packages.
```julia-repl
-(@v1.9) pkg> st
-Status `~/.julia/environments/v1.6/Project.toml`
+(@v1.10) pkg> st
+Status `~/.julia/environments/v1.10/Project.toml`
[7876af07] Example v0.5.3
[682c06a0] JSON v0.21.3
[90137ffa] StaticArrays v1.5.9
@@ -68,13 +68,13 @@ Status `~/.julia/environments/v1.6/Project.toml`
To remove packages, use `rm` (or `remove`):
```julia-repl
-(@v1.9) pkg> rm JSON StaticArrays
+(@v1.10) pkg> rm JSON StaticArrays
```
Use `up` (or `update`) to update the installed packages
```julia-repl
-(@v1.9) pkg> up
+(@v1.10) pkg> up
```
If you have been following this guide it is likely that the packages installed are at the latest version
@@ -82,13 +82,13 @@ so `up` will not do anything. Below we show the status output in the case where
an old version of the Example package and then upgrade it:
```julia-repl
-(@v1.9) pkg> st
-Status `~/.julia/environments/v1.9/Project.toml`
+(@v1.10) pkg> st
+Status `~/.julia/environments/v1.10/Project.toml`
⌃ [7876af07] Example v0.5.1
Info Packages marked with ⌃ have new versions available and may be upgradable.
-(@v1.9) pkg> up
- Updating `~/.julia/environments/v1.9/Project.toml`
+(@v1.10) pkg> up
+ Updating `~/.julia/environments/v1.10/Project.toml`
[7876af07] ↑ Example v0.5.1 ⇒ v0.5.3
```
@@ -101,8 +101,8 @@ For more information about managing packages, see the [Managing Packages](@ref M
Up to this point, we have covered basic package management: adding, updating, and removing packages.
-You may have noticed the `(@v1.9)` in the REPL prompt.
-This lets us know that `v1.9` is the **active environment**.
+You may have noticed the `(@v1.10)` in the REPL prompt.
+This lets us know that `v1.10` is the **active environment**.
Different environments can have totally different packages and versions installed from another environment.
The active environment is the environment that will be modified by Pkg commands such as `add`, `rm` and `update`.
@@ -110,7 +110,7 @@ Let's set up a new environment so we may experiment.
To set the active environment, use `activate`:
```julia-repl
-(@v1.9) pkg> activate tutorial
+(@v1.10) pkg> activate tutorial
[ Info: activating new environment at `~/tutorial/Project.toml`.
```
@@ -159,6 +159,17 @@ We can see that the `tutorial` environment now contains `Example` and `JSON`.
Julia. Learning how to use environments effectively will improve your experience with
Julia packages.
+When you're done working in a specific environment and want to return to the default environment, use `activate` with no arguments:
+
+```julia-repl
+(tutorial) pkg> activate
+ Activating project at `~/.julia/environments/v1.10`
+
+(@v1.10) pkg>
+```
+
+This returns you to the default `@v1.10` environment. There is no separate "deactivate" command—`activate` with no arguments serves this purpose.
+
For more information about environments, see the [Working with Environments](@ref Working-with-Environments) section of the documentation.
## Asking for Help
@@ -166,16 +177,16 @@ For more information about environments, see the [Working with Environments](@re
If you are ever stuck, you can ask `Pkg` for help:
```julia-repl
-(@v1.9) pkg> ?
+(@v1.10) pkg> ?
```
You should see a list of available commands along with short descriptions.
You can ask for more detailed help by specifying a command:
```julia-repl
-(@v1.9) pkg> ?develop
+(@v1.10) pkg> ?develop
```
This guide should help you get started with `Pkg`.
-`Pkg` has much more to offer in terms of powerful package management,
-read the full manual to learn more!
+`Pkg` has much more to offer in terms of powerful package management.
+For more advanced topics, see [Managing Packages](@ref Managing-Packages), [Working with Environments](@ref Working-with-Environments), and [Creating Packages](@ref creating-packages-tutorial).
diff --git a/docs/src/glossary.md b/docs/src/glossary.md
index 60e0546039..44c394ef66 100644
--- a/docs/src/glossary.md
+++ b/docs/src/glossary.md
@@ -1,4 +1,4 @@
-# [**9.** Glossary](@id Glossary)
+# [**10.** Glossary](@id Glossary)
**Project:** a source tree with a standard layout, including a `src` directory
for the main body of Julia code, a `test` directory for testing the project,
@@ -14,8 +14,8 @@ may optionally have a manifest file:
- **Manifest file:** a file in the root directory of a project, named
`Manifest.toml` (or `JuliaManifest.toml`), describing a complete dependency graph
and exact versions of each package and library used by a project. The file name may
- also be suffixed by `-v{major}.{minor}.toml` which julia will prefer if the version
- matches `VERSION`, allowing multiple environments to be maintained for different julia
+ also be suffixed by `-v{major}.{minor}.toml` which Julia will prefer if the version
+ matches `VERSION`, allowing multiple environments to be maintained for different Julia
versions.
**Package:** a project which provides reusable functionality that can be used by
@@ -29,6 +29,15 @@ identify the package in projects that depend on it.
to load a package without a project file or UUID from a project with them. Once
you've loaded from a project file, everything needs a project file and UUID.
+!!! note
+ **Packages vs. Modules:** A *package* is a source tree with a `Project.toml` file
+ and other components that Pkg can install and manage. A *module* is a Julia language
+ construct (created with the `module` keyword) that provides a namespace for code.
+ Typically, a package contains a module of the same name (e.g., the `DataFrames` package
+ contains a `DataFrames` module), but they are distinct concepts: the package is the
+ distributable unit that Pkg manages, while the module is the namespace that your code
+ interacts with using `import` or `using`.
+
**Application:** a project which provides standalone functionality not intended
to be reused by other Julia projects. For example a web application or a
command-line utility, or simulation/analytics code accompanying a scientific paper.
@@ -46,7 +55,7 @@ since that could conflict with the configuration of the main application.
**Environment:** the combination of the top-level name map provided by a project
file combined with the dependency graph and map from packages to their entry points
-provided by a manifest file. For more detail see the manual section on code loading.
+provided by a manifest file. For more detail see the [manual section on code loading](https://docs.julialang.org/en/v1/manual/code-loading/).
- **Explicit environment:** an environment in the form of an explicit project
file and an optional corresponding manifest file together in a directory. If the
@@ -107,7 +116,7 @@ Julia's code loading mechanisms, look for registries, installed packages, named
environments, repo clones, cached compiled package images, and configuration
files. The depot path is controlled by the Julia `DEPOT_PATH` global variable
which is populated at startup based on the value of the `JULIA_DEPOT_PATH`
-environment variable. The first entry is the “user depot” and should be writable
+environment variable. The first entry is the "user depot" and should be writable
by and owned by the current user. The user depot is where: registries are
cloned, new package versions are installed, named environments are created and
updated, package repositories are cloned, newly compiled package image files are saved,
@@ -115,3 +124,15 @@ log files are written, development packages are checked out by default, and
global configuration data is saved. Later entries in the depot path are treated
as read-only and are appropriate for registries, packages, etc. installed and
managed by system administrators.
+
+**Materialize:** the process of installing all packages and dependencies specified
+in a manifest file to recreate an exact environment state. When you
+`instantiate` a project, Pkg materializes its environment by downloading and
+installing all the exact package versions recorded in the `Manifest.toml` file.
+This ensures reproducibility across different machines and users.
+
+**Canonical:** refers to a single, authoritative location for each specific
+version of a package. When the same package version is used by multiple
+environments, Pkg stores it in one canonical location and all environments
+reference that same location, rather than duplicating the package files. This
+saves disk space and ensures consistency.
diff --git a/docs/src/index.md b/docs/src/index.md
index e51ffcec71..da1aa13c4f 100644
--- a/docs/src/index.md
+++ b/docs/src/index.md
@@ -4,6 +4,23 @@ Welcome to the documentation for Pkg, [Julia](https://julialang.org)'s package m
The documentation covers many things, for example managing package
installations, developing packages, working with package registries and more.
+```@eval
+import Markdown
+# For Pkg, we need to determine the appropriate Julia version for the PDF
+# Since Pkg docs are versioned by Julia version, we'll use a similar approach to Julia docs
+julia_patch = if VERSION.prerelease == ()
+ "v$(VERSION.major).$(VERSION.minor).$(VERSION.patch)"
+elseif VERSION.prerelease[1] == "DEV"
+ "dev"
+end
+file = "Pkg.jl.pdf"
+url = "https://raw.githubusercontent.com/JuliaLang/Pkg.jl/gh-pages-pdf/$(julia_patch)/$(file)"
+Markdown.parse("""
+!!! note
+ The documentation is also available in PDF format: [$file]($url).
+""")
+```
+
Throughout the manual the REPL interface to Pkg, the Pkg REPL mode, is used in the examples.
There is also a functional API, which is preferred when not working
interactively. This API is documented in the [API Reference](@ref) section.
diff --git a/docs/src/managing-packages.md b/docs/src/managing-packages.md
index b5889221cf..8dbc6fc9e9 100644
--- a/docs/src/managing-packages.md
+++ b/docs/src/managing-packages.md
@@ -10,25 +10,26 @@ The most frequently used is `add` and its usage is described first.
In the Pkg REPL, packages can be added with the `add` command followed by the name of the package, for example:
```julia-repl
-(@v1.8) pkg> add JSON
- Installing known registries into `~/`
+(@v1.13) pkg> add JSON
Resolving package versions...
- Installed Parsers ─ v2.4.0
- Installed JSON ──── v0.21.3
- Updating `~/.julia/environments/v1.8/Project.toml`
- [682c06a0] + JSON v0.21.3
- Updating `~/environments/v1.9/Manifest.toml`
- [682c06a0] + JSON v0.21.3
- [69de0a69] + Parsers v2.4.0
- [ade2ca70] + Dates
- [a63ad114] + Mmap
- [de0858da] + Printf
- [4ec0a83e] + Unicode
-Precompiling environment...
- 2 dependencies successfully precompiled in 2 seconds
-```
-
-Here we added the package `JSON` to the current environment (which is the default `@v1.8` environment).
+ Updating `~/.julia/environments/v1.13/Project.toml`
+ [682c06a0] + JSON v0.21.4
+ Updating `~/.julia/environments/v1.13/Manifest.toml`
+ [682c06a0] + JSON v0.21.4
+ [69de0a69] + Parsers v2.8.3
+ [aea7be01] + PrecompileTools v1.3.2
+ [21216c6a] + Preferences v1.5.0
+ [ade2ca70] + Dates v1.11.0
+ [a63ad114] + Mmap v1.11.0
+ [de0858da] + Printf v1.11.0
+ [9a3f8284] + Random v1.11.0
+ [ea8e919c] + SHA v0.7.0
+ [fa267f1f] + TOML v1.0.3
+ [cf7118a7] + UUIDs v1.11.0
+ [4ec0a83e] + Unicode v1.11.0
+```
+
+Here we added the package `JSON` to the current environment (which is the default `@v1.10` environment).
In this example, we are using a fresh Julia installation,
and this is our first time adding a package using Pkg. By default, Pkg installs the General registry
and uses this registry to look up packages requested for inclusion in the current environment.
@@ -40,42 +41,48 @@ It is possible to add multiple packages in one command as `pkg> add A B C`.
The status output contains the packages you have added yourself, in this case, `JSON`:
```julia-repl
-(@v1.11) pkg> st
- Status `~/.julia/environments/v1.8/Project.toml`
- [682c06a0] JSON v0.21.3
+(@v1.13) pkg> st
+Status `~/.julia/environments/v1.13/Project.toml`
+ [682c06a0] JSON v0.21.4
```
The manifest status shows all the packages in the environment, including recursive dependencies:
```julia-repl
-(@v1.11) pkg> st -m
-Status `~/environments/v1.9/Manifest.toml`
- [682c06a0] JSON v0.21.3
- [69de0a69] Parsers v2.4.0
- [ade2ca70] Dates
- [a63ad114] Mmap
- [de0858da] Printf
- [4ec0a83e] Unicode
-```
-
-Since standard libraries (e.g. ` Dates`) are shipped with Julia, they do not have a version.
+(@v1.13) pkg> st -m
+Status `~/.julia/environments/v1.13/Manifest.toml`
+ [682c06a0] JSON v0.21.4
+ [69de0a69] Parsers v2.8.3
+ [aea7be01] PrecompileTools v1.3.2
+ [21216c6a] Preferences v1.5.0
+ [ade2ca70] Dates v1.11.0
+ [a63ad114] Mmap v1.11.0
+ [de0858da] Printf v1.11.0
+ [9a3f8284] Random v1.11.0
+ [ea8e919c] SHA v0.7.0
+ [fa267f1f] TOML v1.0.3
+ [cf7118a7] UUIDs v1.11.0
+ [4ec0a83e] Unicode v1.11.0
+```
+
+Note that before 1.11 standard libraries (e.g. ` Dates`) did not have dedicated version numbers.
To specify that you want a particular version (or set of versions) of a package, use the `compat` command. For example,
to require any patch release of the v0.21 series of JSON after v0.21.4, call `compat JSON 0.21.4`:
```julia-repl
-(@1.11) pkg> compat JSON 0.21.4
+(@v1.10) pkg> compat JSON 0.21.4
Compat entry set:
JSON = "0.21.4"
Resolve checking for compliance with the new compat rules...
Error empty intersection between JSON@0.21.3 and project compatibility 0.21.4 - 0.21
Suggestion Call `update` to attempt to meet the compatibility requirements.
-(@1.11) pkg> update
+(@v1.10) pkg> update
Updating registry at `~/.julia/registries/General.toml`
- Updating `~/.julia/environments/1.11/Project.toml`
+ Updating `~/.julia/environments/v1.10/Project.toml`
[682c06a0] ↑ JSON v0.21.3 ⇒ v0.21.4
- Updating `~/.julia/environments/1.11/Manifest.toml`
+ Updating `~/.julia/environments/v1.10/Manifest.toml`
[682c06a0] ↑ JSON v0.21.3 ⇒ v0.21.4
```
@@ -96,11 +103,11 @@ julia> JSON.json(Dict("foo" => [1, "bar"])) |> print
A specific version of a package can be installed by appending a version after a `@` symbol to the package name:
```julia-repl
-(@v1.8) pkg> add JSON@0.21.1
+(@v1.10) pkg> add JSON@0.21.1
Resolving package versions...
- Updating `~/.julia/environments/v1.8/Project.toml`
+ Updating `~/.julia/environments/v1.10/Project.toml`
⌃ [682c06a0] + JSON v0.21.1
- Updating `~/environments/v1.9/Manifest.toml`
+ Updating `~/.julia/environments/v1.10/Manifest.toml`
⌃ [682c06a0] + JSON v0.21.1
⌅ [69de0a69] + Parsers v1.1.2
[ade2ca70] + Dates
@@ -118,12 +125,12 @@ If a branch (or a certain commit) of `Example` has a hotfix that is not yet incl
we can explicitly track that branch (or commit) by appending `#branchname` (or `#commitSHA1`) to the package name:
```julia-repl
-(@v1.8) pkg> add Example#master
+(@v1.10) pkg> add Example#master
Cloning git-repo `https://github.com/JuliaLang/Example.jl.git`
Resolving package versions...
- Updating `~/.julia/environments/v1.8/Project.toml`
+ Updating `~/.julia/environments/v1.10/Project.toml`
[7876af07] + Example v0.5.4 `https://github.com/JuliaLang/Example.jl.git#master`
- Updating `~/environments/v1.9/Manifest.toml`
+ Updating `~/.julia/environments/v1.10/Manifest.toml`
[7876af07] + Example v0.5.4 `https://github.com/JuliaLang/Example.jl.git#master`
```
@@ -139,12 +146,12 @@ When updating packages, updates are pulled from that branch.
To go back to tracking the registry version of `Example`, the command `free` is used:
```julia-repl
-(@v1.8) pkg> free Example
+(@v1.10) pkg> free Example
Resolving package versions...
Installed Example ─ v0.5.3
- Updating `~/.julia/environments/v1.8/Project.toml`
+ Updating `~/.julia/environments/v1.10/Project.toml`
[7876af07] ~ Example v0.5.4 `https://github.com/JuliaLang/Example.jl.git#master` ⇒ v0.5.3
- Updating `~/environments/v1.9/Manifest.toml`
+ Updating `~/.julia/environments/v1.10/Manifest.toml`
[7876af07] ~ Example v0.5.4 `https://github.com/JuliaLang/Example.jl.git#master` ⇒ v0.5.3
```
@@ -153,12 +160,12 @@ To go back to tracking the registry version of `Example`, the command `free` is
If a package is not in a registry, it can be added by specifying a URL to the Git repository:
```julia-repl
-(@v1.8) pkg> add https://github.com/fredrikekre/ImportMacros.jl
+(@v1.10) pkg> add https://github.com/fredrikekre/ImportMacros.jl
Cloning git-repo `https://github.com/fredrikekre/ImportMacros.jl`
Resolving package versions...
- Updating `~/.julia/environments/v1.8/Project.toml`
+ Updating `~/.julia/environments/v1.10/Project.toml`
[92a963f6] + ImportMacros v1.0.0 `https://github.com/fredrikekre/ImportMacros.jl#master`
- Updating `~/environments/v1.9/Manifest.toml`
+ Updating `~/.julia/environments/v1.10/Manifest.toml`
[92a963f6] + ImportMacros v1.0.0 `https://github.com/fredrikekre/ImportMacros.jl#master`
```
@@ -167,7 +174,7 @@ For unregistered packages, we could have given a branch name (or commit SHA1) to
If you want to add a package using the SSH-based `git` protocol, you have to use quotes because the URL contains a `@`. For example,
```julia-repl
-(@v1.8) pkg> add "git@github.com:fredrikekre/ImportMacros.jl.git"
+(@v1.10) pkg> add "git@github.com:fredrikekre/ImportMacros.jl.git"
Cloning git-repo `git@github.com:fredrikekre/ImportMacros.jl.git`
Updating registry at `~/.julia/registries/General`
Resolving package versions...
@@ -188,7 +195,7 @@ repository:
pkg> add https://github.com/timholy/SnoopCompile.jl.git:SnoopCompileCore
Cloning git-repo `https://github.com/timholy/SnoopCompile.jl.git`
Resolving package versions...
- Updating `~/.julia/environments/v1.8/Project.toml`
+ Updating `~/.julia/environments/v1.10/Project.toml`
[e2b509da] + SnoopCompileCore v2.9.0 `https://github.com/timholy/SnoopCompile.jl.git:SnoopCompileCore#master`
Updating `~/.julia/environments/v1.8/Manifest.toml`
[e2b509da] + SnoopCompileCore v2.9.0 `https://github.com/timholy/SnoopCompile.jl.git:SnoopCompileCore#master`
@@ -214,15 +221,15 @@ from that local repo are pulled when packages are updated.
By only using `add` your environment always has a "reproducible state", in other words, as long as the repositories and registries used are still accessible
it is possible to retrieve the exact state of all the dependencies in the environment. This has the advantage that you can send your environment (`Project.toml`
and `Manifest.toml`) to someone else and they can [`Pkg.instantiate`](@ref) that environment in the same state as you had it locally.
-However, when you are developing a package, it is more convenient to load packages at their current state at some path. For this reason, the `dev` command exists.
+However, when you are [developing a package](@ref developing), it is more convenient to load packages at their current state at some path. For this reason, the `dev` command exists.
Let's try to `dev` a registered package:
```julia-repl
-(@v1.8) pkg> dev Example
+(@v1.10) pkg> dev Example
Updating git-repo `https://github.com/JuliaLang/Example.jl.git`
Resolving package versions...
- Updating `~/.julia/environments/v1.8/Project.toml`
+ Updating `~/.julia/environments/v1.10/Project.toml`
[7876af07] + Example v0.5.4 `~/.julia/dev/Example`
Updating `~/.julia/environments/v1.8/Manifest.toml`
[7876af07] + Example v0.5.4 `~/.julia/dev/Example`
@@ -263,9 +270,9 @@ julia> Example.plusone(1)
To stop tracking a path and use the registered version again, use `free`:
```julia-repl
-(@v1.8) pkg> free Example
+(@v1.10) pkg> free Example
Resolving package versions...
- Updating `~/.julia/environments/v1.8/Project.toml`
+ Updating `~/.julia/environments/v1.10/Project.toml`
[7876af07] ~ Example v0.5.4 `~/.julia/dev/Example` ⇒ v0.5.3
Updating `~/.julia/environments/v1.8/Manifest.toml`
[7876af07] ~ Example v0.5.4 `~/.julia/dev/Example` ⇒ v0.5.3
@@ -300,29 +307,29 @@ When new versions of packages are released, it is a good idea to update. Simply
to the latest compatible version. Sometimes this is not what you want. You can specify a subset of the dependencies to upgrade by giving them as arguments to `up`, e.g:
```julia-repl
-(@v1.8) pkg> up Example
+(@v1.10) pkg> up Example
```
This will only allow Example do upgrade. If you also want to allow dependencies of Example to upgrade (with the exception of packages that are in the project) you can pass the `--preserve=direct` flag.
```julia-repl
-(@v1.8) pkg> up --preserve=direct Example
+(@v1.10) pkg> up --preserve=direct Example
```
And if you also want to allow dependencies of Example that are also in the project to upgrade, you can use `--preserve=none`:
```julia-repl
-(@v1.8) pkg> up --preserve=none Example
+(@v1.10) pkg> up --preserve=none Example
```
## Pinning a package
A pinned package will never be updated. A package can be pinned using `pin`, for example:
```julia-repl
-(@v1.8) pkg> pin Example
+(@v1.10) pkg> pin Example
Resolving package versions...
- Updating `~/.julia/environments/v1.8/Project.toml`
+ Updating `~/.julia/environments/v1.10/Project.toml`
[7876af07] ~ Example v0.5.3 ⇒ v0.5.3 ⚲
Updating `~/.julia/environments/v1.8/Manifest.toml`
[7876af07] ~ Example v0.5.3 ⇒ v0.5.3 ⚲
@@ -331,8 +338,8 @@ A pinned package will never be updated. A package can be pinned using `pin`, for
Note the pin symbol `⚲` showing that the package is pinned. Removing the pin is done using `free`
```julia-repl
-(@v1.8) pkg> free Example
- Updating `~/.julia/environments/v1.8/Project.toml`
+(@v1.10) pkg> free Example
+ Updating `~/.julia/environments/v1.10/Project.toml`
[7876af07] ~ Example v0.5.3 ⚲ ⇒ v0.5.3
Updating `~/.julia/environments/v1.8/Manifest.toml`
[7876af07] ~ Example v0.5.3 ⚲ ⇒ v0.5.3
@@ -343,7 +350,7 @@ Note the pin symbol `⚲` showing that the package is pinned. Removing the pin i
The tests for a package can be run using `test` command:
```julia-repl
-(@v1.8) pkg> test Example
+(@v1.10) pkg> test Example
...
Testing Example
Testing Example tests passed
@@ -356,7 +363,7 @@ The output of the build process is directed to a file.
To explicitly run the build step for a package, the `build` command is used:
```julia-repl
-(@v1.8) pkg> build IJulia
+(@v1.10) pkg> build IJulia
Building Conda ─→ `~/.julia/scratchspaces/44cfe95a-1eb2-52ea-b672-e2afdf69b78f/6e47d11ea2776bc5627421d59cdcc1296c058071/build.log`
Building IJulia → `~/.julia/scratchspaces/44cfe95a-1eb2-52ea-b672-e2afdf69b78f/98ab633acb0fe071b671f6c1785c46cd70bb86bd/build.log`
@@ -486,7 +493,31 @@ To fix such errors, you have a number of options:
- remove either `A` or `B` from your environment. Perhaps `B` is left over from something you were previously working on, and you don't need it anymore. If you don't need `A` and `B` at the same time, this is the easiest way to fix the problem.
- try reporting your conflict. In this case, we were able to deduce that `B` requires an outdated version of `D`. You could thus report an issue in the development repository of `B.jl` asking for an updated version.
- try fixing the problem yourself.
- This becomes easier once you understand `Project.toml` files and how they declare their compatibility requirements. We'll return to this example in [Fixing conflicts](@ref).
+ This becomes easier once you understand `Project.toml` files and how they declare their compatibility requirements. We'll return to this example in [Fixing conflicts](@ref Fixing-conflicts).
+
+## Yanked packages
+
+Package registries can mark specific versions of packages as "yanked". A yanked package version
+is one that should no longer be used, typically because it contains serious bugs, security
+vulnerabilities, or other critical issues. When a package version is yanked, it becomes
+unavailable for new installations but remains accessible (i.e. via `instantiate`) to maintain reproducibility
+of existing environments.
+
+When you run `pkg> status`, yanked packages are clearly marked with a warning symbol:
+
+```julia-repl
+(@v1.13) pkg> status
+ Status `~/.julia/environments/v1.13/Project.toml`
+ [682c06a0] JSON v0.21.3
+ [f4259836] Example v1.2.0 [yanked]
+```
+
+The `[yanked]` annotation indicate that version `v1.2.0` of the `Example` package
+has been yanked and should be updated or replaced.
+
+When resolving dependencies, Pkg will warn you if yanked packages are present and may provide
+guidance on how to resolve the situation. It's important to address yanked packages promptly
+to ensure the security and stability of your Julia environment.
## Garbage collecting old, unused packages
@@ -502,7 +533,7 @@ If you are short on disk space and want to clean out as many unused packages and
To run a typical garbage collection with default arguments, simply use the `gc` command at the `pkg>` REPL:
```julia-repl
-(@v1.8) pkg> gc
+(@v1.10) pkg> gc
Active manifests at:
`~/BinaryProvider/Manifest.toml`
...
diff --git a/docs/src/protocol.md b/docs/src/protocol.md
new file mode 100644
index 0000000000..c3503c2d96
--- /dev/null
+++ b/docs/src/protocol.md
@@ -0,0 +1,203 @@
+# [**14.** Package and Storage Server Protocol Reference](@id Pkg-Server-Protocols)
+
+The Julia Package Server Protocol (Pkg Protocol) and the Package Storage Server Protocol (Storage Protocol) define how Julia's package manager, Pkg, obtains and manages packages and their associated resources. They aim to enhance the Julia package ecosystem, making it more efficient, reliable, and user-friendly, avoiding potential points of failure, and ensuring the permanent availability of package versions and artifacts, which is paramount for the stability and reproducibility of Julia projects.
+
+The Pkg client, by default, gets all resources over HTTPS from a single open source service run by the Julia community. This service for serving packages is additionally backed by multiple independent storage services which interface with proprietary origin services (GitHub, etc.) and guarantee persistent availability of resources into the future.
+
+The protocols also aim to address some of the limitations that existed prior to its introduction.
+
+- **Vanishing Resources.** It is possible for authors to delete code repositories of registered Julia packages. Without some kind of package server, no one can install a package which has been deleted. If someone happens to have a current fork of a deleted package, that can be made the new official repository for the package, but the chances of them having no or outdated forks are high. An even worse situation could happen for artifacts since they tend not to be kept in version control and are much more likely to be served from "random" web servers at a fixed URL with content changing over time. Artifact publishers are unlikely to retain all past versions of artifacts, so old versions of packages that depend on specific artifact content will not be reproducible in the future unless we do something to ensure that they are kept around after the publisher has stopped hosting them. By storing all package versions and artifacts in a single place, we can ensure that they are available forever.
+- **Usage Insights.** It is valuable for the Julia community to know how many people are using Julia or what the relative popularity of different packages and operating systems is. Julia uses GitHub to host its ecosystem. GitHub - a commercial, proprietary service - has this information but does not make it available to the Julia community. We are of course using GitHub for free, so we can't complain, but it seems unfortunate that a commercial entity has this valuable information while the open source community remains in the dark. The Julia community really could use insight into who is using Julia and how, so that we can prioritize packages and platforms, and give real numbers when people ask "how many people are using Julia?"
+- **Decoupling from Git and GitHub.** Prior to this, Julia package ecosystem was very deeply coupled to git and was even specialized on GitHub specifically in many ways. The Pkg and Storage Protocols allowed us to decouple ourselves from git as the primary mechanism for getting packages. Now Julia continues to support using git, but does not require it just to install packages from the default public registry anymore. This decoupling also paves the way for supporting other version control systems in the future, making git no longer so special. Special treatment of GitHub will also go away since we get the benefits of specializing for GitHub (fast tarball downloads) directly from the Pkg protocols.
+- **Firewall problems.** Prior to this, Pkg's need to connect to arbitrary servers using a miscellany of protocols caused several problems with firewalls. A large set of protocols and an unbounded list of servers needed to be whitelisted just to support default Pkg operation. If Pkg only needed to talk to a single service over a single, secure protocol (i.e. HTTPS), then whitelisting Pkg for standard use would be dead simple.
+
+## Protocols & Services
+
+1. **Pkg Protocol:** what Julia Pkg Clients speak to Pkg Servers. The Pkg Server serves all resources that Pkg Clients need to install and use registered packages, including registry data, packages and artifacts. It is designed to be easily horizontally scalable and not to have any hard operational requirements: if service is slow, just start more servers; if a Pkg Server crashes, forget it and boot up a new one.
+2. **Storage Protocol:** what Pkg Servers speak to get resources from Storage Services. Julia clients do not interact with Storage services directly and multiple independent Storage Services can symmetrically (all are treated equally) provide their service to a given Pkg Server. Since Pkg Servers cache what they serve to Clients and handle convenient content presentation, Storage Services can expose a much simpler protocol: all they do is serve up complete versions of registries, packages and artifacts, while guaranteeing persistence and completeness. Persistence means: once a version of a resource has been served, that version can be served forever. Completeness means: if the service serves a registry, it can serve all package versions referenced by that registry; if it serves a package version, it can serve all artifacts used by that package.
+
+Both protocols work over HTTPS, using only GET and HEAD requests. As is normal for HTTP, HEAD requests are used to get information about a resource, including whether it would be served, without actually downloading it. As described in what follows, the Pkg Protocol is client-to-server and may be unauthenticated, use basic auth, or OpenID; the Storage Protocol is server-to-server only and uses mutual authentication with TLS certificates.
+
+The following diagram shows how these services interact with each other and with external services such as GitHub, GitLab and BitBucket for source control, and S3 and HDFS for long-term persistence:
+
+ ┌───────────┐
+
+ │ Amazon S3 │
+
+ │ Storage │
+
+ └───────────┘
+
+ ▲
+
+ ║
+
+ ▼
+
+ Storage ╔═══════════╗ ┌───────────┐
+
+ Pkg Protocol ║ Storage ║ ┌──▶│ GitHub │
+
+ Protocol ┌──▶║ Service A ║───┤ └───────────┘
+
+ ┏━━━━━━━━━━━━┓ ┏━━━━━━━━━━━━┓ │ ╚═══════════╝ │ ┌───────────┐
+
+ ┃ Pkg Client ┃────▶┃ Pkg Server ┃───┤ ╔═══════════╗ ├──▶│ GitLab │
+
+ ┗━━━━━━━━━━━━┛ ┗━━━━━━━━━━━━┛ │ ║ Storage ║ │ └───────────┘
+
+ └──▶║ Service B ║───┤ ┌───────────┐
+
+ ╚═══════════╝ └──▶│ BitBucket │
+
+ ▲ └───────────┘
+
+ ║
+
+ ▼
+
+ ┌───────────┐
+
+ │ HDFS │
+
+ │ Cluster │
+
+ └───────────┘
+
+Each Julia Pkg Client is configured to talk to a Pkg Server. By default, they talk to `pkg.julialang.org`, a public, unauthenticated Pkg Server. If the environment variable `JULIA_PKG_SERVER` is set, the Pkg Client connects to that host instead. For example, if `JULIA_PKG_SERVER` is set to `pkg.company.com` then the Pkg Client will connect to `https://pkg.company.com`. So in typical operation, a Pkg Client will no longer rely on `libgit2` or a git command-line client, both of which have been an ongoing headache, especially behind firewalls and on Windows. If fact, git will only be necessary when working with git-hosted registries and unregistered packages - those will continue to work as they have previously, fetched using git.
+
+While the default Pkg Server at `pkg.julialang.org` is unauthenticated, other parties may host Pkg Server instances elsewhere, authenticated or unauthenticated, public or private, as they wish. People can connect to those servers by setting the `JULIA_PKG_SERVER` variable. There will be a configuration file for providing authentication information to Pkg Servers using either basic auth or OpenID. The Pkg Server implementation will be open source and have minimal operational requirements. Specifically, it needs:
+
+1. The ability to accept incoming connections on port 443;
+2. The ability to connect to a configurable set of Storage Services;
+3. Temporary disk storage for caching resources (registries, packages, artifacts).
+
+A Pkg Service may be backed by more than one actual server, as is typical for web services. The Pkg Service is stateless, so this kind of horizontal scaling is straightforward. Each Pkg Server serves registry, package and artifact resources to Pkg Clients and caches whatever it serves. Each Pkg Server, in turn, gets those resources from one or more Storage Services. Storage services are responsible for fetching resources from code hosting sites like GitHub, GitLab and BitBucket, and for persisting everything that they have ever served to long-term storage systems like Amazon S3, hosted HDFS clusters - or whatever an implementor wants to use. If the original copies of resources vanish, Pkg Servers must always serve up all previously served versions of resources.
+
+The Storage Protocol is designed to be extremely simple so that multiple independent implementations can coexist, and each Pkg Server may be symmetrically backed by multiple different Storage Services, providing both redundant backup and ensuring that no single implementation has a "choke hold" on the ecosystem - anyone can implement a new Storage Service and add it to the set of services backing the default Pkg Server at `pkg.julialang.org`. The simplest possible version of a Storage Service is a static HTTPS site serving files generated from a snapshot of a registry. Although this does not provide adequate long-term backup capabilities, and would need to be regenerated whenever a registry changes, it may be sufficient for some private uses. Having multiple independently operated Storage Services helps ensure that even if one Storage Service becomes unavailable or unreliable - for technical, financial, or political reasons - others will keep operating and so will the Pkg ecosystem.
+
+## The Pkg Protocol
+
+This section describes the protocol used by Pkg Clients to get resources from Pkg Servers, including the latest versions of registries, package source trees, and artifacts. There is also a standard system for asking for diffs of all of these from previous versions, to minimize how much data the client needs to download in order to update itself. There is additionally a bundle mechanism for requesting and receiving a set of resources in a single request.
+
+### Authentication
+
+The authentication scheme between a Pkg client and server will be HTTP authorization with bearer tokens, as standardized in RFC6750. This means that authenticated access is accomplished by the client by making an HTTPS request including a `Authorization: Bearer $access_token` header.
+
+The format of the token, its contents and validation mechanism are not specified by the Pkg Protocol. They are left to the server to define. The server is expected to validate the token and determine whether the client is authorized to access the requested resource. Similarly at the client side, the implementation of the token acquisition is not specified by the Pkg Protocol. However Pkg provides [hooks](#Authentication-Hooks) that can be implemented at the client side to trigger the token acquisition process. Tokens thus acquired are expected to be stored in a local file, the format of which is specified by the Pkg Protocol. Pkg will be able to read the token from this file and include it in the request to the server. Pkg can also, optionally, detect when the token is about to expire and trigger a refresh. The Pkg client also supports automatic token refresh, since bearer tokens are recommended to be short-lived (no more than a day).
+
+The authorization information is saved locally in `$(DEPOT_PATH[1])/servers/$server/auth.toml` which is a TOML file with the following fields:
+
+- `access_token` (REQUIRED): the bearer token used to authorize normal requests
+- `expires_at` (OPTIONAL): an absolute expiration time
+- `expires_in` (OPTIONAL): a relative expiration time
+- `refresh_token` (OPTIONAL): bearer token used to authorize refresh requests
+- `refresh_url` (OPTIONAL): URL to fetch a new token from
+
+The `auth.toml` file may contain other fields (e.g. user name, user email), but they are ignored by Pkg. The two other fields mentioned in RFC6750 are `token_type` and `scope`: these are omitted since only tokens of type `Bearer` are supported currently and the scope is always implicitly to provide access to Pkg protocol URLs. Pkg servers should, however, not send `auth.toml` files with `token_type` or `scope` fields, as these names may be used in the future, e.g. to support other kinds of tokens or to limit the scope of an authorization to a subset of Pkg protocol URLs.
+
+Initially, the user or user agent (IDE) must acquire a `auth.toml` file and save it to the correct location. After that, Pkg will determine whether the access token needs to be refreshed by examining the `expires_at` and/or `expires_in` fields of the auth file. The expiration time is the minimum of `expires_at` and `mtime(auth_file) + expires_in`. When the Pkg client downloads a new `auth.toml` file, if there is a relative `expires_in` field, an absolute `expires_at` value is computed based on the client's current clock time. This combination of policies allows expiration to work gracefully even in the presence of clock skew between the server and the client.
+
+If the access token is expired and there are `refresh_token` and `refresh_url` fields in `auth.toml`, a new auth file is requested by making a request to `refresh_url` with an `Authorization: Bearer $refresh_token` header. Pkg will refuse to make a refresh request unless `refresh_url` is an HTTPS URL. Note that `refresh_url` need not be a URL on the Pkg server: token refresh can be handled by a separate server. If the request is successful and the returned `auth.toml` file is a well-formed TOML file with at least an `access_token` field, it is saved to `$(DEPOT_PATH[1])/servers/$server/auth.toml`.
+
+Checking for access token expiry and refreshing `auth.toml` is done before each Pkg client request to a Pkg server, and if the auth file is updated the new access token is used, so the token should in theory always be up to date. Practice is different from theory, of course, and if the Pkg server considers the access token expired, it may return an HTTP 401 Unauthorized response, and the Pkg client should attempt to refresh the auth token. If, after attempting to refresh the access token, the server still returns HTTP 401 Unauthorized, the Pkg client will present the body of the error response to the user or user agent (IDE).
+
+## Authentication Hooks
+A mechanism to register a hook at the client is provided to allow the user agent to handle an auth failure. It can, for example, present a login page and take the user through the necessary authentication flow to get a new auth token and store it in `auth.toml`.
+
+- A handler can also be registered using [`register_auth_error_handler`](@ref Pkg.PlatformEngines.register_auth_error_handler). It returns a function that can be called to deregister the handler.
+- A handler can also be deregistered using [`deregister_auth_error_handler`](@ref Pkg.PlatformEngines.deregister_auth_error_handler).
+
+Example:
+
+```julia
+# register a handler
+dispose = Pkg.PlatformEngines.register_auth_error_handler((url, svr, err) -> begin
+ PkgAuth.authenticate(svr*"/auth")
+ return true, true
+end)
+
+# ... client code ...
+
+# deregister the handler
+dispose()
+# or
+Pkg.PlatformEngines.deregister_auth_error_handler(url, svr)
+```
+
+### Resources
+
+The client can make GET or HEAD requests to the following resources:
+
+- `/registries`: map of registry uuids at this server to their current tree hashes, each line of the response data is of the form `/registry/$uuid/$hash` representing a resource pointing to particular version of a registry
+- `/registry/$uuid/$hash`: tarball of registry uuid at the given tree hash
+- `/package/$uuid/$hash`: tarball of package uuid at the given tree hash
+- `/artifact/$hash`: tarball of an artifact with the given tree hash
+
+Only the `/registries` changes - all other resources can be cached forever and the server will indicate this with the appropriate HTTP headers.
+
+### Compression Negotiation
+
+The Pkg protocol supports multiple compression formats.
+
+- **Zstd compression** (current): Modern clients send `Accept-Encoding: zstd, gzip` to request Zstandard-compressed resources with gzip as a fallback.
+- **Gzip compression** (legacy): Older clients that only support gzip send `Accept-Encoding: gzip` or omit the header entirely.
+
+Clients verify the actual compression format by reading file magic bytes after download:
+
+- **Zstd format**: Magic bytes `0x28 0xB5 0x2F 0xFD` (4 bytes) - decompressed with `zstd` (significantly faster)
+- **Gzip format**: Magic bytes `0x1F 0x8B` (2 bytes) - decompressed with 7z
+
+
+### Reference Implementation
+
+A reference implementation of the Pkg Server protocol is available at [PkgServer.jl](https://github.com/JuliaPackaging/PkgServer.jl).
+
+## The Storage Protocol
+
+This section describes the protocol used by Pkg Servers to get resources from Storage Servers, including the latest versions of registries, package source trees, and artifacts. The Pkg Server requests each type of resource when it needs it and caches it for as long as it can, so Storage Services should not have to serve the same resources to the same Pkg Server instance many times.
+
+### Authentication
+
+Since the Storage protocol is a server-to-server protocol, it uses certificate-based mutual authentication: each side of the connection presents certificates of identity to the other. The operator of a Storage Service must issue a client certificate to the operator of a Pkg Service certifying that it is authorized to use the Storage Service.
+
+### Resources
+
+The Storage Protocol is similar to the Pkg Protocol:
+
+- `/registries`: map of registry uuids at this server to their current tree hashes
+- `/registry/$uuid/$hash`: tarball of registry uuid at the given tree hash
+- `/package/$uuid/$hash`: tarball of package uuid at the given tree hash
+- `/artifact/$hash`: tarball of an artifact with the given tree hash
+
+As is the case with the Pkg Server protocol, only the `/registries` resource changes over time—all other resources are permanently cacheable and Pkg Servers are expected to cache resources indefinitely, only deleting them if they need to reclaim storage space.
+
+### Interaction
+
+Fetching resources from a single Storage Server is straightforward: the Pkg Server asks for a version of a registry by UUID and hash and the Storage Server returns a tarball of that registry tree if it knows about that registry and version, or an HTTP 404 error if it doesn't.
+
+Each Pkg Server may use multiple Storage Services for availability and depth of backup. For a given resource, the Pkg Server makes a HEAD request to each Storage Service requesting the resource, and then makes a GET request for the resource to the first Storage Server that replies to the HEAD request with a 200 OK. If no Storage Service responds with a 200 OK in enough time, the Pkg Server should respond to the request for the corresponding resource with a 404 error. Each Storage Service which responds with a 200 OK must behave as if it had served the resource, regardless of whether it does so or not - i.e. persist the resource to long-term storage.
+
+One subtlety is how the Pkg Server determines what the latest version of each registry is. It can get a map from registry UUIDs to version hashes from each Storage Server, but hashes are unordered - if multiple Storage Servers reply with different hashes, which one should the Pkg Server use? When Storage Servers disagree on the latest hash of a registry, the Pkg Server should ask each Storage Server about the hashes that the other servers returned: if Service A knows about Service B's hash but B doesn't know about A's hash, then A's hash is more recent and should be used. If each server doesn't know about the other's hash, then neither hash is strictly newer than the other one and either could be used. The Pkg Server can break the tie any way it wants, e.g. randomly or by using the lexicographically earlier hash.
+
+### Guarantees
+
+The primary guarantee that a Storage Server makes is that if it has ever successfully served a resource—registry tree, package source tree, artifact tree — it must be able to serve that same resource version forever.
+
+It's tempting to also require it to guarantee that if a Storage Server serves a registry tree, it can also serve every package source tree referred to within that registry tree. Similarly, it is tempting to require that if a Storage Server can serve a package source tree that it should be able to serve any artifacts referenced by that version of the package. However, this could fail for reasons entirely beyond the control of the server: what if the registry is published with wrong package hashes? What if someone registers a package version, doesn't git tag it, then force pushes the branch that the version was on? In both of these cases, the Storage Server may not be able to fetch a version of a package through no fault of its own. Similarly, artifact hashes in packages might be incorrect or vanish before the Storage Server can retrieve them.
+
+Therefore, we don't strictly require that Storage Servers guarantee this kind of closure under resource references. We do, however, recommend that Storage Servers proactively fetch resources referred to by other resources as soon as possible. When a new version of a registry is available, the Storage Server should fetch all the new package versions in the registry immediately. When a package version is fetched—for any reason, whether because it was included in a new registry snapshot or because an upstream Pkg Server requested it by hash—all artifacts that it references should be fetched immediately.
+
+## Verification
+
+Since all resources are content addressed, the Pkg Clients and Pkg Server can and should verify that resources that they receive from upstream have the correct content hash. If a resource does not have the right hash, it should not be used and not be served further downstream. Pkg Servers should try to fetch the resource from other Storage Services and serve one that has the correct content. Pkg Clients should error if they get a resource with an incorrect content hash.
+
+Git uses SHA1 for content hashing. There is a pure Julia implementation of git's content hashing algorithm, which is being used to verify artifacts in Julia 1.3 (among other things). The SHA1 hashing algorithm is considered to be cryptographically compromised at this point, and while it's not completely broken, git is already starting to plan how to move away from using SHA1 hashes. To that end, we should consider getting ahead of this problem by using a stronger hash like SHA3-256 in these protocols. Having control over these protocols actually makes this considerably easier than if we were continuing to rely on git for resource acquisition.
+
+The first step to using SHA3-256 instead of SHA1 is to populate registries with additional hashes for package versions. Currently each package version is identified by a git-tree-sha1 entry. We would add git-tree-sha3-256 entries that give the SHA3-256 hashes computed using the same git tree hashing logic. From this origin, the Pkg Client, Pkg Server and Storage Servers all just need to use SHA3-256 hashes rather than SHA1 hashes.
+
+## References
+
+1. Pkg & Storage Protocols [https://github.com/JuliaLang/Pkg.jl/issues/1377](https://github.com/JuliaLang/Pkg.jl/issues/1377)
+2. Authenticated Pkg Client Support: [https://github.com/JuliaLang/Pkg.jl/pull/1538](https://github.com/JuliaLang/Pkg.jl/pull/1538)
+3. Authentication Hooks: [https://github.com/JuliaLang/Pkg.jl/pull/1630](https://github.com/JuliaLang/Pkg.jl/pull/1630)
diff --git a/docs/src/registries.md b/docs/src/registries.md
index 7c50727204..85a2777dc7 100644
--- a/docs/src/registries.md
+++ b/docs/src/registries.md
@@ -1,4 +1,4 @@
-# **7.** Registries
+# **8.** Registries
Registries contain information about packages, such as
available releases and dependencies, and where they can be downloaded.
@@ -99,6 +99,39 @@ are the following files: `Compat.toml`, `Deps.toml`, `Package.toml`,
and `Versions.toml`.
The formats of these files are described below.
+### Registry Package.toml
+
+The `Package.toml` file contains basic metadata about the package, such as its name, UUID, repository URL, and optional metadata.
+
+#### Package metadata
+
+The `[metadata]` table in `Package.toml` provides a location for metadata about the package that doesn't fit into the other registry files. This is an extensible framework for adding package-level metadata.
+
+#### Deprecated packages
+
+One use of the `[metadata]` table is to mark packages as deprecated using `[metadata.deprecated]`. Deprecated packages will:
+- Show as `[deprecated]` in package status output
+- Be excluded from tab-completion suggestions
+- Still be installable and usable
+
+The `[metadata.deprecated]` table can contain arbitrary metadata fields. Two special fields are recognized by Pkg and displayed when using `pkg> status --deprecated`:
+- `reason`: A string explaining why the package is deprecated
+- `alternative`: A string suggesting a replacement package
+
+Example:
+
+```toml
+name = "MyPackage"
+uuid = "..."
+repo = "..."
+
+[metadata.deprecated]
+reason = "This package is no longer maintained"
+alternative = "ReplacementPackage"
+```
+
+Other fields can be added to `[metadata.deprecated]` for use by registries or other tools.
+
### Registry Compat.toml
The `Compat.toml` file has a series of blocks specifying version
@@ -127,6 +160,55 @@ together in these blocks. The interpretation of these ranges is given by the com
So for this package, versions `[0.8.0, 0.8.3]` depend on versions `[0.4.0, 0.6.0)` of `DependencyA` and version `[0.3.0, 0.6.0)` of `DependencyB`.
Meanwhile, it is also true that versions `[0.8.2, 0.8.5]` require specific versions of `DependencyC` (so that all three are required for versions `0.8.2` and `0.8.3`).
+### Registry formats
+
+!!! compat "Julia 1.7"
+ Compressed registry formats are available starting with Julia 1.7.
+
+Registries can be installed in several different formats, each with different tradeoffs:
+
+#### Compressed registries (preferred)
+
+When using a package server (the default), registries are downloaded as compressed tarballs. This is the preferred format for the General registry because it is:
+- **Fast for the initial download**: Only a single compressed file needs to be transferred
+- **Fast to use**: Pkg reads data directly from the packed tarball, avoiding many small filesystem reads
+- **Low disk usage**: The registry can be read directly from the compressed file without extraction
+
+You can check if a registry is compressed by running `Pkg.Registry.status()`, which will describe it as a "packed registry" when it remains in its tarball and an "unpacked registry" when the files have been extracted to disk.
+
+#### Git registries
+
+Registries can also be installed as git clones. This format:
+- **Provides immediate updates**: Running `Pkg.Registry.update()` fetches the latest changes directly from the git repository
+- **Uses more disk space**: The full git history is stored locally
+- **May be slower**: Cloning and updating can take longer than downloading a compressed tarball
+- **Integrates with local tooling**: All registry files are present on disk, so you can inspect or customize them using familiar editors and git workflows
+
+To install a registry as a git clone, use:
+
+```julia
+Pkg.Registry.add(url = "https://github.com/JuliaRegistries/General.git")
+```
+
+#### Converting between formats
+
+To convert an existing registry from git to compressed (or vice versa), remove and re-add it:
+
+```julia-repl
+# Convert to compressed (uses package server if available)
+pkg> registry rm General
+
+pkg> registry add General
+
+# Convert to git
+pkg> registry rm General
+
+pkg> registry add https://github.com/JuliaRegistries/General.git
+```
+
+!!! note
+ The environment variable `JULIA_PKG_SERVER` controls whether package servers are used. Setting it to an empty string (`JULIA_PKG_SERVER=""`) disables package server usage and forces git clones. To force unpacking even when using a package server, set `JULIA_PKG_UNPACK_REGISTRY=true`.
+
### Registry flavors
The default Pkg Server (`pkg.julialang.org`) offers two different "flavors" of registry.
diff --git a/docs/src/toml-files.md b/docs/src/toml-files.md
index 79496e0321..f928e7be61 100644
--- a/docs/src/toml-files.md
+++ b/docs/src/toml-files.md
@@ -1,4 +1,4 @@
-# [**10.** `Project.toml` and `Manifest.toml`](@id Project-and-Manifest)
+# [**11.** `Project.toml` and `Manifest.toml`](@id Project-and-Manifest)
Two files that are central to Pkg are `Project.toml` and `Manifest.toml`. `Project.toml`
and `Manifest.toml` are written in [TOML](https://github.com/toml-lang/toml) (hence the
@@ -22,13 +22,38 @@ are described below.
### The `authors` field
-For a package, the optional `authors` field is a list of strings describing the
-package authors, in the form `NAME `. For example:
+For a package, the optional `authors` field is a TOML array describing the package authors.
+Entries in the array can either be a string in the form `"NAME"` or `"NAME "`, or a table keys following the [Citation File Format schema](https://github.com/citation-file-format/citation-file-format/blob/main/schema-guide.md) for either a
+[`person`](https://github.com/citation-file-format/citation-file-format/blob/main/schema-guide.md#definitionsperson) or an [`entity`](https://github.com/citation-file-format/citation-file-format/blob/main/schema-guide.md#definitionsentity).
+
+For example:
```toml
-authors = ["Some One ",
- "Foo Bar "]
+authors = [
+ "Some One ",
+ "Foo Bar ",
+ {given-names = "Baz", family-names = "Qux", email = "bazqux@example.com", orcid = "https://orcid.org/0000-0000-0000-0000", website = "https://github.com/bazqux"},
+]
```
+If all authors are specified by tables, it is possible to use [the TOML Array of Tables syntax](https://toml.io/en/v1.0.0#array-of-tables)
+```toml
+[[authors]]
+given-names = "Some"
+family-names = "One"
+email = "someone@email.com"
+
+[[authors]]
+given-names = "Foo"
+family-names = "Bar"
+email = "foo@bar.com"
+
+[[authors]]
+given-names = "Baz"
+family-names = "Qux"
+email = "bazqux@example.com"
+orcid = "https://orcid.org/0000-0000-0000-0000"
+website = "https://github.com/bazqux"
+```
### The `name` field
@@ -39,7 +64,7 @@ name = "Example"
The name must be a valid [identifier](https://docs.julialang.org/en/v1/base/base/#Base.isidentifier)
(a sequence of Unicode characters that does not start with a number and is neither `true` nor `false`).
For packages, it is recommended to follow the
-[package naming rules](@ref Package-naming-rules). The `name` field is mandatory
+[package naming rules](@ref Package-naming-guidelines). The `name` field is mandatory
for packages.
@@ -55,6 +80,13 @@ The `uuid` field is mandatory for packages.
!!! note
It is recommended that `UUIDs.uuid4()` is used to generate random UUIDs.
+#### Why UUIDs are important
+
+UUIDs serve several critical purposes in the Julia package ecosystem:
+
+- **Unique identification**: UUIDs uniquely identify packages across all registries and repositories, preventing naming conflicts. Two different packages can have the same name (e.g., in different registries), but their UUIDs will always be different.
+- **Multiple registries**: UUIDs enable the use of multiple package registries (including private registries) without conflicts, as each package is uniquely identified by its UUID regardless of which registry it comes from.
+
### The `version` field
@@ -77,6 +109,33 @@ Note that Pkg.jl deviates from the SemVer specification when it comes to version
the section on [pre-1.0 behavior](@ref compat-pre-1.0) for more details.
+### The `readonly` field
+
+The `readonly` field is a boolean that, when set to `true`, marks the environment as read-only. This prevents any modifications to the environment, including adding, removing, or updating packages. For example:
+
+```toml
+readonly = true
+```
+
+When an environment is marked as readonly, Pkg will throw an error if any operation that would modify the environment is attempted.
+If the `readonly` field is not present or set to `false` (the default), the environment can be modified normally.
+
+You can also programmatically check and modify the readonly state using the [`Pkg.readonly`](@ref) function:
+
+```julia
+# Check if current environment is readonly
+is_readonly = Pkg.readonly()
+
+# Enable readonly mode
+previous_state = Pkg.readonly(true)
+
+# Disable readonly mode
+Pkg.readonly(false)
+```
+
+When readonly mode is enabled, the status display will show `(readonly)` next to the project name to indicate the environment is protected from modifications.
+
+
### The `[deps]` section
All dependencies of the package/project are listed in the `[deps]` section. Each dependency
@@ -91,24 +150,101 @@ Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40"
Typically it is not needed to manually add entries to the `[deps]` section; this is instead
handled by Pkg operations such as `add`.
-### The `[sources]` section
+### [The `[sources]` section](@id sources-section)
-Specifiying a path or repo (+ branch) for a dependency is done in the `[sources]` section.
+Specifying a path or repo (+ branch) for a dependency is done in the `[sources]` section.
These are especially useful for controlling unregistered dependencies without having to bundle a
corresponding manifest file.
+Each entry in the `[sources]` section supports the following keys:
+
+- **`url`**: The URL of the Git repository. Cannot be used with `path`.
+- **`rev`**: The Git revision (branch name, tag, or commit hash) to use. Only valid with `url`.
+- **`subdir`**: A subdirectory within the repository containing the package.
+- **`path`**: A local filesystem path to the package. Cannot be used with `url` or `rev`. This will `dev` the package.
+
+This might in practice look something like:
+
```toml
[sources]
Example = {url = "https://github.com/JuliaLang/Example.jl", rev = "custom_branch"}
+WithinMonorepo = {url = "https://github.org/author/BigProject", subdir = "SubPackage"}
SomeDependency = {path = "deps/SomeDependency.jl"}
```
-Note that this information is only used when this environment is active, i.e. it is not used if this project is a package that is being used as a dependency.
+#### When `[sources]` entries are used
+
+Sources are read and applied in the following situations:
+
+1. **Active environment**: When resolving dependencies for the currently active environment, sources from the environment's `Project.toml` override registry information for direct dependencies.
+
+2. **Automatic addition**: When you add a package by URL (e.g., `pkg> add https://github.com/...`) or develop a package (e.g., `pkg> dev Example`), Pkg automatically adds an entry to `[sources]` for that package in your active environment's `Project.toml`.
+
+3. **Recursive collection**: When a package is added by URL or path, Pkg recursively collects `[sources]` entries from that package's dependencies. This allows private dependency chains to resolve without registry metadata. For example:
+ - If you `add` Package A by URL, and Package A has a `[sources]` entry for Package B
+ - And Package B (also specified by URL in A's sources) has a `[sources]` entry for Package C
+ - Then all three packages' source information will be collected and used during resolution
+
+This recursive behavior is particularly useful for managing chains of unregistered or private packages.
+
+!!! note "Scope of sources"
+ Sources are only used when the environment containing them is the active environment being resolved. If a package is used as a dependency in another project, its `[sources]` section is **not** consulted (except when that package itself was added by URL or path, in which case recursive collection applies as described above).
+
+!!! tip "Test-specific dependencies"
+ A use case for `[sources]` with `path` is in `test/Project.toml` to reference the parent package using `path = ".."`. This allows test dependencies to be managed independently with their own manifest file. See [Test-specific dependencies](@ref) for more details on this and other approaches.
+
+!!! compat
+ Specifying sources requires Julia 1.11+.
+
+### The `[weakdeps]` section
+
+Weak dependencies are optional dependencies that will not automatically install when the package is installed,
+but for which you can still specify compatibility constraints. Weak dependencies are typically used in conjunction
+with package extensions (see [`[extensions]`](@ref extensions-section) below), which allow conditional loading of code
+when the weak dependency is available in the environment.
+
+Example:
+```toml
+[weakdeps]
+SomePackage = "b3785f31-9d33-4cdf-bc73-f646780f1739"
+
+[compat]
+SomePackage = "1.2"
+```
+
+For more details on using weak dependencies and extensions, see the
+[Weak dependencies](@ref Weak-dependencies) section in the Creating Packages guide.
+
+!!! compat
+ Weak dependencies require Julia 1.9+.
+
+### [The `[extensions]` section](@id extensions-section)
+
+Extensions allow packages to provide optional functionality that is only loaded when certain other packages
+(typically listed in `[weakdeps]`) are available. Each entry in the `[extensions]` section maps an extension
+name to one or more package dependencies required to load that extension.
+
+Example:
+```toml
+[weakdeps]
+Contour = "d38c429a-6771-53c6-b99e-75d170b6e991"
+
+[extensions]
+ContourExt = "Contour"
+```
+
+The extension code itself should be placed in an `ext/` directory at the package root, with the file name
+matching the extension name (e.g., `ext/ContourExt.jl`). For more details on creating and using extensions,
+see the [Conditional loading of code in packages (Extensions)](@ref Conditional-loading-of-code-in-packages-(Extensions)) section in the Creating Packages guide.
+
+!!! compat
+ Extensions require Julia 1.9+.
### The `[compat]` section
-Compatibility constraints for the dependencies listed under `[deps]` can be listed in the
-`[compat]` section.
+Compatibility constraints for dependencies can be listed in the `[compat]` section. This applies to
+packages listed under `[deps]`, `[weakdeps]`, and `[extras]`.
+
Example:
```toml
@@ -128,13 +264,17 @@ constraints in detail. It is also possible to list constraints on `julia` itself
julia = "1.1"
```
-### The `[workspace]` section
+### [The `[workspace]` section](@id Workspaces)
A project file can define a workspace by giving a set of projects that is part of that workspace.
Each project in a workspace can include their own dependencies, compatibility information, and even function as full packages.
When the package manager resolves dependencies, it considers the requirements of all the projects in the workspace. The compatible versions identified during this process are recorded in a single manifest file located next to the base project file.
+Note that dependencies of the root package are **not** automatically available in child projects.
+Each child must declare its own `[deps]`. The parent package itself can be included in a child project
+via a `[sources]` path entry. See [Test-specific dependencies](@ref adding-tests-to-packages) for more information.
+
A workspace is defined in the base project by giving a list of the projects in it:
```toml
@@ -142,11 +282,54 @@ A workspace is defined in the base project by giving a list of the projects in i
projects = ["test", "docs", "benchmarks", "PrivatePackage"]
```
-This structure is particularly beneficial for developers using a monorepo approach, where a large number of unregistered packages may be involved. It's also useful for adding documentation or benchmarks to a package by including additional dependencies beyond those of the package itself.
+This structure is particularly beneficial for developers using a monorepo approach, where a large number of unregistered packages may be involved. It's also useful for adding test-specific dependencies to a package by including a `test` project in the workspace (see [Test-specific dependencies](@ref adding-tests-to-packages)), or for adding documentation or benchmarks with their own dependencies.
-Workspace can be nested: a project that itself defines a workspace can also be part of another workspace.
+Workspaces can be nested: a project that itself defines a workspace can also be part of another workspace.
In this case, the workspaces are "merged" with a single manifest being stored alongside the "root project" (the project that doesn't have another workspace including it).
+### The `[extras]` section (legacy)
+
+!!! warning
+ The `[extras]` section is a legacy feature maintained for compatibility. For Julia 1.13+,
+ using [workspaces](@ref Workspaces) is the recommended approach for managing test-specific
+ and other optional dependencies.
+
+The `[extras]` section lists additional dependencies that are not regular dependencies of the package,
+but may be used in specific contexts like testing. These are typically used in conjunction with the
+`[targets]` section.
+
+Example:
+```toml
+[extras]
+Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40"
+Markdown = "d6f4376e-aef5-505a-96c1-9c027394607a"
+```
+
+For more information, see the [Test-specific dependencies](@ref adding-tests-to-packages) section.
+
+### The `[targets]` section (legacy)
+
+!!! warning
+ The `[targets]` section is a legacy feature maintained for compatibility. For Julia 1.13+,
+ using [workspaces](@ref Workspaces) is the recommended approach for managing test-specific
+ and build dependencies.
+
+The `[targets]` section specifies which packages from `[extras]` should be available in specific
+contexts. The only supported targets are `test` (for test dependencies) and `build` (for build-time
+dependencies used by `deps/build.jl` scripts).
+
+Example:
+```toml
+[extras]
+Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40"
+Markdown = "d6f4376e-aef5-505a-96c1-9c027394607a"
+
+[targets]
+test = ["Test", "Markdown"]
+```
+
+For more information, see the [Test-specific dependencies](@ref adding-tests-to-packages) section.
+
## `Manifest.toml`
The manifest file is an absolute record of the state of the packages in the environment.
@@ -161,7 +344,7 @@ For the details, see [`Pkg.instantiate`](@ref).
### Different Manifests for Different Julia versions
-Starting from Julia v1.11, there is an option to name manifest files in the format `Manifest-v{major}.{minor}.toml`.
+Starting from Julia v1.10.8, there is an option to name manifest files in the format `Manifest-v{major}.{minor}.toml`.
Julia will then preferentially use the version-specific manifest file if available.
For example, if both `Manifest-v1.11.toml` and `Manifest.toml` exist, Julia 1.11 will prioritize using `Manifest-v1.11.toml`.
However, Julia versions 1.10, 1.12, and all others will default to using `Manifest.toml`.
@@ -185,6 +368,39 @@ This shows the Julia version the manifest was created on, the "format" of the ma
and a hash of the project file, so that it is possible to see when the manifest is stale
compared to the project file.
+#### Manifest format versions
+
+The `manifest_format` field indicates the structure version of the manifest file:
+- `"2.0"`: The standard format for Julia 1.7+
+- `"2.1"`: The current format (requires Julia 1.13+). This format introduced registry tracking in the `[registries]` section.
+
+### The `[registries]` section
+
+!!! compat
+ Registry tracking in manifests requires Julia 1.13+ and manifest format `"2.1"`.
+
+Starting with manifest format `2.1`, the manifest can include a `[registries]` section that tracks
+metadata about the registries from which packages were obtained. This ensures that the exact source
+of each package version can be identified, which is particularly important when using multiple
+registries or private registries.
+
+Each registry entry in the manifest looks like this:
+
+```toml
+[registries.General]
+uuid = "23338594-aafe-5451-b93e-139f81909106"
+url = "https://github.com/JuliaRegistries/General.git"
+```
+
+The registry entries include:
+* **`uuid`** (required): The unique identifier for the registry.
+* **`url`** (optional): The URL where the registry can be found. This enables automatic installation
+ of registries when instantiating an environment on a new machine.
+
+The section key (e.g., `General` in the example above) is the registry name.
+
+### Package entries
+
Each dependency has its own section in the manifest file, and its content varies depending
on how the dependency was added to the environment. Every
dependency section includes a combination of the following entries:
@@ -201,6 +417,11 @@ dependency section includes a combination of the following entries:
or a commit `repo-rev = "66607a62a83cb07ab18c0b35c038fcd62987c9b1"`.
* `git-tree-sha1`: a content hash of the source tree, for example
`git-tree-sha1 = "ca3820cc4e66f473467d912c4b2b3ae5dc968444"`.
+* `registries`: a reference to the registry IDs from which this package version was obtained. This can be either
+ a single string (e.g., `registries = "General"`) or a vector of strings if the package is available in multiple
+ registries (e.g., `registries = ["General", "MyRegistry"]`). All registries containing this package version
+ are recorded. This field is only present in manifest format `2.1` or later, and only for packages that were
+ added from a registry (not for developed or git-tracked packages).
#### Added package
@@ -215,10 +436,12 @@ deps = ["DependencyA", "DependencyB"]
git-tree-sha1 = "8eb7b4d4ca487caade9ba3e85932e28ce6d6e1f8"
uuid = "7876af07-990d-54b4-ab0e-23690620f79a"
version = "1.2.3"
+registries = "General"
```
Note, in particular, that no `repo-url` is present, since that information is included in
-the registry where this package was found.
+the registry where this package was found. The `registries` field (present in manifest format `2.1`+)
+references an entry in the `[registries]` section that contains the registry metadata.
#### Added package by branch
diff --git a/ext/REPLExt/REPLExt.jl b/ext/REPLExt/REPLExt.jl
index 92e636cd2f..faf7f6cb0c 100644
--- a/ext/REPLExt/REPLExt.jl
+++ b/ext/REPLExt/REPLExt.jl
@@ -1,18 +1,25 @@
module REPLExt
+if Base.get_bool_env("JULIA_PKG_DISALLOW_PKG_PRECOMPILATION", false) == true
+ error("Precompililing Pkg extension REPLExt is disallowed. JULIA_PKG_DISALLOW_PKG_PRECOMPILATION=$(ENV["JULIA_PKG_DISALLOW_PKG_PRECOMPILATION"])")
+end
+
using Markdown, UUIDs, Dates
import REPL
import .REPL: LineEdit, REPLCompletions, TerminalMenus
import Pkg
-import .Pkg: linewrap, pathrepr, compat, can_fancyprint, printpkgstyle, PKGMODE_PROJECT
+import .Pkg: linewrap, pathrepr, can_fancyprint, printpkgstyle, PKGMODE_PROJECT
using .Pkg: Types, Operations, API, Registry, Resolve, REPLMode, safe_realpath
using .REPLMode: Statement, CommandSpec, Command, prepare_cmd, tokenize, core_parse, SPECS, api_options, parse_option, api_options, is_opt, wrap_option
using .Types: Context, PkgError, pkgerror, EnvCache
+using .API: set_current_compat
+import .API: _compat
+
include("completions.jl")
include("compat.jl")
@@ -21,13 +28,32 @@ include("compat.jl")
# REPL mode creation #
######################
+const BRACKET_INSERT_SUPPORTED = hasfield(REPL.Options, :auto_insert_closing_bracket)
+
struct PkgCompletionProvider <: LineEdit.CompletionProvider end
-function LineEdit.complete_line(c::PkgCompletionProvider, s; hint::Bool=false)
+function LineEdit.complete_line(c::PkgCompletionProvider, s; hint::Bool = false)
partial = REPL.beforecursor(s.input_buffer)
full = LineEdit.input_string(s)
ret, range, should_complete = completions(full, lastindex(partial); hint)
- return ret, partial[range], should_complete
+ # Convert to new completion interface format
+ named_completions = map(LineEdit.NamedCompletion, ret)
+ # Convert UnitRange to Region (Pair{Int,Int}) to match new completion interface
+ # range represents character positions in partial string, convert to 0-based byte positions
+ if length(range) == 0 && first(range) > last(range)
+ # Empty backward range like 4:3 means insert at cursor position
+ # The cursor is at position last(range), so insert after it
+ pos = thisind(partial, last(range) + 1) - 1
+ region = pos => pos
+ elseif isempty(range)
+ region = 0 => 0
+ else
+ # Convert 1-based character positions to 0-based byte positions
+ start_pos = thisind(full, first(range)) - 1
+ end_pos = thisind(full, last(range))
+ region = start_pos => end_pos
+ end
+ return named_completions, region, should_complete
end
prev_project_file = nothing
@@ -113,23 +139,27 @@ function on_done(s, buf, ok, repl)
do_cmds(repl, input)
REPL.prepare_next(repl)
REPL.reset_state(s)
- s.current_mode.sticky || REPL.transition(s, main)
+ return s.current_mode.sticky || REPL.transition(s, main)
end
# Set up the repl Pkg REPLMode
function create_mode(repl::REPL.AbstractREPL, main::LineEdit.Prompt)
- pkg_mode = LineEdit.Prompt(promptf;
+ pkg_mode = LineEdit.Prompt(
+ promptf;
prompt_prefix = repl.options.hascolor ? Base.text_colors[:blue] : "",
prompt_suffix = "",
complete = PkgCompletionProvider(),
- sticky = true)
+ sticky = true
+ )
pkg_mode.repl = repl
hp = main.hist
hp.mode_mapping[:pkg] = pkg_mode
pkg_mode.hist = hp
- search_prompt, skeymap = LineEdit.setup_search_keymap(hp)
+ skeymap = if !isdefined(REPL, :History)
+ last(LineEdit.setup_search_keymap(hp)) # TODO: Remove
+ end
prefix_prompt, prefix_keymap = LineEdit.setup_prefix_keymap(hp, pkg_mode)
pkg_mode.on_done = (s, buf, ok) -> Base.@invokelatest(on_done(s, buf, ok, repl))
@@ -145,25 +175,30 @@ function create_mode(repl::REPL.AbstractREPL, main::LineEdit.Prompt)
repl_keymap = Dict()
if shell_mode !== nothing
- let shell_mode=shell_mode
- repl_keymap[';'] = function (s,o...)
- if isempty(s) || position(LineEdit.buffer(s)) == 0
+ let shell_mode = shell_mode
+ repl_keymap[';'] = function (s, o...)
+ return if isempty(s) || position(LineEdit.buffer(s)) == 0
buf = copy(LineEdit.buffer(s))
LineEdit.transition(s, shell_mode) do
LineEdit.state(s, shell_mode).input_buffer = buf
end
else
LineEdit.edit_insert(s, ';')
- LineEdit.check_for_hint(s) && LineEdit.refresh_line(s)
+ LineEdit.check_show_hint(s)
end
end
end
end
- b = Dict{Any,Any}[
- skeymap, repl_keymap, mk, prefix_keymap, LineEdit.history_keymap,
- LineEdit.default_keymap, LineEdit.escape_defaults
- ]
+ b = Dict{Any, Any}[]
+ if !isdefined(REPL, :History)
+ push!(b, skeymap)
+ end
+ push!(b, repl_keymap)
+ if BRACKET_INSERT_SUPPORTED && repl.options.auto_insert_closing_bracket
+ push!(b, LineEdit.bracket_insert_keymap)
+ end
+ push!(b, mk, prefix_keymap, LineEdit.history_keymap, LineEdit.default_keymap, LineEdit.escape_defaults)
pkg_mode.keymap_dict = LineEdit.keymap(b)
return pkg_mode
end
@@ -172,16 +207,20 @@ function repl_init(repl::REPL.LineEditREPL)
main_mode = repl.interface.modes[1]
pkg_mode = create_mode(repl, main_mode)
push!(repl.interface.modes, pkg_mode)
- keymap = Dict{Any,Any}(
- ']' => function (s,args...)
+ keymap = Dict{Any, Any}(
+ ']' => function (s, args...)
if isempty(s) || position(LineEdit.buffer(s)) == 0
buf = copy(LineEdit.buffer(s))
- LineEdit.transition(s, pkg_mode) do
+ return LineEdit.transition(s, pkg_mode) do
LineEdit.state(s, pkg_mode).input_buffer = buf
end
else
- LineEdit.edit_insert(s, ']')
- LineEdit.check_for_hint(s) && LineEdit.refresh_line(s)
+ if BRACKET_INSERT_SUPPORTED && repl.options.auto_insert_closing_bracket
+ return LineEdit.bracket_insert_keymap[']'](s, args...)
+ else
+ LineEdit.edit_insert(s, ']')
+ return LineEdit.check_show_hint(s)
+ end
end
end
)
@@ -201,9 +240,9 @@ function try_prompt_pkg_add(pkgs::Vector{Symbol})
end
if isempty(ctx.registries)
if !REG_WARNED[]
- printstyled(ctx.io, " │ "; color=:green)
+ printstyled(ctx.io, " │ "; color = :green)
printstyled(ctx.io, "Attempted to find missing packages in package registries but no registries are installed.\n")
- printstyled(ctx.io, " └ "; color=:green)
+ printstyled(ctx.io, " └ "; color = :green)
printstyled(ctx.io, "Use package mode to install a registry. `pkg> registry add` will install the default registries.\n\n")
REG_WARNED[] = true
end
@@ -223,22 +262,22 @@ function try_prompt_pkg_add(pkgs::Vector{Symbol})
available_pkg_list = length(available_pkgs) == 1 ? String(available_pkgs[1]) : "[$(join(available_pkgs, ", "))]"
msg1 = "Package$(plural1) $(missing_pkg_list) not found, but $(plural2) named $(available_pkg_list) $(plural3) available from a registry."
for line in linewrap(msg1, io = ctx.io, padding = length(" │ "))
- printstyled(ctx.io, " │ "; color=:green)
+ printstyled(ctx.io, " │ "; color = :green)
println(ctx.io, line)
end
- printstyled(ctx.io, " │ "; color=:green)
+ printstyled(ctx.io, " │ "; color = :green)
println(ctx.io, "Install package$(plural4)?")
msg2 = string("add ", join(available_pkgs, ' '))
for (i, line) in pairs(linewrap(msg2; io = ctx.io, padding = length(string(" | ", promptf()))))
- printstyled(ctx.io, " │ "; color=:green)
+ printstyled(ctx.io, " │ "; color = :green)
if i == 1
- printstyled(ctx.io, promptf(); color=:blue)
+ printstyled(ctx.io, promptf(); color = :blue)
else
print(ctx.io, " "^length(promptf()))
end
println(ctx.io, line)
end
- printstyled(ctx.io, " └ "; color=:green)
+ printstyled(ctx.io, " └ "; color = :green)
Base.prompt(stdin, ctx.io, "(y/n/o)", default = "y")
catch err
if err isa InterruptException # if ^C is entered
@@ -254,7 +293,7 @@ function try_prompt_pkg_add(pkgs::Vector{Symbol})
resp = strip(resp)
lower_resp = lowercase(resp)
if lower_resp in ["y", "yes"]
- API.add(string.(available_pkgs); allow_autoprecomp=false)
+ API.add(string.(available_pkgs); allow_autoprecomp = false)
elseif lower_resp in ["o"]
editable_envs = filter(v -> v != "@stdlib", LOAD_PATH)
option_list = String[]
@@ -273,16 +312,16 @@ function try_prompt_pkg_add(pkgs::Vector{Symbol})
push!(keybindings, only("$n"))
push!(shown_envs, expanded_env)
end
- menu = TerminalMenus.RadioMenu(option_list, keybindings=keybindings, pagesize=length(option_list))
+ menu = TerminalMenus.RadioMenu(option_list; keybindings = keybindings, pagesize = length(option_list), charset = :ascii)
default = something(
# select the first non-default env by default, if possible
findfirst(!=(Base.active_project()), shown_envs),
1
)
print(ctx.io, "\e[1A\e[1G\e[0J") # go up one line, to the start, and clear it
- printstyled(ctx.io, " └ "; color=:green)
+ printstyled(ctx.io, " └ "; color = :green)
choice = try
- TerminalMenus.request("Select environment:", menu, cursor=default)
+ TerminalMenus.request("Select environment:", menu, cursor = default)
catch err
if err isa InterruptException # if ^C is entered
println(ctx.io)
@@ -292,7 +331,7 @@ function try_prompt_pkg_add(pkgs::Vector{Symbol})
end
choice == -1 && return false
API.activate(shown_envs[choice]) do
- API.add(string.(available_pkgs); allow_autoprecomp=false)
+ API.add(string.(available_pkgs); allow_autoprecomp = false)
end
elseif (lower_resp in ["n"])
return false
@@ -308,7 +347,6 @@ function try_prompt_pkg_add(pkgs::Vector{Symbol})
end
-
function __init__()
if isdefined(Base, :active_repl)
if Base.active_repl isa REPL.LineEditREPL
@@ -326,7 +364,7 @@ function __init__()
end
end
end
- if !in(try_prompt_pkg_add, REPL.install_packages_hooks)
+ return if !in(try_prompt_pkg_add, REPL.install_packages_hooks)
push!(REPL.install_packages_hooks, try_prompt_pkg_add)
end
end
diff --git a/ext/REPLExt/compat.jl b/ext/REPLExt/compat.jl
index a9a537cf0f..f51e6b877c 100644
--- a/ext/REPLExt/compat.jl
+++ b/ext/REPLExt/compat.jl
@@ -1,7 +1,9 @@
# TODO: Overload
-function compat(ctx::Context; io = nothing)
+function _compat(ctx::Context; io = nothing, input_io = stdin)
io = something(io, ctx.io)
- can_fancyprint(io) || pkgerror("Pkg.compat cannot be run interactively in this terminal")
+ if input_io isa Base.TTY # testing uses IOBuffer
+ can_fancyprint(io) || pkgerror("Pkg.compat cannot be run interactively in this terminal")
+ end
printpkgstyle(io, :Compat, pathrepr(ctx.env.project_file))
longest_dep_len = max(5, length.(collect(keys(ctx.env.project.deps)))...)
opt_strs = String[]
@@ -9,14 +11,14 @@ function compat(ctx::Context; io = nothing)
compat_str = Operations.get_compat_str(ctx.env.project, "julia")
push!(opt_strs, Operations.compat_line(io, "julia", nothing, compat_str, longest_dep_len, indent = ""))
push!(opt_pkgs, "julia")
- for (dep, uuid) in sort(collect(ctx.env.project.deps); by = x->x.first)
+ for (dep, uuid) in sort(collect(ctx.env.project.deps); by = x -> x.first)
compat_str = Operations.get_compat_str(ctx.env.project, dep)
push!(opt_strs, Operations.compat_line(io, dep, uuid, compat_str, longest_dep_len, indent = ""))
push!(opt_pkgs, dep)
end
- menu = TerminalMenus.RadioMenu(opt_strs, pagesize=length(opt_strs))
+ menu = TerminalMenus.RadioMenu(opt_strs; pagesize = length(opt_strs), charset = :ascii)
choice = try
- TerminalMenus.request(" Select an entry to edit:", menu)
+ TerminalMenus.request(TerminalMenus.default_terminal(in = input_io, out = io), " Select an entry to edit:", menu)
catch err
if err isa InterruptException # if ^C is entered
println(io)
@@ -35,10 +37,12 @@ function compat(ctx::Context; io = nothing)
start_pos = length(prompt) + 2
move_start = "\e[$(start_pos)G"
clear_to_end = "\e[0J"
- ccall(:jl_tty_set_mode, Int32, (Ptr{Cvoid},Int32), stdin.handle, true)
+ if input_io isa Base.TTY
+ ccall(:jl_tty_set_mode, Int32, (Ptr{Cvoid}, Int32), input_io.handle, true)
+ end
while true
print(io, move_start, clear_to_end, buffer, "\e[$(start_pos + cursor)G")
- inp = TerminalMenus._readkey(stdin)
+ inp = TerminalMenus._readkey(input_io)
if inp == '\r' # Carriage return
println(io)
break
@@ -54,7 +58,7 @@ function compat(ctx::Context; io = nothing)
elseif inp == TerminalMenus.END_KEY
cursor = length(buffer)
elseif inp == TerminalMenus.DEL_KEY
- if cursor == 0
+ if cursor == 0 && !isempty(buffer)
buffer = buffer[2:end]
elseif cursor < length(buffer)
buffer = buffer[1:cursor] * buffer[(cursor + 2):end]
@@ -62,16 +66,18 @@ function compat(ctx::Context; io = nothing)
elseif inp isa TerminalMenus.Key
# ignore all other escaped (multi-byte) keys
elseif inp == '\x7f' # backspace
- if cursor == 1
- buffer = buffer[2:end]
- elseif cursor == length(buffer)
- buffer = buffer[1:end - 1]
- elseif cursor > 0
- buffer = buffer[1:(cursor-1)] * buffer[(cursor + 1):end]
+ if cursor > 0
+ if cursor == 1
+ buffer = buffer[2:end]
+ elseif cursor == length(buffer)
+ buffer = buffer[1:(end - 1)]
+ else
+ buffer = buffer[1:(cursor - 1)] * buffer[(cursor + 1):end]
+ end
+ cursor -= 1
else
continue
end
- cursor -= 1
else
if cursor == 0
buffer = inp * buffer
@@ -85,9 +91,11 @@ function compat(ctx::Context; io = nothing)
end
buffer
finally
- ccall(:jl_tty_set_mode, Int32, (Ptr{Cvoid},Int32), stdin.handle, false)
+ if input_io isa Base.TTY
+ ccall(:jl_tty_set_mode, Int32, (Ptr{Cvoid}, Int32), input_io.handle, false)
+ end
end
new_entry = strip(resp)
- compat(ctx, dep, string(new_entry))
+ API._compat(ctx, dep, string(new_entry))
return
end
diff --git a/ext/REPLExt/completions.jl b/ext/REPLExt/completions.jl
index eca5e11218..60646bad78 100644
--- a/ext/REPLExt/completions.jl
+++ b/ext/REPLExt/completions.jl
@@ -11,7 +11,7 @@ function _shared_envs()
return possible
end
-function complete_activate(options, partial, i1, i2; hint::Bool)
+function complete_activate(options, partial, i1, i2; hint::Bool, arguments = [])
shared = get(options, :shared, false)
if shared
return _shared_envs()
@@ -34,11 +34,10 @@ function complete_local_dir(s, i1, i2)
end
function complete_expanded_local_dir(s, i1, i2, expanded_user, oldi2)
- cmp = REPL.REPLCompletions.complete_path(s, i2, shell_escape=true)
- cmp2 = cmp[2]
- completions = [REPL.REPLCompletions.completion_text(p) for p in cmp[1]]
- completions = filter!(completions) do x
- Base.isaccessibledir(s[1:prevind(s, first(cmp2)-i1+1)]*x)
+ paths, dir, success = REPL.REPLCompletions.complete_path(s; cmd_escape = true)
+ completions = [REPL.REPLCompletions.completion_text(p) for p in paths]
+ filter!(completions) do x
+ Base.isaccessibledir(joinpath(dir, x))
end
if expanded_user
if length(completions) == 1 && endswith(joinpath(homedir(), ""), first(completions))
@@ -48,83 +47,110 @@ function complete_expanded_local_dir(s, i1, i2, expanded_user, oldi2)
end
return completions, i1:oldi2, true
end
-
- return completions, cmp[2], !isempty(completions)
+ prefix = splitdir(s)[2]
+ startpos = i2 - lastindex(prefix) + 1
+ return completions, startpos:i2, !isempty(completions)
end
const JULIA_UUID = UUID("1222c4b2-2114-5bfd-aeef-88e4692bbb3e")
-function complete_remote_package(partial; hint::Bool)
- found_match = false
- isempty(partial) && return String[]
+
+# Helper function to extract already-specified package names from arguments
+# Used for deduplicating completion suggestions (issue #4098)
+function extract_specified_names(arguments)
+ specified_names = Set{String}()
+ # Exclude the last argument, which is the one currently being completed
+ for i in 1:(length(arguments) - 1)
+ arg = arguments[i]
+ arg_str = arg isa String ? arg : arg.raw
+ # Extract package name (before any @, #, =, or : specifiers)
+ pkg_name = first(split(arg_str, ['@', '#', '=', ':']))
+ push!(specified_names, pkg_name)
+ end
+ return specified_names
+end
+function complete_remote_package!(comps, partial; hint::Bool)
+ isempty(partial) && return true # true means returned early
+ found_match = !isempty(comps)
cmp = Set{String}()
for reg in Registry.reachable_registries()
for (uuid, regpkg) in reg
name = regpkg.name
name in cmp && continue
if startswith(regpkg.name, partial)
- pkg = Registry.registry_info(regpkg)
- compat_info = Registry.compat_info(pkg)
- # Filter versions
- for (v, uncompressed_compat) in compat_info
+ pkg = Registry.registry_info(reg, regpkg)
+ Registry.isdeprecated(pkg) && continue
+ # Check if any non-yanked version is compatible with current Julia
+ found_compatible_version = false
+ for v in keys(pkg.version_info)
Registry.isyanked(pkg, v) && continue
# TODO: Filter based on offline mode
- is_julia_compat = nothing
- for (pkg_uuid, vspec) in uncompressed_compat
- if pkg_uuid == JULIA_UUID
- is_julia_compat = VERSION in vspec
- is_julia_compat && continue
- end
- end
- # Found a compatible version or compat on julia at all => compatible
- if is_julia_compat === nothing || is_julia_compat
- push!(cmp, name)
- # In hint mode the result is only used if there is a single matching entry
- # so we abort the search
- if hint && found_match
- return sort!(collect(cmp))
- end
- found_match = true
+ # Query compressed compat for this version (optimized: only fetch Julia compat)
+ julia_vspec = Pkg.Registry.query_compat_for_version(pkg, v, JULIA_UUID)
+ # Found a compatible version or no julia compat at all => compatible
+ if julia_vspec === nothing || VERSION in julia_vspec
+ found_compatible_version = true
break
end
end
+ if found_compatible_version
+ push!(cmp, name)
+ # In hint mode the result is only used if there is a single matching entry
+ # so we can return no matches in case of more than one match
+ if hint && found_match
+ return true # true means returned early
+ end
+ found_match = true
+ end
end
end
end
- return sort!(collect(cmp))
+ append!(comps, sort!(collect(cmp)))
+ return false # false means performed full search
end
-function complete_help(options, partial; hint::Bool)
+function complete_help(options, partial; hint::Bool, arguments = [])
names = String[]
for cmds in values(SPECS)
- append!(names, [spec.canonical_name for spec in values(cmds)])
+ append!(names, [spec.canonical_name for spec in values(cmds)])
end
return sort!(unique!(append!(names, collect(keys(SPECS)))))
end
-function complete_installed_packages(options, partial; hint::Bool)
- env = try EnvCache()
+function complete_installed_packages(options, partial; hint::Bool, arguments = [])
+ env = try
+ EnvCache()
catch err
err isa PkgError || rethrow()
return String[]
end
mode = get(options, :mode, PKGMODE_PROJECT)
- return mode == PKGMODE_PROJECT ?
+ packages = mode == PKGMODE_PROJECT ?
collect(keys(env.project.deps)) :
unique!([entry.name for (uuid, entry) in env.manifest])
+
+ # Filter out already-specified packages
+ specified_names = extract_specified_names(arguments)
+ return filter(pkg -> !(pkg in specified_names), packages)
end
-function complete_all_installed_packages(options, partial; hint::Bool)
- env = try EnvCache()
+function complete_all_installed_packages(options, partial; hint::Bool, arguments = [])
+ env = try
+ EnvCache()
catch err
err isa PkgError || rethrow()
return String[]
end
- return unique!([entry.name for (uuid, entry) in env.manifest])
+ packages = unique!([entry.name for (uuid, entry) in env.manifest])
+
+ # Filter out already-specified packages
+ specified_names = extract_specified_names(arguments)
+ return filter(pkg -> !(pkg in specified_names), packages)
end
-function complete_installed_packages_and_compat(options, partial; hint::Bool)
- env = try EnvCache()
+function complete_installed_packages_and_compat(options, partial; hint::Bool, arguments = [])
+ env = try
+ EnvCache()
catch err
err isa PkgError || rethrow()
return String[]
@@ -135,27 +161,59 @@ function complete_installed_packages_and_compat(options, partial; hint::Bool)
end
end
-function complete_fixed_packages(options, partial; hint::Bool)
- env = try EnvCache()
+function complete_fixed_packages(options, partial; hint::Bool, arguments = [])
+ env = try
+ EnvCache()
catch err
err isa PkgError || rethrow()
return String[]
end
- return unique!([entry.name for (uuid, entry) in env.manifest.deps if Operations.isfixed(entry)])
+ packages = unique!([entry.name for (uuid, entry) in env.manifest.deps if Operations.isfixed(entry)])
+
+ # Filter out already-specified packages
+ specified_names = extract_specified_names(arguments)
+ return filter(pkg -> !(pkg in specified_names), packages)
end
-function complete_add_dev(options, partial, i1, i2; hint::Bool)
+function complete_add_dev(options, partial, i1, i2; hint::Bool, arguments = [])
comps, idx, _ = complete_local_dir(partial, i1, i2)
if occursin(Base.Filesystem.path_separator_re, partial)
return comps, idx, !isempty(comps)
end
- comps = vcat(comps, sort(complete_remote_package(partial; hint)))
- if !isempty(partial)
+ returned_early = complete_remote_package!(comps, partial; hint)
+ # returning early means that no further search should be done here
+ if !returned_early
append!(comps, filter!(startswith(partial), [info.name for info in values(Types.stdlib_infos())]))
end
+
+ # Filter out already-specified packages
+ specified_names = extract_specified_names(arguments)
+ filter!(pkg -> !(pkg in specified_names), comps)
+
return comps, idx, !isempty(comps)
end
+# TODO: Move
+import Pkg: Operations, Types, Apps
+function complete_installed_apps(options, partial; hint, arguments = [])
+ manifest = try
+ Types.read_manifest(joinpath(Apps.app_env_folder(), "AppManifest.toml"))
+ catch err
+ err isa PkgError || rethrow()
+ return String[]
+ end
+ apps = String[]
+ for (uuid, entry) in manifest.deps
+ append!(apps, keys(entry.apps))
+ push!(apps, entry.name)
+ end
+ apps = unique!(apps)
+
+ # Filter out already-specified packages
+ specified_names = extract_specified_names(arguments, partial)
+ return filter(app -> !(app in specified_names), apps)
+end
+
########################
# COMPLETION INTERFACE #
########################
@@ -179,13 +237,23 @@ function complete_command(statement::Statement, final::Bool, on_sub::Bool)
end
complete_opt(opt_specs) =
- unique(sort(map(wrap_option,
- map(x -> getproperty(x, :name),
- collect(values(opt_specs))))))
+ unique(
+ sort(
+ map(
+ wrap_option,
+ map(
+ x -> getproperty(x, :name),
+ collect(values(opt_specs))
+ )
+ )
+ )
+)
-function complete_argument(spec::CommandSpec, options::Vector{String},
- partial::AbstractString, offset::Int,
- index::Int; hint::Bool)
+function complete_argument(
+ spec::CommandSpec, options::Vector{String}, arguments::Vector,
+ partial::AbstractString, offset::Int,
+ index::Int; hint::Bool
+ )
if spec.completions isa Symbol
# if completions is a symbol, it is a function in REPLExt that needs to be forwarded
# to REPLMode (couldn't be linked there because REPLExt is not a dependency of REPLMode)
@@ -195,11 +263,16 @@ function complete_argument(spec::CommandSpec, options::Vector{String},
@error "REPLMode indicates a completion function called :$(spec.completions) that cannot be found in REPLExt"
rethrow()
end
- spec.completions = function(opts, partial, offset, index; hint::Bool)
- applicable(completions, opts, partial, offset, index) ?
- completions(opts, partial, offset, index; hint) :
- completions(opts, partial; hint)
+ spec.completions = function (opts, partial, offset, index; hint::Bool, arguments = [])
+ # Wrapper that normalizes completion function calls.
+ if applicable(completions, opts, partial, offset, index)
+ # Function takes 4 positional args: (opts, partial, offset, index; hint, arguments)
+ return completions(opts, partial, offset, index; hint, arguments)
+ else
+ # Function takes 2 positional args: (opts, partial; hint, arguments)
+ return completions(opts, partial; hint, arguments)
end
+ end
end
spec.completions === nothing && return String[]
# finish parsing opts
@@ -210,18 +283,19 @@ function complete_argument(spec::CommandSpec, options::Vector{String},
e isa PkgError && return String[]
rethrow()
end
- return spec.completions(opts, partial, offset, index; hint)
+ return spec.completions(opts, partial, offset, index; hint, arguments)
end
function _completions(input, final, offset, index; hint::Bool)
statement, word_count, partial = nothing, nothing, nothing
try
- words = tokenize(input)[end]
+ words = tokenize(input; rm_leading_bracket = false)[end]
word_count = length(words)
statement, partial = core_parse(words)
if final
partial = "" # last token is finalized -> no partial
end
+ partial = something(partial, "")
catch
return String[], 0:-1, false
end
@@ -236,11 +310,11 @@ function _completions(input, final, offset, index; hint::Bool)
command_is_focused() && return String[], 0:-1, false
if final # complete arg by default
- x = complete_argument(statement.spec, statement.options, partial, offset, index; hint)
+ x = complete_argument(statement.spec, statement.options, statement.arguments, partial, offset, index; hint)
else # complete arg or opt depending on last token
x = is_opt(partial) ?
complete_opt(statement.spec.option_specs) :
- complete_argument(statement.spec, statement.options, partial, offset, index; hint)
+ complete_argument(statement.spec, statement.options, statement.arguments, partial, offset, index; hint)
end
end
@@ -255,7 +329,7 @@ function _completions(input, final, offset, index; hint::Bool)
end
end
-function completions(full, index; hint::Bool=false)::Tuple{Vector{String},UnitRange{Int},Bool}
+function completions(full, index; hint::Bool = false)::Tuple{Vector{String}, UnitRange{Int}, Bool}
pre = full[1:index]
isempty(pre) && return default_commands(), 0:-1, false # empty input -> complete commands
offset_adjust = 0
@@ -264,8 +338,8 @@ function completions(full, index; hint::Bool=false)::Tuple{Vector{String},UnitRa
pre = string(pre[1], " ", pre[2:end])
offset_adjust = -1
end
- last = split(pre, ' ', keepempty=true)[end]
- offset = isempty(last) ? index+1+offset_adjust : last.offset+1+offset_adjust
- final = isempty(last) # is the cursor still attached to the final token?
+ last = split(pre, ' ', keepempty = true)[end]
+ offset = isempty(last) ? index + 1 + offset_adjust : last.offset + 1 + offset_adjust
+ final = isempty(last) # is the cursor still attached to the final token?
return _completions(pre, final, offset, index; hint)
end
diff --git a/ext/REPLExt/precompile.jl b/ext/REPLExt/precompile.jl
index 2deb9b84f0..796c746eba 100644
--- a/ext/REPLExt/precompile.jl
+++ b/ext/REPLExt/precompile.jl
@@ -14,29 +14,35 @@ let
original_load_path = copy(LOAD_PATH)
__init__()
Pkg.UPDATED_REGISTRY_THIS_SESSION[] = true
- Pkg.DEFAULT_IO[] = Pkg.unstableio(devnull)
- withenv("JULIA_PKG_SERVER" => nothing, "JULIA_PKG_UNPACK_REGISTRY" => nothing) do
- tmp = Pkg._run_precompilation_script_setup()
- cd(tmp) do
- try_prompt_pkg_add(Symbol[:notapackage])
- promptf()
- term = FakeTerminal()
- repl = REPL.LineEditREPL(term, true)
- REPL.run_repl(repl)
- repl_init(repl)
+ Base.ScopedValues.@with Pkg.DEFAULT_IO => Pkg.unstableio(devnull) begin
+ withenv("JULIA_PKG_SERVER" => nothing, "JULIA_PKG_UNPACK_REGISTRY" => nothing) do
+ tmp = Pkg._run_precompilation_script_setup()
+ cd(tmp) do
+ try_prompt_pkg_add(Symbol[:notapackage])
+ promptf()
+ term = FakeTerminal()
+ repl = REPL.LineEditREPL(term, true)
+ REPL.run_repl(repl)
+ repl_init(repl)
+ end
end
- end
- copy!(DEPOT_PATH, original_depot_path)
- copy!(LOAD_PATH, original_load_path)
+ copy!(DEPOT_PATH, original_depot_path)
+ copy!(LOAD_PATH, original_load_path)
- Base.precompile(Tuple{typeof(REPL.LineEdit.complete_line), REPLExt.PkgCompletionProvider, REPL.LineEdit.PromptState})
- Base.precompile(Tuple{typeof(REPL.REPLCompletions.completion_text), REPL.REPLCompletions.PackageCompletion})
- Base.precompile(Tuple{typeof(REPLExt.on_done), REPL.LineEdit.MIState, Base.GenericIOBuffer{Memory{UInt8}}, Bool, REPL.LineEditREPL})
- Base.precompile(Tuple{typeof(Core.kwcall), NamedTuple{(:hint,), Tuple{Bool}}, typeof(REPL.LineEdit.complete_line), REPLExt.PkgCompletionProvider, REPL.LineEdit.PromptState})
+ Base.precompile(Tuple{typeof(REPL.LineEdit.complete_line), REPLExt.PkgCompletionProvider, REPL.LineEdit.PromptState})
+ Base.precompile(Tuple{typeof(REPL.REPLCompletions.completion_text), REPL.REPLCompletions.PackageCompletion})
+ Base.precompile(Tuple{typeof(REPLExt.on_done), REPL.LineEdit.MIState, Base.GenericIOBuffer{Memory{UInt8}}, Bool, REPL.LineEditREPL})
+ return Base.precompile(Tuple{typeof(Core.kwcall), NamedTuple{(:hint,), Tuple{Bool}}, typeof(REPL.LineEdit.complete_line), REPLExt.PkgCompletionProvider, REPL.LineEdit.PromptState})
+ end
end
if Base.generating_output()
- pkgreplmode_precompile()
+ ccall(:jl_tag_newly_inferred_enable, Cvoid, ())
+ try
+ pkgreplmode_precompile()
+ finally
+ ccall(:jl_tag_newly_inferred_disable, Cvoid, ())
+ end
end
end # let
diff --git a/src/API.jl b/src/API.jl
index 5e5723a8b7..8b1f5a1af9 100644
--- a/src/API.jl
+++ b/src/API.jl
@@ -12,9 +12,9 @@ import FileWatching
import Base: StaleCacheKey
-import ..depots, ..depots1, ..logdir, ..devdir, ..printpkgstyle
+import ..depots, ..depots1, ..logdir, ..devdir, ..printpkgstyle, .._autoprecompilation_enabled_scoped, ..manifest_rel_path
import ..Operations, ..GitTools, ..Pkg, ..Registry
-import ..can_fancyprint, ..pathrepr, ..isurl, ..PREV_ENV_PATH
+import ..can_fancyprint, ..pathrepr, ..isurl, ..PREV_ENV_PATH, ..atomic_toml_write, ..safe_realpath
using ..Types, ..TOML
using ..Types: VersionTypes
using Base.BinaryPlatforms
@@ -27,17 +27,17 @@ include("generate.jl")
Base.@kwdef struct PackageInfo
name::String
- version::Union{Nothing,VersionNumber}
- tree_hash::Union{Nothing,String}
+ version::Union{Nothing, VersionNumber}
+ tree_hash::Union{Nothing, String}
is_direct_dep::Bool
is_pinned::Bool
is_tracking_path::Bool
is_tracking_repo::Bool
is_tracking_registry::Bool
- git_revision::Union{Nothing,String}
- git_source::Union{Nothing,String}
+ git_revision::Union{Nothing, String}
+ git_source::Union{Nothing, String}
source::String
- dependencies::Dict{String,UUID}
+ dependencies::Dict{String, UUID}
end
function Base.:(==)(a::PackageInfo, b::PackageInfo)
@@ -53,29 +53,36 @@ end
function package_info(env::EnvCache, pkg::PackageSpec)::PackageInfo
entry = manifest_info(env.manifest, pkg.uuid)
if entry === nothing
- pkgerror("expected package $(err_rep(pkg)) to exist in the manifest",
- " (use `resolve` to populate the manifest)")
+ pkgerror(
+ "expected package $(err_rep(pkg)) to exist in the manifest",
+ " (use `resolve` to populate the manifest)"
+ )
end
- package_info(env, pkg, entry)
+ return package_info(env, pkg, entry)
end
function package_info(env::EnvCache, pkg::PackageSpec, entry::PackageEntry)::PackageInfo
git_source = pkg.repo.source === nothing ? nothing :
isurl(pkg.repo.source::String) ? pkg.repo.source::String :
- Operations.project_rel_path(env, pkg.repo.source::String)
+ safe_realpath(manifest_rel_path(env, pkg.repo.source::String))
+ _source_path = Operations.source_path(env.manifest_file, pkg)
+ if _source_path === nothing
+ @debug "Manifest file $(env.manifest_file) contents:\n$(read(env.manifest_file, String))"
+ pkgerror("could not find source path for package $(err_rep(pkg)) based on $(env.manifest_file)")
+ end
info = PackageInfo(
- name = pkg.name,
- version = pkg.version != VersionSpec() ? pkg.version : nothing,
- tree_hash = pkg.tree_hash === nothing ? nothing : string(pkg.tree_hash), # TODO or should it just be a SHA?
- is_direct_dep = pkg.uuid in values(env.project.deps),
- is_pinned = pkg.pinned,
- is_tracking_path = pkg.path !== nothing,
- is_tracking_repo = pkg.repo.rev !== nothing || pkg.repo.source !== nothing,
+ name = pkg.name,
+ version = pkg.version != VersionSpec() ? pkg.version : nothing,
+ tree_hash = pkg.tree_hash === nothing ? nothing : string(pkg.tree_hash), # TODO or should it just be a SHA?
+ is_direct_dep = pkg.uuid in values(env.project.deps),
+ is_pinned = pkg.pinned,
+ is_tracking_path = pkg.path !== nothing,
+ is_tracking_repo = pkg.repo.rev !== nothing || pkg.repo.source !== nothing,
is_tracking_registry = Operations.is_tracking_registry(pkg),
- git_revision = pkg.repo.rev,
- git_source = git_source,
- source = Operations.project_rel_path(env, Operations.source_path(env.manifest_file, pkg)),
- dependencies = copy(entry.deps), #TODO is copy needed?
+ git_revision = pkg.repo.rev,
+ git_source = git_source,
+ source = _source_path,
+ dependencies = copy(entry.deps), #TODO is copy needed?
)
return info
end
@@ -90,17 +97,17 @@ function dependencies(fn::Function, uuid::UUID)
if dep === nothing
pkgerror("dependency with UUID `$uuid` does not exist")
end
- fn(dep)
+ return fn(dep)
end
Base.@kwdef struct ProjectInfo
- name::Union{Nothing,String}
- uuid::Union{Nothing,UUID}
- version::Union{Nothing,VersionNumber}
+ name::Union{Nothing, String}
+ uuid::Union{Nothing, UUID}
+ version::Union{Nothing, VersionNumber}
ispackage::Bool
- dependencies::Dict{String,UUID}
- sources::Dict{String,Dict{String,String}}
+ dependencies::Dict{String, UUID}
+ sources::Dict{String, Dict{String, String}}
path::String
end
@@ -108,26 +115,28 @@ project() = project(EnvCache())
function project(env::EnvCache)::ProjectInfo
pkg = env.pkg
return ProjectInfo(
- name = pkg === nothing ? nothing : pkg.name,
- uuid = pkg === nothing ? nothing : pkg.uuid,
- version = pkg === nothing ? nothing : pkg.version::VersionNumber,
- ispackage = pkg !== nothing,
+ name = pkg === nothing ? nothing : pkg.name,
+ uuid = pkg === nothing ? nothing : pkg.uuid,
+ version = pkg === nothing ? nothing : pkg.version::VersionNumber,
+ ispackage = pkg !== nothing,
dependencies = env.project.deps,
- sources = env.project.sources,
- path = env.project_file
+ sources = env.project.sources,
+ path = env.project_file
)
end
-function check_package_name(x::AbstractString, mode::Union{Nothing,String,Symbol}=nothing)
+function check_package_name(x::AbstractString, mode::Union{Nothing, String, Symbol} = nothing)
if !Base.isidentifier(x)
message = sprint() do iostr
print(iostr, "`$x` is not a valid package name")
if endswith(lowercase(x), ".jl")
- print(iostr, ". Perhaps you meant `$(chop(x; tail=3))`")
+ print(iostr, ". Perhaps you meant `$(chop(x; tail = 3))`")
end
- if mode !== nothing && any(occursin.(['\\','/'], x)) # maybe a url or a path
- print(iostr, "\nThe argument appears to be a URL or path, perhaps you meant ",
- "`Pkg.$mode(url=\"...\")` or `Pkg.$mode(path=\"...\")`.")
+ if mode !== nothing && any(occursin.(['\\', '/'], x)) # maybe a url or a path
+ print(
+ iostr, "\nThe argument appears to be a URL or path, perhaps you meant ",
+ "`Pkg.$mode(url=\"...\")` or `Pkg.$mode(path=\"...\")`."
+ )
end
end
pkgerror(message)
@@ -137,15 +146,19 @@ end
check_package_name(::Nothing, ::Any) = nothing
function require_not_empty(pkgs, f::Symbol)
- isempty(pkgs) && pkgerror("$f requires at least one package")
+ return isempty(pkgs) && pkgerror("$f requires at least one package")
+end
+
+function check_readonly(ctx::Context)
+ return ctx.env.project.readonly && pkgerror("Cannot modify a readonly environment. The project at $(ctx.env.project_file) is marked as readonly.")
end
# Provide some convenience calls
for f in (:develop, :add, :rm, :up, :pin, :free, :test, :build, :status, :why, :precompile)
@eval begin
$f(pkg::Union{AbstractString, PackageSpec}; kwargs...) = $f([pkg]; kwargs...)
- $f(pkgs::Vector{<:AbstractString}; kwargs...) = $f([PackageSpec(pkg) for pkg in pkgs]; kwargs...)
- function $f(pkgs::Vector{PackageSpec}; io::IO=$(f === :status ? :stdout_f : :stderr_f)(), kwargs...)
+ $f(pkgs::Vector{<:AbstractString}; kwargs...) = $f([PackageSpec(pkg) for pkg in pkgs]; kwargs...)
+ function $f(pkgs::Vector{PackageSpec}; io::IO = $(f === :status ? :stdout_f : :stderr_f)(), kwargs...)
$(f != :precompile) && Registry.download_default_registries(io)
ctx = Context()
# Save initial environment for undo/redo functionality
@@ -153,7 +166,7 @@ for f in (:develop, :add, :rm, :up, :pin, :free, :test, :build, :status, :why, :
add_snapshot_to_undo(ctx.env)
saved_initial_snapshot[] = true
end
- kwargs = merge((;kwargs...), (:io => io,))
+ kwargs = merge((; kwargs...), (:io => io,))
pkgs = deepcopy(pkgs) # don't mutate input
foreach(handle_package_input!, pkgs)
ret = $f(ctx, pkgs; kwargs...)
@@ -162,55 +175,102 @@ for f in (:develop, :add, :rm, :up, :pin, :free, :test, :build, :status, :why, :
return ret
end
$f(ctx::Context; kwargs...) = $f(ctx, PackageSpec[]; kwargs...)
- function $f(; name::Union{Nothing,AbstractString}=nothing, uuid::Union{Nothing,String,UUID}=nothing,
- version::Union{VersionNumber, String, VersionSpec, Nothing}=nothing,
- url=nothing, rev=nothing, path=nothing, mode=PKGMODE_PROJECT, subdir=nothing, kwargs...)
+ function $f(;
+ name::Union{Nothing, AbstractString} = nothing, uuid::Union{Nothing, String, UUID} = nothing,
+ version::Union{VersionNumber, String, VersionSpec, Nothing} = nothing,
+ url = nothing, rev = nothing, path = nothing, mode = PKGMODE_PROJECT, subdir = nothing, kwargs...
+ )
pkg = PackageSpec(; name, uuid, version, url, rev, path, subdir)
if $f === status || $f === rm || $f === up
- kwargs = merge((;kwargs...), (:mode => mode,))
+ kwargs = merge((; kwargs...), (:mode => mode,))
end
# Handle $f() case
- if all(isnothing, [name,uuid,version,url,rev,path,subdir])
+ return if all(isnothing, [name, uuid, version, url, rev, path, subdir])
$f(PackageSpec[]; kwargs...)
else
$f(pkg; kwargs...)
end
end
function $f(pkgs::Vector{<:NamedTuple}; kwargs...)
- $f([PackageSpec(;pkg...) for pkg in pkgs]; kwargs...)
+ return $f([PackageSpec(; pkg...) for pkg in pkgs]; kwargs...)
end
end
end
-function update_source_if_set(project, pkg)
+function update_source_if_set(env, pkg)
+ project = env.project
source = get(project.sources, pkg.name, nothing)
- source === nothing && return
- # This should probably not modify the dicts directly...
- if pkg.repo.source !== nothing
- source["url"] = pkg.repo.source
- end
- if pkg.repo.rev !== nothing
- source["rev"] = pkg.repo.rev
- end
- if pkg.path !== nothing
- source["path"] = pkg.path
- end
- path, repo = get_path_repo(project, pkg.name)
- if path !== nothing
- pkg.path = path
+ if source !== nothing
+ if pkg.repo == GitRepo()
+ delete!(project.sources, pkg.name)
+ else
+ # This should probably not modify the dicts directly...
+ if pkg.repo.source !== nothing
+ source["url"] = pkg.repo.source
+ delete!(source, "path")
+ end
+ if pkg.repo.rev !== nothing
+ source["rev"] = pkg.repo.rev
+ delete!(source, "path")
+ end
+ if pkg.repo.subdir !== nothing
+ source["subdir"] = pkg.repo.subdir
+ end
+ if pkg.path !== nothing
+ source["path"] = pkg.path
+ delete!(source, "url")
+ delete!(source, "rev")
+ end
+ end
+ if pkg.subdir !== nothing
+ source["subdir"] = pkg.subdir
+ end
+ path, repo = get_path_repo(project, env.project_file, env.manifest_file, pkg.name)
+ if path !== nothing
+ pkg.path = path
+ end
+ if repo.source !== nothing
+ pkg.repo.source = repo.source
+ end
+ if repo.rev !== nothing
+ pkg.repo.rev = repo.rev
+ end
+ if repo.subdir !== nothing
+ pkg.repo.subdir = repo.subdir
+ end
end
- if repo.source !== nothing
- pkg.repo.source = repo.source
+
+ # Packages in manifest should have their paths set to the path in the manifest
+ for (path, wproj) in env.workspace
+ if wproj.uuid == pkg.uuid
+ pkg.path = Types.relative_project_path(env.manifest_file, dirname(path))
+ break
+ end
end
- if repo.rev !== nothing
- pkg.repo.rev = repo.rev
+ return
+end
+
+# Normalize relative paths from user input (pwd-relative) to internal representation (manifest-relative)
+# This ensures all relative paths in Pkg are consistently relative to the manifest file
+function normalize_package_paths!(ctx::Context, pkgs::Vector{PackageSpec})
+ for pkg in pkgs
+ if pkg.repo.source !== nothing && !isurl(pkg.repo.source) && !isabspath(pkg.repo.source)
+ # User provided a relative path (relative to pwd), convert to manifest-relative
+ absolute_path = abspath(pkg.repo.source)
+ pkg.repo.source = Types.relative_project_path(ctx.env.manifest_file, absolute_path)
+ end
end
+ return
end
-function develop(ctx::Context, pkgs::Vector{PackageSpec}; shared::Bool=true,
- preserve::PreserveLevel=Operations.default_preserve(), platform::AbstractPlatform=HostPlatform(), kwargs...)
+function develop(
+ ctx::Context, pkgs::Vector{PackageSpec}; shared::Bool = true,
+ preserve::PreserveLevel = Operations.default_preserve(), platform::AbstractPlatform = HostPlatform(), kwargs...
+ )
require_not_empty(pkgs, :develop)
Context!(ctx; kwargs...)
+ Operations.ensure_manifest_registries!(ctx)
+ check_readonly(ctx)
for pkg in pkgs
check_package_name(pkg.name, "develop")
@@ -224,8 +284,10 @@ function develop(ctx::Context, pkgs::Vector{PackageSpec}; shared::Bool=true,
pkgerror("rev argument not supported by `develop`; consider using `add` instead")
end
if pkg.version != VersionSpec()
- pkgerror("version specification invalid when calling `develop`:",
- " `$(pkg.version)` specified for package $(err_rep(pkg))")
+ pkgerror(
+ "version specification invalid when calling `develop`:",
+ " `$(pkg.version)` specified for package $(err_rep(pkg))"
+ )
end
# not strictly necessary to check these fields early, but it is more efficient
if pkg.name !== nothing && (length(findall(x -> x.name == pkg.name, pkgs)) > 1)
@@ -236,8 +298,11 @@ function develop(ctx::Context, pkgs::Vector{PackageSpec}; shared::Bool=true,
end
end
+ normalize_package_paths!(ctx, pkgs)
+
new_git = handle_repos_develop!(ctx, pkgs, shared)
+ Operations.update_registries(ctx; force = false, update_cooldown = Day(1))
for pkg in pkgs
if Types.collides_with_project(ctx.env, pkg)
@@ -246,17 +311,21 @@ function develop(ctx::Context, pkgs::Vector{PackageSpec}; shared::Bool=true,
if length(findall(x -> x.uuid == pkg.uuid, pkgs)) > 1
pkgerror("it is invalid to specify multiple packages with the same UUID: $(err_rep(pkg))")
end
- update_source_if_set(ctx.env.project, pkg)
+ update_source_if_set(ctx.env, pkg)
end
- Operations.develop(ctx, pkgs, new_git; preserve=preserve, platform=platform)
+ Operations.develop(ctx, pkgs, new_git; preserve = preserve, platform = platform)
return
end
-function add(ctx::Context, pkgs::Vector{PackageSpec}; preserve::PreserveLevel=Operations.default_preserve(),
- platform::AbstractPlatform=HostPlatform(), target::Symbol=:deps, allow_autoprecomp::Bool=true, kwargs...)
+function add(
+ ctx::Context, pkgs::Vector{PackageSpec}; preserve::PreserveLevel = Operations.default_preserve(),
+ platform::AbstractPlatform = HostPlatform(), target::Symbol = :deps, allow_autoprecomp::Bool = true, kwargs...
+ )
require_not_empty(pkgs, :add)
Context!(ctx; kwargs...)
+ Operations.ensure_manifest_registries!(ctx)
+ check_readonly(ctx)
for pkg in pkgs
check_package_name(pkg.name, "add")
@@ -268,8 +337,10 @@ function add(ctx::Context, pkgs::Vector{PackageSpec}; preserve::PreserveLevel=Op
end
if pkg.repo.source !== nothing || pkg.repo.rev !== nothing
if pkg.version != VersionSpec()
- pkgerror("version specification invalid when tracking a repository:",
- " `$(pkg.version)` specified for package $(err_rep(pkg))")
+ pkgerror(
+ "version specification invalid when tracking a repository:",
+ " `$(pkg.version)` specified for package $(err_rep(pkg))"
+ )
end
end
# not strictly necessary to check these fields early, but it is more efficient
@@ -281,17 +352,19 @@ function add(ctx::Context, pkgs::Vector{PackageSpec}; preserve::PreserveLevel=Op
end
end
+ normalize_package_paths!(ctx, pkgs)
+
repo_pkgs = PackageSpec[pkg for pkg in pkgs if (pkg.repo.source !== nothing || pkg.repo.rev !== nothing)]
new_git = handle_repos_add!(ctx, repo_pkgs)
# repo + unpinned -> name, uuid, repo.rev, repo.source, tree_hash
# repo + pinned -> name, uuid, tree_hash
- Operations.update_registries(ctx; force=false, update_cooldown=Day(1))
+ Operations.update_registries(ctx; force = false, update_cooldown = Day(1))
project_deps_resolve!(ctx.env, pkgs)
registry_resolve!(ctx.registries, pkgs)
stdlib_resolve!(pkgs)
- ensure_resolved(ctx, ctx.env.manifest, pkgs, registry=true)
+ ensure_resolved(ctx, ctx.env.manifest, pkgs, registry = true)
for pkg in pkgs
if Types.collides_with_project(ctx.env, pkg)
@@ -300,15 +373,17 @@ function add(ctx::Context, pkgs::Vector{PackageSpec}; preserve::PreserveLevel=Op
if length(findall(x -> x.uuid == pkg.uuid, pkgs)) > 1
pkgerror("it is invalid to specify multiple packages with the same UUID: $(err_rep(pkg))")
end
- update_source_if_set(ctx.env.project, pkg)
+ update_source_if_set(ctx.env, pkg)
end
Operations.add(ctx, pkgs, new_git; allow_autoprecomp, preserve, platform, target)
return
end
-function rm(ctx::Context, pkgs::Vector{PackageSpec}; mode=PKGMODE_PROJECT, all_pkgs::Bool=false, kwargs...)
+function rm(ctx::Context, pkgs::Vector{PackageSpec}; mode = PKGMODE_PROJECT, all_pkgs::Bool = false, kwargs...)
Context!(ctx; kwargs...)
+ Operations.ensure_manifest_registries!(ctx)
+ check_readonly(ctx)
if all_pkgs
!isempty(pkgs) && pkgerror("cannot specify packages when operating on all packages")
append_all_pkgs!(pkgs, ctx, mode)
@@ -320,9 +395,11 @@ function rm(ctx::Context, pkgs::Vector{PackageSpec}; mode=PKGMODE_PROJECT, all_p
if pkg.name === nothing && pkg.uuid === nothing
pkgerror("name or UUID specification required when calling `rm`")
end
- if !(pkg.version == VersionSpec() && pkg.pinned == false &&
- pkg.tree_hash === nothing && pkg.repo.source === nothing &&
- pkg.repo.rev === nothing && pkg.path === nothing)
+ if !(
+ pkg.version == VersionSpec() && pkg.pinned == false &&
+ pkg.tree_hash === nothing && pkg.repo.source === nothing &&
+ pkg.repo.rev === nothing && pkg.path === nothing
+ )
pkgerror("packages may only be specified by name or UUID when calling `rm`")
end
end
@@ -337,40 +414,58 @@ function rm(ctx::Context, pkgs::Vector{PackageSpec}; mode=PKGMODE_PROJECT, all_p
end
-function append_all_pkgs!(pkgs, ctx, mode)
+function append_all_pkgs!(pkgs, ctx, mode; workspace::Bool = false)
if mode == PKGMODE_PROJECT || mode == PKGMODE_COMBINED
for (name::String, uuid::UUID) in ctx.env.project.deps
- path, repo = get_path_repo(ctx.env.project, name)
- push!(pkgs, PackageSpec(name=name, uuid=uuid, path=path, repo=repo))
+ path, repo = get_path_repo(ctx.env.project, ctx.env.project_file, ctx.env.manifest_file, name)
+ push!(pkgs, PackageSpec(name = name, uuid = uuid, path = path, repo = repo))
+ end
+ if workspace
+ for (project_file, project) in ctx.env.workspace
+ for (name::String, uuid::UUID) in project.deps
+ path, repo = get_path_repo(project, project_file, ctx.env.manifest_file, name)
+ existing = findfirst(p -> p.uuid == uuid, pkgs)
+ if existing !== nothing
+ Operations.merge_pkg_source!(pkgs[existing], path, repo)
+ continue
+ end
+ push!(pkgs, PackageSpec(name = name, uuid = uuid, path = path, repo = repo))
+ end
+ end
end
end
if mode == PKGMODE_MANIFEST || mode == PKGMODE_COMBINED
for (uuid, entry) in ctx.env.manifest
- path, repo = get_path_repo(ctx.env.project, entry.name)
- push!(pkgs, PackageSpec(name=entry.name, uuid=uuid, path=path, repo=repo))
+ path, repo = get_path_repo(ctx.env.project, ctx.env.project_file, ctx.env.manifest_file, entry.name)
+ push!(pkgs, PackageSpec(name = entry.name, uuid = uuid, path = path, repo = repo))
end
end
return
end
-function up(ctx::Context, pkgs::Vector{PackageSpec};
- level::UpgradeLevel=UPLEVEL_MAJOR, mode::PackageMode=PKGMODE_PROJECT,
- preserve::Union{Nothing,PreserveLevel}= isempty(pkgs) ? nothing : PRESERVE_ALL,
- update_registry::Bool=true,
- skip_writing_project::Bool=false,
- kwargs...)
+function up(
+ ctx::Context, pkgs::Vector{PackageSpec};
+ level::UpgradeLevel = UPLEVEL_MAJOR, mode::PackageMode = PKGMODE_PROJECT,
+ preserve::Union{Nothing, PreserveLevel} = isempty(pkgs) ? nothing : PRESERVE_ALL,
+ update_registry::Bool = true,
+ skip_writing_project::Bool = false,
+ workspace::Bool = false,
+ kwargs...
+ )
Context!(ctx; kwargs...)
+ Operations.ensure_manifest_registries!(ctx)
+ check_readonly(ctx)
if Operations.is_fully_pinned(ctx)
printpkgstyle(ctx.io, :Update, "All dependencies are pinned - nothing to update.", color = Base.info_color())
return
end
if update_registry
Registry.download_default_registries(ctx.io)
- Operations.update_registries(ctx; force=true)
+ Operations.update_registries(ctx; force = true)
end
Operations.prune_manifest(ctx.env)
if isempty(pkgs)
- append_all_pkgs!(pkgs, ctx, mode)
+ append_all_pkgs!(pkgs, ctx, mode; workspace)
else
mode == PKGMODE_PROJECT && project_deps_resolve!(ctx.env, pkgs)
mode == PKGMODE_MANIFEST && manifest_resolve!(ctx.env.manifest, pkgs)
@@ -378,22 +473,26 @@ function up(ctx::Context, pkgs::Vector{PackageSpec};
manifest_resolve!(ctx.env.manifest, pkgs)
ensure_resolved(ctx, ctx.env.manifest, pkgs)
end
-
+ for pkg in pkgs
+ update_source_if_set(ctx.env, pkg)
+ end
Operations.up(ctx, pkgs, level; skip_writing_project, preserve)
return
end
-resolve(; io::IO=stderr_f(), kwargs...) = resolve(Context(;io); kwargs...)
-function resolve(ctx::Context; skip_writing_project::Bool=false, kwargs...)
- up(ctx; level=UPLEVEL_FIXED, mode=PKGMODE_MANIFEST, update_registry=false, skip_writing_project, kwargs...)
+resolve(; io::IO = stderr_f(), kwargs...) = resolve(Context(; io); kwargs...)
+function resolve(ctx::Context; skip_writing_project::Bool = false, kwargs...)
+ up(ctx; level = UPLEVEL_FIXED, mode = PKGMODE_MANIFEST, update_registry = false, skip_writing_project, kwargs...)
return nothing
end
-function pin(ctx::Context, pkgs::Vector{PackageSpec}; all_pkgs::Bool=false, kwargs...)
+function pin(ctx::Context, pkgs::Vector{PackageSpec}; all_pkgs::Bool = false, workspace::Bool = false, kwargs...)
Context!(ctx; kwargs...)
+ Operations.ensure_manifest_registries!(ctx)
+ check_readonly(ctx)
if all_pkgs
!isempty(pkgs) && pkgerror("cannot specify packages when operating on all packages")
- append_all_pkgs!(pkgs, ctx, PKGMODE_MANIFEST)
+ append_all_pkgs!(pkgs, ctx, PKGMODE_MANIFEST; workspace)
else
require_not_empty(pkgs, :pin)
end
@@ -403,12 +502,16 @@ function pin(ctx::Context, pkgs::Vector{PackageSpec}; all_pkgs::Bool=false, kwar
pkgerror("name or UUID specification required when calling `pin`")
end
if pkg.repo.source !== nothing
- pkgerror("repository specification invalid when calling `pin`:",
- " `$(pkg.repo.source)` specified for package $(err_rep(pkg))")
+ pkgerror(
+ "repository specification invalid when calling `pin`:",
+ " `$(pkg.repo.source)` specified for package $(err_rep(pkg))"
+ )
end
if pkg.repo.rev !== nothing
- pkgerror("git revision specification invalid when calling `pin`:",
- " `$(pkg.repo.rev)` specified for package $(err_rep(pkg))")
+ pkgerror(
+ "git revision specification invalid when calling `pin`:",
+ " `$(pkg.repo.rev)` specified for package $(err_rep(pkg))"
+ )
end
version = pkg.version
if version isa VersionSpec
@@ -416,6 +519,7 @@ function pin(ctx::Context, pkgs::Vector{PackageSpec}; all_pkgs::Bool=false, kwar
pkgerror("pinning a package requires a single version, not a versionrange")
end
end
+ update_source_if_set(ctx.env, pkg)
end
project_deps_resolve!(ctx.env, pkgs)
@@ -424,11 +528,13 @@ function pin(ctx::Context, pkgs::Vector{PackageSpec}; all_pkgs::Bool=false, kwar
return
end
-function free(ctx::Context, pkgs::Vector{PackageSpec}; all_pkgs::Bool=false, kwargs...)
+function free(ctx::Context, pkgs::Vector{PackageSpec}; all_pkgs::Bool = false, workspace::Bool = false, kwargs...)
Context!(ctx; kwargs...)
+ Operations.ensure_manifest_registries!(ctx)
+ check_readonly(ctx)
if all_pkgs
!isempty(pkgs) && pkgerror("cannot specify packages when operating on all packages")
- append_all_pkgs!(pkgs, ctx, PKGMODE_MANIFEST)
+ append_all_pkgs!(pkgs, ctx, PKGMODE_MANIFEST; workspace)
else
require_not_empty(pkgs, :free)
end
@@ -437,9 +543,11 @@ function free(ctx::Context, pkgs::Vector{PackageSpec}; all_pkgs::Bool=false, kwa
if pkg.name === nothing && pkg.uuid === nothing
pkgerror("name or UUID specification required when calling `free`")
end
- if !(pkg.version == VersionSpec() && pkg.pinned == false &&
- pkg.tree_hash === nothing && pkg.repo.source === nothing &&
- pkg.repo.rev === nothing && pkg.path === nothing)
+ if !(
+ pkg.version == VersionSpec() && pkg.pinned == false &&
+ pkg.tree_hash === nothing && pkg.repo.source === nothing &&
+ pkg.repo.rev === nothing && pkg.path === nothing
+ )
pkgerror("packages may only be specified by name or UUID when calling `free`")
end
end
@@ -451,17 +559,20 @@ function free(ctx::Context, pkgs::Vector{PackageSpec}; all_pkgs::Bool=false, kwa
return
end
-function test(ctx::Context, pkgs::Vector{PackageSpec};
- coverage=false, test_fn=nothing,
- julia_args::Union{Cmd, AbstractVector{<:AbstractString}}=``,
- test_args::Union{Cmd, AbstractVector{<:AbstractString}}=``,
- force_latest_compatible_version::Bool=false,
- allow_earlier_backwards_compatible_versions::Bool=true,
- allow_reresolve::Bool=true,
- kwargs...)
+function test(
+ ctx::Context, pkgs::Vector{PackageSpec};
+ coverage = false, test_fn = nothing,
+ julia_args::Union{Cmd, AbstractVector{<:AbstractString}} = ``,
+ test_args::Union{Cmd, AbstractVector{<:AbstractString}} = ``,
+ force_latest_compatible_version::Bool = false,
+ allow_earlier_backwards_compatible_versions::Bool = true,
+ allow_reresolve::Bool = true,
+ kwargs...
+ )
julia_args = Cmd(julia_args)
test_args = Cmd(test_args)
Context!(ctx; kwargs...)
+ Operations.ensure_manifest_registries!(ctx)
if isempty(pkgs)
ctx.env.pkg === nothing && pkgerror("The Project.toml of the package being tested must have a name and a UUID entry") #TODO Allow this?
@@ -496,18 +607,16 @@ function is_manifest_current(path::AbstractString)
return Operations.is_manifest_current(env)
end
-const UsageDict = Dict{String,DateTime}
-const UsageByDepotDict = Dict{String,UsageDict}
+const UsageDict = Dict{String, DateTime}
+const UsageByDepotDict = Dict{String, UsageDict}
"""
- gc(ctx::Context=Context(); collect_delay::Period=Day(7), verbose=false, kwargs...)
+ gc(ctx::Context=Context(); verbose=false, force=false, kwargs...)
Garbage-collect package and artifact installations by sweeping over all known
`Manifest.toml` and `Artifacts.toml` files, noting those that have been deleted, and then
-finding artifacts and packages that are thereafter not used by any other projects,
-marking them as "orphaned". This method will only remove orphaned objects (package
-versions, artifacts, and scratch spaces) that have been continually un-used for a period
-of `collect_delay`; which defaults to seven days.
+finding artifacts and packages that are thereafter not used by any other projects.
+Unused packages, artifacts, repos, and scratch spaces are immediately deleted.
Garbage collection is only applied to the "user depot", e.g. the first entry in the
depot path. If you want to run `gc` on all depots set `force=true` (this might require
@@ -515,8 +624,11 @@ admin privileges depending on the setup).
Use verbose mode (`verbose=true`) for detailed output.
"""
-function gc(ctx::Context=Context(); collect_delay::Period=Day(7), verbose=false, force=false, kwargs...)
+function gc(ctx::Context = Context(); collect_delay::Union{Period, Nothing} = nothing, verbose = false, force = false, kwargs...)
Context!(ctx; kwargs...)
+ if collect_delay !== nothing
+ @warn "The `collect_delay` parameter is no longer used. Packages are now deleted immediately when they become unreachable."
+ end
env = ctx.env
# Only look at user-depot unless force=true
@@ -549,6 +661,7 @@ function gc(ctx::Context=Context(); collect_delay::Period=Day(7), verbose=false,
for (filename, infos) in parse_toml(usage_filepath)
f.(Ref(filename), infos)
end
+ return
end
# Extract usage data from this depot, (taking only the latest state for each
@@ -556,7 +669,7 @@ function gc(ctx::Context=Context(); collect_delay::Period=Day(7), verbose=false,
# into the overall list across depots to create a single, coherent view across
# all depots.
usage = UsageDict()
- let usage=usage
+ let usage = usage
reduce_usage!(joinpath(logdir(depot), "manifest_usage.toml")) do filename, info
# For Manifest usage, store only the last DateTime for each filename found
usage[filename] = max(get(usage, filename, DateTime(0)), DateTime(info["time"])::DateTime)
@@ -565,7 +678,7 @@ function gc(ctx::Context=Context(); collect_delay::Period=Day(7), verbose=false,
manifest_usage_by_depot[depot] = usage
usage = UsageDict()
- let usage=usage
+ let usage = usage
reduce_usage!(joinpath(logdir(depot), "artifact_usage.toml")) do filename, info
# For Artifact usage, store only the last DateTime for each filename found
usage[filename] = max(get(usage, filename, DateTime(0)), DateTime(info["time"])::DateTime)
@@ -576,7 +689,7 @@ function gc(ctx::Context=Context(); collect_delay::Period=Day(7), verbose=false,
# track last-used
usage = UsageDict()
parents = Dict{String, Set{String}}()
- let usage=usage
+ let usage = usage
reduce_usage!(joinpath(logdir(depot), "scratch_usage.toml")) do filename, info
# For Artifact usage, store only the last DateTime for each filename found
usage[filename] = max(get(usage, filename, DateTime(0)), DateTime(info["time"])::DateTime)
@@ -617,21 +730,20 @@ function gc(ctx::Context=Context(); collect_delay::Period=Day(7), verbose=false,
# Write out the TOML file for this depot
usage_path = joinpath(logdir(depot), fname)
if !(isempty(usage)::Bool) || isfile(usage_path)
- let usage=usage
- open(usage_path, "w") do io
- TOML.print(io, usage, sorted=true)
- end
+ let usage = usage
+ atomic_toml_write(usage_path, usage, sorted = true)
end
end
end
+ return
end
# Write condensed Manifest usage
- let all_manifest_tomls=all_manifest_tomls
+ let all_manifest_tomls = all_manifest_tomls
write_condensed_toml(manifest_usage_by_depot, "manifest_usage.toml") do depot, usage
# Keep only manifest usage markers that are still existent
- let usage=usage
- filter!(((k,v),) -> k in all_manifest_tomls, usage)
+ let usage = usage
+ filter!(((k, v),) -> k in all_manifest_tomls, usage)
# Expand it back into a dict-of-dicts
return Dict(k => [Dict("time" => v)] for (k, v) in usage)
@@ -640,23 +752,23 @@ function gc(ctx::Context=Context(); collect_delay::Period=Day(7), verbose=false,
end
# Write condensed Artifact usage
- let all_artifact_tomls=all_artifact_tomls
+ let all_artifact_tomls = all_artifact_tomls
write_condensed_toml(artifact_usage_by_depot, "artifact_usage.toml") do depot, usage
let usage = usage
- filter!(((k,v),) -> k in all_artifact_tomls, usage)
+ filter!(((k, v),) -> k in all_artifact_tomls, usage)
return Dict(k => [Dict("time" => v)] for (k, v) in usage)
end
end
end
# Write condensed scratch space usage
- let all_scratch_parents=all_scratch_parents, all_scratch_dirs=all_scratch_dirs
+ let all_scratch_parents = all_scratch_parents, all_scratch_dirs = all_scratch_dirs
write_condensed_toml(scratch_usage_by_depot, "scratch_usage.toml") do depot, usage
# Keep only scratch directories that still exist
- filter!(((k,v),) -> k in all_scratch_dirs, usage)
+ filter!(((k, v),) -> k in all_scratch_dirs, usage)
# Expand it back into a dict-of-dicts
- expanded_usage = Dict{String,Vector{Dict}}()
+ expanded_usage = Dict{String, Vector{Dict}}()
for (k, v) in usage
# Drop scratch spaces whose parents are all non-existent
parents = scratch_parents_by_depot[depot][k]
@@ -665,10 +777,12 @@ function gc(ctx::Context=Context(); collect_delay::Period=Day(7), verbose=false,
continue
end
- expanded_usage[k] = [Dict(
- "time" => v,
- "parent_projects" => collect(parents),
- )]
+ expanded_usage[k] = [
+ Dict(
+ "time" => v,
+ "parent_projects" => collect(parents),
+ ),
+ ]
end
return expanded_usage
end
@@ -699,7 +813,15 @@ function gc(ctx::Context=Context(); collect_delay::Period=Day(7), verbose=false,
end
# Collect the locations of every repo referred to in this manifest
- return [Types.add_repo_cache_path(e.repo.source) for (u, e) in manifest if e.repo.source !== nothing]
+ return [
+ Types.add_repo_cache_path(
+ isurl(e.repo.source) ? e.repo.source :
+ safe_realpath(
+ isabspath(e.repo.source) ? e.repo.source :
+ normpath(joinpath(dirname(path), e.repo.source))
+ )
+ ) for (u, e) in manifest if e.repo.source !== nothing
+ ]
end
function process_artifacts_toml(path, pkgs_to_delete)
@@ -756,7 +878,7 @@ function gc(ctx::Context=Context(); collect_delay::Period=Day(7), verbose=false,
end
# Mark packages/artifacts as active or not by calling the appropriate user function
- function mark(process_func::Function, index_files, ctx::Context; do_print=true, verbose=false, file_str=nothing)
+ function mark(process_func::Function, index_files, ctx::Context; do_print = true, verbose = false, file_str = nothing)
marked_paths = String[]
active_index_files = Set{String}()
for index_file in index_files
@@ -783,92 +905,36 @@ function gc(ctx::Context=Context(); collect_delay::Period=Day(7), verbose=false,
return Set(marked_paths)
end
- gc_time = now()
- function merge_orphanages!(new_orphanage, paths, deletion_list, old_orphanage = UsageDict())
- for path in paths
- free_time = something(
- get(old_orphanage, path, nothing),
- gc_time,
- )
-
- # No matter what, store the free time in the new orphanage. This allows
- # something terrible to go wrong while trying to delete the artifact/
- # package and it will still try to be deleted next time. The only time
- # something is removed from an orphanage is when it didn't exist before
- # we even started the `gc` run.
- new_orphanage[path] = free_time
-
- # If this path was orphaned long enough ago, add it to the deletion list.
- # Otherwise, we continue to propagate its orphaning date but don't delete
- # it. It will get cleaned up at some future `gc`, or it will be used
- # again during a future `gc` in which case it will not persist within the
- # orphanage list.
- if gc_time - free_time >= collect_delay
- push!(deletion_list, path)
- end
- end
- end
-
# Scan manifests, parse them, read in all UUIDs listed and mark those as active
# printpkgstyle(ctx.io, :Active, "manifests:")
- packages_to_keep = mark(process_manifest_pkgs, all_manifest_tomls, ctx,
- verbose=verbose, file_str="manifest files")
-
- # Do an initial scan of our depots to get a preliminary `packages_to_delete`.
- packages_to_delete = String[]
- for depot in gc_depots
- depot_orphaned_packages = String[]
- packagedir = abspath(depot, "packages")
- if isdir(packagedir)
- for name in readdir(packagedir)
- !isdir(joinpath(packagedir, name)) && continue
-
- for slug in readdir(joinpath(packagedir, name))
- pkg_dir = joinpath(packagedir, name, slug)
- !isdir(pkg_dir) && continue
-
- if !(pkg_dir in packages_to_keep)
- push!(depot_orphaned_packages, pkg_dir)
- end
- end
- end
- end
- merge_orphanages!(UsageDict(), depot_orphaned_packages, packages_to_delete)
- end
+ packages_to_keep = mark(
+ process_manifest_pkgs, all_manifest_tomls, ctx,
+ verbose = verbose, file_str = "manifest files"
+ )
- # Next, do the same for artifacts. Note that we MUST do this after calculating
- # `packages_to_delete`, as `process_artifacts_toml()` uses it internally to discount
- # `Artifacts.toml` files that will be deleted by the future culling operation.
+ # Next, do the same for artifacts.
# printpkgstyle(ctx.io, :Active, "artifacts:")
- artifacts_to_keep = let packages_to_delete=packages_to_delete
- mark(x -> process_artifacts_toml(x, packages_to_delete),
- all_artifact_tomls, ctx; verbose=verbose, file_str="artifact files")
- end
- repos_to_keep = mark(process_manifest_repos, all_manifest_tomls, ctx; do_print=false)
+ artifacts_to_keep = mark(
+ x -> process_artifacts_toml(x, String[]),
+ all_artifact_tomls, ctx; verbose = verbose, file_str = "artifact files"
+ )
+ repos_to_keep = mark(process_manifest_repos, all_manifest_tomls, ctx; do_print = false)
# printpkgstyle(ctx.io, :Active, "scratchspaces:")
- spaces_to_keep = let packages_to_delete=packages_to_delete
- mark(x -> process_scratchspace(x, packages_to_delete),
- all_scratch_dirs, ctx; verbose=verbose, file_str="scratchspaces")
- end
+ spaces_to_keep = mark(
+ x -> process_scratchspace(x, String[]),
+ all_scratch_dirs, ctx; verbose = verbose, file_str = "scratchspaces"
+ )
- # Collect all orphaned paths (packages, artifacts and repos that are not reachable). These
- # are implicitly defined in that we walk all packages/artifacts installed, then if
- # they were not marked in the above steps, we reap them.
+ # Collect all unreachable paths (packages, artifacts and repos that are not reachable)
+ # and immediately delete them.
packages_to_delete = String[]
artifacts_to_delete = String[]
repos_to_delete = String[]
spaces_to_delete = String[]
for depot in gc_depots
- # We track orphaned objects on a per-depot basis, writing out our `orphaned.toml`
- # tracking file immediately, only pushing onto the overall `*_to_delete` lists if
- # the package has been orphaned for at least a period of `collect_delay`
- depot_orphaned_packages = String[]
- depot_orphaned_artifacts = String[]
- depot_orphaned_repos = String[]
- depot_orphaned_scratchspaces = String[]
packagedir = abspath(depot, "packages")
if isdir(packagedir)
@@ -880,7 +946,7 @@ function gc(ctx::Context=Context(); collect_delay::Period=Day(7), verbose=false,
!isdir(pkg_dir) && continue
if !(pkg_dir in packages_to_keep)
- push!(depot_orphaned_packages, pkg_dir)
+ push!(packages_to_delete, pkg_dir)
end
end
end
@@ -892,7 +958,7 @@ function gc(ctx::Context=Context(); collect_delay::Period=Day(7), verbose=false,
repo_dir = joinpath(reposdir, repo)
!isdir(repo_dir) && continue
if !(repo_dir in repos_to_keep)
- push!(depot_orphaned_repos, repo_dir)
+ push!(repos_to_delete, repo_dir)
end
end
end
@@ -904,7 +970,7 @@ function gc(ctx::Context=Context(); collect_delay::Period=Day(7), verbose=false,
!isdir(artifact_path) && continue
if !(artifact_path in artifacts_to_keep)
- push!(depot_orphaned_artifacts, artifact_path)
+ push!(artifacts_to_delete, artifact_path)
end
end
end
@@ -918,13 +984,13 @@ function gc(ctx::Context=Context(); collect_delay::Period=Day(7), verbose=false,
space_dir_or_file = joinpath(uuid_dir, space)
if isdir(space_dir_or_file)
if !(space_dir_or_file in spaces_to_keep)
- push!(depot_orphaned_scratchspaces, space_dir_or_file)
+ push!(spaces_to_delete, space_dir_or_file)
end
elseif uuid == Operations.PkgUUID && isfile(space_dir_or_file)
# special cleanup for the precompile cache files that Pkg saves
- if any(prefix->startswith(basename(space_dir_or_file), prefix), ("suspend_cache_", "pending_cache_"))
- if mtime(space_dir_or_file) < (time() - (24*60*60))
- push!(depot_orphaned_scratchspaces, space_dir_or_file)
+ if any(prefix -> startswith(basename(space_dir_or_file), prefix), ("suspend_cache_", "pending_cache_"))
+ if mtime(space_dir_or_file) < (time() - (24 * 60 * 60))
+ push!(spaces_to_delete, space_dir_or_file)
end
end
end
@@ -932,27 +998,6 @@ function gc(ctx::Context=Context(); collect_delay::Period=Day(7), verbose=false,
end
end
- # Read in this depot's `orphaned.toml` file:
- orphanage_file = joinpath(logdir(depot), "orphaned.toml")
- new_orphanage = UsageDict()
- old_orphanage = try
- TOML.parse(String(read(orphanage_file)))
- catch
- UsageDict()
- end
-
- # Update the package and artifact lists of things to delete, and
- # create the `new_orphanage` list for this depot.
- merge_orphanages!(new_orphanage, depot_orphaned_packages, packages_to_delete, old_orphanage)
- merge_orphanages!(new_orphanage, depot_orphaned_artifacts, artifacts_to_delete, old_orphanage)
- merge_orphanages!(new_orphanage, depot_orphaned_repos, repos_to_delete, old_orphanage)
- merge_orphanages!(new_orphanage, depot_orphaned_scratchspaces, spaces_to_delete, old_orphanage)
-
- # Write out the `new_orphanage` for this depot
- mkpath(dirname(orphanage_file))
- open(orphanage_file, "w") do io
- TOML.print(io, new_orphanage, sorted=true)
- end
end
function recursive_dir_size(path)
@@ -964,12 +1009,12 @@ function gc(ctx::Context=Context(); collect_delay::Period=Day(7), verbose=false,
try
size += lstat(path).size
catch ex
- @error("Failed to calculate size of $path", exception=ex)
+ @error("Failed to calculate size of $path", exception = ex)
end
end
end
catch ex
- @error("Failed to calculate size of $path", exception=ex)
+ @error("Failed to calculate size of $path", exception = ex)
end
return size
end
@@ -980,7 +1025,7 @@ function gc(ctx::Context=Context(); collect_delay::Period=Day(7), verbose=false,
try
lstat(path).size
catch ex
- @error("Failed to calculate size of $path", exception=ex)
+ @error("Failed to calculate size of $path", exception = ex)
0
end
else
@@ -988,14 +1033,16 @@ function gc(ctx::Context=Context(); collect_delay::Period=Day(7), verbose=false,
end
try
Base.Filesystem.prepare_for_deletion(path)
- Base.rm(path; recursive=true, force=true)
+ Base.rm(path; recursive = true, force = true)
catch e
- @warn("Failed to delete $path", exception=e)
+ @warn("Failed to delete $path", exception = e)
return 0
end
if verbose
- printpkgstyle(ctx.io, :Deleted, pathrepr(path) * " (" *
- Base.format_bytes(path_size) * ")")
+ printpkgstyle(
+ ctx.io, :Deleted, pathrepr(path) * " (" *
+ Base.format_bytes(path_size) * ")"
+ )
end
return path_size
end
@@ -1045,18 +1092,33 @@ function gc(ctx::Context=Context(); collect_delay::Period=Day(7), verbose=false,
end
end
- # Delete any files that could not be rm-ed and were specially moved to the delayed delete directory.
- # Do this silently because it's out of scope for Pkg.gc() but it's helpful to use this opportunity to do it
- if isdefined(Base.Filesystem, :delayed_delete_dir)
- if isdir(Base.Filesystem.delayed_delete_dir())
- for p in readdir(Base.Filesystem.delayed_delete_dir(), join=true)
+ # Delete anything that could not be rm-ed and was specially recorded in the delayed delete reference folder.
+ # Do this silently because it's out of scope for Pkg.gc() but it's helpful to use this opportunity to do it.
+ if isdefined(Base.Filesystem, :delayed_delete_ref)
+ delayed_delete_ref_path = Base.Filesystem.delayed_delete_ref()
+ if isdir(delayed_delete_ref_path)
+ delayed_delete_dirs = Set{String}()
+ for f in readdir(delayed_delete_ref_path; join = true)
try
+ p = readline(f)
+ push!(delayed_delete_dirs, dirname(p))
Base.Filesystem.prepare_for_deletion(p)
- Base.rm(p; recursive=true, force=true, allow_delayed_delete=false)
+ Base.rm(p; recursive = true, force = true, allow_delayed_delete = false)
+ Base.rm(f)
catch e
- @debug "Failed to delete $p" exception=e
+ @debug "Failed to delete $p" exception = e
end
end
+ for dir in delayed_delete_dirs
+ if basename(dir) == "julia_delayed_deletes" && isempty(readdir(dir))
+ Base.Filesystem.prepare_for_deletion(dir)
+ Base.rm(dir; recursive = true)
+ end
+ end
+ if isempty(readdir(delayed_delete_ref_path))
+ Base.Filesystem.prepare_for_deletion(delayed_delete_ref_path)
+ Base.rm(delayed_delete_ref_path; recursive = true)
+ end
end
end
@@ -1072,7 +1134,7 @@ function gc(ctx::Context=Context(); collect_delay::Period=Day(7), verbose=false,
s = ndel == 1 ? "" : "s"
bytes_saved_string = Base.format_bytes(freed)
- printpkgstyle(ctx.io, :Deleted, "$(ndel) $(name)$(s) ($bytes_saved_string)")
+ return printpkgstyle(ctx.io, :Deleted, "$(ndel) $(name)$(s) ($bytes_saved_string)")
end
print_deleted(ndel_pkg, package_space_freed, "package installation")
print_deleted(ndel_repo, repo_space_freed, "repo")
@@ -1083,11 +1145,40 @@ function gc(ctx::Context=Context(); collect_delay::Period=Day(7), verbose=false,
printpkgstyle(ctx.io, :Deleted, "no artifacts, repos, packages or scratchspaces")
end
+ # Run git gc on registries if git is available
+ if Sys.which("git") !== nothing
+ for depot in gc_depots
+ reg_dir = joinpath(depot, "registries")
+ isdir(reg_dir) || continue
+
+ for reg_name in readdir(reg_dir)
+ reg_path = joinpath(reg_dir, reg_name)
+ isdir(reg_path) || continue
+ git_dir = joinpath(reg_path, ".git")
+ isdir(git_dir) || continue
+
+ try
+ if verbose
+ printpkgstyle(ctx.io, :GC, "running git gc on registry $(reg_name)")
+ end
+ # Run git gc quietly, don't error if it fails
+ run(`git -C $reg_path gc --quiet`)
+ catch e
+ # Silently ignore errors from git gc
+ if verbose
+ @warn "git gc failed for registry $(reg_name)" exception = e
+ end
+ end
+ end
+ end
+ end
+
return
end
-function build(ctx::Context, pkgs::Vector{PackageSpec}; verbose=false, kwargs...)
+function build(ctx::Context, pkgs::Vector{PackageSpec}; verbose = false, allow_reresolve::Bool = true, kwargs...)
Context!(ctx; kwargs...)
+ Operations.ensure_manifest_registries!(ctx)
if isempty(pkgs)
if ctx.env.pkg !== nothing
@@ -1101,7 +1192,7 @@ function build(ctx::Context, pkgs::Vector{PackageSpec}; verbose=false, kwargs...
project_resolve!(ctx.env, pkgs)
manifest_resolve!(ctx.env.manifest, pkgs)
ensure_resolved(ctx, ctx.env.manifest, pkgs)
- Operations.build(ctx, Set{UUID}(pkg.uuid for pkg in pkgs), verbose)
+ return Operations.build(ctx, Set{UUID}(pkg.uuid for pkg in pkgs), verbose; allow_reresolve)
end
function get_or_make_pkgspec(pkgspecs::Vector{PackageSpec}, ctx::Context, uuid)
@@ -1123,13 +1214,37 @@ function get_or_make_pkgspec(pkgspecs::Vector{PackageSpec}, ctx::Context, uuid)
end
end
-function precompile(ctx::Context, pkgs::Vector{PackageSpec}; internal_call::Bool=false,
- strict::Bool=false, warn_loaded = true, already_instantiated = false, timing::Bool = false,
- _from_loading::Bool=false, configs::Union{Base.Precompilation.Config,Vector{Base.Precompilation.Config}}=(``=>Base.CacheFlags()),
- workspace::Bool=false, kwargs...)
+function precompile(
+ ctx::Context, pkgs::Vector{PackageSpec}; internal_call::Bool = false,
+ strict::Bool = false, warn_loaded = true, already_instantiated = false, timing::Bool = false,
+ _from_loading::Bool = false, configs::Union{Base.Precompilation.Config, Vector{Base.Precompilation.Config}} = (`` => Base.CacheFlags()),
+ workspace::Bool = false, monitor::Bool = false, stop::Bool = false, cancel::Bool = false, kwargs...
+ )
+ # Handle background precompilation control options via Base
+ if monitor
+ Base.Precompilation.monitor_background_precompile(ctx.io)
+ return
+ end
+ if stop
+ if Base.Precompilation.stop_background_precompile(graceful = true)
+ printpkgstyle(ctx.io, :Info, "Stopping background precompilation...")
+ else
+ printpkgstyle(ctx.io, :Info, "No background precompilation is running")
+ end
+ return
+ end
+ if cancel
+ if Base.Precompilation.stop_background_precompile(graceful = false)
+ printpkgstyle(ctx.io, :Info, "Canceling background precompilation...")
+ else
+ printpkgstyle(ctx.io, :Info, "No background precompilation is running")
+ end
+ return
+ end
+
Context!(ctx; kwargs...)
if !already_instantiated
- instantiate(ctx; allow_autoprecomp=false, kwargs...)
+ instantiate(ctx; allow_autoprecomp = false, kwargs...)
@debug "precompile: instantiated"
end
@@ -1139,17 +1254,28 @@ function precompile(ctx::Context, pkgs::Vector{PackageSpec}; internal_call::Bool
return
end
- io = ctx.io
- if io isa IOContext{IO}
- # precompile does quite a bit of output and using the IOContext{IO} can cause
- # some slowdowns, the important part here is to not specialize the whole
- # precompile function on the io
- io = io.io
+ return activate(dirname(ctx.env.project_file)) do
+ io = if ctx.io isa IOContext{IO} && !isa(ctx.io.io, Base.PipeEndpoint)
+ # precompile does quite a bit of output and using the IOContext{IO} can cause
+ # some slowdowns, the important part here is to not specialize the whole
+ # precompile function on the io.
+ # But don't unwrap the IOContext if it is a PipeEndpoint, as that would
+ # cause the output to lose color.
+ ctx.io.io
+ else
+ ctx.io
+ end
+ pkgs_name = String[pkg.name for pkg in pkgs]
+ # Allow user to press 'd' to detach when running interactively
+ detachable = isinteractive()
+ return Base.Precompilation.precompilepkgs(pkgs_name; internal_call, strict, warn_loaded, timing, _from_loading, configs, manifest = workspace, io, detachable)
end
+end
- activate(dirname(ctx.env.project_file)) do
- pkgs_name = String[pkg.name for pkg in pkgs]
- return Base.Precompilation.precompilepkgs(pkgs_name; internal_call, strict, warn_loaded, timing, _from_loading, configs, manifest=workspace, io)
+function precompile(f, args...; kwargs...)
+ return Base.ScopedValues.@with _autoprecompilation_enabled_scoped => false begin
+ f()
+ Pkg.precompile(args...; kwargs...)
end
end
@@ -1163,18 +1289,21 @@ function tree_hash(repo::LibGit2.GitRepo, tree_hash::String)
end
instantiate(; kwargs...) = instantiate(Context(); kwargs...)
-function instantiate(ctx::Context; manifest::Union{Bool, Nothing}=nothing,
- update_registry::Bool=true, verbose::Bool=false,
- platform::AbstractPlatform=HostPlatform(), allow_build::Bool=true, allow_autoprecomp::Bool=true,
- workspace::Bool=false, julia_version_strict::Bool=false, kwargs...)
+function instantiate(
+ ctx::Context; manifest::Union{Bool, Nothing} = nothing,
+ update_registry::Bool = true, verbose::Bool = false,
+ platform::AbstractPlatform = HostPlatform(), allow_build::Bool = true, allow_autoprecomp::Bool = true,
+ workspace::Bool = false, julia_version_strict::Bool = false, kwargs...
+ )
Context!(ctx; kwargs...)
if Registry.download_default_registries(ctx.io)
copy!(ctx.registries, Registry.reachable_registries())
end
+ Operations.ensure_manifest_registries!(ctx)
if !isfile(ctx.env.project_file) && isfile(ctx.env.manifest_file)
_manifest = Pkg.Types.read_manifest(ctx.env.manifest_file)
Types.check_manifest_julia_version_compat(_manifest, ctx.env.manifest_file; julia_version_strict)
- deps = Dict{String,String}()
+ deps = Dict{String, String}()
for (uuid, pkg) in _manifest
if pkg.name in keys(deps)
# TODO, query what package to put in Project when in interactive mode?
@@ -1183,7 +1312,7 @@ function instantiate(ctx::Context; manifest::Union{Bool, Nothing}=nothing,
deps[pkg.name] = string(uuid)
end
Types.write_project(Dict("deps" => deps), ctx.env.project_file)
- return instantiate(Context(); manifest=manifest, update_registry=update_registry, allow_autoprecomp=allow_autoprecomp, verbose=verbose, platform=platform, kwargs...)
+ return instantiate(Context(); manifest = manifest, update_registry = update_registry, allow_autoprecomp = allow_autoprecomp, verbose = verbose, platform = platform, kwargs...)
end
if (!isfile(ctx.env.manifest_file) && manifest === nothing) || manifest == false
# given no manifest exists, only allow invoking a registry update if there are project deps
@@ -1198,17 +1327,24 @@ function instantiate(ctx::Context; manifest::Union{Bool, Nothing}=nothing,
Types.check_manifest_julia_version_compat(ctx.env.manifest, ctx.env.manifest_file; julia_version_strict)
if Operations.is_manifest_current(ctx.env) === false
+ resolve_cmd = Pkg.in_repl_mode() ? "pkg> resolve" : "Pkg.resolve()"
+ update_cmd = Pkg.in_repl_mode() ? "pkg> update" : "Pkg.update()"
@warn """The project dependencies or compat requirements have changed since the manifest was last resolved.
- It is recommended to `Pkg.resolve()` or consider `Pkg.update()` if necessary."""
+ It is recommended to `$resolve_cmd` or consider `$update_cmd` if necessary."""
end
Operations.prune_manifest(ctx.env)
for (name, uuid) in ctx.env.project.deps
get(ctx.env.manifest, uuid, nothing) === nothing || continue
- pkgerror("`$name` is a direct dependency, but does not appear in the manifest.",
- " If you intend `$name` to be a direct dependency, run `Pkg.resolve()` to populate the manifest.",
- " Otherwise, remove `$name` with `Pkg.rm(\"$name\")`.",
- " Finally, run `Pkg.instantiate()` again.")
+ resolve_cmd = Pkg.in_repl_mode() ? "pkg> resolve" : "Pkg.resolve()"
+ rm_cmd = Pkg.in_repl_mode() ? "pkg> rm $name" : "Pkg.rm(\"$name\")"
+ instantiate_cmd = Pkg.in_repl_mode() ? "pkg> instantiate" : "Pkg.instantiate()"
+ pkgerror(
+ "`$name` is a direct dependency, but does not appear in the manifest.",
+ " If you intend `$name` to be a direct dependency, run `$resolve_cmd` to populate the manifest.",
+ " Otherwise, remove `$name` with `$rm_cmd`.",
+ " Finally, run `$instantiate_cmd` again."
+ )
end
# check if all source code and artifacts are downloaded to exit early
if Operations.is_instantiated(ctx.env, workspace; platform)
@@ -1228,7 +1364,7 @@ function instantiate(ctx::Context; manifest::Union{Bool, Nothing}=nothing,
if !(e isa PkgError) || update_registry == false
rethrow(e)
end
- Operations.update_registries(ctx; force=false)
+ Operations.update_registries(ctx; force = false)
Operations.check_registered(ctx.registries, pkgs)
end
new_git = UUID[]
@@ -1241,18 +1377,18 @@ function instantiate(ctx::Context; manifest::Union{Bool, Nothing}=nothing,
## Download repo at tree hash
# determine canonical form of repo source
if !isurl(repo_source)
- repo_source = normpath(joinpath(dirname(ctx.env.project_file), repo_source))
+ repo_source = manifest_rel_path(ctx.env, repo_source)
end
if !isurl(repo_source) && !isdir(repo_source)
pkgerror("Did not find path `$(repo_source)` for $(err_rep(pkg))")
end
repo_path = Types.add_repo_cache_path(repo_source)
- let repo_source=repo_source
- LibGit2.with(GitTools.ensure_clone(ctx.io, repo_path, repo_source; isbare=true)) do repo
+ let repo_source = repo_source
+ LibGit2.with(GitTools.ensure_clone(ctx.io, repo_path, repo_source; isbare = true, depth = 1)) do repo
# We only update the clone if the tree hash can't be found
tree_hash_object = tree_hash(repo, string(pkg.tree_hash))
if tree_hash_object === nothing
- GitTools.fetch(ctx.io, repo, repo_source; refspecs=Types.refspecs)
+ GitTools.fetch(ctx.io, repo, repo_source; refspecs = Types.refspecs, depth = LibGit2.Consts.FETCH_DEPTH_UNSHALLOW)
tree_hash_object = tree_hash(repo, string(pkg.tree_hash))
end
if tree_hash_object === nothing
@@ -1266,39 +1402,40 @@ function instantiate(ctx::Context; manifest::Union{Bool, Nothing}=nothing,
end
# Install all packages
- new_apply = Operations.download_source(ctx)
+ new_apply = Operations.download_source(ctx, pkgs)
# Install all artifacts
- Operations.download_artifacts(ctx; platform, verbose)
+ Operations.download_artifacts(ctx, pkgs; platform, verbose)
# Run build scripts
- allow_build && Operations.build_versions(ctx, union(new_apply, new_git); verbose=verbose)
+ allow_build && Operations.build_versions(ctx, union(new_apply, new_git); verbose = verbose)
- allow_autoprecomp && Pkg._auto_precompile(ctx, already_instantiated = true)
+ return allow_autoprecomp && Pkg._auto_precompile(ctx, already_instantiated = true)
end
-@deprecate status(mode::PackageMode) status(mode=mode)
+@deprecate status(mode::PackageMode) status(mode = mode)
-function status(ctx::Context, pkgs::Vector{PackageSpec}; diff::Bool=false, mode=PKGMODE_PROJECT, workspace::Bool=false, outdated::Bool=false, compat::Bool=false, extensions::Bool=false, io::IO=stdout_f())
+function status(ctx::Context, pkgs::Vector{PackageSpec}; diff::Bool = false, mode = PKGMODE_PROJECT, workspace::Bool = false, outdated::Bool = false, deprecated::Bool = false, compat::Bool = false, extensions::Bool = false, io::IO = stdout_f())
if compat
diff && pkgerror("Compat status has no `diff` mode")
outdated && pkgerror("Compat status has no `outdated` mode")
+ deprecated && pkgerror("Compat status has no `deprecated` mode")
extensions && pkgerror("Compat status has no `extensions` mode")
Operations.print_compat(ctx, pkgs; io)
else
- Operations.status(ctx.env, ctx.registries, pkgs; mode, git_diff=diff, io, outdated, extensions, workspace)
+ Operations.status(ctx.env, ctx.registries, pkgs; mode, git_diff = diff, io, outdated, deprecated, extensions, workspace)
end
return nothing
end
-function activate(;temp=false, shared=false, prev=false, io::IO=stderr_f())
+function activate(; temp = false, shared = false, prev = false, io::IO = stderr_f())
shared && pkgerror("Must give a name for a shared environment")
- temp && return activate(mktempdir(); io=io)
+ temp && return activate(mktempdir(); io = io)
if prev
if isempty(PREV_ENV_PATH[])
pkgerror("No previously active environment found")
else
- return activate(PREV_ENV_PATH[]; io=io)
+ return activate(PREV_ENV_PATH[]; io = io)
end
end
if !isnothing(Base.active_project())
@@ -1320,14 +1457,14 @@ function _activate_dep(dep_name::AbstractString)
return
end
uuid = get(ctx.env.project.deps, dep_name, nothing)
- if uuid !== nothing
+ return if uuid !== nothing
entry = manifest_info(ctx.env.manifest, uuid)
if entry.path !== nothing
return joinpath(dirname(ctx.env.manifest_file), entry.path::String)
end
end
end
-function activate(path::AbstractString; shared::Bool=false, temp::Bool=false, io::IO=stderr_f())
+function activate(path::AbstractString; shared::Bool = false, temp::Bool = false, io::IO = stderr_f())
temp && pkgerror("Can not give `path` argument when creating a temporary environment")
if !shared
# `pkg> activate path`/`Pkg.activate(path)` does the following
@@ -1374,23 +1511,39 @@ end
function activate(f::Function, new_project::AbstractString)
old = Base.ACTIVE_PROJECT[]
Base.ACTIVE_PROJECT[] = new_project
- try
+ return try
f()
finally
Base.ACTIVE_PROJECT[] = old
end
end
-function compat(ctx::Context, pkg::String, compat_str::Union{Nothing,String}; io = nothing, kwargs...)
+function _compat(ctx::Context, pkg::String, compat_str::Union{Nothing, String}; current::Bool = false, io = nothing, kwargs...)
+ if current
+ if compat_str !== nothing
+ pkgerror("`current` is true, but `compat_str` is not nothing. This is not allowed.")
+ end
+ return set_current_compat(ctx, pkg; io = io)
+ end
io = something(io, ctx.io)
pkg = pkg == "Julia" ? "julia" : pkg
isnothing(compat_str) || (compat_str = string(strip(compat_str, '"')))
+ existing_compat = Operations.get_compat_str(ctx.env.project, pkg)
+ # Double check before deleting a compat entry issue/3567
+ if isinteractive() && (isnothing(compat_str) || isempty(compat_str))
+ if !isnothing(existing_compat)
+ ans = Base.prompt(stdin, ctx.io, "No compat string was given. Delete existing compat entry `$pkg = $(repr(existing_compat))`? [y]/n", default = "y")
+ if lowercase(ans) !== "y"
+ return
+ end
+ end
+ end
if haskey(ctx.env.project.deps, pkg) || pkg == "julia"
success = Operations.set_compat(ctx.env.project, pkg, isnothing(compat_str) ? nothing : isempty(compat_str) ? nothing : compat_str)
success === false && pkgerror("invalid compat version specifier \"$(compat_str)\"")
write_env(ctx.env)
if isnothing(compat_str) || isempty(compat_str)
- printpkgstyle(io, :Compat, "entry removed for $(pkg)")
+ printpkgstyle(io, :Compat, "entry removed:\n $pkg = $(repr(existing_compat))")
else
printpkgstyle(io, :Compat, "entry set:\n $(pkg) = $(repr(compat_str))")
end
@@ -1410,15 +1563,98 @@ function compat(ctx::Context, pkg::String, compat_str::Union{Nothing,String}; io
pkgerror("No package named $pkg in current Project")
end
end
-compat(pkg::String; kwargs...) = compat(pkg, nothing; kwargs...)
-compat(pkg::String, compat_str::Union{Nothing,String}; kwargs...) = compat(Context(), pkg, compat_str; kwargs...)
-compat(;kwargs...) = compat(Context(); kwargs...)
+function compat(ctx::Context = Context(); current::Bool = false, kwargs...)
+ if current
+ return set_current_compat(ctx; kwargs...)
+ end
+ return _compat(ctx; kwargs...)
+end
+compat(pkg::String, compat_str::Union{Nothing, String} = nothing; kwargs...) = _compat(Context(), pkg, compat_str; kwargs...)
+
+
+function set_current_compat(ctx::Context, target_pkg::Union{Nothing, String} = nothing; io = nothing)
+ io = something(io, ctx.io)
+ updated_deps = String[]
+
+ deps_to_process = if target_pkg !== nothing
+ # Process only the specified package
+ if haskey(ctx.env.project.deps, target_pkg)
+ [(target_pkg, ctx.env.project.deps[target_pkg])]
+ else
+ pkgerror("Package $(target_pkg) not found in project dependencies")
+ end
+ else
+ # Process all packages (existing behavior)
+ collect(ctx.env.project.deps)
+ end
+
+ # Process regular package dependencies
+ for (dep, uuid) in deps_to_process
+ compat_str = Operations.get_compat_str(ctx.env.project, dep)
+ if target_pkg !== nothing || isnothing(compat_str)
+ entry = get(ctx.env.manifest, uuid, nothing)
+ entry === nothing && continue
+ v = entry.version
+ v === nothing && continue
+ pkgversion = string(Base.thispatch(v))
+ Operations.set_compat(ctx.env.project, dep, pkgversion) ||
+ pkgerror("invalid compat version specifier \"$(pkgversion)\"")
+ push!(updated_deps, dep)
+ end
+ end
+
+ # Also handle Julia compat entry when processing all packages (not when targeting a specific package)
+ if target_pkg === nothing
+ julia_compat_str = Operations.get_compat_str(ctx.env.project, "julia")
+ if isnothing(julia_compat_str)
+ # Set julia compat to current running version
+ julia_version = string(Base.thispatch(VERSION))
+ Operations.set_compat(ctx.env.project, "julia", julia_version) ||
+ pkgerror("invalid compat version specifier \"$(julia_version)\"")
+ push!(updated_deps, "julia")
+ end
+ end
+
+ # Update messaging
+ if isempty(updated_deps)
+ if target_pkg !== nothing
+ printpkgstyle(io, :Info, "$(target_pkg) already has a compat entry or is not in manifest. No changes made.", color = Base.info_color())
+ else
+ printpkgstyle(io, :Info, "no missing compat entries found. No changes made.", color = Base.info_color())
+ end
+ elseif length(updated_deps) == 1
+ printpkgstyle(io, :Info, "new entry set for $(only(updated_deps)) based on its current version", color = Base.info_color())
+ else
+ printpkgstyle(io, :Info, "new entries set for $(join(updated_deps, ", ", " and ")) based on their current versions", color = Base.info_color())
+ end
+
+ write_env(ctx.env)
+ return Operations.print_compat(ctx; io)
+end
+set_current_compat(; kwargs...) = set_current_compat(Context(); kwargs...)
#######
# why #
#######
-function why(ctx::Context, pkgs::Vector{PackageSpec}; io::IO, workspace::Bool=false, kwargs...)
+function why_find_paths!(final_paths, incoming, project_deps, current, path)
+ push!(path, current)
+ current in project_deps && push!(final_paths, path) # record once we've traversed to a project dep
+ haskey(incoming, current) || return # but only return if we've reached a leaf that nothing depends on
+ for p in incoming[current]
+ if p in path
+ # detected dependency cycle and none of the dependencies in the cycle
+ # are in the project could happen when manually modifying
+ # the project and running this function function before a
+ # resolve
+ continue
+ end
+ why_find_paths!(final_paths, incoming, project_deps, p, copy(path))
+ end
+ return
+end
+
+function why(ctx::Context, pkgs::Vector{PackageSpec}; io::IO, workspace::Bool = false, kwargs...)
require_not_empty(pkgs, :why)
manifest_resolve!(ctx.env.manifest, pkgs)
@@ -1442,36 +1678,21 @@ function why(ctx::Context, pkgs::Vector{PackageSpec}; io::IO, workspace::Bool=fa
end
end
- function find_paths!(final_paths, current, path = UUID[])
- push!(path, current)
- current in project_deps && push!(final_paths, path) # record once we've traversed to a project dep
- haskey(incoming, current) || return # but only return if we've reached a leaf that nothing depends on
- for p in incoming[current]
- if p in path
- # detected dependency cycle and none of the dependencies in the cycle
- # are in the project could happen when manually modifying
- # the project and running this function function before a
- # resolve
- continue
- end
- find_paths!(final_paths, p, copy(path))
- end
- end
-
first = true
for pkg in pkgs
!first && println(io)
first = false
final_paths = Set{Vector{UUID}}()
- find_paths!(final_paths, pkg.uuid)
+ why_find_paths!(final_paths, incoming, project_deps, pkg.uuid, UUID[])
foreach(reverse!, final_paths)
final_paths_names = map(x -> [ctx.env.manifest[uuid].name for uuid in x], collect(final_paths))
sort!(final_paths_names, by = x -> (x, length(x)))
- delimiter = sprint((io, args) -> printstyled(io, args...; color=:light_green), "→", context=io)
+ delimiter = sprint((io, args) -> printstyled(io, args...; color = :light_green), "→", context = io)
for path in final_paths_names
println(io, " ", join(path, " $delimiter "))
end
end
+ return
end
@@ -1493,7 +1714,7 @@ const undo_entries = Dict{String, UndoState}()
const max_undo_limit = 50
const saved_initial_snapshot = Ref(false)
-function add_snapshot_to_undo(env=nothing)
+function add_snapshot_to_undo(env = nothing)
# only attempt to take a snapshot if there is
# an active project to be found
if env === nothing
@@ -1511,14 +1732,14 @@ function add_snapshot_to_undo(env=nothing)
return
end
snapshot = UndoSnapshot(now(), env.project, env.manifest)
- deleteat!(state.entries, 1:(state.idx-1))
+ deleteat!(state.entries, 1:(state.idx - 1))
pushfirst!(state.entries, snapshot)
state.idx = 1
- resize!(state.entries, min(length(state.entries), max_undo_limit))
+ return resize!(state.entries, min(length(state.entries), max_undo_limit))
end
-undo(ctx = Context()) = redo_undo(ctx, :undo, 1)
+undo(ctx = Context()) = redo_undo(ctx, :undo, 1)
redo(ctx = Context()) = redo_undo(ctx, :redo, -1)
function redo_undo(ctx, mode::Symbol, direction::Int)
@assert direction == 1 || direction == -1
@@ -1529,16 +1750,16 @@ function redo_undo(ctx, mode::Symbol, direction::Int)
state.idx += direction
snapshot = state.entries[state.idx]
ctx.env.manifest, ctx.env.project = snapshot.manifest, snapshot.project
- write_env(ctx.env; update_undo=false)
- Operations.show_update(ctx.env, ctx.registries; io=ctx.io)
+ write_env(ctx.env; update_undo = false)
+ return Operations.show_update(ctx.env, ctx.registries; io = ctx.io)
end
function setprotocol!(;
- domain::AbstractString="github.com",
- protocol::Union{Nothing, AbstractString}=nothing
-)
- GitTools.setprotocol!(domain=domain, protocol=protocol)
+ domain::AbstractString = "github.com",
+ protocol::Union{Nothing, AbstractString} = nothing
+ )
+ GitTools.setprotocol!(domain = domain, protocol = protocol)
return nothing
end
@@ -1546,10 +1767,15 @@ end
function handle_package_input!(pkg::PackageSpec)
if pkg.path !== nothing && pkg.url !== nothing
- pkgerror("`path` and `url` are conflicting specifications")
+ pkgerror("Conflicting `path` and `url` in PackageSpec")
+ end
+ if pkg.repo.source !== nothing || pkg.repo.rev !== nothing || pkg.repo.subdir !== nothing
+ pkgerror("`repo` is a private field of PackageSpec and should not be set directly")
end
- pkg.repo = Types.GitRepo(rev = pkg.rev, source = pkg.url !== nothing ? pkg.url : pkg.path,
- subdir = pkg.subdir)
+ pkg.repo = Types.GitRepo(
+ rev = pkg.rev, source = pkg.url !== nothing ? pkg.url : pkg.path,
+ subdir = pkg.subdir
+ )
pkg.path = nothing
pkg.tree_hash = nothing
if pkg.version === nothing
@@ -1558,28 +1784,7 @@ function handle_package_input!(pkg::PackageSpec)
if !(pkg.version isa VersionNumber)
pkg.version = VersionSpec(pkg.version)
end
- pkg.uuid = pkg.uuid isa String ? UUID(pkg.uuid) : pkg.uuid
-end
-
-function upgrade_manifest(man_path::String)
- dir = mktempdir()
- cp(man_path, joinpath(dir, "Manifest.toml"))
- Pkg.activate(dir) do
- Pkg.upgrade_manifest()
- end
- mv(joinpath(dir, "Manifest.toml"), man_path, force = true)
-end
-function upgrade_manifest(ctx::Context = Context())
- before_format = ctx.env.manifest.manifest_format
- if before_format == v"2.0"
- pkgerror("Format of manifest file at `$(ctx.env.manifest_file)` already up to date: manifest_format == $(before_format)")
- elseif before_format != v"1.0"
- pkgerror("Format of manifest file at `$(ctx.env.manifest_file)` version is unrecognized: manifest_format == $(before_format)")
- end
- ctx.env.manifest.manifest_format = v"2.0"
- Types.write_manifest(ctx.env)
- printpkgstyle(ctx.io, :Updated, "Format of manifest file at `$(ctx.env.manifest_file)` updated from v$(before_format.major).$(before_format.minor) to v2.0")
- return nothing
+ return pkg.uuid = pkg.uuid isa String ? UUID(pkg.uuid) : pkg.uuid
end
"""
@@ -1595,4 +1800,30 @@ function auto_gc(on::Bool)
return pstate
end
+"""
+ readonly()
+
+Return whether the current environment is readonly.
+"""
+function readonly(ctx::Context = Context())
+ return ctx.env.project.readonly
+end
+
+"""
+ readonly(on::Bool)
+
+Enable or disable readonly mode for the current environment.
+Return the previous state.
+"""
+function readonly(on::Bool, ctx::Context = Context())
+ previous_state = ctx.env.project.readonly
+ ctx.env.project.readonly = on
+ Types.write_env(ctx.env; skip_readonly_check = true)
+
+ mode_str = on ? "enabled" : "disabled"
+ printpkgstyle(ctx.io, :Updated, "Readonly mode $mode_str for project at $(ctx.env.project_file)")
+
+ return previous_state
+end
+
end # module
diff --git a/src/Apps/Apps.jl b/src/Apps/Apps.jl
new file mode 100644
index 0000000000..8521f68958
--- /dev/null
+++ b/src/Apps/Apps.jl
@@ -0,0 +1,635 @@
+module Apps
+
+using Pkg
+using Pkg: atomic_toml_write
+using Pkg.Versions
+using Pkg.Types: AppInfo, PackageSpec, Context, EnvCache, PackageEntry, Manifest, handle_repo_add!, handle_repo_develop!, write_manifest, write_project,
+ pkgerror, projectfile_path, manifestfile_path
+using Pkg.Operations: print_single, source_path, update_package_add
+using Pkg.API: handle_package_input!
+using TOML, UUIDs
+using Dates
+import Pkg.Registry
+
+public add, rm, status, update, develop
+
+app_env_folder() = joinpath(first(DEPOT_PATH), "environments", "apps")
+app_manifest_file() = joinpath(app_env_folder(), "AppManifest.toml")
+julia_bin_path() = joinpath(first(DEPOT_PATH), "bin")
+
+app_context() = Context(env = EnvCache(joinpath(app_env_folder(), "Project.toml")))
+
+function validate_app_name(name::AbstractString)
+ if isempty(name)
+ error("App name cannot be empty")
+ end
+ if !occursin(r"^[a-zA-Z][a-zA-Z0-9_-]*$", name)
+ error("App name must start with a letter and contain only letters, numbers, underscores, and hyphens")
+ end
+ return if occursin(r"\.\.", name) || occursin(r"[/\\]", name)
+ error("App name cannot contain path traversal sequences or path separators")
+ end
+end
+
+function validate_package_name(name::AbstractString)
+ if isempty(name)
+ error("Package name cannot be empty")
+ end
+ return if !occursin(r"^[a-zA-Z][a-zA-Z0-9_]*$", name)
+ error("Package name must start with a letter and contain only letters, numbers, and underscores")
+ end
+end
+
+function validate_submodule_name(name::Union{AbstractString, Nothing})
+ return if name !== nothing
+ if isempty(name)
+ error("Submodule name cannot be empty")
+ end
+ if !occursin(r"^[a-zA-Z][a-zA-Z0-9_]*$", name)
+ error("Submodule name must start with a letter and contain only letters, numbers, and underscores")
+ end
+ end
+end
+
+
+function rm_shim(name; kwargs...)
+ validate_app_name(name)
+ return Base.rm(joinpath(julia_bin_path(), name * (Sys.iswindows() ? ".bat" : "")); kwargs...)
+end
+
+function get_project(sourcepath)
+ project_file = projectfile_path(sourcepath)
+
+ isfile(project_file) || error("Project file not found: $project_file")
+
+ project = Pkg.Types.read_project(project_file)
+ isempty(project.apps) && error("No apps found in Project.toml for package $(project.name) at version $(project.version)")
+ return project
+end
+
+
+function overwrite_file_if_different(file, content)
+ # Windows batch files require CRLF line endings for reliable label parsing
+ if endswith(file, ".bat")
+ content = replace(content, "\r\n" => "\n") # normalize to LF first
+ content = replace(content, "\n" => "\r\n") # then convert to CRLF
+ end
+ return if !isfile(file) || read(file, String) != content
+ mkpath(dirname(file))
+ write(file, content)
+ end
+end
+
+function check_apps_in_path(apps)
+ for app_name in keys(apps)
+ which_name = app_name * (Sys.iswindows() ? ".bat" : "")
+ which_result = Sys.which(which_name)
+ if which_result === nothing
+ @warn """
+ App '$app_name' was installed but is not available in PATH.
+ Consider adding '$(julia_bin_path())' to your PATH environment variable.
+ """ maxlog = 1
+ break # Only show warning once per installation
+ else
+ # Check for collisions
+ expected_path = joinpath(julia_bin_path(), app_name * (Sys.iswindows() ? ".bat" : ""))
+ if which_result != expected_path
+ @warn """
+ App '$app_name' collision detected:
+ Expected: $expected_path
+ Found: $which_result
+ Another application with the same name exists in PATH.
+ """
+ end
+ end
+ end
+ return
+end
+
+function get_max_version_register(pkg::PackageSpec, regs)
+ max_v = nothing
+ tree_hash = nothing
+ for reg in regs
+ if get(reg, pkg.uuid, nothing) !== nothing
+ reg_pkg = get(reg, pkg.uuid, nothing)
+ reg_pkg === nothing && continue
+ pkg_info = Registry.registry_info(reg, reg_pkg)
+ for (version, info) in pkg_info.version_info
+ info.yanked && continue
+ if pkg.version isa VersionNumber
+ pkg.version == version || continue
+ else
+ version in pkg.version || continue
+ end
+ if max_v === nothing || version > max_v
+ max_v = version
+ tree_hash = info.git_tree_sha1
+ end
+ end
+ end
+ end
+ if max_v === nothing
+ error("Suitable package version for $(pkg.name) not found in any registries.")
+ end
+ return (max_v, tree_hash)
+end
+
+
+##################
+# Main Functions #
+##################
+
+function _resolve(manifest::Manifest, pkgname = nothing)
+ for (uuid, pkg) in manifest.deps
+ if pkgname !== nothing && pkg.name !== pkgname
+ continue
+ end
+
+ # TODO: Add support for existing manifest
+
+ projectfile = joinpath(app_env_folder(), pkg.name, "Project.toml")
+
+ sourcepath = source_path(app_manifest_file(), pkg)
+ original_project_file = projectfile_path(sourcepath)
+
+ mkpath(dirname(projectfile))
+
+ if isfile(original_project_file)
+ cp(original_project_file, projectfile; force = true)
+ chmod(projectfile, 0o644) # Make the copied project file writable
+
+ # Add entryfile stanza pointing to the package entry file
+ # TODO: What if project file has its own entryfile?
+ project_data = TOML.parsefile(projectfile)
+ project_data["entryfile"] = joinpath(sourcepath, "src", "$(pkg.name).jl")
+ atomic_toml_write(projectfile, project_data)
+ else
+ error("could not find project file for package $pkg")
+ end
+
+ # Create a manifest with the manifest entry
+ Pkg.activate(joinpath(app_env_folder(), pkg.name)) do
+ ctx = Context()
+ ctx.env.manifest.deps[uuid] = pkg
+ Pkg.resolve(ctx)
+ end
+
+ # TODO: Julia path
+ generate_shims_for_apps(pkg.name, pkg.apps, dirname(projectfile), joinpath(Sys.BINDIR, "julia"))
+ end
+ return write_manifest(manifest, app_manifest_file())
+end
+
+
+function add(pkg::Vector{PackageSpec})
+ for p in pkg
+ add(p)
+ end
+ return
+end
+
+
+function add(pkg::PackageSpec)
+ handle_package_input!(pkg)
+
+ ctx = app_context()
+
+ Pkg.Operations.update_registries(ctx; force = false, update_cooldown = Day(1))
+
+ manifest = ctx.env.manifest
+ new = false
+
+ # Download package
+ if pkg.repo.source !== nothing || pkg.repo.rev !== nothing
+ entry = Pkg.API.manifest_info(ctx.env.manifest, pkg.uuid)
+ pkg = update_package_add(ctx, pkg, entry, false)
+ new = handle_repo_add!(ctx, pkg)
+ else
+ pkgs = [pkg]
+ Pkg.Operations.registry_resolve!(ctx.registries, pkgs)
+ Pkg.Operations.ensure_resolved(ctx, manifest, pkgs, registry = true)
+
+ pkg.version, pkg.tree_hash = get_max_version_register(pkg, ctx.registries)
+
+ new = Pkg.Operations.download_source(ctx, pkgs)
+ end
+
+ # Run Pkg.build()?
+
+ Base.rm(joinpath(app_env_folder(), pkg.name); force = true, recursive = true)
+ sourcepath = source_path(ctx.env.manifest_file, pkg)
+ project = get_project(sourcepath)
+ # TODO: Wrong if package itself has a sourcepath?
+ # PackageEntry requires version::Union{VersionNumber, Nothing}, but project.version can be VersionSpec
+ entry = PackageEntry(; apps = project.apps, name = pkg.name, version = project.version isa VersionNumber ? project.version : nothing, tree_hash = pkg.tree_hash, path = pkg.path, repo = pkg.repo, uuid = pkg.uuid)
+ manifest.deps[pkg.uuid] = entry
+
+ _resolve(manifest, pkg.name)
+ if new === true || (new isa Set{UUID} && pkg.uuid in new)
+ Pkg.Operations.build_versions(ctx, Set([pkg.uuid]); verbose = true)
+ end
+ precompile(pkg.name)
+
+ @info "For package: $(pkg.name) installed apps $(join(keys(project.apps), ","))"
+ return check_apps_in_path(project.apps)
+end
+
+function develop(pkg::Vector{PackageSpec})
+ for p in pkg
+ develop(p)
+ end
+ return
+end
+
+function develop(pkg::PackageSpec)
+ if pkg.path !== nothing
+ pkg.path = abspath(pkg.path)
+ end
+ handle_package_input!(pkg)
+ ctx = app_context()
+ handle_repo_develop!(ctx, pkg, #=shared =# true)
+ Base.rm(joinpath(app_env_folder(), pkg.name); force = true, recursive = true)
+ sourcepath = abspath(source_path(ctx.env.manifest_file, pkg))
+ project = get_project(sourcepath)
+
+ # Seems like the `.repo.source` field is not cleared.
+ # At least repo-url is still in the manifest after doing a dev with a path
+ # Figure out why for normal dev this is not needed.
+ # XXX: Why needed?
+ if pkg.path !== nothing
+ pkg.repo.source = nothing
+ end
+
+ # PackageEntry requires version::Union{VersionNumber, Nothing}, but project.version can be VersionSpec
+ entry = PackageEntry(; apps = project.apps, name = pkg.name, version = project.version isa VersionNumber ? project.version : nothing, tree_hash = pkg.tree_hash, path = sourcepath, repo = pkg.repo, uuid = pkg.uuid)
+ manifest = ctx.env.manifest
+ manifest.deps[pkg.uuid] = entry
+
+ # For dev, we don't create an app environment - just point shims directly to the dev'd project
+ write_manifest(manifest, app_manifest_file())
+ generate_shims_for_apps(pkg.name, project.apps, sourcepath, joinpath(Sys.BINDIR, "julia"))
+
+ @info "For package: $(pkg.name) installed apps: $(join(keys(project.apps), ","))"
+ return check_apps_in_path(project.apps)
+end
+
+
+update(pkgs_or_apps::String) = update([pkgs_or_apps])
+function update(pkgs_or_apps::Vector)
+ for pkg_or_app in pkgs_or_apps
+ if pkg_or_app isa String
+ pkg_or_app = PackageSpec(pkg_or_app)
+ end
+ update(pkg_or_app)
+ end
+ return
+end
+
+# XXX: Is updating an app ever different from rm-ing and adding it from scratch?
+function update(pkg::Union{PackageSpec, Nothing} = nothing)
+ ctx = app_context()
+ manifest = ctx.env.manifest
+ deps = Pkg.Operations.load_manifest_deps(manifest)
+ for dep in deps
+ info = manifest.deps[dep.uuid]
+ if pkg === nothing || info.name !== pkg.name
+ continue
+ end
+ Pkg.activate(joinpath(app_env_folder(), info.name)) do
+ # precompile only after updating all apps?
+ Pkg.update()
+ end
+ sourcepath = abspath(source_path(ctx.env.manifest_file, info))
+ project = get_project(sourcepath)
+ # Get the tree hash from the project file
+ manifest_file = manifestfile_path(joinpath(app_env_folder(), info.name))
+ manifest_app = Pkg.Types.read_manifest(manifest_file)
+ manifest_entry = manifest_app.deps[info.uuid]
+
+ entry = PackageEntry(;
+ apps = project.apps, name = manifest_entry.name, version = manifest_entry.version, tree_hash = manifest_entry.tree_hash,
+ path = manifest_entry.path, repo = manifest_entry.repo, uuid = manifest_entry.uuid
+ )
+
+ manifest.deps[dep.uuid] = entry
+ Pkg.Types.write_manifest(manifest, app_manifest_file())
+ end
+ return
+end
+
+function status(pkgs_or_apps::Vector)
+ return if isempty(pkgs_or_apps)
+ status()
+ else
+ for pkg_or_app in pkgs_or_apps
+ if pkg_or_app isa String
+ pkg_or_app = PackageSpec(pkg_or_app)
+ end
+ status(pkg_or_app)
+ end
+ end
+end
+
+function status(pkg_or_app::Union{PackageSpec, Nothing} = nothing)
+ # TODO: Sort.
+ pkg_or_app = pkg_or_app === nothing ? nothing : pkg_or_app.name
+ manifest = Pkg.Types.read_manifest(joinpath(app_env_folder(), "AppManifest.toml"))
+ deps = Pkg.Operations.load_manifest_deps(manifest)
+
+ is_pkg = pkg_or_app !== nothing && any(dep -> dep.name == pkg_or_app, values(manifest.deps))
+
+ for dep in deps
+ info = manifest.deps[dep.uuid]
+ if is_pkg && dep.name !== pkg_or_app
+ continue
+ end
+ if !is_pkg && pkg_or_app !== nothing
+ if !(pkg_or_app in keys(info.apps))
+ continue
+ end
+ end
+
+ printstyled("[", string(dep.uuid)[1:8], "] "; color = :light_black)
+ print_single(stdout, dep)
+ println()
+ for (appname, appinfo) in info.apps
+ if !is_pkg && pkg_or_app !== nothing && appname !== pkg_or_app
+ continue
+ end
+ julia_cmd = contractuser(appinfo.julia_command)
+ printstyled(" $(appname)", color = :green)
+ printstyled(" $(julia_cmd) \n", color = :gray)
+ end
+ end
+ return
+end
+
+function precompile(pkg::Union{Nothing, String} = nothing)
+ manifest = Pkg.Types.read_manifest(joinpath(app_env_folder(), "AppManifest.toml"))
+ deps = Pkg.Operations.load_manifest_deps(manifest)
+ for dep in deps
+ # TODO: Parallel app compilation..?
+ info = manifest.deps[dep.uuid]
+ if pkg !== nothing && info.name !== pkg
+ continue
+ end
+ Pkg.activate(joinpath(app_env_folder(), info.name)) do
+ Pkg.instantiate()
+ Pkg.precompile()
+ end
+ end
+ return
+end
+
+
+function require_not_empty(pkgs, f::Symbol)
+ return if pkgs === nothing || isempty(pkgs)
+ pkgerror("app $f requires at least one package")
+ end
+end
+
+rm(pkgs_or_apps::String) = rm([pkgs_or_apps])
+function rm(pkgs_or_apps::Vector)
+ for pkg_or_app in pkgs_or_apps
+ if pkg_or_app isa String
+ pkg_or_app = PackageSpec(pkg_or_app)
+ end
+ rm(pkg_or_app)
+ end
+ return
+end
+
+function rm(pkg_or_app::Union{PackageSpec, Nothing} = nothing)
+ pkg_or_app = pkg_or_app === nothing ? nothing : pkg_or_app.name
+
+ require_not_empty(pkg_or_app, :rm)
+
+ manifest = Pkg.Types.read_manifest(joinpath(app_env_folder(), "AppManifest.toml"))
+ dep_idx = findfirst(dep -> dep.name == pkg_or_app, manifest.deps)
+ if dep_idx !== nothing
+ dep = manifest.deps[dep_idx]
+ @info "Deleting all apps for package $(dep.name)"
+ delete!(manifest.deps, dep.uuid)
+ for (appname, appinfo) in dep.apps
+ @info "Deleted $(appname)"
+ rm_shim(appname; force = true)
+ end
+ if dep.path === nothing
+ Base.rm(joinpath(app_env_folder(), dep.name); recursive = true)
+ end
+ else
+ for (uuid, pkg) in manifest.deps
+ app_idx = findfirst(app -> app.name == pkg_or_app, pkg.apps)
+ if app_idx !== nothing
+ app = pkg.apps[app_idx]
+ @info "Deleted app $(app.name)"
+ delete!(pkg.apps, app.name)
+ rm_shim(app.name; force = true)
+ end
+ if isempty(pkg.apps)
+ delete!(manifest.deps, uuid)
+ Base.rm(joinpath(app_env_folder(), pkg.name); recursive = true)
+ end
+ end
+ end
+ # XXX: What happens if something fails above and we do not write out the updated manifest?
+ Pkg.Types.write_manifest(manifest, app_manifest_file())
+ return
+end
+
+for f in (:develop, :add)
+ @eval begin
+ $f(pkg::Union{AbstractString, PackageSpec}; kwargs...) = $f([pkg]; kwargs...)
+ $f(pkgs::Vector{<:AbstractString}; kwargs...) = $f([PackageSpec(pkg) for pkg in pkgs]; kwargs...)
+ function $f(;
+ name::Union{Nothing, AbstractString} = nothing, uuid::Union{Nothing, String, UUID} = nothing,
+ version::Union{VersionNumber, String, VersionSpec, Nothing} = nothing,
+ url = nothing, rev = nothing, path = nothing, subdir = nothing, kwargs...
+ )
+ pkg = PackageSpec(; name, uuid, version, url, rev, path, subdir)
+ return if all(isnothing, [name, uuid, version, url, rev, path, subdir])
+ $f(PackageSpec[]; kwargs...)
+ else
+ $f(pkg; kwargs...)
+ end
+ end
+ function $f(pkgs::Vector{<:NamedTuple}; kwargs...)
+ return $f([PackageSpec(; pkg...) for pkg in pkgs]; kwargs...)
+ end
+ end
+end
+
+
+#########
+# Shims #
+#########
+
+const SHIM_COMMENT = Sys.iswindows() ? "REM " : "#"
+const SHIM_VERSION = 1.1
+const SHIM_HEADER = """$SHIM_COMMENT This file is generated by the Julia package manager.
+$SHIM_COMMENT Shim version: $SHIM_VERSION"""
+
+function generate_shims_for_apps(pkgname, apps, env, julia)
+ for (_, app) in apps
+ generate_shim(pkgname, app, env, julia)
+ end
+ return
+end
+
+function generate_shim(pkgname, app::AppInfo, env, julia)
+ validate_package_name(pkgname)
+ validate_app_name(app.name)
+ validate_submodule_name(app.submodule)
+
+ module_spec = app.submodule === nothing ? pkgname : "$(pkgname).$(app.submodule)"
+
+ filename = app.name * (Sys.iswindows() ? ".bat" : "")
+ julia_bin_filename = joinpath(julia_bin_path(), filename)
+ mkpath(dirname(julia_bin_filename))
+ content = if Sys.iswindows()
+ julia_escaped = "\"$(Base.shell_escape_wincmd(julia))\""
+ module_spec_escaped = "\"$(Base.shell_escape_wincmd(module_spec))\""
+ windows_shim(julia_escaped, module_spec_escaped, env, app.julia_flags)
+ else
+ julia_escaped = Base.shell_escape(julia)
+ module_spec_escaped = Base.shell_escape(module_spec)
+ shell_shim(julia_escaped, module_spec_escaped, env, app.julia_flags)
+ end
+ overwrite_file_if_different(julia_bin_filename, content)
+ return if Sys.isunix()
+ chmod(julia_bin_filename, 0o755)
+ end
+end
+
+
+function shell_shim(julia_escaped::String, module_spec_escaped::String, env, julia_flags::Vector{String})
+ julia_flags_escaped = join(Base.shell_escape.(julia_flags), " ")
+ julia_flags_part = isempty(julia_flags) ? "" : " $julia_flags_escaped"
+
+ load_path_escaped = Base.shell_escape(env)
+ depot_path_escaped = Base.shell_escape(join(DEPOT_PATH, ':'))
+
+ return """
+ #!/bin/sh
+ set -eu
+
+ $SHIM_HEADER
+
+ # Pin Julia paths for the child process
+ export JULIA_LOAD_PATH=$load_path_escaped
+ export JULIA_DEPOT_PATH=$depot_path_escaped
+
+ # Allow overriding Julia executable via environment variable
+ if [ -n "\${JULIA_APPS_JULIA_CMD:-}" ]; then
+ julia_cmd="\$JULIA_APPS_JULIA_CMD"
+ else
+ julia_cmd=$julia_escaped
+ fi
+
+ # If a `--` appears, args before it go to Julia, after it to the app.
+ # If no `--` appears, all original args go to the app (no Julia args).
+ found_separator=false
+ for a in "\$@"; do
+ [ "\$a" = "--" ] && { found_separator=true; break; }
+ done
+
+ if [ "\$found_separator" = "true" ]; then
+ # Build julia_args until `--`, then leave the rest in "\$@"
+ julia_args=""
+ while [ "\$#" -gt 0 ]; do
+ case "\$1" in
+ --) shift; break ;;
+ *) julia_args="\$julia_args\${julia_args:+ }\$1"; shift ;;
+ esac
+ done
+ # Here: "\$@" are the app args after the separator
+ exec "\$julia_cmd" --startup-file=no$julia_flags_part \$julia_args -m $module_spec_escaped "\$@"
+ else
+ # No separator: all original args go straight to the app
+ exec "\$julia_cmd" --startup-file=no$julia_flags_part -m $module_spec_escaped "\$@"
+ fi
+ """
+end
+
+function windows_shim(
+ julia_escaped::String,
+ module_spec_escaped::String,
+ env,
+ julia_flags::Vector{String},
+ )
+ flags_escaped = join(Base.shell_escape_wincmd.(julia_flags), " ")
+ flags_part = isempty(julia_flags) ? "" : " $flags_escaped"
+
+ depot_path = join(DEPOT_PATH, ';')
+
+ return """
+ @echo off
+ setlocal EnableExtensions DisableDelayedExpansion
+
+ $SHIM_HEADER
+
+ rem --- Environment (no delayed expansion here to keep '!' literal) ---
+ set "JULIA_LOAD_PATH=$env"
+ set "JULIA_DEPOT_PATH=$depot_path"
+
+ rem --- Allow overriding Julia executable via environment variable ---
+ if defined JULIA_APPS_JULIA_CMD (
+ set "julia_cmd=%JULIA_APPS_JULIA_CMD%"
+ ) else (
+ set "julia_cmd=$julia_escaped"
+ )
+
+ rem --- Now enable delayed expansion for string building below ---
+ setlocal EnableDelayedExpansion
+
+ rem Parse arguments, splitting on first -- into julia_args / app_args
+ set "found_sep="
+ set "julia_args="
+ set "app_args="
+
+ :__next
+ if "%~1"=="" goto __done
+
+ if not defined found_sep if "%~1"=="--" (
+ set "found_sep=1"
+ shift
+ goto __next
+ )
+
+ if not defined found_sep (
+ if defined julia_args (
+ set "julia_args=!julia_args! %1"
+ ) else (
+ set "julia_args=%1"
+ )
+ shift
+ goto __next
+ )
+
+ if defined found_sep (
+ if defined app_args (
+ set "app_args=!app_args! %1"
+ ) else (
+ set "app_args=%1"
+ )
+ shift
+ goto __next
+ )
+
+ :__done
+ rem If no --, pass all original args to the app; otherwise use split vars
+ if defined found_sep (
+ "%julia_cmd%" ^
+ --startup-file=no$flags_part !julia_args! ^
+ -m $module_spec_escaped ^
+ !app_args!
+ ) else (
+ "%julia_cmd%" ^
+ --startup-file=no$flags_part ^
+ -m $module_spec_escaped ^
+ %*
+ )
+ """
+end
+
+end
diff --git a/src/Artifacts.jl b/src/Artifacts.jl
index 957d14aab9..11ac99c129 100644
--- a/src/Artifacts.jl
+++ b/src/Artifacts.jl
@@ -1,23 +1,24 @@
-module Artifacts
+module PkgArtifacts
using Artifacts, Base.BinaryPlatforms, SHA
using ..MiniProgressBars, ..PlatformEngines
using Tar: can_symlink
+using FileWatching: FileWatching
import ..set_readonly, ..GitTools, ..TOML, ..pkg_server, ..can_fancyprint,
- ..stderr_f, ..printpkgstyle
+ ..stderr_f, ..printpkgstyle, ..mv_temp_dir_retries, ..atomic_toml_write, ..create_cachedir_tag
import Base: get, SHA1
import Artifacts: artifact_names, ARTIFACTS_DIR_OVERRIDE, ARTIFACT_OVERRIDES, artifact_paths,
- artifacts_dirs, pack_platform!, unpack_platform, load_artifacts_toml,
- query_override, with_artifacts_directory, load_overrides
+ artifacts_dirs, pack_platform!, unpack_platform, load_artifacts_toml,
+ query_override, with_artifacts_directory, load_overrides
import ..Types: write_env_usage, parse_toml
-
-export create_artifact, artifact_exists, artifact_path, remove_artifact, verify_artifact,
- artifact_meta, artifact_hash, bind_artifact!, unbind_artifact!, download_artifact,
- find_artifacts_toml, ensure_artifact_installed, @artifact_str, archive_artifact,
- select_downloadable_artifacts
+const Artifacts = PkgArtifacts # This is to preserve compatability for folks who depend on the internals of this module
+export Artifacts, create_artifact, artifact_exists, artifact_path, remove_artifact, verify_artifact,
+ artifact_meta, artifact_hash, bind_artifact!, unbind_artifact!, download_artifact,
+ find_artifacts_toml, ensure_artifact_installed, @artifact_str, archive_artifact,
+ select_downloadable_artifacts, ArtifactDownloadInfo
"""
create_artifact(f::Function)
@@ -30,6 +31,7 @@ function create_artifact(f::Function)
# Ensure the `artifacts` directory exists in our default depot
artifacts_dir = first(artifacts_dirs())
mkpath(artifacts_dir)
+ create_cachedir_tag(artifacts_dir)
# Temporary directory where we'll do our creation business
temp_dir = mktempdir(artifacts_dir)
@@ -48,56 +50,14 @@ function create_artifact(f::Function)
# system directory by accidentally creating something with the same content-hash
# as something that was foolishly overridden. This should be virtually impossible
# unless the user has been very unwise, but let's be cautious.
- new_path = artifact_path(artifact_hash; honor_overrides=false)
- _mv_temp_artifact_dir(temp_dir, new_path)
+ new_path = artifact_path(artifact_hash; honor_overrides = false)
+ mv_temp_dir_retries(temp_dir, new_path)
# Give the people what they want
return artifact_hash
finally
# Always attempt to cleanup
- rm(temp_dir; recursive=true, force=true)
- end
-end
-
-"""
- _mv_temp_artifact_dir(temp_dir::String, new_path::String)::Nothing
-Either rename the directory at `temp_dir` to `new_path` and set it to read-only
-or if `new_path` artifact already exists try to do nothing.
-"""
-function _mv_temp_artifact_dir(temp_dir::String, new_path::String)::Nothing
- # Sometimes a rename can fail because the temp_dir is locked by
- # anti-virus software scanning the new files.
- # In this case we want to sleep and try again.
- # I am using the list of error codes to retry from:
- # https://github.com/isaacs/node-graceful-fs/blob/234379906b7d2f4c9cfeb412d2516f42b0fb4953/polyfills.js#L87
- # Retry for up to about 60 seconds by retrying 20 times with exponential backoff.
- retry = 0
- max_num_retries = 20 # maybe this should be configurable?
- sleep_amount = 0.01 # seconds
- max_sleep_amount = 5.0 # seconds
- while true
- isdir(new_path) && return
- # This next step is like
- # `mv(temp_dir, new_path)`.
- # However, `mv` defaults to `cp` if `rename` returns an error.
- # `cp` is not atomic, so avoid the potential of calling it.
- err = ccall(:jl_fs_rename, Int32, (Cstring, Cstring), temp_dir, new_path)
- if err ≥ 0
- # rename worked
- chmod(new_path, filemode(dirname(new_path)))
- set_readonly(new_path)
- return
- else
- # Ignore rename error if `new_path` exists.
- isdir(new_path) && return
- if retry < max_num_retries && err ∈ (Base.UV_EACCES, Base.UV_EPERM, Base.UV_EBUSY)
- sleep(sleep_amount)
- sleep_amount = min(sleep_amount*2.0, max_sleep_amount)
- retry += 1
- else
- Base.uv_error("rename of $(repr(temp_dir)) to $(repr(new_path))", err)
- end
- end
+ rm(temp_dir; recursive = true, force = true)
end
end
@@ -123,9 +83,10 @@ function remove_artifact(hash::SHA1)
possible_paths = artifacts_dirs(bytes2hex(hash.bytes))
for path in possible_paths
if isdir(path)
- rm(path; recursive=true, force=true)
+ rm(path; recursive = true, force = true)
end
end
+ return
end
"""
@@ -135,7 +96,7 @@ Verifies that the given artifact (identified by its SHA1 git tree hash) is insta
disk, and retains its integrity. If the given artifact is overridden, skips the
verification unless `honor_overrides` is set to `true`.
"""
-function verify_artifact(hash::SHA1; honor_overrides::Bool=false)
+function verify_artifact(hash::SHA1; honor_overrides::Bool = false)
# Silently skip overridden artifacts unless we really ask for it
if !honor_overrides
if query_override(hash) !== nothing
@@ -159,7 +120,7 @@ Archive an artifact into a tarball stored at `tarball_path`, returns the SHA256
resultant tarball as a hexadecimal string. Throws an error if the artifact does not
exist. If the artifact is overridden, throws an error unless `honor_overrides` is set.
"""
-function archive_artifact(hash::SHA1, tarball_path::String; honor_overrides::Bool=false)
+function archive_artifact(hash::SHA1, tarball_path::String; honor_overrides::Bool = false)
if !honor_overrides
if query_override(hash) !== nothing
error("Will not archive an overridden artifact unless `honor_overrides` is set!")
@@ -179,12 +140,62 @@ function archive_artifact(hash::SHA1, tarball_path::String; honor_overrides::Boo
end
end
+"""
+ ArtifactDownloadInfo
+
+Auxilliary information about an artifact to be used with `bind_artifact!()` to give
+a download location for that artifact, as well as the hash and size of that artifact.
+"""
+struct ArtifactDownloadInfo
+ # URL the artifact is available at as a gzip-compressed tarball
+ url::String
+
+ # SHA256 hash of the tarball
+ hash::Vector{UInt8}
+
+ # Size in bytes of the tarball. `size <= 0` means unknown.
+ size::Int64
+
+ function ArtifactDownloadInfo(url, hash::AbstractVector, size = 0)
+ valid_hash_len = SHA.digestlen(SHA256_CTX)
+ hash_len = length(hash)
+ if hash_len != valid_hash_len
+ throw(ArgumentError("Invalid hash length '$(hash_len)', must be $(valid_hash_len)"))
+ end
+ return new(
+ String(url),
+ Vector{UInt8}(hash),
+ Int64(size),
+ )
+ end
+end
+
+# Convenience constructor for string hashes
+ArtifactDownloadInfo(url, hash::AbstractString, args...) = ArtifactDownloadInfo(url, hex2bytes(hash), args...)
+
+# Convenience constructor for legacy Tuple representation
+ArtifactDownloadInfo(args::Tuple) = ArtifactDownloadInfo(args...)
+
+ArtifactDownloadInfo(adi::ArtifactDownloadInfo) = adi
+
+# Make the dict that will be embedded in the TOML
+function make_dict(adi::ArtifactDownloadInfo)
+ ret = Dict{String, Any}(
+ "url" => adi.url,
+ "sha256" => bytes2hex(adi.hash),
+ )
+ if adi.size > 0
+ ret["size"] = adi.size
+ end
+ return ret
+end
+
"""
bind_artifact!(artifacts_toml::String, name::String, hash::SHA1;
- platform::Union{AbstractPlatform,Nothing} = nothing,
- download_info::Union{Vector{Tuple},Nothing} = nothing,
- lazy::Bool = false,
- force::Bool = false)
+ platform::Union{AbstractPlatform,Nothing} = nothing,
+ download_info::Union{Vector{Tuple},Nothing} = nothing,
+ lazy::Bool = false,
+ force::Bool = false)
Writes a mapping of `name` -> `hash` within the given `(Julia)Artifacts.toml` file. If
`platform` is not `nothing`, this artifact is marked as platform-specific, and will be
@@ -198,11 +209,13 @@ is set to `true`, even if download information is available, this artifact will
downloaded until it is accessed via the `artifact"name"` syntax, or
`ensure_artifact_installed()` is called upon it.
"""
-function bind_artifact!(artifacts_toml::String, name::String, hash::SHA1;
- platform::Union{AbstractPlatform,Nothing} = nothing,
- download_info::Union{Vector{<:Tuple},Nothing} = nothing,
- lazy::Bool = false,
- force::Bool = false)
+function bind_artifact!(
+ artifacts_toml::String, name::String, hash::SHA1;
+ platform::Union{AbstractPlatform, Nothing} = nothing,
+ download_info::Union{Vector{<:Tuple}, Vector{<:ArtifactDownloadInfo}, Nothing} = nothing,
+ lazy::Bool = false,
+ force::Bool = false
+ )
# First, check to see if this artifact is already bound:
if isfile(artifacts_toml)
artifact_dict = parse_toml(artifacts_toml)
@@ -211,7 +224,7 @@ function bind_artifact!(artifacts_toml::String, name::String, hash::SHA1;
meta = artifact_dict[name]
if !isa(meta, Vector)
error("Mapping for '$name' within $(artifacts_toml) already exists!")
- elseif any(isequal(platform), unpack_platform(x, name, artifacts_toml) for x in meta)
+ elseif any(p -> platforms_match(platform, p), unpack_platform(x, name, artifacts_toml) for x in meta)
error("Mapping for '$name'/$(triplet(platform)) within $(artifacts_toml) already exists!")
end
end
@@ -220,7 +233,7 @@ function bind_artifact!(artifacts_toml::String, name::String, hash::SHA1;
end
# Otherwise, the new piece of data we're going to write out is this dict:
- meta = Dict{String,Any}(
+ meta = Dict{String, Any}(
"git-tree-sha1" => bytes2hex(hash.bytes),
)
@@ -229,15 +242,11 @@ function bind_artifact!(artifacts_toml::String, name::String, hash::SHA1;
meta["lazy"] = true
end
- # Integrate download info, if it is given. We represent the download info as a
- # vector of dicts, each with its own `url` and `sha256`, since different tarballs can
- # expand to the same tree hash.
+ # Integrate download info, if it is given. Note that there can be multiple
+ # download locations, each with its own tarball with its own hash, but which
+ # expands to the same content/treehash.
if download_info !== nothing
- meta["download"] = [
- Dict("url" => dl[1],
- "sha256" => dl[2],
- ) for dl in download_info
- ]
+ meta["download"] = make_dict.(ArtifactDownloadInfo.(download_info))
end
if platform === nothing
@@ -262,11 +271,7 @@ function bind_artifact!(artifacts_toml::String, name::String, hash::SHA1;
# Spit it out onto disk
let artifact_dict = artifact_dict
parent_dir = dirname(artifacts_toml)
- temp_artifacts_toml = isempty(parent_dir) ? tempname(pwd()) : tempname(parent_dir)
- open(temp_artifacts_toml, "w") do io
- TOML.print(io, artifact_dict, sorted=true)
- end
- mv(temp_artifacts_toml, artifacts_toml; force=true)
+ atomic_toml_write(artifacts_toml, artifact_dict, sorted = true)
end
# Mark that we have used this Artifact.toml
@@ -281,8 +286,10 @@ end
Unbind the given `name` from an `(Julia)Artifacts.toml` file.
Silently fails if no such binding exists within the file.
"""
-function unbind_artifact!(artifacts_toml::String, name::String;
- platform::Union{AbstractPlatform,Nothing} = nothing)
+function unbind_artifact!(
+ artifacts_toml::String, name::String;
+ platform::Union{AbstractPlatform, Nothing} = nothing
+ )
artifact_dict = parse_toml(artifacts_toml)
if !haskey(artifact_dict, name)
return
@@ -297,15 +304,13 @@ function unbind_artifact!(artifacts_toml::String, name::String;
)
end
- open(artifacts_toml, "w") do io
- TOML.print(io, artifact_dict, sorted=true)
- end
+ atomic_toml_write(artifacts_toml, artifact_dict, sorted = true)
return
end
"""
download_artifact(tree_hash::SHA1, tarball_url::String, tarball_hash::String;
- verbose::Bool = false, io::IO=stderr)
+ verbose::Bool = false, io::IO=stderr)
Download/install an artifact into the artifact store. Returns `true` on success,
returns an error object on failure.
@@ -315,138 +320,163 @@ returns an error object on failure.
failure occurs
"""
function download_artifact(
- tree_hash::SHA1,
- tarball_url::String,
- tarball_hash::Union{String, Nothing} = nothing;
- verbose::Bool = false,
- quiet_download::Bool = false,
- io::IO=stderr_f(),
- progress::Union{Function, Nothing} = nothing,
-)
- if artifact_exists(tree_hash)
- return true
+ tree_hash::SHA1,
+ tarball_url::String,
+ tarball_hash::Union{String, Nothing} = nothing;
+ verbose::Bool = false,
+ quiet_download::Bool = false,
+ io::IO = stderr_f(),
+ progress::Union{Function, Nothing} = nothing,
+ )
+ _artifact_paths = artifact_paths(tree_hash)
+ pidfile = _artifact_paths[1] * ".pid"
+ mkpath(dirname(pidfile))
+ t_wait_msg = Timer(2) do t
+ if progress === nothing
+ @info "downloading $tarball_url ($hex) in another process"
+ else
+ progress(0, 0; status = "downloading in another process")
+ end
end
+ ret = FileWatching.mkpidlock(pidfile, stale_age = 20) do
+ close(t_wait_msg)
+ if artifact_exists(tree_hash)
+ return true
+ end
- # Ensure the `artifacts` directory exists in our default depot
- artifacts_dir = first(artifacts_dirs())
- mkpath(artifacts_dir)
- # expected artifact path
- dst = joinpath(artifacts_dir, bytes2hex(tree_hash.bytes))
+ # Ensure the `artifacts` directory exists in our default depot
+ artifacts_dir = first(artifacts_dirs())
+ mkpath(artifacts_dir)
+ create_cachedir_tag(artifacts_dir)
+ # expected artifact path
+ dst = joinpath(artifacts_dir, bytes2hex(tree_hash.bytes))
- # We download by using a temporary directory. We do this because the download may
- # be corrupted or even malicious; we don't want to clobber someone else's artifact
- # by trusting the tree hash that has been given to us; we will instead download it
- # to a temporary directory, calculate the true tree hash, then move it to the proper
- # location only after knowing what it is, and if something goes wrong in the process,
- # everything should be cleaned up.
+ # We download by using a temporary directory. We do this because the download may
+ # be corrupted or even malicious; we don't want to clobber someone else's artifact
+ # by trusting the tree hash that has been given to us; we will instead download it
+ # to a temporary directory, calculate the true tree hash, then move it to the proper
+ # location only after knowing what it is, and if something goes wrong in the process,
+ # everything should be cleaned up.
- # Temporary directory where we'll do our creation business
- temp_dir = mktempdir(artifacts_dir)
+ # Temporary directory where we'll do our creation business
+ temp_dir = mktempdir(artifacts_dir)
- try
- download_verify_unpack(tarball_url, tarball_hash, temp_dir;
- ignore_existence=true, verbose, quiet_download, io, progress)
- isnothing(progress) || progress(10000, 10000; status="verifying")
- calc_hash = SHA1(GitTools.tree_hash(temp_dir))
-
- # Did we get what we expected? If not, freak out.
- if calc_hash.bytes != tree_hash.bytes
- msg = """
- Tree Hash Mismatch!
- Expected git-tree-sha1: $(bytes2hex(tree_hash.bytes))
- Calculated git-tree-sha1: $(bytes2hex(calc_hash.bytes))
- """
- # Since tree hash calculation is rather fragile and file system dependent,
- # we allow setting JULIA_PKG_IGNORE_HASHES=1 to ignore the error and move
- # the artifact to the expected location and return true
- ignore_hash_env_set = get(ENV, "JULIA_PKG_IGNORE_HASHES", "") != ""
- if ignore_hash_env_set
- ignore_hash = Base.get_bool_env("JULIA_PKG_IGNORE_HASHES", false)
- ignore_hash === nothing && @error(
- "Invalid ENV[\"JULIA_PKG_IGNORE_HASHES\"] value",
- ENV["JULIA_PKG_IGNORE_HASHES"],
- )
- ignore_hash = something(ignore_hash, false)
- else
- # default: false except Windows users who can't symlink
- ignore_hash = Sys.iswindows() &&
- !mktempdir(can_symlink, artifacts_dir)
- end
- if ignore_hash
- desc = ignore_hash_env_set ?
- "Environment variable \$JULIA_PKG_IGNORE_HASHES is true" :
- "System is Windows and user cannot create symlinks"
- msg *= "\n$desc: \
+ try
+ download_verify_unpack(
+ tarball_url, tarball_hash, temp_dir;
+ ignore_existence = true, verbose, quiet_download, io, progress
+ )
+ isnothing(progress) || progress(10000, 10000; status = "verifying")
+ calc_hash = SHA1(GitTools.tree_hash(temp_dir))
+
+ # Did we get what we expected? If not, freak out.
+ if calc_hash.bytes != tree_hash.bytes
+ msg = """
+ Tree Hash Mismatch!
+ Expected git-tree-sha1: $(bytes2hex(tree_hash.bytes))
+ Calculated git-tree-sha1: $(bytes2hex(calc_hash.bytes))
+ """
+ # Since tree hash calculation is rather fragile and file system dependent,
+ # we allow setting JULIA_PKG_IGNORE_HASHES=1 to ignore the error and move
+ # the artifact to the expected location and return true
+ ignore_hash_env_set = get(ENV, "JULIA_PKG_IGNORE_HASHES", "") != ""
+ if ignore_hash_env_set
+ ignore_hash = Base.get_bool_env("JULIA_PKG_IGNORE_HASHES", false)
+ ignore_hash === nothing && @error(
+ "Invalid ENV[\"JULIA_PKG_IGNORE_HASHES\"] value",
+ ENV["JULIA_PKG_IGNORE_HASHES"],
+ )
+ ignore_hash = something(ignore_hash, false)
+ else
+ # default: false except Windows users who can't symlink
+ ignore_hash = Sys.iswindows() &&
+ !mktempdir(can_symlink, artifacts_dir)
+ end
+ if ignore_hash
+ desc = ignore_hash_env_set ?
+ "Environment variable \$JULIA_PKG_IGNORE_HASHES is true" :
+ "System is Windows and user cannot create symlinks"
+ msg *= "\n$desc: \
ignoring hash mismatch and moving \
artifact to the expected location"
- @error(msg)
- else
- error(msg)
+ @error(msg)
+ else
+ error(msg)
+ end
+ end
+ # Move it to the location we expected
+ isnothing(progress) || progress(10000, 10000; status = "moving to artifact store")
+ mv_temp_dir_retries(temp_dir, dst)
+ catch err
+ @debug "download_artifact error" tree_hash tarball_url tarball_hash err
+ if isa(err, InterruptException)
+ rethrow(err)
+ end
+ # If something went wrong during download, return the error
+ return err
+ finally
+ # Always attempt to cleanup
+ try
+ rm(temp_dir; recursive = true, force = true)
+ catch e
+ e isa InterruptException && rethrow()
+ @warn("Failed to clean up temporary directory $(repr(temp_dir))", exception = e)
end
end
- # Move it to the location we expected
- isnothing(progress) || progress(10000, 10000; status="moving to artifact store")
- _mv_temp_artifact_dir(temp_dir, dst)
- catch err
- @debug "download_artifact error" tree_hash tarball_url tarball_hash err
- if isa(err, InterruptException)
- rethrow(err)
- end
- # If something went wrong during download, return the error
- return err
- finally
- # Always attempt to cleanup
- try
- rm(temp_dir; recursive=true, force=true)
- catch e
- e isa InterruptException && rethrow()
- @warn("Failed to clean up temporary directory $(repr(temp_dir))", exception=e)
- end
+ return true
end
- return true
+
+ return ret
end
"""
ensure_artifact_installed(name::String, artifacts_toml::String;
- platform::AbstractPlatform = HostPlatform(),
- pkg_uuid::Union{Base.UUID,Nothing}=nothing,
- verbose::Bool = false,
- quiet_download::Bool = false,
- io::IO=stderr)
+ platform::AbstractPlatform = HostPlatform(),
+ pkg_uuid::Union{Base.UUID,Nothing}=nothing,
+ verbose::Bool = false,
+ quiet_download::Bool = false,
+ io::IO=stderr)
Ensures an artifact is installed, downloading it via the download information stored in
`artifacts_toml` if necessary. Throws an error if unable to install.
"""
-function ensure_artifact_installed(name::String, artifacts_toml::String;
- platform::AbstractPlatform = HostPlatform(),
- pkg_uuid::Union{Base.UUID,Nothing}=nothing,
- verbose::Bool = false,
- quiet_download::Bool = false,
- progress::Union{Function,Nothing} = nothing,
- io::IO=stderr_f())
- meta = artifact_meta(name, artifacts_toml; pkg_uuid=pkg_uuid, platform=platform)
+function ensure_artifact_installed(
+ name::String, artifacts_toml::String;
+ platform::AbstractPlatform = HostPlatform(),
+ pkg_uuid::Union{Base.UUID, Nothing} = nothing,
+ pkg_server_eligible::Bool = true,
+ verbose::Bool = false,
+ quiet_download::Bool = false,
+ progress::Union{Function, Nothing} = nothing,
+ io::IO = stderr_f()
+ )
+ meta = artifact_meta(name, artifacts_toml; pkg_uuid = pkg_uuid, platform = platform)
if meta === nothing
error("Cannot locate artifact '$(name)' in '$(artifacts_toml)'")
end
- return ensure_artifact_installed(name, meta, artifacts_toml;
- platform, verbose, quiet_download, progress, io)
+ return ensure_artifact_installed(
+ name, meta, artifacts_toml;
+ pkg_server_eligible, platform, verbose, quiet_download, progress, io
+ )
end
-function ensure_artifact_installed(name::String, meta::Dict, artifacts_toml::String;
- platform::AbstractPlatform = HostPlatform(),
- verbose::Bool = false,
- quiet_download::Bool = false,
- progress::Union{Function,Nothing} = nothing,
- io::IO=stderr_f())
-
+function ensure_artifact_installed(
+ name::String, meta::Dict, artifacts_toml::String;
+ pkg_server_eligible::Bool = true,
+ platform::AbstractPlatform = HostPlatform(),
+ verbose::Bool = false,
+ quiet_download::Bool = false,
+ progress::Union{Function, Nothing} = nothing,
+ io::IO = stderr_f()
+ )
hash = SHA1(meta["git-tree-sha1"])
if !artifact_exists(hash)
if isnothing(progress) || verbose == true
- return try_artifact_download_sources(name, hash, meta, artifacts_toml; platform, verbose, quiet_download, io)
+ return try_artifact_download_sources(name, hash, meta, artifacts_toml; pkg_server_eligible, platform, verbose, quiet_download, io)
else
# if a custom progress handler is given it is taken to mean the caller wants to handle the download scheduling
- return () -> try_artifact_download_sources(name, hash, meta, artifacts_toml; platform, quiet_download=true, io, progress)
+ return () -> try_artifact_download_sources(name, hash, meta, artifacts_toml; pkg_server_eligible, platform, quiet_download = true, io, progress)
end
else
return artifact_path(hash)
@@ -454,17 +484,18 @@ function ensure_artifact_installed(name::String, meta::Dict, artifacts_toml::Str
end
function try_artifact_download_sources(
- name::String, hash::SHA1, meta::Dict, artifacts_toml::String;
- platform::AbstractPlatform=HostPlatform(),
- verbose::Bool=false,
- quiet_download::Bool=false,
- io::IO=stderr_f(),
- progress::Union{Function,Nothing}=nothing)
+ name::String, hash::SHA1, meta::Dict, artifacts_toml::String;
+ pkg_server_eligible::Bool = true,
+ platform::AbstractPlatform = HostPlatform(),
+ verbose::Bool = false,
+ quiet_download::Bool = false,
+ io::IO = stderr_f(),
+ progress::Union{Function, Nothing} = nothing
+ )
errors = Any[]
- # first try downloading from Pkg server
- # TODO: only do this if Pkg server knows about this package
- if (server = pkg_server()) !== nothing
+ # first try downloading from Pkg server if the Pkg server knows about this package
+ if pkg_server_eligible && (server = pkg_server()) !== nothing
url = "$server/artifact/$hash"
download_success = let url = url
@debug "Downloading artifact from Pkg server" name artifacts_toml platform url
@@ -543,12 +574,12 @@ end
"""
ensure_all_artifacts_installed(artifacts_toml::String;
- platform = HostPlatform(),
- pkg_uuid = nothing,
- include_lazy = false,
- verbose = false,
- quiet_download = false,
- io::IO=stderr)
+ platform = HostPlatform(),
+ pkg_uuid = nothing,
+ include_lazy = false,
+ verbose = false,
+ quiet_download = false,
+ io::IO=stderr)
Installs all non-lazy artifacts from a given `(Julia)Artifacts.toml` file. `package_uuid` must
be provided to properly support overrides from `Overrides.toml` entries in depots.
@@ -566,49 +597,56 @@ This function is deprecated and should be replaced with the following snippet:
This function is deprecated in Julia 1.6 and will be removed in a future version.
Use `select_downloadable_artifacts()` and `ensure_artifact_installed()` instead.
"""
-function ensure_all_artifacts_installed(artifacts_toml::String;
- platform::AbstractPlatform = HostPlatform(),
- pkg_uuid::Union{Nothing,Base.UUID} = nothing,
- include_lazy::Bool = false,
- verbose::Bool = false,
- quiet_download::Bool = false,
- io::IO=stderr_f())
+function ensure_all_artifacts_installed(
+ artifacts_toml::String;
+ platform::AbstractPlatform = HostPlatform(),
+ pkg_uuid::Union{Nothing, Base.UUID} = nothing,
+ include_lazy::Bool = false,
+ verbose::Bool = false,
+ quiet_download::Bool = false,
+ io::IO = stderr_f()
+ )
# This function should not be called anymore; use `select_downloadable_artifacts()` directly.
Base.depwarn("`ensure_all_artifacts_installed()` is deprecated; iterate over `select_downloadable_artifacts()` output with `ensure_artifact_installed()`.", :ensure_all_artifacts_installed)
# Collect all artifacts we're supposed to install
artifacts = select_downloadable_artifacts(artifacts_toml; platform, include_lazy, pkg_uuid)
for name in keys(artifacts)
# Otherwise, let's try and install it!
- ensure_artifact_installed(name, artifacts[name], artifacts_toml; platform=platform,
- verbose=verbose, quiet_download=quiet_download, io=io)
+ ensure_artifact_installed(
+ name, artifacts[name], artifacts_toml; platform = platform,
+ verbose = verbose, quiet_download = quiet_download, io = io
+ )
end
+ return
end
"""
extract_all_hashes(artifacts_toml::String;
- platform = HostPlatform(),
- pkg_uuid = nothing,
- include_lazy = false)
+ platform = HostPlatform(),
+ pkg_uuid = nothing,
+ include_lazy = false)
Extract all hashes from a given `(Julia)Artifacts.toml` file. `package_uuid` must
be provided to properly support overrides from `Overrides.toml` entries in depots.
If `include_lazy` is set to `true`, then lazy packages will be installed as well.
"""
-function extract_all_hashes(artifacts_toml::String;
- platform::AbstractPlatform = HostPlatform(),
- pkg_uuid::Union{Nothing,Base.UUID} = nothing,
- include_lazy::Bool = false)
+function extract_all_hashes(
+ artifacts_toml::String;
+ platform::AbstractPlatform = HostPlatform(),
+ pkg_uuid::Union{Nothing, Base.UUID} = nothing,
+ include_lazy::Bool = false
+ )
hashes = Base.SHA1[]
if !isfile(artifacts_toml)
return hashes
end
- artifact_dict = load_artifacts_toml(artifacts_toml; pkg_uuid=pkg_uuid)
+ artifact_dict = load_artifacts_toml(artifacts_toml; pkg_uuid = pkg_uuid)
for name in keys(artifact_dict)
# Get the metadata about this name for the requested platform
- meta = artifact_meta(name, artifact_dict, artifacts_toml; platform=platform)
+ meta = artifact_meta(name, artifact_dict, artifacts_toml; platform = platform)
# If there are no instances of this name for the desired platform, skip it
meta === nothing && continue
@@ -644,4 +682,4 @@ ensure_all_artifacts_installed(artifacts_toml::AbstractString; kwargs...) =
extract_all_hashes(artifacts_toml::AbstractString; kwargs...) =
extract_all_hashes(string(artifacts_toml)::String; kwargs...)
-end # module Artifacts
+end # module PkgArtifacts
diff --git a/src/BinaryPlatformsCompat.jl b/src/BinaryPlatformsCompat.jl
new file mode 100644
index 0000000000..93403e05bd
--- /dev/null
+++ b/src/BinaryPlatformsCompat.jl
@@ -0,0 +1,155 @@
+module BinaryPlatformsCompat
+
+ export platform_key_abi, platform_dlext, valid_dl_path, arch, libc,
+ libgfortran_version, libstdcxx_version, cxxstring_abi, parse_dl_name_version,
+ detect_libgfortran_version, detect_libstdcxx_version, detect_cxxstring_abi,
+ call_abi, wordsize, triplet, select_platform, platforms_match,
+ CompilerABI, Platform, UnknownPlatform, Linux, MacOS, Windows, FreeBSD
+
+ using Base.BinaryPlatforms: parse_dl_name_version,
+ detect_libgfortran_version, detect_libstdcxx_version, detect_cxxstring_abi,
+ os, call_abi, select_platform, platforms_match,
+ AbstractPlatform, Platform, HostPlatform
+
+ import Base.BinaryPlatforms: libgfortran_version, libstdcxx_version, platform_name,
+ wordsize, platform_dlext, tags, arch, libc, call_abi,
+ cxxstring_abi
+
+ struct UnknownPlatform <: AbstractPlatform
+ UnknownPlatform(args...; kwargs...) = new()
+ end
+ tags(::UnknownPlatform) = Dict{String, String}("os" => "unknown")
+
+
+ struct CompilerABI
+ libgfortran_version::Union{Nothing, VersionNumber}
+ libstdcxx_version::Union{Nothing, VersionNumber}
+ cxxstring_abi::Union{Nothing, Symbol}
+
+ function CompilerABI(;
+ libgfortran_version::Union{Nothing, VersionNumber} = nothing,
+ libstdcxx_version::Union{Nothing, VersionNumber} = nothing,
+ cxxstring_abi::Union{Nothing, Symbol} = nothing
+ )
+ return new(libgfortran_version, libstdcxx_version, cxxstring_abi)
+ end
+ end
+
+ # Easy replacement constructor
+ function CompilerABI(
+ cabi::CompilerABI; libgfortran_version = nothing,
+ libstdcxx_version = nothing,
+ cxxstring_abi = nothing
+ )
+ return CompilerABI(;
+ libgfortran_version = something(libgfortran_version, Some(cabi.libgfortran_version)),
+ libstdcxx_version = something(libstdcxx_version, Some(cabi.libstdcxx_version)),
+ cxxstring_abi = something(cxxstring_abi, Some(cabi.cxxstring_abi)),
+ )
+ end
+
+ libgfortran_version(cabi::CompilerABI) = cabi.libgfortran_version
+ libstdcxx_version(cabi::CompilerABI) = cabi.libstdcxx_version
+ cxxstring_abi(cabi::CompilerABI) = cabi.cxxstring_abi
+
+ for T in (:Linux, :Windows, :MacOS, :FreeBSD)
+ @eval begin
+ struct $(T) <: AbstractPlatform
+ p::Platform
+ function $(T)(arch::Symbol; compiler_abi = nothing, kwargs...)
+ if compiler_abi !== nothing
+ kwargs = (;
+ kwargs...,
+ :libgfortran_version => libgfortran_version(compiler_abi),
+ :libstdcxx_version => libstdcxx_version(compiler_abi),
+ :cxxstring_abi => cxxstring_abi(compiler_abi),
+ )
+ end
+ return new(Platform(string(arch), $(string(T)); kwargs..., validate_strict = true))
+ end
+ end
+ end
+ end
+
+ const PlatformUnion = Union{Linux, MacOS, Windows, FreeBSD}
+
+ # First, methods we need to coerce to Symbol for backwards-compatibility
+ for f in (:arch, :libc, :call_abi, :cxxstring_abi)
+ @eval begin
+ function $(f)(p::PlatformUnion)
+ str = $(f)(p.p)
+ if str === nothing
+ return nothing
+ end
+ return Symbol(str)
+ end
+ end
+ end
+
+ # Next, things we don't need to coerce
+ for f in (:libgfortran_version, :libstdcxx_version, :platform_name, :wordsize, :platform_dlext, :tags, :triplet)
+ @eval begin
+ $(f)(p::PlatformUnion) = $(f)(p.p)
+ end
+ end
+
+ # Finally, add equality testing between these wrapper types and other AbstractPlatforms
+ @eval begin
+ Base.:(==)(a::PlatformUnion, b::AbstractPlatform) = b == a.p
+ end
+
+ # Add one-off functions
+ MacOS(; kwargs...) = MacOS(:x86_64; kwargs...)
+ FreeBSD(; kwargs...) = FreeBSD(:x86_64; kwargs...)
+
+ function triplet(p::AbstractPlatform)
+ # We are going to sub off to `Base.BinaryPlatforms.triplet()` here,
+ # with the important exception that we override `os_version` to better
+ # mimic the old behavior of `triplet()`
+ if Sys.isfreebsd(p)
+ p = deepcopy(p)
+ p["os_version"] = "11.1.0"
+ elseif Sys.isapple(p)
+ p = deepcopy(p)
+ p["os_version"] = "14.0.0"
+ end
+ return Base.BinaryPlatforms.triplet(p)
+ end
+
+ """
+ platform_key_abi(machine::AbstractString)
+
+ Returns the platform key for the current platform, or any other though the
+ the use of the `machine` parameter.
+
+ This method is deprecated, import `Base.BinaryPlatforms` and use either `HostPlatform()`
+ to get the current host platform, or `parse(Base.BinaryPlatforms.Platform, triplet)`
+ to parse the triplet for some other platform instead.
+ """
+ platform_key_abi() = HostPlatform()
+ platform_key_abi(triplet::AbstractString) = parse(Platform, triplet)
+
+ """
+ valid_dl_path(path::AbstractString, platform::Platform)
+
+ Return `true` if the given `path` ends in a valid dynamic library filename.
+ E.g. returns `true` for a path like `"usr/lib/libfoo.so.3.5"`, but returns
+ `false` for a path like `"libbar.so.f.a"`.
+
+ This method is deprecated and will be removed in Julia 2.0.
+ """
+ function valid_dl_path(path::AbstractString, platform::AbstractPlatform)
+ try
+ parse_dl_name_version(path, string(os(platform))::String)
+ return true
+ catch e
+ if isa(e, ArgumentError)
+ return false
+ end
+ rethrow(e)
+ end
+ end
+
+end # module BinaryPlatformsCompat
+
+const BinaryPlatforms = BinaryPlatformsCompat
diff --git a/src/BinaryPlatforms_compat.jl b/src/BinaryPlatforms_compat.jl
deleted file mode 100644
index 879dcc0c83..0000000000
--- a/src/BinaryPlatforms_compat.jl
+++ /dev/null
@@ -1,148 +0,0 @@
-module BinaryPlatforms
-
-export platform_key_abi, platform_dlext, valid_dl_path, arch, libc,
- libgfortran_version, libstdcxx_version, cxxstring_abi, parse_dl_name_version,
- detect_libgfortran_version, detect_libstdcxx_version, detect_cxxstring_abi,
- call_abi, wordsize, triplet, select_platform, platforms_match,
- CompilerABI, Platform, UnknownPlatform, Linux, MacOS, Windows, FreeBSD
-
-using Base.BinaryPlatforms: parse_dl_name_version,
- detect_libgfortran_version, detect_libstdcxx_version, detect_cxxstring_abi,
- os, call_abi, select_platform, platforms_match,
- AbstractPlatform, Platform, HostPlatform
-
-import Base.BinaryPlatforms: libgfortran_version, libstdcxx_version, platform_name,
- wordsize, platform_dlext, tags, arch, libc, call_abi,
- cxxstring_abi
-
-struct UnknownPlatform <: AbstractPlatform
- UnknownPlatform(args...; kwargs...) = new()
-end
-tags(::UnknownPlatform) = Dict{String,String}("os"=>"unknown")
-
-
-struct CompilerABI
- libgfortran_version::Union{Nothing,VersionNumber}
- libstdcxx_version::Union{Nothing,VersionNumber}
- cxxstring_abi::Union{Nothing,Symbol}
-
- function CompilerABI(;libgfortran_version::Union{Nothing, VersionNumber} = nothing,
- libstdcxx_version::Union{Nothing, VersionNumber} = nothing,
- cxxstring_abi::Union{Nothing, Symbol} = nothing)
- return new(libgfortran_version, libstdcxx_version, cxxstring_abi)
- end
-end
-
-# Easy replacement constructor
-function CompilerABI(cabi::CompilerABI; libgfortran_version=nothing,
- libstdcxx_version=nothing,
- cxxstring_abi=nothing)
- return CompilerABI(;
- libgfortran_version=something(libgfortran_version, Some(cabi.libgfortran_version)),
- libstdcxx_version=something(libstdcxx_version, Some(cabi.libstdcxx_version)),
- cxxstring_abi=something(cxxstring_abi, Some(cabi.cxxstring_abi)),
- )
-end
-
-libgfortran_version(cabi::CompilerABI) = cabi.libgfortran_version
-libstdcxx_version(cabi::CompilerABI) = cabi.libstdcxx_version
-cxxstring_abi(cabi::CompilerABI) = cabi.cxxstring_abi
-
-for T in (:Linux, :Windows, :MacOS, :FreeBSD)
- @eval begin
- struct $(T) <: AbstractPlatform
- p::Platform
- function $(T)(arch::Symbol; compiler_abi=nothing, kwargs...)
- if compiler_abi !== nothing
- kwargs = (; kwargs...,
- :libgfortran_version => libgfortran_version(compiler_abi),
- :libstdcxx_version => libstdcxx_version(compiler_abi),
- :cxxstring_abi => cxxstring_abi(compiler_abi)
- )
- end
- return new(Platform(string(arch), $(string(T)); kwargs..., validate_strict=true))
- end
- end
- end
-end
-
-const PlatformUnion = Union{Linux,MacOS,Windows,FreeBSD}
-
-# First, methods we need to coerce to Symbol for backwards-compatibility
-for f in (:arch, :libc, :call_abi, :cxxstring_abi)
- @eval begin
- function $(f)(p::PlatformUnion)
- str = $(f)(p.p)
- if str === nothing
- return nothing
- end
- return Symbol(str)
- end
- end
-end
-
-# Next, things we don't need to coerce
-for f in (:libgfortran_version, :libstdcxx_version, :platform_name, :wordsize, :platform_dlext, :tags, :triplet)
- @eval begin
- $(f)(p::PlatformUnion) = $(f)(p.p)
- end
-end
-
-# Finally, add equality testing between these wrapper types and other AbstractPlatforms
-@eval begin
- Base.:(==)(a::PlatformUnion, b::AbstractPlatform) = b == a.p
-end
-
-# Add one-off functions
-MacOS(; kwargs...) = MacOS(:x86_64; kwargs...)
-FreeBSD(; kwargs...) = FreeBSD(:x86_64; kwargs...)
-
-function triplet(p::AbstractPlatform)
- # We are going to sub off to `Base.BinaryPlatforms.triplet()` here,
- # with the important exception that we override `os_version` to better
- # mimic the old behavior of `triplet()`
- if Sys.isfreebsd(p)
- p = deepcopy(p)
- p["os_version"] = "11.1.0"
- elseif Sys.isapple(p)
- p = deepcopy(p)
- p["os_version"] = "14.0.0"
- end
- return Base.BinaryPlatforms.triplet(p)
-end
-
-"""
- platform_key_abi(machine::AbstractString)
-
-Returns the platform key for the current platform, or any other though the
-the use of the `machine` parameter.
-
-This method is deprecated, import `Base.BinaryPlatforms` and use either `HostPlatform()`
-to get the current host platform, or `parse(Base.BinaryPlatforms.Platform, triplet)`
-to parse the triplet for some other platform instead.
-"""
-platform_key_abi() = HostPlatform()
-platform_key_abi(triplet::AbstractString) = parse(Platform, triplet)
-
-"""
- valid_dl_path(path::AbstractString, platform::Platform)
-
-Return `true` if the given `path` ends in a valid dynamic library filename.
-E.g. returns `true` for a path like `"usr/lib/libfoo.so.3.5"`, but returns
-`false` for a path like `"libbar.so.f.a"`.
-
-This method is deprecated and will be removed in Julia 2.0.
-"""
-function valid_dl_path(path::AbstractString, platform::AbstractPlatform)
- try
- parse_dl_name_version(path, string(os(platform))::String)
- return true
- catch e
- if isa(e, ArgumentError)
- return false
- end
- rethrow(e)
- end
-end
-
-end # module BinaryPlatforms
diff --git a/src/GitTools.jl b/src/GitTools.jl
index 02fae614ea..70e129350e 100644
--- a/src/GitTools.jl
+++ b/src/GitTools.jl
@@ -13,6 +13,45 @@ using Printf
use_cli_git() = Base.get_bool_env("JULIA_PKG_USE_CLI_GIT", false)
const RESOLVING_DELTAS_HEADER = "Resolving Deltas:"
+# Check if LibGit2 supports shallow clones (requires LibGit2 >= 1.7.0)
+# We check both the LibGit2 version and the existence of `isshallow` to ensure
+# the shallow clone functionality is available
+function supports_shallow_clone()
+ # This seems buggy on Windows? Get some weird CI errors with it.
+ if Sys.iswindows()
+ return false
+ end
+ has_version = @static if isdefined(LibGit2, :VERSION)
+ LibGit2.VERSION >= v"1.7.0"
+ else
+ false
+ end
+ has_isshallow = isdefined(LibGit2, :isshallow)
+ return has_version && has_isshallow
+end
+
+# Check if a URL is a local path or file:// URL
+# Shallow clones are only supported for network protocols (HTTP, HTTPS, Git, SSH)
+function is_local_repo(url::AbstractString)
+ # Check if it's a local filesystem path
+ ispath(url) && return true
+ # Check if it uses file:// protocol
+ startswith(url, "file://") && return true
+ return false
+end
+
+# Check if a repository is a shallow clone
+function isshallow(repo::LibGit2.GitRepo)
+ if supports_shallow_clone() && isdefined(LibGit2, :isshallow)
+ return LibGit2.isshallow(repo)
+ else
+ # Fallback: check for .git/shallow file
+ repo_path = LibGit2.path(repo)
+ shallow_file = joinpath(repo_path, "shallow")
+ return isfile(shallow_file)
+ end
+end
+
function transfer_progress(progress::Ptr{LibGit2.TransferProgress}, p::Any)
progress = unsafe_load(progress)
@assert haskey(p, :transfer_progress)
@@ -41,13 +80,13 @@ const GIT_USERS = Dict{String, Union{Nothing, String}}()
@deprecate setprotocol!(proto::Union{Nothing, AbstractString}) setprotocol!(protocol = proto) false
function setprotocol!(;
- domain::AbstractString="github.com",
- protocol::Union{Nothing, AbstractString}=nothing,
- user::Union{Nothing, AbstractString}=(protocol == "ssh" ? "git" : nothing)
-)
+ domain::AbstractString = "github.com",
+ protocol::Union{Nothing, AbstractString} = nothing,
+ user::Union{Nothing, AbstractString} = (protocol == "ssh" ? "git" : nothing)
+ )
domain = lowercase(domain)
GIT_PROTOCOLS[domain] = protocol
- GIT_USERS[domain] = user
+ return GIT_USERS[domain] = user
end
function normalize_url(url::AbstractString)
@@ -61,7 +100,7 @@ function normalize_url(url::AbstractString)
proto = get(GIT_PROTOCOLS, lowercase(host), nothing)
- if proto === nothing
+ return if proto === nothing
url
else
user = get(GIT_USERS, lowercase(host), nothing)
@@ -80,60 +119,75 @@ function ensure_clone(io::IO, target_path, url; kwargs...)
end
function checkout_tree_to_path(repo::LibGit2.GitRepo, tree::LibGit2.GitObject, path::String)
- GC.@preserve path begin
+ return GC.@preserve path begin
opts = LibGit2.CheckoutOptions(
checkout_strategy = LibGit2.Consts.CHECKOUT_FORCE,
target_directory = Base.unsafe_convert(Cstring, path)
)
- LibGit2.checkout_tree(repo, tree, options=opts)
+ LibGit2.checkout_tree(repo, tree, options = opts)
end
end
-function clone(io::IO, url, source_path; header=nothing, credentials=nothing, kwargs...)
+function clone(io::IO, url, source_path; header = nothing, credentials = nothing, isbare = false, depth::Integer = 0, kwargs...)
url = String(url)::String
source_path = String(source_path)::String
@assert !isdir(source_path) || isempty(readdir(source_path))
url = normalize_url(url)
+
+ # Disable shallow clones for local repos (not supported) or if LibGit2 doesn't support it
+ if depth > 0 && (is_local_repo(url) || !supports_shallow_clone())
+ depth = 0
+ end
+
printpkgstyle(io, :Cloning, header === nothing ? "git-repo `$url`" : header)
- bar = MiniProgressBar(header = "Fetching:", color = Base.info_color())
+ bar = MiniProgressBar(header = "Cloning:", color = Base.info_color())
fancyprint = can_fancyprint(io)
- callbacks = if fancyprint
- LibGit2.Callbacks(
- :transfer_progress => (
- @cfunction(transfer_progress, Cint, (Ptr{LibGit2.TransferProgress}, Any)),
- bar,
- )
- )
- else
- LibGit2.Callbacks()
- end
fancyprint && start_progress(io, bar)
if credentials === nothing
credentials = LibGit2.CachedCredentials()
end
- try
+ return try
if use_cli_git()
- cmd = `git clone --quiet $url $source_path`
+ args = ["--quiet"]
+ depth > 0 && push!(args, "--depth=$depth")
+ isbare && push!(args, "--bare")
+ push!(args, url, source_path)
+ cmd = `git clone $args`
try
- run(pipeline(cmd; stdout=devnull))
+ run(pipeline(cmd; stdout = devnull))
catch err
Pkg.Types.pkgerror("The command $(cmd) failed, error: $err")
end
return LibGit2.GitRepo(source_path)
else
+ callbacks = if fancyprint
+ LibGit2.Callbacks(
+ :transfer_progress => (
+ @cfunction(transfer_progress, Cint, (Ptr{LibGit2.TransferProgress}, Any)),
+ bar,
+ )
+ )
+ else
+ LibGit2.Callbacks()
+ end
mkpath(source_path)
- return LibGit2.clone(url, source_path; callbacks=callbacks, credentials=credentials, kwargs...)
+ # Only pass depth if shallow clones are supported and depth > 0
+ if depth > 0
+ return LibGit2.clone(url, source_path; callbacks, credentials, isbare, depth, kwargs...)
+ else
+ return LibGit2.clone(url, source_path; callbacks, credentials, isbare, kwargs...)
+ end
end
catch err
- rm(source_path; force=true, recursive=true)
+ rm(source_path; force = true, recursive = true)
err isa LibGit2.GitError || err isa InterruptException || rethrow()
if err isa InterruptException
Pkg.Types.pkgerror("git clone of `$url` interrupted")
elseif (err.class == LibGit2.Error.Net && err.code == LibGit2.Error.EINVALIDSPEC) ||
- (err.class == LibGit2.Error.Repository && err.code == LibGit2.Error.ENOTFOUND)
- Pkg.Types.pkgerror("git repository not found at `$(url)`")
+ (err.class == LibGit2.Error.Repository && err.code == LibGit2.Error.ENOTFOUND)
+ Pkg.Types.pkgerror("git repository not found at `$(url)`: ($(err.msg))")
else
- Pkg.Types.pkgerror("failed to clone from $(url), error: $err")
+ Pkg.Types.pkgerror("failed to clone from $(url): ($(err.msg))")
end
finally
Base.shred!(credentials)
@@ -141,17 +195,26 @@ function clone(io::IO, url, source_path; header=nothing, credentials=nothing, kw
end
end
-function fetch(io::IO, repo::LibGit2.GitRepo, remoteurl=nothing; header=nothing, credentials=nothing, refspecs=[""], kwargs...)
+function geturl(repo)
+ return LibGit2.with(LibGit2.get(LibGit2.GitRemote, repo, "origin")) do remote
+ LibGit2.url(remote)
+ end
+end
+
+function fetch(io::IO, repo::LibGit2.GitRepo, remoteurl = nothing; header = nothing, credentials = nothing, refspecs::Vector{String} = [""], depth::Integer = 0, kwargs...)
if remoteurl === nothing
- remoteurl = LibGit2.with(LibGit2.get(LibGit2.GitRemote, repo, "origin")) do remote
- LibGit2.url(remote)
- end
+ remoteurl = geturl(repo)
+ end
+
+ # Disable shallow fetches for local repos (not supported) or if LibGit2 doesn't support it
+ if depth > 0 && (is_local_repo(remoteurl) || !supports_shallow_clone())
+ depth = 0
end
+
fancyprint = can_fancyprint(io)
remoteurl = normalize_url(remoteurl)
printpkgstyle(io, :Updating, header === nothing ? "git-repo `$remoteurl`" : header)
bar = MiniProgressBar(header = "Fetching:", color = Base.info_color())
- fancyprint = can_fancyprint(io)
callbacks = if fancyprint
LibGit2.Callbacks(
:transfer_progress => (
@@ -166,27 +229,33 @@ function fetch(io::IO, repo::LibGit2.GitRepo, remoteurl=nothing; header=nothing,
if credentials === nothing
credentials = LibGit2.CachedCredentials()
end
- try
+ return try
if use_cli_git()
- let remoteurl=remoteurl
- cd(LibGit2.path(repo)) do
- cmd = `git fetch -q $remoteurl $(only(refspecs))`
- try
- run(pipeline(cmd; stdout=devnull))
- catch err
- Pkg.Types.pkgerror("The command $(cmd) failed, error: $err")
- end
+ let remoteurl = remoteurl
+ args = ["-C", LibGit2.path(repo), "fetch", "-q"]
+ depth > 0 && push!(args, "--depth=$depth")
+ push!(args, remoteurl, only(refspecs))
+ cmd = `git $args`
+ try
+ run(pipeline(cmd; stdout = devnull))
+ catch err
+ Pkg.Types.pkgerror("The command $(cmd) failed, error: $err")
end
end
else
- return LibGit2.fetch(repo; remoteurl=remoteurl, callbacks=callbacks, refspecs=refspecs, kwargs...)
+ # Only pass depth if shallow clones are supported and depth > 0
+ if depth > 0
+ return LibGit2.fetch(repo; remoteurl, callbacks, credentials, refspecs, depth, kwargs...)
+ else
+ return LibGit2.fetch(repo; remoteurl, callbacks, credentials, refspecs, kwargs...)
+ end
end
catch err
err isa LibGit2.GitError || rethrow()
if (err.class == LibGit2.Error.Repository && err.code == LibGit2.Error.ERROR)
- Pkg.Types.pkgerror("Git repository not found at '$(remoteurl)'")
+ Pkg.Types.pkgerror("Git repository not found at '$(remoteurl)': ($(err.msg))")
else
- Pkg.Types.pkgerror("failed to fetch from $(remoteurl), error: $err")
+ Pkg.Types.pkgerror("failed to fetch from $(remoteurl): ($(err.msg))")
end
finally
Base.shred!(credentials)
@@ -196,8 +265,8 @@ end
# This code gratefully adapted from https://github.com/simonbyrne/GitX.jl
-@enum GitMode mode_dir=0o040000 mode_normal=0o100644 mode_executable=0o100755 mode_symlink=0o120000 mode_submodule=0o160000
-Base.string(mode::GitMode) = string(UInt32(mode); base=8)
+@enum GitMode mode_dir = 0o040000 mode_normal = 0o100644 mode_executable = 0o100755 mode_symlink = 0o120000 mode_submodule = 0o160000
+Base.string(mode::GitMode) = string(UInt32(mode); base = 8)
Base.print(io::IO, mode::GitMode) = print(io, string(mode))
function gitmode(path::AbstractString)
@@ -227,7 +296,7 @@ end
Calculate the git blob hash of a given path.
"""
-function blob_hash(::Type{HashType}, path::AbstractString) where HashType
+function blob_hash(::Type{HashType}, path::AbstractString) where {HashType}
ctx = HashType()
if islink(path)
datalen = length(readlink(path))
@@ -239,7 +308,7 @@ function blob_hash(::Type{HashType}, path::AbstractString) where HashType
SHA.update!(ctx, Vector{UInt8}("blob $(datalen)\0"))
# Next, read data in in chunks of 4KB
- buff = Vector{UInt8}(undef, 4*1024)
+ buff = Vector{UInt8}(undef, 4 * 1024)
try
if islink(path)
@@ -287,9 +356,9 @@ end
Calculate the git tree hash of a given path.
"""
-function tree_hash(::Type{HashType}, root::AbstractString; debug_out::Union{IO,Nothing} = nothing, indent::Int=0) where HashType
+function tree_hash(::Type{HashType}, root::AbstractString; debug_out::Union{IO, Nothing} = nothing, indent::Int = 0) where {HashType}
entries = Tuple{String, Vector{UInt8}, GitMode}[]
- for f in sort(readdir(root; join=true); by = f -> gitmode(f) == mode_dir ? f*"/" : f)
+ for f in sort(readdir(root; join = true); by = f -> gitmode(f) == mode_dir ? f * "/" : f)
# Skip `.git` directories
if basename(f) == ".git"
continue
@@ -306,11 +375,11 @@ function tree_hash(::Type{HashType}, root::AbstractString; debug_out::Union{IO,N
if debug_out !== nothing
child_stream = IOBuffer()
end
- hash = tree_hash(HashType, filepath; debug_out=child_stream, indent=indent+1)
+ hash = tree_hash(HashType, filepath; debug_out = child_stream, indent = indent + 1)
if debug_out !== nothing
indent_str = "| "^indent
println(debug_out, "$(indent_str)+ [D] $(basename(filepath)) - $(bytes2hex(hash))")
- print(debug_out, String(take!(child_stream)))
+ print(debug_out, String(take!(child_stream::IOBuffer)))
println(debug_out, indent_str)
end
else
@@ -326,7 +395,7 @@ function tree_hash(::Type{HashType}, root::AbstractString; debug_out::Union{IO,N
content_size = 0
for (n, h, m) in entries
- content_size += ndigits(UInt32(m); base=8) + 1 + sizeof(n) + 1 + sizeof(h)
+ content_size += ndigits(UInt32(m); base = 8) + 1 + sizeof(n) + 1 + sizeof(h)
end
# Return the hash of these entries
@@ -338,17 +407,24 @@ function tree_hash(::Type{HashType}, root::AbstractString; debug_out::Union{IO,N
end
return SHA.digest!(ctx)
end
-tree_hash(root::AbstractString; debug_out::Union{IO,Nothing} = nothing) = tree_hash(SHA.SHA1_CTX, root; debug_out)
+tree_hash(root::AbstractString; debug_out::Union{IO, Nothing} = nothing) = tree_hash(SHA.SHA1_CTX, root; debug_out)
function check_valid_HEAD(repo)
- try LibGit2.head(repo)
+ return try
+ LibGit2.head(repo)
catch err
- Pkg.Types.pkgerror("invalid git HEAD ($(err.msg))")
+ url = try
+ geturl(repo)
+ catch
+ "(unknown url)"
+ end
+ Pkg.Types.pkgerror("invalid git HEAD in $url ($(err.msg))")
end
end
-function git_file_stream(repo::LibGit2.GitRepo, spec::String; fakeit::Bool=false)::IO
- blob = try LibGit2.GitBlob(repo, spec)
+function git_file_stream(repo::LibGit2.GitRepo, spec::String; fakeit::Bool = false)::IO
+ blob = try
+ LibGit2.GitBlob(repo, spec)
catch err
err isa LibGit2.GitError && err.code == LibGit2.Error.ENOTFOUND || rethrow()
fakeit && return devnull
diff --git a/src/HistoricalStdlibs.jl b/src/HistoricalStdlibs.jl
index d5b4ad5049..6867d1e832 100644
--- a/src/HistoricalStdlibs.jl
+++ b/src/HistoricalStdlibs.jl
@@ -5,13 +5,13 @@ struct StdlibInfo
uuid::UUID
# This can be `nothing` if it's an unregistered stdlib
- version::Union{Nothing,VersionNumber}
+ version::Union{Nothing, VersionNumber}
deps::Vector{UUID}
weakdeps::Vector{UUID}
end
-const DictStdLibs = Dict{UUID,StdlibInfo}
+const DictStdLibs = Dict{UUID, StdlibInfo}
# Julia standard libraries with duplicate entries removed so as to store only the
# first release in a set of releases that all contain the same set of stdlibs.
diff --git a/src/MiniProgressBars.jl b/src/MiniProgressBars.jl
index c0a487d6b6..26c11da564 100644
--- a/src/MiniProgressBars.jl
+++ b/src/MiniProgressBars.jl
@@ -5,12 +5,12 @@ export MiniProgressBar, start_progress, end_progress, show_progress, print_progr
using Printf
# Until Base.format_bytes supports sigdigits
-function pkg_format_bytes(bytes; binary=true, sigdigits::Integer=3)
+function pkg_format_bytes(bytes; binary = true, sigdigits::Integer = 3)
units = binary ? Base._mem_units : Base._cnt_units
factor = binary ? 1024 : 1000
bytes, mb = Base.prettyprint_getunits(bytes, length(units), Int64(factor))
if mb == 1
- return string(Int(bytes), " ", Base._mem_units[mb], bytes==1 ? "" : "s")
+ return string(Int(bytes), " ", Base._mem_units[mb], bytes == 1 ? "" : "s")
else
return string(Base.Ryu.writefixed(Float64(bytes), sigdigits), binary ? " $(units[mb])" : "$(units[mb])B")
end
@@ -37,10 +37,10 @@ const PROGRESS_BAR_PERCENTAGE_GRANULARITY = Ref(0.1)
function start_progress(io::IO, _::MiniProgressBar)
ansi_disablecursor = "\e[?25l"
- print(io, ansi_disablecursor)
+ return print(io, ansi_disablecursor)
end
-function show_progress(io::IO, p::MiniProgressBar; termwidth=nothing, carriagereturn=true)
+function show_progress(io::IO, p::MiniProgressBar; termwidth = nothing, carriagereturn = true)
if p.max == 0
perc = 0.0
prev_perc = 0.0
@@ -62,24 +62,24 @@ function show_progress(io::IO, p::MiniProgressBar; termwidth=nothing, carriagere
p.has_shown = true
progress_text = if p.mode == :percentage
- @sprintf "%2.1f %%" perc
+ @sprintf "%5.1f %%" perc
elseif p.mode == :int
- string(p.current, "/", p.max)
+ string(p.current, "/", p.max)
elseif p.mode == :data
- lpad(string(pkg_format_bytes(p.current; sigdigits=1), "/", pkg_format_bytes(p.max; sigdigits=1)), 20)
+ lpad(string(pkg_format_bytes(p.current; sigdigits = 1), "/", pkg_format_bytes(p.max; sigdigits = 1)), 20)
else
error("Unknown mode $(p.mode)")
end
termwidth = @something termwidth displaysize(io)[2]
- max_progress_width = max(0, min(termwidth - textwidth(p.header) - textwidth(progress_text) - 10 , p.width))
+ max_progress_width = max(0, min(termwidth - textwidth(p.header) - textwidth(progress_text) - 10, p.width))
n_filled = floor(Int, max_progress_width * perc / 100)
partial_filled = (max_progress_width * perc / 100) - n_filled
n_left = max_progress_width - n_filled
headers = split(p.header)
- to_print = sprint(; context=io) do io
+ to_print = sprint(; context = io) do io
print(io, " "^p.indent)
if p.main
- printstyled(io, headers[1], " "; color=:green, bold=true)
+ printstyled(io, headers[1], " "; color = :green, bold = true)
length(headers) > 1 && printstyled(io, join(headers[2:end], ' '), " ")
else
print(io, p.header, " ")
@@ -88,49 +88,38 @@ function show_progress(io::IO, p::MiniProgressBar; termwidth=nothing, carriagere
print(io, p.status)
else
hascolor = get(io, :color, false)::Bool
- printstyled(io, "━"^n_filled; color=p.color)
+ printstyled(io, "━"^n_filled; color = p.color)
if n_left > 0
if hascolor
if partial_filled > 0.5
- printstyled(io, "╸"; color=p.color) # More filled, use ╸
+ printstyled(io, "╸"; color = p.color) # More filled, use ╸
else
- printstyled(io, "╺"; color=:light_black) # Less filled, use ╺
+ printstyled(io, "╺"; color = :light_black) # Less filled, use ╺
end
end
c = hascolor ? "━" : " "
- printstyled(io, c^(n_left-1+!hascolor); color=:light_black)
+ printstyled(io, c^(n_left - 1 + !hascolor); color = :light_black)
end
- printstyled(io, " "; color=:light_black)
+ printstyled(io, " "; color = :light_black)
print(io, progress_text)
end
carriagereturn && print(io, "\r")
end
# Print everything in one call
- print(io, to_print)
+ return print(io, to_print)
end
function end_progress(io, p::MiniProgressBar)
ansi_enablecursor = "\e[?25h"
ansi_clearline = "\e[2K"
- print(io, ansi_enablecursor * ansi_clearline)
+ return print(io, ansi_enablecursor * ansi_clearline)
end
-# Useful when writing a progress bar in the bottom
-# makes the bottom progress bar not flicker
-# prog = MiniProgressBar(...)
-# prog.end = n
-# for progress in 1:n
-# print_progress_bottom(io)
-# println("stuff")
-# prog.current = progress
-# showprogress(io, prog)
-# end
-#
function print_progress_bottom(io::IO)
ansi_clearline = "\e[2K"
ansi_movecol1 = "\e[1G"
ansi_moveup(n::Int) = string("\e[", n, "A")
- print(io, "\e[S" * ansi_moveup(1) * ansi_clearline * ansi_movecol1)
+ return print(io, "\e[S" * ansi_moveup(1) * ansi_clearline * ansi_movecol1)
end
end
diff --git a/src/Operations.jl b/src/Operations.jl
index 6d6bc94558..10254a95ff 100644
--- a/src/Operations.jl
+++ b/src/Operations.jl
@@ -2,6 +2,8 @@
module Operations
+using Base: CacheFlags
+using FileWatching: FileWatching
using UUIDs
using Random: randstring
import LibGit2, Dates, TOML
@@ -9,19 +11,61 @@ import LibGit2, Dates, TOML
using ..Types, ..Resolve, ..PlatformEngines, ..GitTools, ..MiniProgressBars
import ..depots, ..depots1, ..devdir, ..set_readonly, ..Types.PackageEntry
import ..Artifacts: ensure_artifact_installed, artifact_names, extract_all_hashes,
- artifact_exists, select_downloadable_artifacts
+ artifact_exists, select_downloadable_artifacts, mv_temp_dir_retries
using Base.BinaryPlatforms
import ...Pkg
import ...Pkg: pkg_server, Registry, pathrepr, can_fancyprint, printpkgstyle, stderr_f, OFFLINE_MODE
import ...Pkg: UPDATED_REGISTRY_THIS_SESSION, RESPECT_SYSIMAGE_VERSIONS, should_autoprecompile
-import ...Pkg: usable_io
+import ...Pkg: usable_io, discover_repo, create_cachedir_tag, manifest_rel_path
#########
# Utils #
#########
+# Helper functions for yanked package checking
+function is_pkgversion_yanked(uuid::UUID, version::VersionNumber, registries::Vector{Registry.RegistryInstance} = Registry.reachable_registries())
+ for reg in registries
+ reg_pkg = get(reg, uuid, nothing)
+ if reg_pkg !== nothing
+ info = Registry.registry_info(reg, reg_pkg)
+ if haskey(info.version_info, version) && Registry.isyanked(info, version)
+ return true
+ end
+ end
+ end
+ return false
+end
+
+function is_pkgversion_yanked(pkg::PackageSpec, registries::Vector{Registry.RegistryInstance} = Registry.reachable_registries())
+ if pkg.uuid === nothing || pkg.version === nothing || !(pkg.version isa VersionNumber)
+ return false
+ end
+ return is_pkgversion_yanked(pkg.uuid, pkg.version, registries)
+end
+
+function is_pkgversion_yanked(entry::PackageEntry, registries::Vector{Registry.RegistryInstance} = Registry.reachable_registries())
+ if entry.version === nothing || !(entry.version isa VersionNumber)
+ return false
+ end
+ return is_pkgversion_yanked(entry.uuid, entry.version, registries)
+end
+
+function get_pkg_deprecation_info(pkg::Union{PackageSpec, PackageEntry}, registries::Vector{Registry.RegistryInstance} = Registry.reachable_registries())
+ pkg.uuid === nothing && return nothing
+ for reg in registries
+ reg_pkg = get(reg, pkg.uuid, nothing)
+ if reg_pkg !== nothing
+ info = Registry.registry_info(reg, reg_pkg)
+ if Registry.isdeprecated(info)
+ return info.deprecated
+ end
+ end
+ end
+ return nothing
+end
+
function default_preserve()
- if Base.get_bool_env("JULIA_PKG_PRESERVE_TIERED_INSTALLED", false)
+ return if Base.get_bool_env("JULIA_PKG_PRESERVE_TIERED_INSTALLED", false)
PRESERVE_TIERED_INSTALLED
else
PRESERVE_TIERED
@@ -42,14 +86,53 @@ end
# more accurate name is `should_be_tracking_registered_version`
# the only way to know for sure is to key into the registries
-tracking_registered_version(pkg::Union{PackageSpec, PackageEntry}, julia_version=VERSION) =
+tracking_registered_version(pkg::Union{PackageSpec, PackageEntry}, julia_version = VERSION) =
!is_stdlib(pkg.uuid, julia_version) && pkg.path === nothing && pkg.repo.source === nothing
+
+# Try to download all registries referenced in `ctx.env.manifest.registries`.
+# Warn if some fail, but don't error (packages may still work with the registries we have).
+function ensure_manifest_registries!(ctx::Context)
+ manifest_regs = ctx.env.manifest.registries
+ isempty(manifest_regs) && return
+
+ regs_by_uuid = Dict(reg.uuid => reg for reg in ctx.registries)
+ missing = ManifestRegistryEntry[]
+ for entry in values(manifest_regs)
+ reg = get(regs_by_uuid, entry.uuid, nothing)
+ if reg === nothing
+ push!(missing, entry)
+ end
+ end
+
+ isempty(missing) && return
+
+ # Try to install missing registries that have URLs
+ specs = Registry.RegistrySpec[]
+ for entry in missing
+ if entry.url !== nothing
+ push!(specs, Registry.RegistrySpec(uuid = entry.uuid, url = entry.url))
+ end
+ end
+
+ if !isempty(specs)
+ try
+ Registry.add(specs; io = ctx.io)
+ copy!(ctx.registries, Registry.reachable_registries())
+ catch e
+ # Warn but don't error - packages may still work with available registries
+ @warn "Failed to install some registries from manifest" exception = (e, catch_backtrace())
+ end
+ end
+
+ return
+end
+
function source_path(manifest_file::String, pkg::Union{PackageSpec, PackageEntry}, julia_version = VERSION)
- pkg.tree_hash !== nothing ? find_installed(pkg.name, pkg.uuid, pkg.tree_hash) :
- pkg.path !== nothing ? joinpath(dirname(manifest_file), pkg.path) :
- is_or_was_stdlib(pkg.uuid, julia_version) ? Types.stdlib_path(pkg.name) :
- nothing
+ return pkg.tree_hash !== nothing ? find_installed(pkg.name, pkg.uuid, pkg.tree_hash) :
+ pkg.path !== nothing ? normpath(joinpath(dirname(manifest_file), pkg.path)) :
+ is_or_was_stdlib(pkg.uuid, julia_version) ? Types.stdlib_path(pkg.name) :
+ nothing
end
#TODO rename
@@ -67,8 +150,24 @@ function load_version(version, fixed, preserve::PreserveLevel)
end
end
-function load_direct_deps(env::EnvCache, pkgs::Vector{PackageSpec}=PackageSpec[];
- preserve::PreserveLevel=PRESERVE_DIRECT)
+function merge_pkg_source!(pkg::PackageSpec, path::Union{Nothing, String}, repo::GitRepo)
+ if pkg.path === nothing && path !== nothing
+ pkg.path = path
+ elseif pkg.repo.source === nothing && repo.source !== nothing
+ pkg.repo.source = repo.source
+ end
+ if pkg.repo.rev === nothing && repo.rev !== nothing
+ pkg.repo.rev = repo.rev
+ end
+ return
+end
+merge_pkg_source!(target::PackageSpec, source::PackageSpec) =
+ merge_pkg_source!(target, source.path, source.repo)
+
+function load_direct_deps(
+ env::EnvCache, pkgs::Vector{PackageSpec} = PackageSpec[];
+ preserve::PreserveLevel = PRESERVE_DIRECT
+ )
pkgs_direct = load_project_deps(env.project, env.project_file, env.manifest, env.manifest_file, pkgs; preserve)
for (path, project) in env.workspace
@@ -82,18 +181,7 @@ function load_direct_deps(env::EnvCache, pkgs::Vector{PackageSpec}=PackageSpec[]
pkg = pkgs_direct[idxs[1]]
idx_to_drop = Int[]
for i in Iterators.drop(idxs, 1)
- # Merge in sources from other projects
- # Manifest info like pinned, tree_hash and version should be the same
- # since that is all loaded from the same manifest
- if pkg.path === nothing && pkgs_direct[i].path !== nothing
- pkg.path = pkgs_direct[i].path
- end
- if pkg.repo.source === nothing && pkgs_direct[i].repo.source !== nothing
- pkg.repo.source = pkgs_direct[i].repo.source
- end
- if pkg.repo.rev === nothing && pkgs_direct[i].repo.rev !== nothing
- pkg.repo.rev = pkgs_direct[i].repo.rev
- end
+ merge_pkg_source!(pkg, pkgs_direct[i])
push!(idx_to_drop, i)
end
sort!(unique!(idx_to_drop))
@@ -103,70 +191,85 @@ function load_direct_deps(env::EnvCache, pkgs::Vector{PackageSpec}=PackageSpec[]
return vcat(pkgs, pkgs_direct)
end
-function load_project_deps(project::Project, project_file::String, manifest::Manifest, manifest_file::String, pkgs::Vector{PackageSpec}=PackageSpec[];
- preserve::PreserveLevel=PRESERVE_DIRECT)
+function load_project_deps(
+ project::Project, project_file::String, manifest::Manifest, manifest_file::String, pkgs::Vector{PackageSpec} = PackageSpec[];
+ preserve::PreserveLevel = PRESERVE_DIRECT
+ )
pkgs_direct = PackageSpec[]
if project.name !== nothing && project.uuid !== nothing && findfirst(pkg -> pkg.uuid == project.uuid, pkgs) === nothing
path = Types.relative_project_path(manifest_file, dirname(project_file))
- pkg = PackageSpec(;name=project.name, uuid=project.uuid, version=project.version, path)
+ pkg = PackageSpec(; name = project.name, uuid = project.uuid, version = project.version, path)
push!(pkgs_direct, pkg)
end
for (name::String, uuid::UUID) in project.deps
findfirst(pkg -> pkg.uuid == uuid, pkgs) === nothing || continue # do not duplicate packages
- path, repo = get_path_repo(project, name)
+ path, repo = get_path_repo(project, project_file, manifest_file, name)
entry = manifest_info(manifest, uuid)
- push!(pkgs_direct, entry === nothing ?
- PackageSpec(;uuid, name, path, repo) :
- PackageSpec(;
- uuid = uuid,
- name = name,
- path = path === nothing ? entry.path : path,
- repo = repo == GitRepo() ? entry.repo : repo,
- pinned = entry.pinned,
- tree_hash = entry.tree_hash, # TODO should tree_hash be changed too?
- version = load_version(entry.version, isfixed(entry), preserve),
- ))
+ push!(
+ pkgs_direct, entry === nothing ?
+ PackageSpec(; uuid, name, path, repo) :
+ PackageSpec(;
+ uuid = uuid,
+ name = name,
+ path = path === nothing ? entry.path : path,
+ repo = repo == GitRepo() ? entry.repo : repo,
+ pinned = entry.pinned,
+ tree_hash = entry.tree_hash, # TODO should tree_hash be changed too?
+ version = load_version(entry.version, isfixed(entry), preserve),
+ )
+ )
end
return pkgs_direct
end
-function load_manifest_deps(manifest::Manifest, pkgs::Vector{PackageSpec}=PackageSpec[];
- preserve::PreserveLevel=PRESERVE_ALL)
+function load_manifest_deps(
+ manifest::Manifest, pkgs::Vector{PackageSpec} = PackageSpec[];
+ preserve::PreserveLevel = PRESERVE_ALL
+ )
pkgs = copy(pkgs)
for (uuid, entry) in manifest
findfirst(pkg -> pkg.uuid == uuid, pkgs) === nothing || continue # do not duplicate packages
- push!(pkgs, PackageSpec(
- uuid = uuid,
- name = entry.name,
- path = entry.path,
- pinned = entry.pinned,
- repo = entry.repo,
- tree_hash = entry.tree_hash, # TODO should tree_hash be changed too?
- version = load_version(entry.version, isfixed(entry), preserve),
- ))
+ push!(
+ pkgs, PackageSpec(
+ uuid = uuid,
+ name = entry.name,
+ path = entry.path,
+ pinned = entry.pinned,
+ repo = entry.repo,
+ tree_hash = entry.tree_hash, # TODO should tree_hash be changed too?
+ version = load_version(entry.version, isfixed(entry), preserve),
+ )
+ )
end
return pkgs
end
-function load_all_deps(env::EnvCache, pkgs::Vector{PackageSpec}=PackageSpec[];
- preserve::PreserveLevel=PRESERVE_ALL)
- pkgs = load_manifest_deps(env.manifest, pkgs; preserve=preserve)
+function load_all_deps(
+ env::EnvCache, pkgs::Vector{PackageSpec} = PackageSpec[];
+ preserve::PreserveLevel = PRESERVE_ALL
+ )
+ pkgs = load_manifest_deps(env.manifest, pkgs; preserve = preserve)
# Sources takes presedence over the manifest...
for pkg in pkgs
- path, repo = get_path_repo(env.project, pkg.name)
+ path, repo = get_path_repo(env.project, env.project_file, env.manifest_file, pkg.name)
if path !== nothing
+ # Path from [sources] takes precedence - clear tree_hash and repo from manifest
+ pkg.tree_hash = nothing
+ pkg.repo = GitRepo() # Clear any repo info
pkg.path = path
end
if repo.source !== nothing
+ # Repo from [sources] takes precedence - clear path from manifest
+ pkg.path = nothing
pkg.repo.source = repo.source
end
if repo.rev !== nothing
pkg.repo.rev = repo.rev
end
end
- return load_direct_deps(env, pkgs; preserve=preserve)
+ return load_direct_deps(env, pkgs; preserve = preserve)
end
function load_all_deps_loadable(env::EnvCache)
@@ -178,7 +281,7 @@ function load_all_deps_loadable(env::EnvCache)
end
-function is_instantiated(env::EnvCache, workspace::Bool=false; platform = HostPlatform())::Bool
+function is_instantiated(env::EnvCache, workspace::Bool = false; platform = HostPlatform())::Bool
# Load everything
if workspace
pkgs = Operations.load_all_deps(env)
@@ -191,7 +294,7 @@ function is_instantiated(env::EnvCache, workspace::Bool=false; platform = HostPl
# so only add it if it isn't there
idx = findfirst(x -> x.uuid == env.pkg.uuid, pkgs)
if idx === nothing
- push!(pkgs, Types.PackageSpec(name=env.pkg.name, uuid=env.pkg.uuid, version=env.pkg.version, path=dirname(env.project_file)))
+ push!(pkgs, Types.PackageSpec(name = env.pkg.name, uuid = env.pkg.uuid, version = env.pkg.version, path = dirname(env.project_file)))
end
else
# Make sure artifacts for project exist even if it is not a package
@@ -201,52 +304,176 @@ function is_instantiated(env::EnvCache, workspace::Bool=false; platform = HostPl
return all(pkg -> is_package_downloaded(env.manifest_file, pkg; platform), pkgs)
end
-function update_manifest!(env::EnvCache, pkgs::Vector{PackageSpec}, deps_map, julia_version)
+function update_manifest!(env::EnvCache, pkgs::Vector{PackageSpec}, deps_map, julia_version, registries::Vector{Registry.RegistryInstance})
manifest = env.manifest
empty!(manifest)
+ # Determine which registries are used by tracking packages
+ used_registry_uuids = Set{UUID}()
+ pkg_to_registries = Dict{UUID, Vector{UUID}}()
+
+ for pkg in pkgs
+ if tracking_registered_version(pkg, julia_version)
+ # Find all registries that have this package version
+ pkg_reg_uuids = UUID[]
+ for reg in registries
+ reg_pkg = get(reg, pkg.uuid, nothing)
+ reg_pkg === nothing && continue
+ pkg_info = Registry.registry_info(reg, reg_pkg)
+ version_info = get(pkg_info.version_info, pkg.version, nothing)
+ version_info === nothing && continue
+ push!(pkg_reg_uuids, reg.uuid)
+ push!(used_registry_uuids, reg.uuid)
+ end
+ if !isempty(pkg_reg_uuids)
+ pkg_to_registries[pkg.uuid] = pkg_reg_uuids
+ end
+ end
+ end
+
+ # Build registry entries and name map for used registries only
+ uuid_to_name = Dict{UUID, String}()
+ registry_entries = Dict{String, ManifestRegistryEntry}()
+ for reg in registries
+ reg.uuid in used_registry_uuids || continue
+ reg_name = getfield(reg, :name)
+ uuid_to_name[reg.uuid] = reg_name
+ registry_entries[reg_name] = ManifestRegistryEntry(
+ id = reg_name,
+ uuid = reg.uuid,
+ url = getfield(reg, :repo),
+ )
+ end
+
+ # Build package entries
for pkg in pkgs
- entry = PackageEntry(;name = pkg.name, version = pkg.version, pinned = pkg.pinned,
- tree_hash = pkg.tree_hash, path = pkg.path, repo = pkg.repo, uuid=pkg.uuid)
+ entry = PackageEntry(;
+ name = pkg.name,
+ # PackageEntry requires version::Union{VersionNumber, Nothing}
+ # pkg.version may be a VersionSpec in some cases (e.g., when freeing a package)
+ # so we convert non-VersionNumber values to nothing
+ version = pkg.version isa VersionNumber ? pkg.version : nothing,
+ pinned = pkg.pinned,
+ tree_hash = pkg.tree_hash, path = pkg.path, repo = pkg.repo, uuid = pkg.uuid
+ )
if is_stdlib(pkg.uuid, julia_version)
# Only set stdlib versions for versioned (external) stdlibs
entry.version = stdlib_version(pkg.uuid, julia_version)
end
entry.deps = deps_map[pkg.uuid]
+
+ # Convert registry UUIDs to names
+ if haskey(pkg_to_registries, pkg.uuid)
+ reg_names = String[]
+ for reg_uuid in pkg_to_registries[pkg.uuid]
+ if haskey(uuid_to_name, reg_uuid)
+ push!(reg_names, uuid_to_name[reg_uuid])
+ end
+ end
+ entry.registries = reg_names
+ end
+
env.manifest[pkg.uuid] = entry
end
prune_manifest(env)
- record_project_hash(env)
+
+ env.manifest.registries = registry_entries
+ env.manifest.manifest_format = v"2.1.0"
+ return record_project_hash(env)
+end
+
+"""
+ get_project_syntax_version(p::Project) -> VersionNumber
+
+Extract the syntax version from a Project.
+
+This function determines which version of Julia syntax a package uses, following
+this precedence order:
+
+1. If `syntax.julia_version` is present in the Project.toml, use that value
+2. If `compat.julia` is specified, use the minimum version from the compat range
+3. Otherwise, default to the current Julia VERSION
+
+This information is used to populate the `syntax.julia_version` field in the
+Manifest.toml, allowing Base's loading system to parse each package with the
+correct syntax version.
+"""
+function get_project_syntax_version(p::Project)::VersionNumber
+ # First check syntax.julia_version entry in Project.other
+ if p.julia_syntax_version !== nothing
+ return p.julia_syntax_version
+ end
+
+ # If not found, default to minimum(compat["julia"])
+ if haskey(p.compat, "julia")
+ julia_compat = p.compat["julia"]
+ # Get the minimum version from the first range
+ if !isempty(julia_compat.val.ranges)
+ first_range = first(julia_compat.val.ranges)
+ lower_bound = first_range.lower
+ return VersionNumber(lower_bound.t[1], lower_bound.t[2], lower_bound.t[3])
+ end
+ end
+
+ # Finally, if neither of those are set, default to the current Julia version
+ return dropbuild(VERSION)
end
# This has to be done after the packages have been downloaded
# since we need access to the Project file to read the information
# about extensions
-function fixups_from_projectfile!(env::EnvCache)
+function fixups_from_projectfile!(ctx::Context)
+ env = ctx.env
for pkg in values(env.manifest)
- # isfile_casesenstive within locate_project_file used to error on Windows if given a
- # relative path so abspath it to be extra safe https://github.com/JuliaLang/julia/pull/55220
- project_file = Base.locate_project_file(abspath(source_path(env.manifest_file, pkg)))
- if project_file isa String && isfile(project_file)
- p = Types.read_project(project_file)
- pkg.weakdeps = p.weakdeps
- pkg.exts = p.exts
- pkg.entryfile = p.entryfile
- for (name, _) in p.weakdeps
- if !haskey(p.deps, name)
+ if ctx.julia_version !== VERSION && is_stdlib(pkg.uuid, ctx.julia_version)
+ # Special handling for non-current julia_version resolving given the source for historical stdlibs
+ # isn't available at this stage as Pkg thinks it should not be needed, so rely on STDLIBS_BY_VERSION
+ stdlibs = Types.get_last_stdlibs(ctx.julia_version)
+ p = stdlibs[pkg.uuid]
+ pkg.weakdeps = Dict{String, Base.UUID}(stdlibs[uuid].name => uuid for uuid in p.weakdeps)
+ # pkg.exts = p.exts # TODO: STDLIBS_BY_VERSION doesn't record this
+ # pkg.entryfile = p.entryfile # TODO: STDLIBS_BY_VERSION doesn't record this
+ for (name, uuid) in pkg.weakdeps
+ if !(uuid in p.deps)
delete!(pkg.deps, name)
end
end
+ else
+ # normal mode based on project files.
+ # isfile_casesenstive within locate_project_file used to error on Windows if given a
+ # relative path so abspath it to be extra safe https://github.com/JuliaLang/julia/pull/55220
+ sourcepath = source_path(env.manifest_file, pkg)
+ if sourcepath === nothing
+ pkgerror("could not find source path for package $(pkg.name) based on manifest $(env.manifest_file)")
+ end
+ project_file = Base.locate_project_file(abspath(sourcepath))
+ if project_file isa String && isfile(project_file)
+ p = Types.read_project(project_file)
+ pkg.weakdeps = p.weakdeps
+ pkg.exts = p.exts
+ pkg.entryfile = p.entryfile
+ pkg.julia_syntax_version = get_project_syntax_version(p)
+
+ for (name, _) in p.weakdeps
+ if !haskey(p.deps, name)
+ delete!(pkg.deps, name)
+ end
+ end
+ end
end
end
- prune_manifest(env)
+ return prune_manifest(env)
end
####################
# Registry Loading #
####################
-function load_tree_hash!(registries::Vector{Registry.RegistryInstance}, pkg::PackageSpec, julia_version)
+function load_tree_hash!(
+ registries::Vector{Registry.RegistryInstance},
+ pkg::PackageSpec,
+ julia_version,
+ )
if is_stdlib(pkg.uuid, julia_version) && pkg.tree_hash !== nothing
# manifests from newer julia versions might have stdlibs that are upgradable (FORMER_STDLIBS)
# that have tree_hash recorded, which we need to clear for this version where they are not upgradable
@@ -259,7 +486,7 @@ function load_tree_hash!(registries::Vector{Registry.RegistryInstance}, pkg::Pac
for reg in registries
reg_pkg = get(reg, pkg.uuid, nothing)
reg_pkg === nothing && continue
- pkg_info = Registry.registry_info(reg_pkg)
+ pkg_info = Registry.registry_info(reg, reg_pkg)
version_info = get(pkg_info.version_info, pkg.version, nothing)
version_info === nothing && continue
hash′ = version_info.git_tree_sha1
@@ -277,6 +504,32 @@ end
#######################################
get_compat(proj::Project, name::String) = haskey(proj.compat, name) ? proj.compat[name].val : Types.VersionSpec()
get_compat_str(proj::Project, name::String) = haskey(proj.compat, name) ? proj.compat[name].str : nothing
+
+# Helper to check if compat is compatible with a non-upgradable stdlib, warn if not, and return appropriate VersionSpec
+function check_stdlib_compat(name::String, uuid::UUID, compat::VersionSpec, project::Project, project_file::String, julia_version)
+ is_stdlib(uuid) && !(uuid in Types.UPGRADABLE_STDLIBS_UUIDS) || return compat
+
+ stdlib_ver = stdlib_version(uuid, julia_version)
+ stdlib_ver === nothing && return compat
+ isempty(compat) && return compat
+ stdlib_ver in compat && return compat
+
+ compat_str = get_compat_str(project, name)
+ if compat_str !== nothing
+ suggested_compat = string(compat_str, ", ", stdlib_ver.major == 0 ? string(stdlib_ver.major, ".", stdlib_ver.minor) : string(stdlib_ver.major))
+ @warn """Ignoring incompatible compat entry `$name = $(repr(compat_str))` in $(repr(project_file)).
+ $name is a non-upgradable standard library with version $stdlib_ver in the current Julia version.
+ Fix by setting compat to $(repr(suggested_compat)) to mark support of the current version $stdlib_ver.""" maxlog = 1
+ end
+ return VersionSpec("*")
+end
+
+# Get compat for a dependency, checking if it's a non-upgradable stdlib and warning if incompatible
+function get_compat_with_stdlib_check(project::Project, project_file::String, name::String, uuid::UUID, julia_version)
+ compat = get_compat(project, name)
+ return check_stdlib_compat(name, uuid, compat, project, project_file, julia_version)
+end
+
function set_compat(proj::Project, name::String, compat::String)
semverspec = Types.semver_spec(compat, throw = false)
isnothing(semverspec) && return false
@@ -298,22 +551,22 @@ function reset_all_compat!(proj::Project)
return nothing
end
-function collect_project(pkg::Union{PackageSpec, Nothing}, path::String)
+function collect_project(pkg::Union{PackageSpec, Nothing}, path::String, manifest_file::String, julia_version)
deps = PackageSpec[]
weakdeps = Set{UUID}()
- project_file = projectfile_path(path; strict=true)
- project = project_file === nothing ? Project() : read_project(project_file)
+ project_file = projectfile_path(path; strict = true)
+ project = project_file === nothing ? Project() : read_project(project_file)
julia_compat = get_compat(project, "julia")
- if !isnothing(julia_compat) && !(VERSION in julia_compat)
- pkgerror("julia version requirement from Project.toml's compat section not satisfied for package at `$path`")
+ if !isnothing(julia_compat) && !isnothing(julia_version) && !(julia_version in julia_compat)
+ pkgerror("julia version requirement for package at `$path` not satisfied: compat entry \"julia = $(get_compat_str(project, "julia"))\" does not include Julia version $julia_version")
end
for (name, uuid) in project.deps
- path, repo = get_path_repo(project, name)
- vspec = get_compat(project, name)
- push!(deps, PackageSpec(name=name, uuid=uuid, version=vspec, path=path, repo=repo))
+ dep_path, repo = get_path_repo(project, project_file, manifest_file, name)
+ vspec = get_compat_with_stdlib_check(project, something(project_file, path), name, uuid, julia_version)
+ push!(deps, PackageSpec(name = name, uuid = uuid, version = vspec, path = dep_path, repo = repo))
end
for (name, uuid) in project.weakdeps
- vspec = get_compat(project, name)
+ vspec = get_compat_with_stdlib_check(project, something(project_file, path), name, uuid, julia_version)
push!(deps, PackageSpec(name, uuid, vspec))
push!(weakdeps, uuid)
end
@@ -329,27 +582,33 @@ function collect_project(pkg::Union{PackageSpec, Nothing}, path::String)
end
is_tracking_path(pkg) = pkg.path !== nothing
-is_tracking_repo(pkg) = pkg.repo.source !== nothing
+is_tracking_repo(pkg) = (pkg.repo.source !== nothing || pkg.repo.rev !== nothing)
is_tracking_registry(pkg) = !is_tracking_path(pkg) && !is_tracking_repo(pkg)
isfixed(pkg) = !is_tracking_registry(pkg) || pkg.pinned
function collect_developed!(env::EnvCache, pkg::PackageSpec, developed::Vector{PackageSpec})
- source = project_rel_path(env, source_path(env.manifest_file, pkg))
+ source = source_path(env.manifest_file, pkg)
source_env = EnvCache(projectfile_path(source))
pkgs = load_project_deps(source_env.project, source_env.project_file, source_env.manifest, source_env.manifest_file)
- for pkg in filter(is_tracking_path, pkgs)
+ for pkg in pkgs
if any(x -> x.uuid == pkg.uuid, developed)
continue
end
- # normalize path
- # TODO: If path is collected from project, it is relative to the project file
- # otherwise relative to manifest file....
- pkg.path = Types.relative_project_path(env.manifest_file,
- project_rel_path(source_env,
- source_path(source_env.manifest_file, pkg)))
- push!(developed, pkg)
- collect_developed!(env, pkg, developed)
+ if is_tracking_path(pkg)
+ # normalize path
+ # TODO: If path is collected from project, it is relative to the project file
+ # otherwise relative to manifest file....
+ pkg.path = Types.relative_project_path(
+ env.manifest_file,
+ source_path(source_env.manifest_file, pkg)
+ )
+ push!(developed, pkg)
+ collect_developed!(env, pkg, developed)
+ elseif is_tracking_repo(pkg)
+ push!(developed, pkg)
+ end
end
+ return
end
function collect_developed(env::EnvCache, pkgs::Vector{PackageSpec})
@@ -360,62 +619,112 @@ function collect_developed(env::EnvCache, pkgs::Vector{PackageSpec})
return developed
end
-function collect_fixed!(env::EnvCache, pkgs::Vector{PackageSpec}, names::Dict{UUID, String})
- deps_map = Dict{UUID,Vector{PackageSpec}}()
- weak_map = Dict{UUID,Set{UUID}}()
+function collect_fixed!(env::EnvCache, pkgs::Vector{PackageSpec}, names::Dict{UUID, String}, julia_version)
+ deps_map = Dict{UUID, Vector{PackageSpec}}()
+ weak_map = Dict{UUID, Set{UUID}}()
uuid = Types.project_uuid(env)
- deps, weakdeps = collect_project(env.pkg, dirname(env.project_file))
+ deps, weakdeps = collect_project(env.pkg, dirname(env.project_file), env.manifest_file, julia_version)
deps_map[uuid] = deps
weak_map[uuid] = weakdeps
names[uuid] = env.pkg === nothing ? "project" : env.pkg.name
for (path, project) in env.workspace
uuid = Types.project_uuid(project, path)
- pkg = project.name === nothing ? nothing : PackageSpec(name=project.name, uuid=uuid)
- deps, weakdeps = collect_project(pkg, path)
+ pkg = project.name === nothing ? nothing : PackageSpec(name = project.name, uuid = uuid)
+ deps, weakdeps = collect_project(pkg, path, env.manifest_file, julia_version)
deps_map[Types.project_uuid(env)] = deps
weak_map[Types.project_uuid(env)] = weakdeps
names[uuid] = project.name === nothing ? "project" : project.name
end
+ pkg_queue = collect(pkgs)
+ pkg_by_uuid = Dict{UUID, PackageSpec}()
for pkg in pkgs
+ pkg.uuid === nothing && continue
+ pkg_by_uuid[pkg.uuid] = pkg
+ end
+ new_fixed_pkgs = PackageSpec[]
+ seen = Set(keys(pkg_by_uuid))
+ while !isempty(pkg_queue)
+ pkg = popfirst!(pkg_queue)
+ pkg.uuid === nothing && continue
# add repo package if necessary
source = source_path(env.manifest_file, pkg)
- path = source === nothing ? nothing : project_rel_path(env, source)
+ path = source
if (path === nothing || !isdir(path)) && (pkg.repo.rev !== nothing || pkg.repo.source !== nothing)
# ensure revved package is installed
# pkg.tree_hash is set in here
- Types.handle_repo_add!(Types.Context(env=env), pkg)
+ Types.handle_repo_add!(Types.Context(env = env), pkg)
# Recompute path
- path = project_rel_path(env, source_path(env.manifest_file, pkg))
+ path = source_path(env.manifest_file, pkg)
end
if !isdir(path)
- pkgerror("expected package $(err_rep(pkg)) to exist at path `$path`")
+ # Find which packages depend on this missing package for better error reporting
+ dependents = String[]
+ for (dep_uuid, dep_entry) in env.manifest.deps
+ if pkg.uuid in values(dep_entry.deps) || pkg.uuid in values(dep_entry.weakdeps)
+ push!(dependents, dep_entry.name === nothing ? "unknown package [$dep_uuid]" : dep_entry.name)
+ end
+ end
+
+ error_msg = "expected package $(err_rep(pkg)) to exist at path `$path`"
+ error_msg *= "\n\nThis package is referenced in the manifest file: $(env.manifest_file)"
+
+ if !isempty(dependents)
+ if length(dependents) == 1
+ error_msg *= "\nIt is required by: $(dependents[1])"
+ else
+ error_msg *= "\nIt is required by:\n$(join([" - $dep" for dep in dependents], "\n"))"
+ end
+ end
+ pkgerror(error_msg)
end
- deps, weakdeps = collect_project(pkg, path)
+ deps, weakdeps = collect_project(pkg, path, env.manifest_file, julia_version)
deps_map[pkg.uuid] = deps
weak_map[pkg.uuid] = weakdeps
+ for dep in deps
+ names[dep.uuid] = dep.name
+ dep_uuid = dep.uuid
+ if !is_tracking_registry(dep) && dep_uuid !== nothing && !(dep_uuid in seen)
+ # Only recursively collect path sources if the path actually exists
+ # Repo sources (with URL/rev) are always collected
+ if is_tracking_path(dep)
+ dep_source = source_path(env.manifest_file, dep)
+ if dep_source !== nothing && isdir(dep_source)
+ push!(pkg_queue, dep)
+ push!(new_fixed_pkgs, dep)
+ pkg_by_uuid[dep_uuid] = dep
+ push!(seen, dep_uuid)
+ end
+ else
+ # Repo source - always add to queue
+ push!(pkg_queue, dep)
+ push!(new_fixed_pkgs, dep)
+ pkg_by_uuid[dep_uuid] = dep
+ push!(seen, dep_uuid)
+ end
+ elseif dep_uuid !== nothing && !haskey(pkg_by_uuid, dep_uuid)
+ pkg_by_uuid[dep_uuid] = dep
+ end
+ end
end
- fixed = Dict{UUID,Resolve.Fixed}()
+ fixed = Dict{UUID, Resolve.Fixed}()
# Collect the dependencies for the fixed packages
for (uuid, deps) in deps_map
q = Dict{UUID, VersionSpec}()
for dep in deps
names[dep.uuid] = dep.name
- q[dep.uuid] = dep.version
- end
- if Types.is_project_uuid(env, uuid)
- fix_pkg = env.pkg
- else
- idx = findfirst(pkg -> pkg.uuid == uuid, pkgs)
- fix_pkg = pkgs[idx]
+ dep_version = dep.version
+ dep_version === nothing && continue
+ q[dep.uuid] = dep_version isa VersionSpec ? dep_version : VersionSpec(dep_version)
end
+ fix_pkg = Types.is_project_uuid(env, uuid) ? env.pkg : get(pkg_by_uuid, uuid, nothing)
fixpkgversion = fix_pkg === nothing ? v"0.0.0" : fix_pkg.version
- fixed[uuid] = Resolve.Fixed(fixpkgversion, q, weak_map[uuid])
+ fixed[uuid] = Resolve.Fixed(fixpkgversion, q, get(weak_map, uuid, Set{UUID}()))
end
- return fixed
+ return fixed, new_fixed_pkgs
end
# drops build detail in version but keeps the main prerelease context
@@ -430,6 +739,12 @@ function get_compat_workspace(env, name)
for (_, project) in env.workspace
compat = intersect(compat, get_compat(project, name))
end
+
+ uuid = get(env.project.deps, name, nothing)
+ if uuid !== nothing
+ compat = check_stdlib_compat(name, uuid, compat, env.project, env.project_file, VERSION)
+ end
+
return compat
end
@@ -438,22 +753,25 @@ end
# sets version to a VersionNumber
# adds any other packages which may be in the dependency graph
# all versioned packages should have a `tree_hash`
-function resolve_versions!(env::EnvCache, registries::Vector{Registry.RegistryInstance}, pkgs::Vector{PackageSpec}, julia_version,
- installed_only::Bool)
+function resolve_versions!(
+ env::EnvCache, registries::Vector{Registry.RegistryInstance}, pkgs::Vector{PackageSpec}, julia_version,
+ installed_only::Bool
+ )
installed_only = installed_only || OFFLINE_MODE[]
+
# compatibility
if julia_version !== nothing
# only set the manifest julia_version if ctx.julia_version is not nothing
- env.manifest.julia_version = dropbuild(VERSION)
+ env.manifest.julia_version = dropbuild(julia_version)
v = intersect(julia_version, get_compat_workspace(env, "julia"))
if isempty(v)
- @warn "julia version requirement for project not satisfied" _module=nothing _file=nothing
+ @warn "julia version requirement for project not satisfied" _module = nothing _file = nothing
end
end
jll_fix = Dict{UUID, VersionNumber}()
for pkg in pkgs
- if !is_stdlib(pkg.uuid) && endswith(pkg.name, "_jll") && pkg.version isa VersionNumber
+ if !is_stdlib(pkg.uuid, julia_version) && endswith(pkg.name, "_jll") && pkg.version isa VersionNumber
jll_fix[pkg.uuid] = pkg.version
end
end
@@ -468,7 +786,12 @@ function resolve_versions!(env::EnvCache, registries::Vector{Registry.RegistryIn
end
end
# this also sets pkg.version for fixed packages
- fixed = collect_fixed!(env, filter(!is_tracking_registry, pkgs), names)
+ pkgs_fixed = filter(!is_tracking_registry, pkgs)
+ fixed, new_fixed_pkgs = collect_fixed!(env, pkgs_fixed, names, julia_version)
+ for new_pkg in new_fixed_pkgs
+ any(x -> x.uuid == new_pkg.uuid, pkgs) && continue
+ push!(pkgs, new_pkg)
+ end
# non fixed packages are `add`ed by version: their version is either restricted or free
# fixed packages are `dev`ed or `add`ed by repo
# at this point, fixed packages have a version and `deps`
@@ -480,8 +803,11 @@ function resolve_versions!(env::EnvCache, registries::Vector{Registry.RegistryIn
compat = get_compat_workspace(env, pkg.name)
v = intersect(pkg.version, compat)
if isempty(v)
- throw(Resolve.ResolverError(
- "empty intersection between $(pkg.name)@$(pkg.version) and project compatibility $(compat)"))
+ throw(
+ Resolve.ResolverError(
+ "empty intersection between $(pkg.name)@$(pkg.version) and project compatibility $(compat)"
+ )
+ )
end
# Work around not clobbering 0.x.y+ for checked out old type of packages
if !(pkg.version isa VersionNumber)
@@ -496,8 +822,9 @@ function resolve_versions!(env::EnvCache, registries::Vector{Registry.RegistryIn
# Unless using the unbounded or historical resolver, always allow stdlibs to update. Helps if the previous resolve
# happened on a different julia version / commit and the stdlib version in the manifest is not the current stdlib version
unbind_stdlibs = julia_version === VERSION
- reqs = Resolve.Requires(pkg.uuid => is_stdlib(pkg.uuid) && unbind_stdlibs ? VersionSpec("*") : VersionSpec(pkg.version) for pkg in pkgs)
- graph, compat_map = deps_graph(env, registries, names, reqs, fixed, julia_version, installed_only)
+ reqs = Resolve.Requires(pkg.uuid => is_stdlib(pkg.uuid, julia_version) && unbind_stdlibs ? VersionSpec("*") : VersionSpec(pkg.version) for pkg in pkgs)
+ deps_map_compressed, compat_map_compressed, weak_deps_map_compressed, weak_compat_map_compressed, pkg_versions_map, pkg_versions_per_registry, uuid_to_name, reqs, fixed = deps_graph(env, registries, names, reqs, fixed, julia_version, installed_only)
+ graph = Resolve.Graph(deps_map_compressed, compat_map_compressed, weak_deps_map_compressed, weak_compat_map_compressed, pkg_versions_map, pkg_versions_per_registry, uuid_to_name, reqs, fixed, false, julia_version)
Resolve.simplify_graph!(graph)
vers = Resolve.resolve(graph)
@@ -508,6 +835,13 @@ function resolve_versions!(env::EnvCache, registries::Vector{Registry.RegistryIn
# We only fixup a JLL if the old major/minor/patch matches the new major/minor/patch
if old_v !== nothing && Base.thispatch(old_v) == Base.thispatch(vers_fix[uuid])
vers_fix[uuid] = old_v
+ # Add old_v to pkg_versions_map so it's considered available
+ # even if it was yanked (needed for sysimage compatibility)
+ versions_for_pkg = get!(pkg_versions_map, uuid, VersionNumber[])
+ if !(old_v in versions_for_pkg)
+ push!(versions_for_pkg, old_v)
+ sort!(versions_for_pkg)
+ end
end
end
vers = vers_fix
@@ -521,9 +855,13 @@ function resolve_versions!(env::EnvCache, registries::Vector{Registry.RegistryIn
pkg.version = vers[pkg.uuid]
else
name = is_stdlib(uuid) ? stdlib_infos()[uuid].name : registered_name(registries, uuid)
- push!(pkgs, PackageSpec(;name=name, uuid=uuid, version=ver))
+ push!(pkgs, PackageSpec(; name = name, uuid = uuid, version = ver))
end
end
+
+ # Collect all UUIDs that will be in the manifest
+ pkgs_uuids = Set{UUID}(pkg.uuid for pkg in pkgs)
+
final_deps_map = Dict{UUID, Dict{String, UUID}}()
for pkg in pkgs
load_tree_hash!(registries, pkg, julia_version)
@@ -531,13 +869,25 @@ function resolve_versions!(env::EnvCache, registries::Vector{Registry.RegistryIn
if pkg.uuid in keys(fixed)
deps_fixed = Dict{String, UUID}()
for dep in keys(fixed[pkg.uuid].requires)
+ # Only include deps that are actually in the manifest
+ dep in pkgs_uuids || continue
deps_fixed[names[dep]] = dep
end
deps_fixed
else
d = Dict{String, UUID}()
- for (uuid, _) in compat_map[pkg.uuid][pkg.version]
- d[names[uuid]] = uuid
+ available_versions = get(Vector{VersionNumber}, pkg_versions_map, pkg.uuid)
+ if !(pkg.version in available_versions)
+ pkgerror("version $(pkg.version) of package $(pkg.name) is not available. Available versions: $(join(available_versions, ", "))")
+ end
+ deps_for_version = Registry.query_deps_for_version(
+ deps_map_compressed, weak_deps_map_compressed,
+ pkg.uuid, pkg.version
+ )
+ for uuid in deps_for_version
+ # Only include deps that are actually in the manifest
+ uuid in pkgs_uuids || continue
+ d[names[uuid]] = uuid
end
d
end
@@ -549,29 +899,54 @@ function resolve_versions!(env::EnvCache, registries::Vector{Registry.RegistryIn
return final_deps_map
end
-get_or_make!(d::Dict{K,V}, k::K) where {K,V} = get!(d, k) do; V() end
+get_or_make!(d::Dict{K, V}, k::K) where {K, V} = get!(d, k) do;
+ V()
+end
const JULIA_UUID = UUID("1222c4b2-2114-5bfd-aeef-88e4692bbb3e")
const PKGORIGIN_HAVE_VERSION = :version in fieldnames(Base.PkgOrigin)
-function deps_graph(env::EnvCache, registries::Vector{Registry.RegistryInstance}, uuid_to_name::Dict{UUID,String},
- reqs::Resolve.Requires, fixed::Dict{UUID,Resolve.Fixed}, julia_version,
- installed_only::Bool)
+function deps_graph(
+ env::EnvCache, registries::Vector{Registry.RegistryInstance}, uuid_to_name::Dict{UUID, String},
+ reqs::Resolve.Requires, fixed::Dict{UUID, Resolve.Fixed}, julia_version,
+ installed_only::Bool
+ )
uuids = Set{UUID}()
union!(uuids, keys(reqs))
union!(uuids, keys(fixed))
- for fixed_uuids in map(fx->keys(fx.requires), values(fixed))
+ for fixed_uuids in map(fx -> keys(fx.requires), values(fixed))
union!(uuids, fixed_uuids)
end
+ # Collect all weak dependency UUIDs from fixed packages
+ all_weak_uuids = Set{UUID}()
+ for fx in values(fixed)
+ union!(all_weak_uuids, fx.weak)
+ end
+
stdlibs_for_julia_version = Types.get_last_stdlibs(julia_version)
seen = Set{UUID}()
- # pkg -> version -> (dependency => compat):
- all_compat = Dict{UUID,Dict{VersionNumber,Dict{UUID,VersionSpec}}}()
- weak_compat = Dict{UUID,Dict{VersionNumber,Set{UUID}}}()
+ # pkg -> vector of (registry data) for handling multiple registries correctly
+ # Each element in the vector represents data from one registry
+ all_deps_compressed = Dict{UUID, Vector{Dict{VersionRange, Set{UUID}}}}()
+ all_compat_compressed = Dict{UUID, Vector{Dict{VersionRange, Dict{UUID, VersionSpec}}}}()
+ weak_deps_compressed = Dict{UUID, Vector{Dict{VersionRange, Set{UUID}}}}()
+ weak_compat_compressed = Dict{UUID, Vector{Dict{VersionRange, Dict{UUID, VersionSpec}}}}()
+
+ # pkg -> list of valid versions:
+ pkg_versions = Dict{UUID, Vector{VersionNumber}}()
+
+ # pkg -> vector of (versions from each registry) - parallel to the compressed data vectors
+ # This tracks which versions came from which registry to avoid cross-registry compat pollution
+ pkg_versions_per_registry = Dict{UUID, Vector{Set{VersionNumber}}}()
for (fp, fx) in fixed
- all_compat[fp] = Dict(fx.version => Dict{UUID,VersionSpec}())
+ all_deps_compressed[fp] = [Dict{VersionRange, Set{UUID}}()]
+ all_compat_compressed[fp] = [Dict{VersionRange, Dict{UUID, VersionSpec}}()]
+ weak_deps_compressed[fp] = [Dict{VersionRange, Set{UUID}}()]
+ weak_compat_compressed[fp] = [Dict{VersionRange, Dict{UUID, VersionSpec}}()]
+ pkg_versions[fp] = [fx.version]
+ pkg_versions_per_registry[fp] = [Set([fx.version])]
end
while true
@@ -580,85 +955,138 @@ function deps_graph(env::EnvCache, registries::Vector{Registry.RegistryInstance}
for uuid in unseen
push!(seen, uuid)
uuid in keys(fixed) && continue
- all_compat_u = get_or_make!(all_compat, uuid)
- weak_compat_u = get_or_make!(weak_compat, uuid)
uuid_is_stdlib = haskey(stdlibs_for_julia_version, uuid)
# If we're requesting resolution of a package that is an
# unregistered stdlib we must special-case it here. This is further
# complicated by the fact that we can ask this question relative to
# a Julia version.
+ # CRITICAL: Never resolve stdlibs from registry for target julia_version
if (julia_version != VERSION && is_unregistered_stdlib(uuid)) || uuid_is_stdlib
# We use our historical stdlib versioning data to unpack the version, deps and weakdeps of this uuid
stdlib_info = stdlibs_for_julia_version[uuid]
v = something(stdlib_info.version, VERSION)
- all_compat_u_vr = get_or_make!(all_compat_u, v)
+ # For stdlibs, create a single registry entry
+ stdlib_deps = Dict{VersionRange, Set{UUID}}()
+ stdlib_compat = Dict{VersionRange, Dict{UUID, VersionSpec}}()
+ stdlib_weak_deps = Dict{VersionRange, Set{UUID}}()
+ stdlib_weak_compat = Dict{VersionRange, Dict{UUID, VersionSpec}}()
+
+ vrange = VersionRange(v, v)
+ deps_set = Set{UUID}()
for other_uuid in stdlib_info.deps
push!(uuids, other_uuid)
- all_compat_u_vr[other_uuid] = VersionSpec()
+ push!(deps_set, other_uuid)
end
+ stdlib_deps[vrange] = deps_set
+ stdlib_compat[vrange] = Dict{UUID, VersionSpec}()
if !isempty(stdlib_info.weakdeps)
- weak_all_compat_u_vr = get_or_make!(weak_compat_u, v)
+ weak_deps_set = Set{UUID}()
for other_uuid in stdlib_info.weakdeps
push!(uuids, other_uuid)
- all_compat_u_vr[other_uuid] = VersionSpec()
- push!(weak_all_compat_u_vr, other_uuid)
+ push!(weak_deps_set, other_uuid)
end
+ stdlib_weak_deps[vrange] = weak_deps_set
+ stdlib_weak_compat[vrange] = Dict{UUID, VersionSpec}()
end
+
+ all_deps_compressed[uuid] = [stdlib_deps]
+ all_compat_compressed[uuid] = [stdlib_compat]
+ weak_deps_compressed[uuid] = [stdlib_weak_deps]
+ weak_compat_compressed[uuid] = [stdlib_weak_compat]
+ pkg_versions[uuid] = [v]
+ pkg_versions_per_registry[uuid] = [Set([v])]
else
+ # Accumulate valid versions from all registries
+ valid_versions = VersionNumber[]
+ # Store per-registry data separately - don't merge!
+ pkg_deps_list = Vector{Dict{VersionRange, Set{UUID}}}()
+ pkg_compat_list = Vector{Dict{VersionRange, Dict{UUID, VersionSpec}}}()
+ pkg_weak_deps_list = Vector{Dict{VersionRange, Set{UUID}}}()
+ pkg_weak_compat_list = Vector{Dict{VersionRange, Dict{UUID, VersionSpec}}}()
+ pkg_versions_per_reg = Vector{Set{VersionNumber}}()
+
for reg in registries
pkg = get(reg, uuid, nothing)
pkg === nothing && continue
- info = Registry.registry_info(pkg)
-
- function add_compat!(d, cinfo)
- for (v, compat_info) in cinfo
- # Filter yanked and if we are in offline mode also downloaded packages
- # TODO, pull this into a function
- Registry.isyanked(info, v) && continue
- if installed_only
- pkg_spec = PackageSpec(name=pkg.name, uuid=pkg.uuid, version=v, tree_hash=Registry.treehash(info, v))
- is_package_downloaded(env.manifest_file, pkg_spec) || continue
- end
+ info = Registry.registry_info(reg, pkg)
+
+ # Build filtered version list for this registry
+ reg_valid_versions = Set{VersionNumber}()
+ for v in keys(info.version_info)
+ # Filter yanked and if we are in offline mode also downloaded packages
+ Registry.isyanked(info, v) && continue
+ if installed_only
+ pkg_spec = PackageSpec(name = pkg.name, uuid = pkg.uuid, version = v, tree_hash = Registry.treehash(info, v))
+ is_package_downloaded(env.manifest_file, pkg_spec) || continue
+ end
- # Skip package version that are not the same as external packages in sysimage
- if PKGORIGIN_HAVE_VERSION && RESPECT_SYSIMAGE_VERSIONS[] && julia_version == VERSION
- pkgid = Base.PkgId(uuid, pkg.name)
- if Base.in_sysimage(pkgid)
- pkgorigin = get(Base.pkgorigins, pkgid, nothing)
- if pkgorigin !== nothing && pkgorigin.version !== nothing
- if v != pkgorigin.version
- continue
- end
+ # Skip package version that are not the same as external packages in sysimage
+ if PKGORIGIN_HAVE_VERSION && RESPECT_SYSIMAGE_VERSIONS[] && julia_version == VERSION
+ pkgid = Base.PkgId(uuid, pkg.name)
+ if Base.in_sysimage(pkgid)
+ pkgorigin = get(Base.pkgorigins, pkgid, nothing)
+ if pkgorigin !== nothing && pkgorigin.version !== nothing
+ if v != pkgorigin.version
+ continue
end
end
end
- dv = get_or_make!(d, v)
- merge!(dv, compat_info)
- union!(uuids, keys(compat_info))
end
+
+ push!(reg_valid_versions, v)
+ push!(valid_versions, v)
+ end
+
+ # Only add this registry's data if it has valid versions
+ if !isempty(reg_valid_versions)
+ # Store the full compressed data along with which versions are valid
+ # The query function will check version membership to avoid cross-registry pollution
+ push!(pkg_deps_list, info.deps)
+ push!(pkg_compat_list, info.compat)
+ push!(pkg_weak_deps_list, info.weak_deps)
+ push!(pkg_weak_compat_list, info.weak_compat)
+ push!(pkg_versions_per_reg, reg_valid_versions)
end
- add_compat!(all_compat_u, Registry.compat_info(info))
- weak_compat_info = Registry.weak_compat_info(info)
- if weak_compat_info !== nothing
- add_compat!(all_compat_u, weak_compat_info)
- # Version to Set
- for (v, compat_info) in weak_compat_info
- weak_compat_u[v] = keys(compat_info)
+
+ # Collect all dependency UUIDs for discovery
+ for deps_dict in (info.deps, info.weak_deps)
+ for (vrange, deps_set) in deps_dict
+ union!(uuids, deps_set)
end
end
end
+
+ # After processing all registries, sort and store the accumulated versions
+ pkg_versions[uuid] = sort!(unique!(valid_versions))
+
+ # Store the per-registry data
+ all_deps_compressed[uuid] = pkg_deps_list
+ all_compat_compressed[uuid] = pkg_compat_list
+ weak_deps_compressed[uuid] = pkg_weak_deps_list
+ weak_compat_compressed[uuid] = pkg_weak_compat_list
+ pkg_versions_per_registry[uuid] = pkg_versions_per_reg
end
end
end
+ # Track weak dependencies that are not available in any registry
+ unavailable_weak_uuids = Set{UUID}()
+
for uuid in uuids
uuid == JULIA_UUID && continue
if !haskey(uuid_to_name, uuid)
name = registered_name(registries, uuid)
- name === nothing && pkgerror("cannot find name corresponding to UUID $(uuid) in a registry")
+ if name === nothing
+ # Allow weak dependencies to be missing from registries
+ if uuid in all_weak_uuids
+ push!(unavailable_weak_uuids, uuid)
+ continue
+ end
+ pkgerror("cannot find name corresponding to UUID $(uuid) in a registry")
+ end
uuid_to_name[uuid] = name
entry = manifest_info(env.manifest, uuid)
entry ≡ nothing && continue
@@ -666,8 +1094,24 @@ function deps_graph(env::EnvCache, registries::Vector{Registry.RegistryInstance}
end
end
- return Resolve.Graph(all_compat, weak_compat, uuid_to_name, reqs, fixed, false, julia_version),
- all_compat
+ # Filter out unavailable weak dependencies from fixed packages
+ if !isempty(unavailable_weak_uuids)
+ fixed_filtered = Dict{UUID, Resolve.Fixed}()
+ for (uuid, fx) in fixed
+ filtered_requires = Requires()
+ for (req_uuid, req_spec) in fx.requires
+ if !(req_uuid in unavailable_weak_uuids)
+ filtered_requires[req_uuid] = req_spec
+ end
+ end
+ # Also filter the weak set
+ filtered_weak = setdiff(fx.weak, unavailable_weak_uuids)
+ fixed_filtered[uuid] = Resolve.Fixed(fx.version, filtered_requires, filtered_weak)
+ end
+ fixed = fixed_filtered
+ end
+
+ return all_deps_compressed, all_compat_compressed, weak_deps_compressed, weak_compat_compressed, pkg_versions, pkg_versions_per_registry, uuid_to_name, reqs, fixed
end
########################
@@ -683,11 +1127,18 @@ end
# Returns if archive successfully installed
function install_archive(
- urls::Vector{Pair{String,Bool}},
- hash::SHA1,
- version_path::String;
- io::IO=stderr_f()
-)::Bool
+ urls::Vector{Pair{String, Bool}},
+ hash::SHA1,
+ version_path::String;
+ name::Union{String, Nothing} = nothing,
+ io::IO = stderr_f()
+ )::Bool
+ # Because we use `mv_temp_dir_retries` which uses `rename` not `mv` it can fail if the temp
+ # files are on a different fs. So use a temp dir in the same depot dir as some systems might
+ # be serving different parts of the depot on different filesystems via links i.e. pkgeval does this.
+ depot_temp = mkpath(joinpath(dirname(dirname(version_path)), "temp")) # .julia/packages/temp
+ create_cachedir_tag(dirname(dirname(version_path)))
+
tmp_objects = String[]
url_success = false
for (url, top) in urls
@@ -695,19 +1146,21 @@ function install_archive(
push!(tmp_objects, path) # for cleanup
url_success = true
try
- PlatformEngines.download(url, path; verbose=false, io=io)
+ PlatformEngines.download(url, path; verbose = false, io = io)
catch e
e isa InterruptException && rethrow()
url_success = false
end
url_success || continue
- dir = joinpath(tempdir(), randstring(12))
+ # the temp dir should be in the same depot because the `rename` operation in `mv_temp_dir_retries`
+ # is possible only if the source and destination are on the same filesystem
+ dir = tempname(depot_temp) * randstring(6)
push!(tmp_objects, dir) # for cleanup
# Might fail to extract an archive (https://github.com/JuliaPackaging/PkgServer.jl/issues/126)
try
- unpack(path, dir; verbose=false)
+ unpack(path, dir; verbose = false)
catch e
- e isa InterruptException && rethrow()
+ e isa ProcessFailedException || rethrow()
@warn "failed to extract archive downloaded from $(url)"
url_success = false
end
@@ -722,52 +1175,67 @@ function install_archive(
unpacked = joinpath(dir, dirs[1])
end
# Assert that the tarball unpacked to the tree sha we wanted
- # TODO: Enable on Windows when tree_hash handles
- # executable bits correctly, see JuliaLang/julia #33212.
- if !Sys.iswindows()
- if SHA1(GitTools.tree_hash(unpacked)) != hash
- @warn "tarball content does not match git-tree-sha1"
- url_success = false
- end
- url_success || continue
+ computed_hash = GitTools.tree_hash(unpacked)
+ if SHA1(computed_hash) != hash
+ @warn "Downloaded package content does not match expected hash (git-tree-sha1); skipping this source" package = name url = url expected = hash computed = computed_hash
+ url_success = false
end
+ url_success || continue
+
# Move content to version path
- !isdir(version_path) && mkpath(version_path)
- mv(unpacked, version_path; force=true)
+ !isdir(dirname(version_path)) && mkpath(dirname(version_path))
+ mv_temp_dir_retries(unpacked, version_path; set_permissions = false)
+
break # successful install
end
# Clean up and exit
- foreach(x -> Base.rm(x; force=true, recursive=true), tmp_objects)
+ foreach(x -> Base.rm(x; force = true, recursive = true), tmp_objects)
return url_success
end
-const refspecs = ["+refs/*:refs/remotes/cache/*"]
+const refspecs = ["+refs/*:refs/cache/*"]
function install_git(
- io::IO,
- uuid::UUID,
- name::String,
- hash::SHA1,
- urls::Set{String},
- version_path::String
-)::Nothing
+ io::IO,
+ uuid::UUID,
+ name::String,
+ hash::SHA1,
+ urls::Set{String},
+ version_path::String
+ )::Nothing
+ if isempty(urls)
+ pkgerror(
+ "Package $name [$uuid] has no repository URL available. This could happen if:\n" *
+ " - The package is not registered in any configured registry\n" *
+ " - The package exists in a registry but lacks repository information\n" *
+ " - Registry files are corrupted or incomplete\n" *
+ " - Network issues prevented registry updates\n" *
+ "Please check that the package name is correct and that your registries are up to date."
+ )
+ end
+
repo = nothing
tree = nothing
# TODO: Consolidate this with some of the repo handling in Types.jl
try
clones_dir = joinpath(depots1(), "clones")
ispath(clones_dir) || mkpath(clones_dir)
+ create_cachedir_tag(clones_dir)
repo_path = joinpath(clones_dir, string(uuid))
- repo = GitTools.ensure_clone(io, repo_path, first(urls); isbare=true,
- header = "[$uuid] $name from $(first(urls))")
+ first_url = first(urls)
+ repo = GitTools.ensure_clone(
+ io, repo_path, first_url; isbare = true,
+ header = "[$uuid] $name from $first_url", depth = 1
+ )
git_hash = LibGit2.GitHash(hash.bytes)
for url in urls
- try LibGit2.with(LibGit2.GitObject, repo, git_hash) do g
+ try
+ LibGit2.with(LibGit2.GitObject, repo, git_hash) do g
end
break # object was found, we can stop
catch err
err isa LibGit2.GitError && err.code == LibGit2.Error.ENOTFOUND || rethrow()
end
- GitTools.fetch(io, repo, url, refspecs=refspecs)
+ GitTools.fetch(io, repo, url, refspecs = refspecs, depth = LibGit2.Consts.FETCH_DEPTH_UNSHALLOW)
end
tree = try
LibGit2.GitObject(repo, git_hash)
@@ -778,6 +1246,7 @@ function install_git(
tree isa LibGit2.GitTree ||
error("$name: git object $(string(hash)) should be a tree, not $(typeof(tree))")
mkpath(version_path)
+ create_cachedir_tag(dirname(dirname(version_path)))
GitTools.checkout_tree_to_path(repo, tree, version_path)
return
finally
@@ -786,9 +1255,9 @@ function install_git(
end
end
-function collect_artifacts(pkg_root::String; platform::AbstractPlatform=HostPlatform())
+function collect_artifacts(pkg_root::String; platform::AbstractPlatform = HostPlatform(), include_lazy::Bool = false)
# Check to see if this package has an (Julia)Artifacts.toml
- artifacts_tomls = Tuple{String,Base.TOML.TOMLDict}[]
+ artifacts_tomls = Tuple{String, Base.TOML.TOMLDict}[]
for f in artifact_names
artifacts_toml = joinpath(pkg_root, f)
if isfile(artifacts_toml)
@@ -799,18 +1268,19 @@ function collect_artifacts(pkg_root::String; platform::AbstractPlatform=HostPlat
# Despite the fact that we inherit the project, since the in-memory manifest
# has not been updated yet, if we try to load any dependencies, it may fail.
# Therefore, this project inheritance is really only for Preferences, not dependencies.
- select_cmd = Cmd(`$(gen_build_code(selector_path; inherit_project=true)) --compile=min -t1 --startup-file=no $(triplet(platform))`)
+ # We only guarantee access to the `stdlib`, which is why we set `add_stdlib` here.
+ select_cmd = Cmd(`$(gen_build_code(selector_path; inherit_project=true, add_stdlib=true)) --compile=min -t1 --startup-file=no $(triplet(platform))`)
meta_toml = String(read(select_cmd))
res = TOML.tryparse(meta_toml)
if res isa TOML.ParserError
- errstr = sprint(showerror, res; context=stderr)
+ errstr = sprint(showerror, res; context = stderr)
pkgerror("failed to parse TOML output from running $(repr(selector_path)), got: \n$errstr")
else
push!(artifacts_tomls, (artifacts_toml, TOML.parse(meta_toml)))
end
else
# Otherwise, use the standard selector from `Artifacts`
- artifacts = select_downloadable_artifacts(artifacts_toml; platform)
+ artifacts = select_downloadable_artifacts(artifacts_toml; platform, include_lazy)
push!(artifacts_tomls, (artifacts_toml, artifacts))
end
break
@@ -827,28 +1297,42 @@ mutable struct DownloadState
const bar::MiniProgressBar
end
-function download_artifacts(ctx::Context;
- platform::AbstractPlatform=HostPlatform(),
- julia_version = VERSION,
- verbose::Bool=false)
+function download_artifacts(
+ ctx::Context, pkgs;
+ platform::AbstractPlatform = HostPlatform(),
+ julia_version = VERSION,
+ verbose::Bool = false,
+ io::IO = stderr_f(),
+ include_lazy::Bool = false
+ )
env = ctx.env
io = ctx.io
fancyprint = can_fancyprint(io)
- pkg_roots = String[]
+ pkg_info = Tuple{String, Union{Base.UUID, Nothing}}[]
+ pkg_uuids = Set(pkg.uuid for pkg in pkgs)
for (uuid, pkg) in env.manifest
+ uuid in pkg_uuids || continue
pkg = manifest_info(env.manifest, uuid)
pkg_root = source_path(env.manifest_file, pkg, julia_version)
- pkg_root === nothing || push!(pkg_roots, pkg_root)
+ pkg_root === nothing || push!(pkg_info, (pkg_root, uuid))
end
- push!(pkg_roots, dirname(env.project_file))
+ push!(pkg_info, (dirname(env.project_file), env.pkg !== nothing ? env.pkg.uuid : nothing))
download_jobs = Dict{SHA1, Function}()
+ # Check what registries the current pkg server tracks
+ # Disable if precompiling to not access internet
+ server_registry_info = if Base.JLOptions().incremental == 0
+ Registry.pkg_server_registry_info()
+ else
+ nothing
+ end
+
print_lock = Base.ReentrantLock() # for non-fancyprint printing
download_states = Dict{SHA1, DownloadState}()
errors = Channel{Any}(Inf)
- is_done = false
+ is_done = Ref{Bool}(false)
ansi_moveup(n::Int) = string("\e[", n, "A")
ansi_movecol1 = "\e[1G"
ansi_cleartoend = "\e[0J"
@@ -856,49 +1340,59 @@ function download_artifacts(ctx::Context;
ansi_enablecursor = "\e[?25h"
ansi_disablecursor = "\e[?25l"
- all_collected_artifacts = reduce(vcat, map(pkg_root -> collect_artifacts(pkg_root; platform), pkg_roots))
- used_artifact_tomls = Set{String}(map(first, all_collected_artifacts))
- longest_name_length = maximum(all_collected_artifacts; init=0) do (artifacts_toml, artifacts)
- maximum(textwidth, keys(artifacts); init=0)
+ all_collected_artifacts = reduce(
+ vcat, map(
+ ((pkg_root, pkg_uuid),) ->
+ map(ca -> (ca[1], ca[2], pkg_uuid), collect_artifacts(pkg_root; platform, include_lazy)), pkg_info
+ )
+ )
+ used_artifact_tomls = Set{String}(map(ca -> ca[1], all_collected_artifacts))
+ longest_name_length = maximum(all_collected_artifacts; init = 0) do (artifacts_toml, artifacts, pkg_uuid)
+ maximum(textwidth, keys(artifacts); init = 0)
end
- for (artifacts_toml, artifacts) in all_collected_artifacts
+ for (artifacts_toml, artifacts, pkg_uuid) in all_collected_artifacts
# For each Artifacts.toml, install each artifact we've collected from it
for name in keys(artifacts)
local rname = rpad(name, longest_name_length)
- local hash = SHA1(artifacts[name]["git-tree-sha1"])
- local bar = MiniProgressBar(;header=rname, main=false, indent=2, color = Base.info_color(), mode=:data, always_reprint=true)
+ local hash = SHA1(artifacts[name]["git-tree-sha1"]::String)
+ local bar = MiniProgressBar(; header = rname, main = false, indent = 2, color = Base.info_color()::Symbol, mode = :data, always_reprint = true)
local dstate = DownloadState(:ready, "", time_ns(), Base.ReentrantLock(), bar)
- function progress(total, current; status="")
+ function progress(total, current; status = "")
local t = time_ns()
if isempty(status)
dstate.bar.max = total
dstate.bar.current = current
end
- lock(dstate.status_lock) do
+ return lock(dstate.status_lock) do
dstate.status = status
dstate.status_update_time = t
end
end
+ # Check if the current package is eligible for PkgServer artifact downloads
+ local pkg_server_eligible = pkg_uuid !== nothing && Registry.is_pkg_in_pkgserver_registry(pkg_uuid, server_registry_info, ctx.registries)
+
# returns a string if exists, or function that downloads the artifact if not
- local ret = ensure_artifact_installed(name, artifacts[name], artifacts_toml;
- verbose, quiet_download=!(usable_io(io)), io, progress)
+ local ret = ensure_artifact_installed(
+ name, artifacts[name], artifacts_toml;
+ pkg_server_eligible, verbose, quiet_download = !(usable_io(io)), io, progress
+ )
if ret isa Function
download_states[hash] = dstate
download_jobs[hash] =
() -> begin
- try
- dstate.state = :running
- ret()
- if !fancyprint
- @lock print_lock printpkgstyle(io, :Installed, "artifact $rname $(MiniProgressBars.pkg_format_bytes(dstate.bar.max; sigdigits=1))")
- end
- catch
- dstate.state = :failed
- rethrow()
- else
- dstate.state = :done
+ try
+ dstate.state = :running
+ ret()
+ if !fancyprint && dstate.bar.max > 1 # if another process downloaded, then max is never set greater than 1
+ @lock print_lock printpkgstyle(io, :Installed, "artifact $rname $(MiniProgressBars.pkg_format_bytes(dstate.bar.max; sigdigits = 1))")
end
+ catch
+ dstate.state = :failed
+ rethrow()
+ else
+ dstate.state = :done
end
+ end
end
end
end
@@ -908,39 +1402,39 @@ function download_artifacts(ctx::Context;
t_print = Threads.@spawn begin
try
print(io, ansi_disablecursor)
- first = true
- timer = Timer(0, interval=1/10)
+ first = Ref(true)
+ timer = Timer(0, interval = 1 / 10)
# TODO: Implement as a new MiniMultiProgressBar
- main_bar = MiniProgressBar(; indent=2, header = "Installing artifacts", color = :green, mode = :int, always_reprint=true)
+ main_bar = MiniProgressBar(; indent = 2, header = "Installing artifacts", color = :green, mode = :int, always_reprint = true)
main_bar.max = length(download_states)
- while !is_done
+ while !is_done[]
main_bar.current = count(x -> x.state == :done, values(download_states))
- str = sprint(context=io) do iostr
- first || print(iostr, ansi_cleartoend)
+ local str = sprint(context = io) do iostr
+ first[] || print(iostr, ansi_cleartoend)
n_printed = 1
- show_progress(iostr, main_bar; carriagereturn=false)
+ show_progress(iostr, main_bar; carriagereturn = false)
println(iostr)
- for dstate in sort!(collect(values(download_states)), by=v->v.bar.max, rev=true)
- local status, status_update_time = lock(()->(dstate.status, dstate.status_update_time), dstate.status_lock)
+ for dstate in sort!(collect(values(download_states)), by = v -> v.bar.max, rev = true)
+ local status, status_update_time = lock(() -> (dstate.status, dstate.status_update_time), dstate.status_lock)
# only update the bar's status message if it is stalled for at least 0.5 s.
# If the new status message is empty, go back to showing the bar without waiting.
if isempty(status) || time_ns() - status_update_time > UInt64(500_000_000)
dstate.bar.status = status
end
dstate.state == :running && (dstate.bar.max > 1000 || !isempty(dstate.bar.status)) || continue
- show_progress(iostr, dstate.bar; carriagereturn=false)
+ show_progress(iostr, dstate.bar; carriagereturn = false)
println(iostr)
n_printed += 1
end
- is_done || print(iostr, ansi_moveup(n_printed), ansi_movecol1)
- first = false
+ is_done[] || print(iostr, ansi_moveup(n_printed), ansi_movecol1)
+ first[] = false
end
print(io, str)
wait(timer)
end
print(io, ansi_cleartoend)
main_bar.current = count(x -> x[2].state == :done, download_states)
- show_progress(io, main_bar; carriagereturn=false)
+ show_progress(io, main_bar; carriagereturn = false)
println(io)
catch e
e isa InterruptException || rethrow()
@@ -953,26 +1447,26 @@ function download_artifacts(ctx::Context;
printpkgstyle(io, :Installing, "$(length(download_jobs)) artifacts")
end
sema = Base.Semaphore(ctx.num_concurrent_downloads)
- interrupted = false
+ interrupted = Ref{Bool}(false)
@sync for f in values(download_jobs)
- interrupted && break
+ interrupted[] && break
Base.acquire(sema)
Threads.@spawn try
f()
catch e
- e isa InterruptException && (interrupted = true)
+ e isa InterruptException && (interrupted[] = true)
put!(errors, e)
finally
Base.release(sema)
end
end
- is_done = true
+ is_done[] = true
fancyprint && wait(t_print)
close(errors)
if !isempty(errors)
all_errors = collect(errors)
- str = sprint(context=io) do iostr
+ local str = sprint(context = io) do iostr
for e in all_errors
Base.showerror(iostr, e)
length(all_errors) > 1 && println(iostr)
@@ -982,12 +1476,22 @@ function download_artifacts(ctx::Context;
end
end
- for f in used_artifact_tomls
- write_env_usage(f, "artifact_usage.toml")
- end
+
+ return write_env_usage(used_artifact_tomls, "artifact_usage.toml")
end
-function check_artifacts_downloaded(pkg_root::String; platform::AbstractPlatform=HostPlatform())
+function download_artifacts(
+ ctx::Context;
+ platform::AbstractPlatform = HostPlatform(),
+ julia_version = VERSION,
+ verbose::Bool = false,
+ io::IO = stderr_f(),
+ include_lazy::Bool = false
+ )
+ return download_artifacts(ctx, values(ctx.env.manifest); platform, julia_version, verbose, io, include_lazy)
+end
+
+function check_artifacts_downloaded(pkg_root::String; platform::AbstractPlatform = HostPlatform())
for (artifacts_toml, artifacts) in collect_artifacts(pkg_root; platform)
for name in keys(artifacts)
if !artifact_exists(Base.SHA1(artifacts[name]["git-tree-sha1"]))
@@ -1005,7 +1509,7 @@ function find_urls(registries::Vector{Registry.RegistryInstance}, uuid::UUID)
for reg in registries
reg_pkg = get(reg, uuid, nothing)
reg_pkg === nothing && continue
- info = Registry.registry_info(reg_pkg)
+ info = Registry.registry_info(reg, reg_pkg)
repo = info.repo
repo === nothing && continue
push!(urls, repo)
@@ -1014,15 +1518,49 @@ function find_urls(registries::Vector{Registry.RegistryInstance}, uuid::UUID)
end
-function download_source(ctx::Context; readonly=true)
- pkgs_to_install = NamedTuple{(:pkg, :urls, :path), Tuple{PackageEntry, Set{String}, String}}[]
- for pkg in values(ctx.env.manifest)
+download_source(ctx::Context; readonly::Bool = true) = download_source(ctx, collect(values(ctx.env.manifest)); readonly)
+
+function count_artifacts(pkg_root::String; platform::AbstractPlatform = HostPlatform())
+ for f in artifact_names
+ artifacts_toml = joinpath(pkg_root, f)
+ if isfile(artifacts_toml)
+ eager = select_downloadable_artifacts(artifacts_toml; platform, include_lazy = false)
+ all_matching = select_downloadable_artifacts(artifacts_toml; platform, include_lazy = true)
+ return (length(eager), length(all_matching) - length(eager))
+ end
+ end
+ return nothing
+end
+
+function artifact_suffix(artifact_counts)
+ artifact_counts === nothing && return ""
+ n_eager, n_lazy = artifact_counts
+ n_eager + n_lazy == 0 && return " (no artifacts on this platform)"
+ return ""
+end
+
+function download_source(ctx::Context, pkgs; readonly::Bool = true)
+ pidfile_stale_age = 10 # recommended value is about 3-5x an estimated normal download time (i.e. 2-3s)
+ pkgs_to_install = NamedTuple{(:pkg, :urls, :path), Tuple{eltype(pkgs), Set{String}, String}}[]
+ for pkg in pkgs
tracking_registered_version(pkg, ctx.julia_version) || continue
path = source_path(ctx.env.manifest_file, pkg, ctx.julia_version)
path === nothing && continue
- ispath(path) && continue
+ if ispath(path) && iswritable(path)
+ pidfile = path * ".pid"
+ else
+ # If the path is not writable, we cannot create a pidfile there so use one in the first depot.
+ # (pidlocking probably isn't needed as in this case the package source logically is alredy installed
+ # in the readonly depot, but keep the pidfile logic for consistency)
+ dir = joinpath(depots1(), "packages", pkg.name)
+ mkpath(dir)
+ iswritable(dir) || pkgerror("The primary depot is not writable")
+ pidfile = joinpath(dir, basename(path) * ".pid")
+ end
+
+ FileWatching.mkpidlock(() -> ispath(path), pidfile, stale_age = pidfile_stale_age) && continue
urls = find_urls(ctx.registries, pkg.uuid)
- push!(pkgs_to_install, (;pkg, urls, path))
+ push!(pkgs_to_install, (; pkg, urls, path))
end
length(pkgs_to_install) == 0 && return Set{UUID}()
@@ -1033,7 +1571,7 @@ function download_source(ctx::Context; readonly=true)
missed_packages = eltype(pkgs_to_install)[]
widths = [textwidth(pkg.name) for (pkg, _) in pkgs_to_install]
- max_name = maximum(widths; init=0)
+ max_name = maximum(widths; init = 0)
# Check what registries the current pkg server tracks
# Disable if precompiling to not access internet
@@ -1043,7 +1581,8 @@ function download_source(ctx::Context; readonly=true)
nothing
end
- @sync begin
+ # use eager throw version
+ Base.Experimental.@sync begin
jobs = Channel{eltype(pkgs_to_install)}(ctx.num_concurrent_downloads)
results = Channel(ctx.num_concurrent_downloads)
@@ -1053,61 +1592,69 @@ function download_source(ctx::Context; readonly=true)
end
end
- for i in 1:ctx.num_concurrent_downloads
+ for i in 1:ctx.num_concurrent_downloads # (default 8)
@async begin
for (pkg, urls, path) in jobs
- if ctx.use_git_for_all_downloads
- put!(results, (pkg, false, (urls, path)))
- continue
- end
- try
- archive_urls = Pair{String,Bool}[]
+ mkpath(dirname(path)) # the `packages/Package` dir needs to exist for the pidfile to be created
+ FileWatching.mkpidlock(path * ".pid", stale_age = pidfile_stale_age) do
+ if ispath(path)
+ put!(results, (pkg, nothing, (urls, path)))
+ return
+ end
+ if ctx.use_git_for_all_downloads
+ put!(results, (pkg, false, (urls, path)))
+ return
+ end
+ archive_urls = Pair{String, Bool}[]
# Check if the current package is available in one of the registries being tracked by the pkg server
# In that case, download from the package server
- if server_registry_info !== nothing
+ if Registry.is_pkg_in_pkgserver_registry(pkg.uuid, server_registry_info, ctx.registries)
server, registry_info = server_registry_info
- for reg in ctx.registries
- if reg.uuid in keys(registry_info)
- if haskey(reg, pkg.uuid)
- url = "$server/package/$(pkg.uuid)/$(pkg.tree_hash)"
- push!(archive_urls, url => true)
- break
- end
- end
- end
+ url = "$server/package/$(pkg.uuid)/$(pkg.tree_hash)"
+ push!(archive_urls, url => true)
end
for repo_url in urls
url = get_archive_url_for_version(repo_url, pkg.tree_hash)
url !== nothing && push!(archive_urls, url => false)
end
- success = install_archive(archive_urls, pkg.tree_hash, path, io=ctx.io)
- if success && readonly
- set_readonly(path) # In add mode, files should be read-only
- end
- if ctx.use_only_tarballs_for_downloads && !success
- pkgerror("failed to get tarball from $(urls)")
+ try
+ success = install_archive(archive_urls, pkg.tree_hash, path; name = pkg.name, io = ctx.io)
+ if success && readonly
+ set_readonly(path) # In add mode, files should be read-only
+ end
+ if ctx.use_only_tarballs_for_downloads && !success
+ pkgerror("failed to get tarball from $(urls)")
+ end
+ put!(results, (pkg, success, (urls, path)))
+ catch err
+ put!(results, (pkg, err, catch_backtrace()))
end
- put!(results, (pkg, success, (urls, path)))
- catch err
- put!(results, (pkg, err, catch_backtrace()))
end
end
end
end
- bar = MiniProgressBar(; indent=1, header = "Downloading packages", color = Base.info_color(),
- mode=:int, always_reprint=true)
+ bar = MiniProgressBar(;
+ indent = 1, header = "Downloading packages", color = Base.info_color(),
+ mode = :int, always_reprint = true
+ )
bar.max = length(pkgs_to_install)
fancyprint = can_fancyprint(ctx.io)
try
for i in 1:length(pkgs_to_install)
- pkg::PackageEntry, exc_or_success, bt_or_pathurls = take!(results)
- exc_or_success isa Exception && pkgerror("Error when installing package $(pkg.name):\n",
- sprint(Base.showerror, exc_or_success, bt_or_pathurls))
- success, (urls, path) = exc_or_success, bt_or_pathurls
+ pkg::eltype(pkgs), exc_or_success_or_nothing, bt_or_pathurls = take!(results)
+ if exc_or_success_or_nothing isa Exception
+ exc = exc_or_success_or_nothing
+ pkgerror("Error when installing package $(pkg.name):\n", sprint(Base.showerror, exc, bt_or_pathurls))
+ end
+ if exc_or_success_or_nothing === nothing
+ continue # represents when another process did the install
+ end
+ success = exc_or_success_or_nothing::Bool
+ (urls, path) = bt_or_pathurls::Tuple{Set{String}, String}
success || push!(missed_packages, (; pkg, urls, path))
bar.current = i
- str = sprint(; context=ctx.io) do io
+ str = sprint(; context = ctx.io) do io
if success
fancyprint && print_progress_bottom(io)
vstr = if pkg.version !== nothing
@@ -1116,7 +1663,8 @@ function download_source(ctx::Context; readonly=true)
short_treehash = string(pkg.tree_hash)[1:16]
"[$short_treehash]"
end
- printpkgstyle(io, :Installed, string(rpad(pkg.name * " ", max_name + 2, "─"), " ", vstr))
+ artifact_str = artifact_suffix(count_artifacts(path))
+ printpkgstyle(io, :Installed, string(rpad(pkg.name * " ", max_name + 2, "─"), " ", vstr, artifact_str))
fancyprint && show_progress(io, bar)
end
end
@@ -1132,16 +1680,19 @@ function download_source(ctx::Context; readonly=true)
# Use LibGit2 to download any remaining packages #
##################################################
for (pkg, urls, path) in missed_packages
- uuid = pkg.uuid
- install_git(ctx.io, pkg.uuid, pkg.name, pkg.tree_hash, urls, path)
- readonly && set_readonly(path)
- vstr = if pkg.version !== nothing
- "v$(pkg.version)"
- else
- short_treehash = string(pkg.tree_hash)[1:16]
- "[$short_treehash]"
+ FileWatching.mkpidlock(path * ".pid", stale_age = pidfile_stale_age) do
+ ispath(path) && return
+ install_git(ctx.io, pkg.uuid, pkg.name, pkg.tree_hash, urls, path)
+ readonly && set_readonly(path)
+ vstr = if pkg.version !== nothing
+ "v$(pkg.version)"
+ else
+ short_treehash = string(pkg.tree_hash)[1:16]
+ "[$short_treehash]"
+ end
+ artifact_str = artifact_suffix(count_artifacts(path))
+ printpkgstyle(ctx.io, :Installed, string(rpad(pkg.name * " ", max_name + 2, "─"), " ", vstr, artifact_str))
end
- printpkgstyle(ctx.io, :Installed, string(rpad(pkg.name * " ", max_name + 2, "─"), " ", vstr))
end
return Set{UUID}(entry.pkg.uuid for entry in pkgs_to_install)
@@ -1150,7 +1701,6 @@ end
################################
# Manifest update and pruning #
################################
-project_rel_path(env::EnvCache, path::String) = normpath(joinpath(dirname(env.manifest_file), path))
function prune_manifest(env::EnvCache)
# if project uses another manifest, only prune project entry in manifest
@@ -1192,10 +1742,11 @@ function prune_deps(iterator, keep::Set{UUID})
end
clean && break
end
+ return
end
function record_project_hash(env::EnvCache)
- env.manifest.other["project_hash"] = Types.workspace_resolve_hash(env)
+ return env.manifest.other["project_hash"] = Types.workspace_resolve_hash(env)
end
#########
@@ -1232,60 +1783,71 @@ function any_package_not_installed(manifest::Manifest)
return false
end
-function build(ctx::Context, uuids::Set{UUID}, verbose::Bool)
+function build(ctx::Context, uuids::Set{UUID}, verbose::Bool; allow_reresolve::Bool = true)
if any_package_not_installed(ctx.env.manifest) || !isfile(ctx.env.manifest_file)
Pkg.instantiate(ctx, allow_build = false, allow_autoprecomp = false)
end
all_uuids = get_deps(ctx.env, uuids)
- build_versions(ctx, all_uuids; verbose)
+ return build_versions(ctx, all_uuids; verbose, allow_reresolve)
end
-function dependency_order_uuids(env::EnvCache, uuids::Vector{UUID})::Dict{UUID,Int}
- order = Dict{UUID,Int}()
+function dependency_order_visit!(
+ order::Dict{UUID, Int}, seen::Vector{UUID}, counter::Base.RefValue{Int},
+ env::EnvCache, uuid::UUID
+ )
+ uuid in seen && return @warn("Dependency graph not a DAG, linearizing anyway")
+ haskey(order, uuid) && return
+ push!(seen, uuid)
+ deps = if Types.is_project_uuid(env, uuid)
+ values(env.project.deps)
+ else
+ entry = manifest_info(env.manifest, uuid)
+ values(entry.deps)
+ end
+ for dep in deps
+ dependency_order_visit!(order, seen, counter, env, dep)
+ end
+ pop!(seen)
+ counter[] += 1
+ order[uuid] = counter[]
+ return
+end
+
+function dependency_order_uuids(env::EnvCache, uuids::Vector{UUID})::Dict{UUID, Int}
+ order = Dict{UUID, Int}()
seen = UUID[]
- k::Int = 0
- function visit(uuid::UUID)
- uuid in seen &&
- return @warn("Dependency graph not a DAG, linearizing anyway")
- haskey(order, uuid) && return
- push!(seen, uuid)
- if Types.is_project_uuid(env, uuid)
- deps = values(env.project.deps)
- else
- entry = manifest_info(env.manifest, uuid)
- deps = values(entry.deps)
- end
- foreach(visit, deps)
- pop!(seen)
- order[uuid] = k += 1
+ counter = Ref(0)
+ for uuid in uuids
+ dependency_order_visit!(order, seen, counter, env, uuid)
end
- visit(uuid::String) = visit(UUID(uuid))
- foreach(visit, uuids)
return order
end
-function gen_build_code(build_file::String; inherit_project::Bool = false)
+function gen_build_code(build_file::String; inherit_project::Bool = false, add_stdlib::Bool = false)
code = """
- $(Base.load_path_setup_code(false))
- cd($(repr(dirname(build_file))))
- include($(repr(build_file)))
- """
+ $(Base.load_path_setup_code(false))
+ if $(add_stdlib)
+ push!(Base.LOAD_PATH, "@stdlib")
+ end
+ cd($(repr(dirname(build_file))))
+ include($(repr(build_file)))
+ """
# This will make it so that running Pkg.build runs the build in a session with --startup=no
# *unless* the parent julia session is started with --startup=yes explicitly.
startup_flag = Base.JLOptions().startupfile == 1 ? "yes" : "no"
return ```
- $(Base.julia_cmd()) -O0 --color=no --history-file=no
- --startup-file=$startup_flag
- $(inherit_project ? `--project=$(Base.active_project())` : ``)
- --eval $code
- ```
+ $(Base.julia_cmd()) -O0 --color=no --history-file=no
+ --startup-file=$startup_flag
+ $(inherit_project ? `--project=$(Base.active_project())` : ``)
+ --eval $code
+ ```
end
with_load_path(f::Function, new_load_path::String) = with_load_path(f, [new_load_path])
function with_load_path(f::Function, new_load_path::Vector{String})
old_load_path = copy(Base.LOAD_PATH)
copy!(Base.LOAD_PATH, new_load_path)
- try
+ return try
f()
finally
copy!(LOAD_PATH, old_load_path)
@@ -1297,9 +1859,9 @@ pkg_scratchpath() = joinpath(depots1(), "scratchspaces", PkgUUID)
builddir(source_path::String) = joinpath(source_path, "deps")
buildfile(source_path::String) = joinpath(builddir(source_path), "build.jl")
-function build_versions(ctx::Context, uuids::Set{UUID}; verbose=false)
+function build_versions(ctx::Context, uuids::Set{UUID}; verbose = false, allow_reresolve::Bool = true)
# collect builds for UUIDs with `deps/build.jl` files
- builds = Tuple{UUID,String,String,VersionNumber}[]
+ builds = Tuple{UUID, String, String, VersionNumber}[]
for uuid in uuids
is_stdlib(uuid) && continue
if Types.is_project_uuid(ctx.env, uuid)
@@ -1324,84 +1886,95 @@ function build_versions(ctx::Context, uuids::Set{UUID}; verbose=false)
# toposort builds by dependencies
order = dependency_order_uuids(ctx.env, UUID[first(build) for build in builds])
sort!(builds, by = build -> order[first(build)])
- max_name = maximum(build->textwidth(build[2]), builds; init=0)
+ max_name = maximum(build -> textwidth(build[2]), builds; init = 0)
- bar = MiniProgressBar(; indent=2, header = "Building packages", color = Base.info_color(),
- mode=:int, always_reprint=true)
+ bar = MiniProgressBar(;
+ indent = 2, header = "Building packages", color = Base.info_color(),
+ mode = :int, always_reprint = true
+ )
bar.max = length(builds)
fancyprint = can_fancyprint(ctx.io)
fancyprint && start_progress(ctx.io, bar)
# build each package versions in a child process
try
- for (n, (uuid, name, source_path, version)) in enumerate(builds)
- pkg = PackageSpec(;uuid=uuid, name=name, version=version)
- build_file = buildfile(source_path)
- # compatibility shim
- local build_project_override, build_project_preferences
- if isfile(projectfile_path(builddir(source_path)))
- build_project_override = nothing
- with_load_path([builddir(source_path), Base.LOAD_PATH...]) do
- build_project_preferences = Base.get_preferences()
- end
- else
- build_project_override = gen_target_project(ctx, pkg, source_path, "build")
- with_load_path([something(projectfile_path(source_path)), Base.LOAD_PATH...]) do
- build_project_preferences = Base.get_preferences()
+ for (n, (uuid, name, source_path, version)) in enumerate(builds)
+ pkg = PackageSpec(; uuid = uuid, name = name, version = version)
+ build_file = buildfile(source_path)
+ # compatibility shim
+ local build_project_override
+ build_project_preferences = if isfile(projectfile_path(builddir(source_path)))
+ build_project_override = nothing
+ with_load_path([builddir(source_path), Base.LOAD_PATH...]) do
+ Base.get_preferences()
+ end
+ else
+ build_project_override = gen_target_project(ctx, pkg, source_path, "build")
+ with_load_path([something(projectfile_path(source_path)), Base.LOAD_PATH...]) do
+ Base.get_preferences()
+ end
end
- end
- # Put log output in Pkg's scratchspace if the package is content addressed
- # by tree sha and in the build directory if it is tracked by path etc.
- entry = manifest_info(ctx.env.manifest, uuid)
- if entry !== nothing && entry.tree_hash !== nothing
- key = string(entry.tree_hash)
- scratch = joinpath(pkg_scratchpath(), key)
- mkpath(scratch)
- log_file = joinpath(scratch, "build.log")
- # Associate the logfile with the package being built
- dict = Dict{String,Any}(scratch => [
- Dict{String,Any}("time" => Dates.now(), "parent_projects" => [projectfile_path(source_path)])
- ])
- open(joinpath(depots1(), "logs", "scratch_usage.toml"), "a") do io
- TOML.print(io, dict)
+ # Put log output in Pkg's scratchspace if the package is content addressed
+ # by tree sha and in the build directory if it is tracked by path etc.
+ entry = manifest_info(ctx.env.manifest, uuid)
+ if entry !== nothing && entry.tree_hash !== nothing
+ key = string(entry.tree_hash)
+ scratch = joinpath(pkg_scratchpath(), key)
+ mkpath(scratch)
+ create_cachedir_tag(joinpath(depots1(), "scratchspaces"))
+ log_file = joinpath(scratch, "build.log")
+ # Associate the logfile with the package being built
+ dict = Dict{String, Any}()
+ inner_dict = Dict{String, Any}()
+ inner_dict["time"] = Dates.now()
+ inner_dict["parent_projects"] = [projectfile_path(source_path)]
+ dict[scratch] = [inner_dict]
+ open(joinpath(depots1(), "logs", "scratch_usage.toml"), "a") do io
+ TOML.print(io, dict)
+ end
+ else
+ log_file = splitext(build_file)[1] * ".log"
end
- else
- log_file = splitext(build_file)[1] * ".log"
- end
-
- fancyprint && print_progress_bottom(ctx.io)
- printpkgstyle(ctx.io, :Building,
- rpad(name * " ", max_name + 1, "─") * "→ " * pathrepr(log_file))
- bar.current = n-1
+ fancyprint && print_progress_bottom(ctx.io)
- fancyprint && show_progress(ctx.io, bar)
-
- let log_file=log_file
- sandbox(ctx, pkg, builddir(source_path), build_project_override; preferences=build_project_preferences) do
- flush(ctx.io)
- ok = open(log_file, "w") do log
- std = verbose ? ctx.io : log
- success(pipeline(gen_build_code(buildfile(source_path)),
- stdout=std, stderr=std))
- end
- ok && return
- n_lines = isinteractive() ? 100 : 5000
- # TODO: Extract last n lines more efficiently
- log_lines = readlines(log_file)
- log_show = join(log_lines[max(1, length(log_lines) - n_lines):end], '\n')
- full_log_at, last_lines =
- if length(log_lines) > n_lines
- "\n\nFull log at $log_file",
- ", showing the last $n_lines of log"
- else
- "", ""
+ printpkgstyle(
+ ctx.io, :Building,
+ rpad(name * " ", max_name + 1, "─") * "→ " * pathrepr(log_file)
+ )
+ bar.current = n - 1
+
+ fancyprint && show_progress(ctx.io, bar)
+
+ let log_file = log_file
+ sandbox(ctx, pkg, builddir(source_path), build_project_override; preferences = build_project_preferences, allow_reresolve) do
+ flush(ctx.io)
+ ok = open(log_file, "w") do log
+ std = verbose ? ctx.io : log
+ success(
+ pipeline(
+ gen_build_code(buildfile(source_path)),
+ stdout = std, stderr = std
+ )
+ )
+ end
+ ok && return
+ n_lines = isinteractive() ? 100 : 5000
+ # TODO: Extract last n lines more efficiently
+ log_lines = readlines(log_file)
+ log_show = join(log_lines[max(1, length(log_lines) - n_lines):end], '\n')
+ full_log_at, last_lines =
+ if length(log_lines) > n_lines
+ "\n\nFull log at $log_file",
+ ", showing the last $n_lines of log"
+ else
+ "", ""
+ end
+ pkgerror("Error building `$(pkg.name)`$last_lines: \n$log_show$full_log_at")
end
- pkgerror("Error building `$(pkg.name)`$last_lines: \n$log_show$full_log_at")
end
end
- end
finally
fancyprint && end_progress(ctx.io, bar)
end
@@ -1482,47 +2055,53 @@ function rm(ctx::Context, pkgs::Vector{PackageSpec}; mode::PackageMode)
record_project_hash(ctx.env)
# update project & manifest
write_env(ctx.env)
- show_update(ctx.env, ctx.registries; io=ctx.io)
+ return show_update(ctx.env, ctx.registries; io = ctx.io)
end
-update_package_add(ctx::Context, pkg::PackageSpec, ::Nothing, source_path, source_repo, is_dep::Bool) = pkg
-function update_package_add(ctx::Context, pkg::PackageSpec, entry::PackageEntry, source_path, source_repo, is_dep::Bool)
+update_package_add(ctx::Context, pkg::PackageSpec, ::Nothing, is_dep::Bool) = pkg
+function update_package_add(ctx::Context, pkg::PackageSpec, entry::PackageEntry, is_dep::Bool)
if entry.pinned
if pkg.version == VersionSpec()
println(ctx.io, "`$(pkg.name)` is pinned at `v$(entry.version)`: maintaining pinned version")
end
- return PackageSpec(; uuid=pkg.uuid, name=pkg.name, pinned=true,
- version=entry.version, tree_hash=entry.tree_hash,
- path=entry.path, repo=entry.repo)
+ return PackageSpec(;
+ uuid = pkg.uuid, name = pkg.name, pinned = true,
+ version = entry.version, tree_hash = entry.tree_hash,
+ path = entry.path, repo = entry.repo
+ )
end
if entry.path !== nothing || entry.repo.source !== nothing || pkg.repo.source !== nothing
return pkg # overwrite everything, nothing to copy over
end
- if is_stdlib(pkg.uuid)
+ if is_stdlib(pkg.uuid, ctx.julia_version)
return pkg # stdlibs are not versioned like other packages
- elseif is_dep && ((isa(pkg.version, VersionNumber) && entry.version == pkg.version) ||
- (!isa(pkg.version, VersionNumber) && entry.version ∈ pkg.version))
+ elseif is_dep && (
+ (isa(pkg.version, VersionNumber) && entry.version == pkg.version) ||
+ (!isa(pkg.version, VersionNumber) && entry.version ∈ pkg.version)
+ )
# leave the package as is at the installed version
- return PackageSpec(; uuid=pkg.uuid, name=pkg.name, version=entry.version,
- tree_hash=entry.tree_hash)
+ return PackageSpec(;
+ uuid = pkg.uuid, name = pkg.name, version = entry.version,
+ tree_hash = entry.tree_hash
+ )
end
# adding a new version not compatible with the old version, so we just overwrite
return pkg
end
# Update registries AND read them back in.
-function update_registries(ctx::Context; force::Bool=true, kwargs...)
+function update_registries(ctx::Context; force::Bool = true, kwargs...)
OFFLINE_MODE[] && return
!force && UPDATED_REGISTRY_THIS_SESSION[] && return
- Registry.update(; io=ctx.io, kwargs...)
+ Registry.update(; io = ctx.io, kwargs...)
copy!(ctx.registries, Registry.reachable_registries())
- UPDATED_REGISTRY_THIS_SESSION[] = true
+ return UPDATED_REGISTRY_THIS_SESSION[] = true
end
function is_all_registered(registries::Vector{Registry.RegistryInstance}, pkgs::Vector{PackageSpec})
pkgs = filter(tracking_registered_version, pkgs)
for pkg in pkgs
- if !any(r->haskey(r, pkg.uuid), registries)
+ if !any(r -> haskey(r, pkg.uuid), registries)
return pkg
end
end
@@ -1530,9 +2109,32 @@ function is_all_registered(registries::Vector{Registry.RegistryInstance}, pkgs::
end
function check_registered(registries::Vector{Registry.RegistryInstance}, pkgs::Vector{PackageSpec})
+ if isempty(registries) && !isempty(pkgs)
+ registry_pkgs = filter(tracking_registered_version, pkgs)
+ if !isempty(registry_pkgs)
+ pkgerror("no registries have been installed. Cannot resolve the following packages:\n$(join(map(pkg -> " " * err_rep(pkg), registry_pkgs), "\n"))")
+ end
+ end
pkg = is_all_registered(registries, pkgs)
if pkg isa PackageSpec
- pkgerror("expected package $(err_rep(pkg)) to be registered")
+ msg = "expected package $(err_rep(pkg)) to be registered"
+ # check if the name exists in the registry with a different uuid
+ if pkg.name !== nothing
+ reg_uuid = Pair{String, Vector{UUID}}[]
+ for reg in registries
+ uuids = Registry.uuids_from_name(reg, pkg.name)
+ if !isempty(uuids)
+ push!(reg_uuid, reg.name => uuids)
+ end
+ end
+ if !isempty(reg_uuid)
+ msg *= "\n You may have provided the wrong UUID for package $(pkg.name).\n Found the following UUIDs for that name:"
+ for (reg, uuids) in reg_uuid
+ msg *= "\n - $(join(uuids, ", ")) from registry: $reg"
+ end
+ end
+ end
+ pkgerror(msg)
end
return nothing
end
@@ -1544,29 +2146,38 @@ function assert_can_add(ctx::Context, pkgs::Vector{PackageSpec})
# package with the same name exist in the project: assert that they have the same uuid
existing_uuid = get(ctx.env.project.deps, pkg.name, pkg.uuid)
existing_uuid == pkg.uuid ||
- pkgerror("""Refusing to add package $(err_rep(pkg)).
- Package `$(pkg.name)=$(existing_uuid)` with the same name already exists as a direct dependency.
- To remove the existing package, use `import Pkg; Pkg.rm("$(pkg.name)")`.
- """)
+ pkgerror(
+ """Refusing to add package $(err_rep(pkg)).
+ Package `$(pkg.name)=$(existing_uuid)` with the same name already exists as a direct dependency.
+ To remove the existing package, use `$(Pkg.in_repl_mode() ? """pkg> rm $(pkg.name)""" : """import Pkg; Pkg.rm("$(pkg.name)")""")`.
+ """
+ )
# package with the same uuid exist in the project: assert they have the same name
name = findfirst(==(pkg.uuid), ctx.env.project.deps)
name === nothing || name == pkg.name ||
- pkgerror("""Refusing to add package $(err_rep(pkg)).
- Package `$name=$(pkg.uuid)` with the same UUID already exists as a direct dependency.
- To remove the existing package, use `import Pkg; Pkg.rm("$name")`.
- """)
+ pkgerror(
+ """Refusing to add package $(err_rep(pkg)).
+ Package `$name=$(pkg.uuid)` with the same UUID already exists as a direct dependency.
+ To remove the existing package, use `$(Pkg.in_repl_mode() ? """pkg> rm $name""" : """import Pkg; Pkg.rm("$name")""")`.
+ """
+ )
# package with the same uuid exist in the manifest: assert they have the same name
entry = get(ctx.env.manifest, pkg.uuid, nothing)
entry === nothing || entry.name == pkg.name ||
- pkgerror("""Refusing to add package $(err_rep(pkg)).
- Package `$(entry.name)=$(pkg.uuid)` with the same UUID already exists in the manifest.
- To remove the existing package, use `import Pkg; Pkg.rm(Pkg.PackageSpec(uuid="$(pkg.uuid)"); mode=Pkg.PKGMODE_MANIFEST)`.
- """)
+ pkgerror(
+ """Refusing to add package $(err_rep(pkg)).
+ Package `$(entry.name)=$(pkg.uuid)` with the same UUID already exists in the manifest.
+ To remove the existing package, use `$(Pkg.in_repl_mode() ? """pkg> rm --manifest $(entry.name)=$(pkg.uuid)""" : """import Pkg; Pkg.rm(Pkg.PackageSpec(uuid="$(pkg.uuid)"); mode=Pkg.PKGMODE_MANIFEST)""")`.
+ """
+ )
end
+ return
end
-function tiered_resolve(env::EnvCache, registries::Vector{Registry.RegistryInstance}, pkgs::Vector{PackageSpec}, julia_version,
- try_all_installed::Bool)
+function tiered_resolve(
+ env::EnvCache, registries::Vector{Registry.RegistryInstance}, pkgs::Vector{PackageSpec}, julia_version,
+ try_all_installed::Bool
+ )
if try_all_installed
try # do not modify existing subgraph and only add installed versions of the new packages
@debug "tiered_resolve: trying PRESERVE_ALL_INSTALLED"
@@ -1609,29 +2220,92 @@ function targeted_resolve(env::EnvCache, registries::Vector{Registry.RegistryIns
return pkgs, deps_map
end
-function _resolve(io::IO, env::EnvCache, registries::Vector{Registry.RegistryInstance},
- pkgs::Vector{PackageSpec}, preserve::PreserveLevel, julia_version)
- printpkgstyle(io, :Resolving, "package versions...")
- if preserve == PRESERVE_TIERED_INSTALLED
- tiered_resolve(env, registries, pkgs, julia_version, true)
- elseif preserve == PRESERVE_TIERED
- tiered_resolve(env, registries, pkgs, julia_version, false)
+function _resolve(
+ io::IO, env::EnvCache, registries::Vector{Registry.RegistryInstance},
+ pkgs::Vector{PackageSpec}, preserve::PreserveLevel, julia_version
+ )
+ usingstrategy = preserve != PRESERVE_TIERED ? " using $preserve" : ""
+ printpkgstyle(io, :Resolving, "package versions$(usingstrategy)...")
+ return try
+ if preserve == PRESERVE_TIERED_INSTALLED
+ tiered_resolve(env, registries, pkgs, julia_version, true)
+ elseif preserve == PRESERVE_TIERED
+ tiered_resolve(env, registries, pkgs, julia_version, false)
+ else
+ targeted_resolve(env, registries, pkgs, preserve, julia_version)
+ end
+ catch err
+
+ if err isa Resolve.ResolverError
+ yanked_pkgs = filter(pkg -> is_pkgversion_yanked(pkg, registries), load_all_deps(env))
+ if !isempty(yanked_pkgs)
+ indent = " "^(Pkg.pkgstyle_indent)
+ yanked_str = join(map(pkg -> indent * " - " * err_rep(pkg, quotes = false) * " " * string(pkg.version), yanked_pkgs), "\n")
+ printpkgstyle(io, :Warning, """The following package versions were yanked from their registry and \
+ are not resolvable:\n$yanked_str""", color = Base.warn_color())
+ end
+ end
+ rethrow()
+ end
+end
+
+function can_skip_resolve_for_add(pkg::PackageSpec, entry::Union{PackageEntry, Nothing})
+ # Can't skip if package not in manifest
+ entry === nothing && return false
+
+ # Can't skip if pinned (needs special handling in resolution)
+ entry.pinned && return false
+
+ # Can't skip if tracking path or repo
+ (entry.path !== nothing || entry.repo.source !== nothing || pkg.repo.source !== nothing) && return false
+
+ # Check if requested version is compatible with installed version
+ version_compatible = if isa(pkg.version, VersionNumber)
+ entry.version == pkg.version
+ elseif pkg.version == VersionSpec()
+ # No version specified, current version is acceptable
+ true
else
- targeted_resolve(env, registries, pkgs, preserve, julia_version)
+ # VersionSpec range specified, check if current version is in range
+ entry.version ∈ pkg.version
end
+
+ return version_compatible
+end
+
+function add_compat_entries!(ctx::Context, pkgs::Vector{PackageSpec})
+ # Only add compat entries if env is a package
+ ctx.env.pkg === nothing && return
+
+ compat_names = String[]
+ for pkg in pkgs
+ haskey(ctx.env.project.compat, pkg.name) && continue
+ v = ctx.env.manifest[pkg.uuid].version
+ v === nothing && continue
+ pkgversion = string(Base.thispatch(v))
+ set_compat(ctx.env.project, pkg.name, pkgversion)
+ push!(compat_names, pkg.name)
+ end
+ if !isempty(compat_names)
+ printpkgstyle(ctx.io, :Compat, "entries added for $(join(compat_names, ", "))")
+ end
+ return
end
-function add(ctx::Context, pkgs::Vector{PackageSpec}, new_git=Set{UUID}();
- allow_autoprecomp::Bool=true, preserve::PreserveLevel=default_preserve(), platform::AbstractPlatform=HostPlatform(),
- target::Symbol=:deps)
+function add(
+ ctx::Context, pkgs::Vector{PackageSpec}, new_git = Set{UUID}();
+ allow_autoprecomp::Bool = true, preserve::PreserveLevel = default_preserve(), platform::AbstractPlatform = HostPlatform(),
+ target::Symbol = :deps
+ )
assert_can_add(ctx, pkgs)
# load manifest data
+ pkg_entries = Tuple{PackageSpec, Union{PackageEntry, Nothing}, Bool}[]
for (i, pkg) in pairs(pkgs)
delete!(ctx.env.project.weakdeps, pkg.name)
entry = manifest_info(ctx.env.manifest, pkg.uuid)
is_dep = any(uuid -> uuid == pkg.uuid, [uuid for (name, uuid) in ctx.env.project.deps])
- source_path, source_repo = get_path_repo(ctx.env.project, pkg.name)
- pkgs[i] = update_package_add(ctx, pkg, entry, source_path, source_repo, is_dep)
+ push!(pkg_entries, (pkg, entry, is_dep))
+ pkgs[i] = update_package_add(ctx, pkg, entry, is_dep)
end
names = (p.name for p in pkgs)
@@ -1645,38 +2319,47 @@ function add(ctx::Context, pkgs::Vector{PackageSpec}, new_git=Set{UUID}();
pkgerror("Unrecognized target $(target)")
end
+ # Check if we can skip resolution for all packages
+ can_skip_all = target == :deps && all(pkg_entries) do (pkg, entry, _)
+ can_skip_resolve_for_add(pkg, entry)
+ end
+
+ if can_skip_all
+ # All packages are already in manifest with compatible versions
+ # Just promote to direct dependencies without resolving
+ foreach(pkg -> target_field[pkg.name] = pkg.uuid, pkgs) # update set of deps/weakdeps/extras
+
+ # if env is a package add compat entries
+ add_compat_entries!(ctx, pkgs)
+
+ record_project_hash(ctx.env)
+ write_env(ctx.env)
+ show_update(ctx.env, ctx.registries; io = ctx.io)
+
+ return
+ end
+
foreach(pkg -> target_field[pkg.name] = pkg.uuid, pkgs) # update set of deps/weakdeps/extras
if target == :deps # nothing to resolve/install if it's weak or extras
# resolve
man_pkgs, deps_map = _resolve(ctx.io, ctx.env, ctx.registries, pkgs, preserve, ctx.julia_version)
- update_manifest!(ctx.env, man_pkgs, deps_map, ctx.julia_version)
+ update_manifest!(ctx.env, man_pkgs, deps_map, ctx.julia_version, ctx.registries)
new_apply = download_source(ctx)
- fixups_from_projectfile!(ctx.env)
+ fixups_from_projectfile!(ctx)
# After downloading resolutionary packages, search for (Julia)Artifacts.toml files
# and ensure they are all downloaded and unpacked as well:
- download_artifacts(ctx, platform=platform, julia_version=ctx.julia_version)
+ download_artifacts(ctx, platform = platform, julia_version = ctx.julia_version)
# if env is a package add compat entries
- if ctx.env.project.name !== nothing && ctx.env.project.uuid !== nothing
- compat_names = String[]
- for pkg in pkgs
- haskey(ctx.env.project.compat, pkg.name) && continue
- v = ctx.env.manifest[pkg.uuid].version
- v === nothing && continue
- pkgversion = string(Base.thispatch(v))
- set_compat(ctx.env.project, pkg.name, pkgversion)
- push!(compat_names, pkg.name)
- end
- printpkgstyle(ctx.io, :Compat, """entries added for $(join(compat_names, ", "))""")
- end
+ add_compat_entries!(ctx, pkgs)
record_project_hash(ctx.env) # compat entries changed the hash after it was last recorded in update_manifest!
write_env(ctx.env) # write env before building
- show_update(ctx.env, ctx.registries; io=ctx.io)
+ show_update(ctx.env, ctx.registries; io = ctx.io)
build_versions(ctx, union(new_apply, new_git))
- allow_autoprecomp && Pkg._auto_precompile(ctx)
+ allow_autoprecomp && Pkg._auto_precompile(ctx, pkgs)
else
record_project_hash(ctx.env)
write_env(ctx.env)
@@ -1687,8 +2370,10 @@ function add(ctx::Context, pkgs::Vector{PackageSpec}, new_git=Set{UUID}();
end
# Input: name, uuid, and path
-function develop(ctx::Context, pkgs::Vector{PackageSpec}, new_git::Set{UUID};
- preserve::PreserveLevel=default_preserve(), platform::AbstractPlatform=HostPlatform())
+function develop(
+ ctx::Context, pkgs::Vector{PackageSpec}, new_git::Set{UUID};
+ preserve::PreserveLevel = default_preserve(), platform::AbstractPlatform = HostPlatform()
+ )
assert_can_add(ctx, pkgs)
# no need to look at manifest.. dev will just nuke whatever is there before
for pkg in pkgs
@@ -1697,13 +2382,13 @@ function develop(ctx::Context, pkgs::Vector{PackageSpec}, new_git::Set{UUID};
end
# resolve & apply package versions
pkgs, deps_map = _resolve(ctx.io, ctx.env, ctx.registries, pkgs, preserve, ctx.julia_version)
- update_manifest!(ctx.env, pkgs, deps_map, ctx.julia_version)
+ update_manifest!(ctx.env, pkgs, deps_map, ctx.julia_version, ctx.registries)
new_apply = download_source(ctx)
- fixups_from_projectfile!(ctx.env)
- download_artifacts(ctx; platform=platform, julia_version=ctx.julia_version)
+ fixups_from_projectfile!(ctx)
+ download_artifacts(ctx; platform = platform, julia_version = ctx.julia_version)
write_env(ctx.env) # write env before building
- show_update(ctx.env, ctx.registries; io=ctx.io)
- build_versions(ctx, union(new_apply, new_git))
+ show_update(ctx.env, ctx.registries; io = ctx.io)
+ return build_versions(ctx, union(new_apply, new_git))
end
# load version constraint
@@ -1714,8 +2399,10 @@ function up_load_versions!(ctx::Context, pkg::PackageSpec, entry::PackageEntry,
entry.version !== nothing || return false # no version to set
if entry.pinned || level == UPLEVEL_FIXED
pkg.version = entry.version
- pkg.tree_hash = entry.tree_hash
- elseif entry.repo.source !== nothing || source_repo.source !== nothing # repo packages have a version but are treated specially
+ if pkg.path === nothing
+ pkg.tree_hash = entry.tree_hash
+ end
+ elseif source_path === nothing && pkg.path === nothing && (entry.repo.source !== nothing || source_repo.source !== nothing) # repo packages have a version but are treated specially
if source_repo.source !== nothing
pkg.repo = source_repo
else
@@ -1739,7 +2426,7 @@ function up_load_versions!(ctx::Context, pkg::PackageSpec, entry::PackageEntry,
r = level == UPLEVEL_PATCH ? VersionRange(ver.major, ver.minor) :
level == UPLEVEL_MINOR ? VersionRange(ver.major) :
level == UPLEVEL_MAJOR ? VersionRange() :
- error("unexpected upgrade level: $level")
+ error("unexpected upgrade level: $level")
pkg.version = VersionSpec(r)
end
return false
@@ -1748,19 +2435,23 @@ end
up_load_manifest_info!(pkg::PackageSpec, ::Nothing) = nothing
function up_load_manifest_info!(pkg::PackageSpec, entry::PackageEntry)
pkg.name = entry.name # TODO check name is same
- if pkg.repo == GitRepo()
+ # Only restore repo from manifest if we don't already have a path set
+ if pkg.repo == GitRepo() && pkg.path === nothing
pkg.repo = entry.repo # TODO check that repo is same
end
- if pkg.path === nothing
+ # Only set path if tree_hash is not already set (to avoid invalid state where both are set)
+ if pkg.path === nothing && pkg.repo == GitRepo() && pkg.tree_hash === nothing
pkg.path = entry.path
end
- pkg.pinned = entry.pinned
+ return pkg.pinned = entry.pinned
# `pkg.version` and `pkg.tree_hash` is set by `up_load_versions!`
end
-function load_manifest_deps_up(env::EnvCache, pkgs::Vector{PackageSpec}=PackageSpec[];
- preserve::PreserveLevel=PRESERVE_ALL)
+function load_manifest_deps_up(
+ env::EnvCache, pkgs::Vector{PackageSpec} = PackageSpec[];
+ preserve::PreserveLevel = PRESERVE_ALL
+ )
manifest = env.manifest
project = env.project
explicit_upgraded = Set(pkg.uuid for pkg in pkgs)
@@ -1795,33 +2486,40 @@ function load_manifest_deps_up(env::EnvCache, pkgs::Vector{PackageSpec}=PackageS
end
# The rest of the packages get fixed
- push!(pkgs, PackageSpec(
- uuid = uuid,
- name = entry.name,
- path = entry.path,
- pinned = entry.pinned,
- repo = entry.repo,
- tree_hash = entry.tree_hash, # TODO should tree_hash be changed too?
- version = something(entry.version, VersionSpec())
- ))
+ push!(
+ pkgs, PackageSpec(
+ uuid = uuid,
+ name = entry.name,
+ path = entry.path,
+ pinned = entry.pinned,
+ repo = entry.repo,
+ tree_hash = entry.tree_hash, # TODO should tree_hash be changed too?
+ version = something(entry.version, VersionSpec()),
+ )
+ )
end
return pkgs
end
function targeted_resolve_up(env::EnvCache, registries::Vector{Registry.RegistryInstance}, pkgs::Vector{PackageSpec}, preserve::PreserveLevel, julia_version)
- pkgs = load_manifest_deps_up(env, pkgs; preserve=preserve)
+ pkgs = load_manifest_deps_up(env, pkgs; preserve = preserve)
check_registered(registries, pkgs)
deps_map = resolve_versions!(env, registries, pkgs, julia_version, preserve == PRESERVE_ALL_INSTALLED)
return pkgs, deps_map
end
-function up(ctx::Context, pkgs::Vector{PackageSpec}, level::UpgradeLevel;
- skip_writing_project::Bool=false, preserve::Union{Nothing,PreserveLevel}=nothing)
+function up(
+ ctx::Context, pkgs::Vector{PackageSpec}, level::UpgradeLevel;
+ skip_writing_project::Bool = false, preserve::Union{Nothing, PreserveLevel} = nothing
+ )
+
+ requested_pkgs = pkgs
+
new_git = Set{UUID}()
# TODO check all pkg.version == VersionSpec()
# set version constraints according to `level`
for pkg in pkgs
- source_path, source_repo = get_path_repo(ctx.env.project, pkg.name)
+ source_path, source_repo = get_path_repo(ctx.env.project, ctx.env.project_file, ctx.env.manifest_file, pkg.name)
entry = manifest_info(ctx.env.manifest, pkg.uuid)
new = up_load_versions!(ctx, pkg, entry, source_path, source_repo, level)
new && push!(new_git, pkg.uuid) #TODO put download + push! in utility function
@@ -1838,26 +2536,55 @@ function up(ctx::Context, pkgs::Vector{PackageSpec}, level::UpgradeLevel;
check_registered(ctx.registries, pkgs)
deps_map = resolve_versions!(ctx.env, ctx.registries, pkgs, ctx.julia_version, false)
end
- update_manifest!(ctx.env, pkgs, deps_map, ctx.julia_version)
+ update_manifest!(ctx.env, pkgs, deps_map, ctx.julia_version, ctx.registries)
new_apply = download_source(ctx)
- fixups_from_projectfile!(ctx.env)
- download_artifacts(ctx, julia_version=ctx.julia_version)
+ fixups_from_projectfile!(ctx)
+ download_artifacts(ctx, julia_version = ctx.julia_version)
write_env(ctx.env; skip_writing_project) # write env before building
- show_update(ctx.env, ctx.registries; io=ctx.io, hidden_upgrades_info = true)
- build_versions(ctx, union(new_apply, new_git))
+ show_update(ctx.env, ctx.registries; io = ctx.io, hidden_upgrades_info = true)
+
+ if length(requested_pkgs) == 1
+ pkg = only(requested_pkgs)
+ entry = manifest_info(ctx.env.manifest, pkg.uuid)
+ if entry === nothing || (entry.path === nothing && entry.repo.source === nothing)
+ # Get current version after the update
+ current_version = entry !== nothing ? entry.version : nothing
+ original_entry = manifest_info(ctx.env.original_manifest, pkg.uuid)
+ original_version = original_entry !== nothing ? original_entry.version : nothing
+
+ # Check if version didn't change and there's a newer version available
+ if current_version == original_version && current_version !== nothing
+ temp_pkg = PackageSpec(name = pkg.name, uuid = pkg.uuid, version = current_version)
+ cinfo = status_compat_info(temp_pkg, ctx.env, ctx.registries)
+ if cinfo !== nothing
+ packages_holding_back, max_version, max_version_compat = cinfo
+ if current_version < max_version
+ printpkgstyle(
+ ctx.io, :Info, "$(pkg.name) can be updated but at the cost of upgrading/downgrading other packages. " *
+ "To force upgrade to the latest version, try `add $(pkg.name)@$(max_version)`", color = Base.info_color()
+ )
+ end
+ end
+ end
+ end
+ end
+
+ return build_versions(ctx, union(new_apply, new_git))
end
-function update_package_pin!(registries::Vector{Registry.RegistryInstance}, pkg::PackageSpec, entry::Union{Nothing, PackageEntry})
+function update_package_pin!(ctx::Context, pkg::PackageSpec, entry::Union{Nothing, PackageEntry})
if entry === nothing
- pkgerror("package $(err_rep(pkg)) not found in the manifest, run `Pkg.resolve()` and retry.")
+ cmd = Pkg.in_repl_mode() ? "pkg> resolve" : "Pkg.resolve()"
+ pkgerror("package $(err_rep(pkg)) not found in the manifest, run `$cmd` and retry.")
end
+ registries = ctx.registries
#if entry.pinned && pkg.version == VersionSpec()
# println(ctx.io, "package $(err_rep(pkg)) already pinned")
#end
# update pinned package
pkg.pinned = true
- if is_stdlib(pkg.uuid)
+ if is_stdlib(pkg.uuid, ctx.julia_version)
return nothing # nothing left to do
elseif pkg.version == VersionSpec()
pkg.version = entry.version # pin at current version
@@ -1878,19 +2605,19 @@ end
is_fully_pinned(ctx::Context) = !isempty(ctx.env.manifest.deps) && all(kv -> last(kv).pinned, ctx.env.manifest.deps)
function pin(ctx::Context, pkgs::Vector{PackageSpec})
- foreach(pkg -> update_package_pin!(ctx.registries, pkg, manifest_info(ctx.env.manifest, pkg.uuid)), pkgs)
+ foreach(pkg -> update_package_pin!(ctx, pkg, manifest_info(ctx.env.manifest, pkg.uuid)), pkgs)
pkgs = load_direct_deps(ctx.env, pkgs)
# TODO: change pin to not take a version and just have it pin on the current version. Then there is no need to resolve after a pin
pkgs, deps_map = _resolve(ctx.io, ctx.env, ctx.registries, pkgs, PRESERVE_TIERED, ctx.julia_version)
- update_manifest!(ctx.env, pkgs, deps_map, ctx.julia_version)
+ update_manifest!(ctx.env, pkgs, deps_map, ctx.julia_version, ctx.registries)
new = download_source(ctx)
- fixups_from_projectfile!(ctx.env)
- download_artifacts(ctx; julia_version=ctx.julia_version)
+ fixups_from_projectfile!(ctx)
+ download_artifacts(ctx; julia_version = ctx.julia_version)
write_env(ctx.env) # write env before building
- show_update(ctx.env, ctx.registries; io=ctx.io)
- build_versions(ctx, new)
+ show_update(ctx.env, ctx.registries; io = ctx.io)
+ return build_versions(ctx, new)
end
function update_package_free!(registries::Vector{Registry.RegistryInstance}, pkg::PackageSpec, entry::PackageEntry, err_if_free::Bool)
@@ -1910,62 +2637,71 @@ function update_package_free!(registries::Vector{Registry.RegistryInstance}, pkg
return # -> name, uuid
end
if err_if_free
- pkgerror("expected package $(err_rep(pkg)) to be pinned, tracking a path,",
- " or tracking a repository")
+ pkgerror(
+ "expected package $(err_rep(pkg)) to be pinned, tracking a path,",
+ " or tracking a repository"
+ )
end
return
end
# TODO: this is two technically different operations with the same name
# split into two subfunctions ...
-function free(ctx::Context, pkgs::Vector{PackageSpec}; err_if_free=true)
+function free(ctx::Context, pkgs::Vector{PackageSpec}; err_if_free = true)
for pkg in pkgs
entry = manifest_info(ctx.env.manifest, pkg.uuid)
delete!(ctx.env.project.sources, pkg.name)
update_package_free!(ctx.registries, pkg, entry, err_if_free)
end
- if any(pkg -> pkg.version == VersionSpec(), pkgs)
+ return if any(pkg -> pkg.version == VersionSpec(), pkgs)
pkgs = load_direct_deps(ctx.env, pkgs)
check_registered(ctx.registries, pkgs)
# TODO: change free to not take a version and just have it pin on the current version. Then there is no need to resolve after a pin
pkgs, deps_map = _resolve(ctx.io, ctx.env, ctx.registries, pkgs, PRESERVE_TIERED, ctx.julia_version)
- update_manifest!(ctx.env, pkgs, deps_map, ctx.julia_version)
+ update_manifest!(ctx.env, pkgs, deps_map, ctx.julia_version, ctx.registries)
new = download_source(ctx)
- fixups_from_projectfile!(ctx.env)
+ fixups_from_projectfile!(ctx)
download_artifacts(ctx)
write_env(ctx.env) # write env before building
- show_update(ctx.env, ctx.registries; io=ctx.io)
+ show_update(ctx.env, ctx.registries; io = ctx.io)
build_versions(ctx, new)
else
foreach(pkg -> manifest_info(ctx.env.manifest, pkg.uuid).pinned = false, pkgs)
write_env(ctx.env)
- show_update(ctx.env, ctx.registries; io=ctx.io)
+ show_update(ctx.env, ctx.registries; io = ctx.io)
end
end
function gen_test_code(source_path::String; test_args::Cmd)
test_file = testfile(source_path)
return """
- $(Base.load_path_setup_code(false))
- cd($(repr(dirname(test_file))))
- append!(empty!(ARGS), $(repr(test_args.exec)))
- include($(repr(test_file)))
- """
+ $(Base.load_path_setup_code(false))
+ cd($(repr(dirname(test_file))))
+ append!(empty!(ARGS), $(repr(test_args.exec)))
+ include($(repr(test_file)))
+ """
end
function get_threads_spec()
- if Threads.nthreads(:interactive) > 0
+ return if haskey(ENV, "JULIA_NUM_THREADS")
+ if isempty(ENV["JULIA_NUM_THREADS"])
+ throw(ArgumentError("JULIA_NUM_THREADS is set to an empty string. It is not clear what Pkg.test should set for `-t` on the test worker."))
+ end
+ # if set, prefer JULIA_NUM_THREADS because this is passed to the test worker via --threads
+ # which takes precedence in the worker
+ ENV["JULIA_NUM_THREADS"]
+ elseif Threads.nthreads(:interactive) > 0
"$(Threads.nthreads(:default)),$(Threads.nthreads(:interactive))"
else
"$(Threads.nthreads(:default))"
end
end
-function gen_subprocess_flags(source_path::String; coverage, julia_args)
+function gen_subprocess_flags(source_path::String; coverage, julia_args::Cmd)
coverage_arg = if coverage isa Bool
# source_path is the package root, not "src" so "ext" etc. is included
coverage ? string("@", source_path) : "none"
@@ -1977,7 +2713,6 @@ function gen_subprocess_flags(source_path::String; coverage, julia_args)
return ```
--code-coverage=$(coverage_arg)
--color=$(Base.have_color === nothing ? "auto" : Base.have_color ? "yes" : "no")
- --check-bounds=yes
--warn-overwrite=yes
--depwarn=$(Base.JLOptions().depwarn == 2 ? "error" : "yes")
--inline=$(Bool(Base.JLOptions().can_inline) ? "yes" : "no")
@@ -1990,7 +2725,7 @@ end
function with_temp_env(fn::Function, temp_env::String)
load_path = copy(LOAD_PATH)
active_project = Base.ACTIVE_PROJECT[]
- try
+ return try
push!(empty!(LOAD_PATH), "@", temp_env)
Base.ACTIVE_PROJECT[] = nothing
fn()
@@ -2005,8 +2740,10 @@ function sandbox_preserve(env::EnvCache, target::PackageSpec, test_project::Stri
env = deepcopy(env)
# include root in manifest (in case any dependencies point back to it)
if env.pkg !== nothing
- env.manifest[env.pkg.uuid] = PackageEntry(;name=env.pkg.name, path=dirname(env.project_file),
- deps=env.project.deps)
+ env.manifest[env.pkg.uuid] = PackageEntry(;
+ name = env.pkg.name, path = dirname(env.project_file),
+ deps = env.project.deps
+ )
end
# if the source manifest is an old format, upgrade the manifest_format so
# that warnings aren't thrown for the temp sandbox manifest
@@ -2025,7 +2762,7 @@ end
function abspath!(env::EnvCache, manifest::Manifest)
for (uuid, entry) in manifest
if entry.path !== nothing
- entry.path = project_rel_path(env, entry.path)
+ entry.path = manifest_rel_path(env, entry.path)
end
end
return manifest
@@ -2034,40 +2771,96 @@ end
function abspath!(env::EnvCache, project::Project)
for (key, entry) in project.sources
if haskey(entry, "path")
- entry["path"] = project_rel_path(env, entry["path"])
+ # Paths in project sources are project-relative, so join with project_file dir, not manifest_file dir
+ entry["path"] = normpath(joinpath(dirname(env.project_file), entry["path"]))
end
end
return project
end
+function sandbox_with_temp_env(
+ fn::Function, ctx::Context, target::PackageSpec, tmp::String,
+ has_sandbox_project::Bool, sandbox_env::EnvCache;
+ force_latest_compatible_version::Bool,
+ allow_earlier_backwards_compatible_versions::Bool,
+ allow_reresolve::Bool
+ )
+ return with_temp_env(tmp) do
+ temp_ctx = Context()
+ if has_sandbox_project
+ abspath!(sandbox_env, temp_ctx.env.project)
+ end
+ temp_ctx.env.project.deps[target.name] = target.uuid
+
+ if force_latest_compatible_version
+ apply_force_latest_compatible_version!(
+ temp_ctx;
+ target_name = target.name,
+ allow_earlier_backwards_compatible_versions,
+ )
+ end
+
+ try
+ Pkg.resolve(temp_ctx; io = devnull, skip_writing_project = true)
+ @debug "Using _parent_ dep graph"
+ catch err # TODO
+ err isa Resolve.ResolverError || rethrow()
+ allow_reresolve || rethrow()
+ @debug err
+ msg = string(
+ "Could not use exact versions of packages in manifest, re-resolving. ",
+ "Note: if you do not check your manifest file into source control, ",
+ "then you can probably ignore this message. ",
+ "However, if you do check your manifest file into source control, ",
+ "then you probably want to pass the `allow_reresolve = false` kwarg ",
+ "when calling the `Pkg.test` function.",
+ )
+ printpkgstyle(ctx.io, :Test, msg, color = Base.warn_color())
+ Pkg.update(temp_ctx; skip_writing_project = true, update_registry = false, io = ctx.io)
+ printpkgstyle(ctx.io, :Test, "Successfully re-resolved")
+ @debug "Using _clean_ dep graph"
+ end
+
+ reset_all_compat!(temp_ctx.env.project)
+ write_env(temp_ctx.env, update_undo = false)
+
+ # Run sandboxed code
+ path_sep = Sys.iswindows() ? ';' : ':'
+ withenv(fn, "JULIA_LOAD_PATH" => "@$(path_sep)$(tmp)", "JULIA_PROJECT" => nothing)
+ end
+end
+
# ctx + pkg used to compute parent dep graph
-function sandbox(fn::Function, ctx::Context, target::PackageSpec,
- sandbox_path::String, sandbox_project_override;
- preferences::Union{Nothing,Dict{String,Any}} = nothing,
- force_latest_compatible_version::Bool=false,
- allow_earlier_backwards_compatible_versions::Bool=true,
- allow_reresolve::Bool=true)
+function sandbox(
+ fn::Function, ctx::Context, target::PackageSpec,
+ sandbox_path::String, sandbox_project_override_in;
+ preferences::Union{Nothing, Dict{String, Any}} = nothing,
+ force_latest_compatible_version::Bool = false,
+ allow_earlier_backwards_compatible_versions::Bool = true,
+ allow_reresolve::Bool = true
+ )
sandbox_project = projectfile_path(sandbox_path)
- mktempdir() do tmp
- tmp_project = projectfile_path(tmp)
+ return mktempdir() do tmp
+ sandbox_project_override_local = sandbox_project_override_in
+ tmp_project = projectfile_path(tmp)
tmp_manifest = manifestfile_path(tmp)
tmp_preferences = joinpath(tmp, first(Base.preferences_names))
# Copy env info over to temp env
has_sandbox_project = false
- if sandbox_project_override === nothing
+ if sandbox_project_override_local === nothing
if isfile(sandbox_project)
- sandbox_project_override = read_project(sandbox_project)
+ sandbox_project_override_local = read_project(sandbox_project)
has_sandbox_project = true
else
- sandbox_project_override = Project()
+ sandbox_project_override_local = Project()
end
end
if !has_sandbox_project
- abspath!(ctx.env, sandbox_project_override)
+ abspath!(ctx.env, sandbox_project_override_local)
end
- Types.write_project(sandbox_project_override, tmp_project)
+ Types.write_project(sandbox_project_override_local, tmp_project)
# create merged manifest
# - copy over active subgraph
@@ -2102,41 +2895,12 @@ function sandbox(fn::Function, ctx::Context, target::PackageSpec,
end
# sandbox
- with_temp_env(tmp) do
- temp_ctx = Context()
- if has_sandbox_project
- abspath!(sandbox_env, temp_ctx.env.project)
- end
- temp_ctx.env.project.deps[target.name] = target.uuid
-
- if force_latest_compatible_version
- apply_force_latest_compatible_version!(
- temp_ctx;
- target_name = target.name,
- allow_earlier_backwards_compatible_versions,
- )
- end
-
- try
- Pkg.resolve(temp_ctx; io=devnull, skip_writing_project=true)
- @debug "Using _parent_ dep graph"
- catch err# TODO
- err isa Resolve.ResolverError || rethrow()
- allow_reresolve || rethrow()
- @debug err
- printpkgstyle(ctx.io, :Test, "Could not use exact versions of packages in manifest. Re-resolving dependencies", color=Base.warn_color())
- Pkg.update(temp_ctx; skip_writing_project=true, update_registry=false, io=ctx.io)
- printpkgstyle(ctx.io, :Test, "Successfully re-resolved")
- @debug "Using _clean_ dep graph"
- end
-
- reset_all_compat!(temp_ctx.env.project)
- write_env(temp_ctx.env, update_undo = false)
-
- # Run sandboxed code
- path_sep = Sys.iswindows() ? ';' : ':'
- withenv(fn, "JULIA_LOAD_PATH" => "@$(path_sep)$(tmp)", "JULIA_PROJECT" => nothing)
- end
+ sandbox_with_temp_env(
+ fn, ctx, target, tmp, has_sandbox_project, sandbox_env;
+ force_latest_compatible_version,
+ allow_earlier_backwards_compatible_versions,
+ allow_reresolve,
+ )
end
end
@@ -2164,7 +2928,7 @@ function gen_target_project(ctx::Context, pkg::PackageSpec, source_path::String,
env = ctx.env
registries = ctx.registries
test_project = Types.Project()
- if projectfile_path(source_path; strict=true) === nothing
+ if projectfile_path(source_path; strict = true) === nothing
# no project file, assuming this is an old REQUIRE package
test_project.deps = copy(env.manifest[pkg.uuid].deps)
if target == "test"
@@ -2172,10 +2936,10 @@ function gen_target_project(ctx::Context, pkg::PackageSpec, source_path::String,
if isfile(test_REQUIRE_path)
@warn "using test/REQUIRE files is deprecated and current support is lacking in some areas"
test_pkgs = parse_REQUIRE(test_REQUIRE_path)
- package_specs = [PackageSpec(name=pkg) for pkg in test_pkgs]
+ package_specs = [PackageSpec(name = pkg) for pkg in test_pkgs]
registry_resolve!(registries, package_specs)
stdlib_resolve!(package_specs)
- ensure_resolved(ctx, env.manifest, package_specs, registry=true)
+ ensure_resolved(ctx, env.manifest, package_specs, registry = true)
for spec in package_specs
test_project.deps[spec.name] = spec.uuid
end
@@ -2211,12 +2975,61 @@ end
testdir(source_path::String) = joinpath(source_path, "test")
testfile(source_path::String) = joinpath(testdir(source_path), "runtests.jl")
-function test(ctx::Context, pkgs::Vector{PackageSpec};
- coverage=false, julia_args::Cmd=``, test_args::Cmd=``,
- test_fn=nothing,
- force_latest_compatible_version::Bool=false,
- allow_earlier_backwards_compatible_versions::Bool=true,
- allow_reresolve::Bool=true)
+
+function run_test_subprocess(io::IO, flags::Cmd, source_path::String, test_args::Cmd; with_threads::Bool)
+ code = gen_test_code(source_path; test_args)
+ threads_arg = with_threads ? `--threads=$(get_threads_spec())` : ``
+ cmd = `$(Base.julia_cmd()) $threads_arg $(flags) --eval $code`
+ return subprocess_handler(cmd, io, "Tests interrupted. Exiting the test process")
+end
+
+function run_test_subprocess_in_env(io::IO, flags::Cmd, source_path::String, test_args::Cmd)
+ path_sep = Sys.iswindows() ? ';' : ':'
+ return withenv("JULIA_LOAD_PATH" => "@$(path_sep)$(testdir(source_path))", "JULIA_PROJECT" => nothing) do
+ run_test_subprocess(io, flags, source_path, test_args; with_threads = false)
+ end
+end
+
+function run_sandboxed_tests!(
+ ctx::Context, pkg::PackageSpec, source_path::String, test_args::Cmd,
+ coverage::Union{Bool, AbstractString}, julia_args::Cmd, test_fn,
+ pkgs_errored::Vector{Tuple{String, Base.Process}}
+ )
+ test_fn !== nothing && test_fn()
+ sandbox_ctx = Context(; io = ctx.io)
+ status(
+ sandbox_ctx.env, sandbox_ctx.registries;
+ mode = PKGMODE_COMBINED,
+ io = sandbox_ctx.io,
+ ignore_indent = false,
+ show_usagetips = false,
+ )
+ flags = gen_subprocess_flags(source_path; coverage, julia_args)
+
+ if should_autoprecompile()
+ cacheflags = parse(CacheFlags, read(`$(Base.julia_cmd()) $(flags) --eval 'show(Base.CacheFlags())'`, String))
+ Pkg.precompile(sandbox_ctx; io = sandbox_ctx.io, configs = flags => cacheflags)
+ end
+
+ printpkgstyle(ctx.io, :Testing, "Running tests...")
+ flush(ctx.io)
+ p, interrupted = run_test_subprocess(ctx.io, flags, source_path, test_args; with_threads = true)
+ if success(p)
+ printpkgstyle(ctx.io, :Testing, pkg.name * " tests passed ")
+ elseif !interrupted
+ push!(pkgs_errored, (pkg.name, p))
+ end
+ return
+end
+
+function test(
+ ctx::Context, pkgs::Vector{PackageSpec};
+ coverage = false, julia_args::Cmd = ``, test_args::Cmd = ``,
+ test_fn = nothing,
+ force_latest_compatible_version::Bool = false,
+ allow_earlier_backwards_compatible_versions::Bool = true,
+ allow_reresolve::Bool = true
+ )
Pkg.instantiate(ctx; allow_autoprecomp = false) # do precomp later within sandbox
# load manifest data
@@ -2237,16 +3050,18 @@ function test(ctx::Context, pkgs::Vector{PackageSpec};
# See if we can find the test files for all packages
missing_runtests = String[]
- source_paths = String[] # source_path is the package root (not /src)
+ source_paths = String[] # source_path is the package root (not /src)
for pkg in pkgs
- sourcepath = project_rel_path(ctx.env, source_path(ctx.env.manifest_file, pkg, ctx.julia_version)) # TODO
+ sourcepath = source_path(ctx.env.manifest_file, pkg, ctx.julia_version)
!isfile(testfile(sourcepath)) && push!(missing_runtests, pkg.name)
push!(source_paths, sourcepath)
end
if !isempty(missing_runtests)
- pkgerror(length(missing_runtests) == 1 ? "Package " : "Packages ",
- join(missing_runtests, ", "),
- " did not provide a `test/runtests.jl` file")
+ pkgerror(
+ length(missing_runtests) == 1 ? "Package " : "Packages ",
+ join(missing_runtests, ", "),
+ " did not provide a `test/runtests.jl` file"
+ )
end
# sandbox
@@ -2257,25 +3072,23 @@ function test(ctx::Context, pkgs::Vector{PackageSpec};
if testdir(source_path) in dirname.(keys(ctx.env.workspace))
proj = Base.locate_project_file(abspath(testdir(source_path)))
env = EnvCache(proj)
- # Instantiate test env
- Pkg.instantiate(Context(env=env); allow_autoprecomp = false)
- status(env, ctx.registries; mode=PKGMODE_COMBINED, io=ctx.io, ignore_indent = false, show_usagetips = false)
+ # Use a Context pointing at the test env so that instantiate and
+ # precompile operate on the test project rather than the parent.
+ test_ctx = Context(env = env; io = ctx.io)
+ Pkg.instantiate(test_ctx; allow_autoprecomp = false)
+ status(env, ctx.registries; mode = PKGMODE_COMBINED, io = ctx.io, ignore_indent = false, show_usagetips = false)
flags = gen_subprocess_flags(source_path; coverage, julia_args)
if should_autoprecompile()
- cacheflags = Base.CacheFlags(parse(UInt8, read(`$(Base.julia_cmd()) $(flags) --eval 'show(ccall(:jl_cache_flags, UInt8, ()))'`, String)))
- Pkg.precompile(; io=ctx.io, configs = flags => cacheflags)
+ cacheflags = parse(CacheFlags, read(`$(Base.julia_cmd()) $(flags) --eval 'show(Base.CacheFlags())'`, String))
+ # Don't warn about already loaded packages, since we are going to run tests in a new
+ # subprocess anyway.
+ Pkg.precompile(test_ctx; io = ctx.io, warn_loaded = false, configs = flags => cacheflags)
end
printpkgstyle(ctx.io, :Testing, "Running tests...")
flush(ctx.io)
- code = gen_test_code(source_path; test_args)
- cmd = `$(Base.julia_cmd()) $(flags) --eval $code`
-
- path_sep = Sys.iswindows() ? ';' : ':'
- p, interrupted = withenv("JULIA_LOAD_PATH" => "@$(path_sep)$(testdir(source_path))", "JULIA_PROJECT" => nothing) do
- subprocess_handler(cmd, ctx.io, "Tests interrupted. Exiting the test process")
- end
+ p, interrupted = run_test_subprocess_in_env(ctx.io, flags, source_path, test_args)
if success(p)
printpkgstyle(ctx.io, :Testing, pkg.name * " tests passed ")
elseif !interrupted
@@ -2285,47 +3098,37 @@ function test(ctx::Context, pkgs::Vector{PackageSpec};
end
# compatibility shim between "targets" and "test/Project.toml"
- local test_project_preferences, test_project_override
- if isfile(projectfile_path(testdir(source_path)))
+ local test_project_override
+ test_project_preferences = if isfile(projectfile_path(testdir(source_path)))
test_project_override = nothing
with_load_path([testdir(source_path), Base.LOAD_PATH...]) do
- test_project_preferences = Base.get_preferences()
+ Base.get_preferences()
end
else
test_project_override = gen_target_project(ctx, pkg, source_path, "test")
with_load_path([something(projectfile_path(source_path)), Base.LOAD_PATH...]) do
- test_project_preferences = Base.get_preferences()
+ Base.get_preferences()
end
end
# now we sandbox
printpkgstyle(ctx.io, :Testing, pkg.name)
- sandbox(ctx, pkg, testdir(source_path), test_project_override; preferences=test_project_preferences, force_latest_compatible_version, allow_earlier_backwards_compatible_versions, allow_reresolve) do
- test_fn !== nothing && test_fn()
- sandbox_ctx = Context(;io=ctx.io)
- status(sandbox_ctx.env, sandbox_ctx.registries; mode=PKGMODE_COMBINED, io=sandbox_ctx.io, ignore_indent = false, show_usagetips = false)
- flags = gen_subprocess_flags(source_path; coverage,julia_args)
-
- if should_autoprecompile()
- cacheflags = Base.CacheFlags(parse(UInt8, read(`$(Base.julia_cmd()) $(flags) --eval 'show(ccall(:jl_cache_flags, UInt8, ()))'`, String)))
- Pkg.precompile(sandbox_ctx; io=sandbox_ctx.io, configs = flags => cacheflags)
- end
-
- printpkgstyle(ctx.io, :Testing, "Running tests...")
- flush(ctx.io)
- code = gen_test_code(source_path; test_args)
- cmd = `$(Base.julia_cmd()) $(flags) --threads=$(get_threads_spec()) --eval $code`
- p, interrupted = subprocess_handler(cmd, ctx.io, "Tests interrupted. Exiting the test process")
- if success(p)
- printpkgstyle(ctx.io, :Testing, pkg.name * " tests passed ")
- elseif !interrupted
- push!(pkgs_errored, (pkg.name, p))
- end
+ sandbox(
+ ctx, pkg, testdir(source_path), test_project_override;
+ preferences = test_project_preferences,
+ force_latest_compatible_version,
+ allow_earlier_backwards_compatible_versions,
+ allow_reresolve,
+ ) do
+ run_sandboxed_tests!(
+ ctx, pkg, source_path, test_args,
+ coverage, julia_args, test_fn, pkgs_errored,
+ )
end
end
# TODO: Should be included in Base
function signal_name(signal::Integer)
- if signal == Base.SIGHUP
+ return if signal == Base.SIGHUP
"HUP"
elseif signal == Base.SIGINT
"INT"
@@ -2343,9 +3146,9 @@ function test(ctx::Context, pkgs::Vector{PackageSpec};
end
# report errors
- if !isempty(pkgs_errored)
+ return if !isempty(pkgs_errored)
function reason(p)
- if Base.process_signaled(p)
+ return if Base.process_signaled(p)
" (received signal: " * signal_name(p.termsignal) * ")"
elseif Base.process_exited(p) && p.exitcode != 1
" (exit code: " * string(p.exitcode) * ")"
@@ -2394,7 +3197,7 @@ end
# Display
-function stat_rep(x::PackageSpec; name=true)
+function stat_rep(x::PackageSpec; name = true)
name = name ? "$(x.name)" : ""
version = x.version == VersionSpec() ? "" : "v$(x.version)"
rev = ""
@@ -2405,7 +3208,7 @@ function stat_rep(x::PackageSpec; name=true)
repo = Operations.is_tracking_repo(x) ? "`$(x.repo.source)$(subdir_str)#$(rev)`" : ""
path = Operations.is_tracking_path(x) ? "$(pathrepr(x.path))" : ""
pinned = x.pinned ? "⚲" : ""
- return join(filter(!isempty, [name,version,repo,path,pinned]), " ")
+ return join(filter(!isempty, [name, version, repo, path, pinned]), " ")
end
print_single(io::IO, pkg::PackageSpec) = print(io, stat_rep(pkg))
@@ -2413,20 +3216,20 @@ print_single(io::IO, pkg::PackageSpec) = print(io, stat_rep(pkg))
is_instantiated(::Nothing) = false
is_instantiated(x::PackageSpec) = x.version != VersionSpec() || is_stdlib(x.uuid)
# Compare an old and new node of the dependency graph and print a single line to summarize the change
-function print_diff(io::IO, old::Union{Nothing,PackageSpec}, new::Union{Nothing,PackageSpec})
- if !is_instantiated(old) && is_instantiated(new)
- printstyled(io, "+ $(stat_rep(new))"; color=:light_green)
+function print_diff(io::IO, old::Union{Nothing, PackageSpec}, new::Union{Nothing, PackageSpec})
+ return if !is_instantiated(old) && is_instantiated(new)
+ printstyled(io, "+ $(stat_rep(new))"; color = :light_green)
elseif !is_instantiated(new)
- printstyled(io, "- $(stat_rep(old))"; color=:light_red)
+ printstyled(io, "- $(stat_rep(old))"; color = :light_red)
elseif is_tracking_registry(old) && is_tracking_registry(new) &&
- new.version isa VersionNumber && old.version isa VersionNumber && new.version != old.version
+ new.version isa VersionNumber && old.version isa VersionNumber && new.version != old.version
if new.version > old.version
- printstyled(io, "↑ $(stat_rep(old)) ⇒ $(stat_rep(new; name=false))"; color=:light_yellow)
+ printstyled(io, "↑ $(stat_rep(old)) ⇒ $(stat_rep(new; name = false))"; color = :light_yellow)
else
- printstyled(io, "↓ $(stat_rep(old)) ⇒ $(stat_rep(new; name=false))"; color=:light_magenta)
+ printstyled(io, "↓ $(stat_rep(old)) ⇒ $(stat_rep(new; name = false))"; color = :light_magenta)
end
else
- printstyled(io, "~ $(stat_rep(old)) ⇒ $(stat_rep(new; name=false))"; color=:light_yellow)
+ printstyled(io, "~ $(stat_rep(old)) ⇒ $(stat_rep(new; name = false))"; color = :light_yellow)
end
end
@@ -2438,15 +3241,15 @@ function status_compat_info(pkg::PackageSpec, env::EnvCache, regs::Vector{Regist
for reg in regs
reg_pkg = get(reg, pkg.uuid, nothing)
reg_pkg === nothing && continue
- info = Registry.registry_info(reg_pkg)
- reg_compat_info = Registry.compat_info(info)
- versions = keys(reg_compat_info)
+ info = Registry.registry_info(reg, reg_pkg)
+ # Get versions directly from version_info
+ versions = keys(info.version_info)
versions = filter(v -> !Registry.isyanked(info, v), versions)
- max_version_reg = maximum(versions; init=v"0")
+ max_version_reg = maximum(versions; init = v"0")
max_version = max(max_version, max_version_reg)
compat_spec = get_compat_workspace(env, pkg.name)
- versions_in_compat = filter(in(compat_spec), keys(reg_compat_info))
- max_version_in_compat = max(max_version_in_compat, maximum(versions_in_compat; init=v"0"))
+ versions_in_compat = filter(in(compat_spec), versions)
+ max_version_in_compat = max(max_version_in_compat, maximum(versions_in_compat; init = v"0"))
end
max_version == v"0" && return nothing
pkg.version >= max_version && return nothing
@@ -2478,11 +3281,9 @@ function status_compat_info(pkg::PackageSpec, env::EnvCache, regs::Vector{Regist
for reg in regs
reg_pkg = get(reg, uuid, nothing)
reg_pkg === nothing && continue
- info = Registry.registry_info(reg_pkg)
- reg_compat_info = Registry.compat_info(info)
- compat_info_v = get(reg_compat_info, dep_info.version, nothing)
- compat_info_v === nothing && continue
- compat_info_v_uuid = get(compat_info_v, pkg.uuid, nothing)
+ info = Registry.registry_info(reg, reg_pkg)
+ # Query compressed deps and compat for the specific dependency version (optimized: only fetch this pkg's compat)
+ compat_info_v_uuid = Registry.query_compat_for_version(info, dep_info.version, pkg.uuid)
compat_info_v_uuid === nothing && continue
if !(max_version in compat_info_v_uuid)
push!(packages_holding_back, dep_pkg.name)
@@ -2495,15 +3296,11 @@ function status_compat_info(pkg::PackageSpec, env::EnvCache, regs::Vector{Regist
for reg in regs
reg_pkg = get(reg, pkg.uuid, nothing)
reg_pkg === nothing && continue
- info = Registry.registry_info(reg_pkg)
- reg_compat_info = Registry.compat_info(info)
- compat_info_v = get(reg_compat_info, pkg.version, nothing)
- versions = keys(reg_compat_info)
- for v in versions
- compat_info_v = get(reg_compat_info, v, nothing)
- compat_info_v === nothing && continue
- compat_info_v_uuid = compat_info_v[JULIA_UUID]
- if VERSION in compat_info_v_uuid
+ info = Registry.registry_info(reg, reg_pkg)
+ # Check all versions for Julia compatibility (optimized: only fetch Julia compat)
+ for v in keys(info.version_info)
+ julia_vspec = Registry.query_compat_for_version(info, v, JULIA_UUID)
+ if julia_vspec !== nothing && VERSION in julia_vspec
push!(julia_compatible_versions, v)
end
end
@@ -2515,7 +3312,7 @@ function status_compat_info(pkg::PackageSpec, env::EnvCache, regs::Vector{Regist
return sort!(unique!(packages_holding_back)), max_version, max_version_in_compat
end
-function diff_array(old_env::Union{EnvCache,Nothing}, new_env::EnvCache; manifest=true, workspace=false)
+function diff_array(old_env::Union{EnvCache, Nothing}, new_env::EnvCache; manifest = true, workspace = false)
function index_pkgs(pkgs, uuid)
idx = findfirst(pkg -> pkg.uuid == uuid, pkgs)
return idx === nothing ? nothing : pkgs[idx]
@@ -2527,9 +3324,9 @@ function diff_array(old_env::Union{EnvCache,Nothing}, new_env::EnvCache; manifes
new = manifest ? load_all_deps_loadable(new_env) : load_project_deps(new_env.project, new_env.project_file, new_env.manifest, new_env.manifest_file)
end
- T, S = Union{UUID,Nothing}, Union{PackageSpec,Nothing}
+ T, S = Union{UUID, Nothing}, Union{PackageSpec, Nothing}
if old_env === nothing
- return Tuple{T,S,S}[(pkg.uuid, nothing, pkg)::Tuple{T,S,S} for pkg in new]
+ return Tuple{T, S, S}[(pkg.uuid, nothing, pkg)::Tuple{T, S, S} for pkg in new]
end
if workspace
old = manifest ? load_all_deps(old_env) : load_direct_deps(old_env)
@@ -2538,13 +3335,12 @@ function diff_array(old_env::Union{EnvCache,Nothing}, new_env::EnvCache; manifes
end
# merge old and new into single array
all_uuids = union(T[pkg.uuid for pkg in old], T[pkg.uuid for pkg in new])
- return Tuple{T,S,S}[(uuid, index_pkgs(old, uuid), index_pkgs(new, uuid))::Tuple{T,S,S} for uuid in all_uuids]
+ return Tuple{T, S, S}[(uuid, index_pkgs(old, uuid), index_pkgs(new, uuid))::Tuple{T, S, S} for uuid in all_uuids]
end
-function is_package_downloaded(manifest_file::String, pkg::PackageSpec; platform=HostPlatform())
+function is_package_downloaded(manifest_file::String, pkg::PackageSpec; platform = HostPlatform())
sourcepath = source_path(manifest_file, pkg)
- identifier = pkg.name !== nothing ? pkg.name : pkg.uuid
- (sourcepath === nothing) && pkgerror("Could not locate the source code for the $(identifier) package. Are you trying to use a manifest generated by a different version of Julia?")
+ sourcepath === nothing && return false
isdir(sourcepath) || return false
check_artifacts_downloaded(sourcepath; platform) || return false
return true
@@ -2564,11 +3360,13 @@ function status_ext_info(pkg::PackageSpec, env::EnvCache)
# Note: `get_extension` returns nothing for stdlibs that are loaded via `require_stdlib`
ext_loaded = (Base.get_extension(Base.PkgId(pkg.uuid, pkg.name), Symbol(ext)) !== nothing)
# Check if deps are loaded
- extdeps_info= Tuple{String, Bool}[]
+ extdeps_info = Tuple{String, Bool}[]
for extdep in extdeps
if !(haskey(weakdepses, extdep) || haskey(depses, extdep))
- pkgerror(isnothing(pkg.name) ? "M" : "$(pkg.name) has a malformed Project.toml, ",
- "the extension package $extdep is not listed in [weakdeps] or [deps]")
+ pkgerror(
+ isnothing(pkg.name) ? "M" : "$(pkg.name) has a malformed Project.toml, ",
+ "the extension package $extdep is not listed in [weakdeps] or [deps]"
+ )
end
uuid = get(weakdepses, extdep, nothing)
if uuid === nothing
@@ -2598,35 +3396,67 @@ struct PackageStatusData
compat_data::Union{Nothing, Tuple{Vector{String}, VersionNumber, VersionNumber}}
changed::Bool
extinfo::Union{Nothing, Vector{ExtInfo}}
+ deprecation_info::Union{Nothing, Dict{String, Any}}
end
-function print_status(env::EnvCache, old_env::Union{Nothing,EnvCache}, registries::Vector{Registry.RegistryInstance}, header::Symbol,
- uuids::Vector, names::Vector; manifest=true, diff=false, ignore_indent::Bool, workspace::Bool, outdated::Bool, extensions::Bool, io::IO,
- mode::PackageMode, hidden_upgrades_info::Bool, show_usagetips::Bool=true)
- not_installed_indicator = sprint((io, args) -> printstyled(io, args...; color=Base.error_color()), "→", context=io)
- upgradable_indicator = sprint((io, args) -> printstyled(io, args...; color=:green), "⌃", context=io)
- heldback_indicator = sprint((io, args) -> printstyled(io, args...; color=Base.warn_color()), "⌅", context=io)
+function print_status(
+ env::EnvCache, old_env::Union{Nothing, EnvCache}, registries::Vector{Registry.RegistryInstance}, header::Symbol,
+ uuids::Vector, names::Vector; manifest = true, diff = false, ignore_indent::Bool, workspace::Bool, outdated::Bool, deprecated::Bool, extensions::Bool, io::IO,
+ mode::PackageMode, hidden_upgrades_info::Bool, show_usagetips::Bool = true
+ )
+ not_installed_indicator = sprint((io, args) -> printstyled(io, args...; color = Base.error_color()), "→", context = io)
+ upgradable_indicator = sprint((io, args) -> printstyled(io, args...; color = :green), "⌃", context = io)
+ heldback_indicator = sprint((io, args) -> printstyled(io, args...; color = Base.warn_color()), "⌅", context = io)
filter = !isempty(uuids) || !isempty(names)
# setup
xs = diff_array(old_env, env; manifest, workspace)
# filter and return early if possible
if isempty(xs) && !diff
- printpkgstyle(io, header, "$(pathrepr(manifest ? env.manifest_file : env.project_file)) (empty " *
- (manifest ? "manifest" : "project") * ")", ignore_indent)
+ printpkgstyle(
+ io, header, "$(pathrepr(manifest ? env.manifest_file : env.project_file)) (empty " *
+ (manifest ? "manifest" : "project") * ")", ignore_indent
+ )
return nothing
end
- no_changes = all(p-> p[2] == p[3], xs)
+ no_changes = all(p -> p[2] == p[3], xs)
if no_changes
- printpkgstyle(io, Symbol("No packages added to or removed from"), "$(pathrepr(manifest ? env.manifest_file : env.project_file))", ignore_indent)
+ if manifest
+ printpkgstyle(io, :Manifest, "No packages added to or removed from $(pathrepr(env.manifest_file))", ignore_indent; color = Base.info_color())
+ else
+ printpkgstyle(io, :Project, "No packages added to or removed from $(pathrepr(env.project_file))", ignore_indent; color = Base.info_color())
+ end
else
- xs = !filter ? xs : eltype(xs)[(id, old, new) for (id, old, new) in xs if (id in uuids || something(new, old).name in names)]
+ if filter
+ # Find packages matching the filter
+ matching_ids = Set{UUID}()
+ for (id, old, new) in xs
+ if (id in uuids || something(new, old).name in names)
+ push!(matching_ids, id)
+ end
+ end
+ # In manifest mode, also include all dependencies of matching packages
+ if manifest && !isempty(matching_ids)
+ deps_to_add = Set{UUID}()
+ for id in matching_ids
+ entry = get(env.manifest, id, nothing)
+ if entry !== nothing
+ union!(deps_to_add, values(entry.deps))
+ end
+ end
+ union!(matching_ids, deps_to_add)
+ end
+ xs = eltype(xs)[(id, old, new) for (id, old, new) in xs if id in matching_ids]
+ end
if isempty(xs)
- printpkgstyle(io, Symbol("No Matches"),
- "in $(diff ? "diff for " : "")$(pathrepr(manifest ? env.manifest_file : env.project_file))", ignore_indent)
+ printpkgstyle(
+ io, Symbol("No Matches"),
+ "in $(diff ? "diff for " : "")$(pathrepr(manifest ? env.manifest_file : env.project_file))", ignore_indent
+ )
return nothing
end
# main print
- printpkgstyle(io, header, pathrepr(manifest ? env.manifest_file : env.project_file), ignore_indent)
+ readonly_suffix = env.project.readonly ? " (readonly)" : ""
+ printpkgstyle(io, header, pathrepr(manifest ? env.manifest_file : env.project_file) * readonly_suffix, ignore_indent)
if workspace && !manifest
for (path, _) in env.workspace
relative_path = Types.relative_project_path(env.project_file, path)
@@ -2675,14 +3505,27 @@ function print_status(env::EnvCache, old_env::Union{Nothing,EnvCache}, registrie
continue
end
+ # Deprecated info
+ deprecation_info = nothing
+ pkg_deprecated = false
+ if !isnothing(new)
+ pkg_spec = something(new, old)
+ deprecation_info = get_pkg_deprecation_info(pkg_spec, registries)
+ pkg_deprecated = deprecation_info !== nothing
+ end
+
+ # if we are running with deprecated, only show packages that are deprecated
+ if deprecated && !pkg_deprecated
+ continue
+ end
# TODO: Show extension deps for project as well?
pkg_downloaded = !is_instantiated(new) || is_package_downloaded(env.manifest_file, new)
new_ver_avail = !latest_version && !Operations.is_tracking_repo(new) && !Operations.is_tracking_path(new)
- pkg_upgradable = new_ver_avail && isempty(cinfo[1])
- pkg_heldback = new_ver_avail && !isempty(cinfo[1])
+ pkg_upgradable = new_ver_avail && cinfo !== nothing && isempty(cinfo[1])
+ pkg_heldback = new_ver_avail && cinfo !== nothing && !isempty(cinfo[1])
if !pkg_downloaded && (pkg_upgradable || pkg_heldback)
# allow space in the gutter for two icons on a single line
@@ -2693,12 +3536,12 @@ function print_status(env::EnvCache, old_env::Union{Nothing,EnvCache}, registrie
no_visible_packages_heldback &= (!changed || !pkg_heldback)
no_packages_heldback &= !pkg_heldback
- push!(package_statuses, PackageStatusData(uuid, old, new, pkg_downloaded, pkg_upgradable, pkg_heldback, cinfo, changed, ext_info))
+ push!(package_statuses, PackageStatusData(uuid, old, new, pkg_downloaded, pkg_upgradable, pkg_heldback, cinfo, changed, ext_info, deprecation_info))
end
for pkg in package_statuses
- pad = 0
- print_padding(x) = (print(io, x); pad += 1)
+ pad = Ref(0)
+ print_padding(x) = (print(io, x); pad[] += 1)
if !pkg.downloaded
print_padding(not_installed_indicator)
@@ -2712,7 +3555,7 @@ function print_status(env::EnvCache, old_env::Union{Nothing,EnvCache}, registrie
end
# Fill the remaining padding with spaces
- while pad < lpadding
+ while pad[] < lpadding
print_padding(" ")
end
@@ -2720,20 +3563,62 @@ function print_status(env::EnvCache, old_env::Union{Nothing,EnvCache}, registrie
diff ? print_diff(io, pkg.old, pkg.new) : print_single(io, pkg.new)
+ # show if package is yanked
+ pkg_spec = something(pkg.new, pkg.old)
+ if is_pkgversion_yanked(pkg_spec, registries)
+ printstyled(io, " [yanked]"; color = :yellow)
+ end
+
+ # show if package is deprecated
+ if pkg.deprecation_info !== nothing
+ printstyled(io, " [deprecated]"; color = :yellow)
+ end
+
+ # show deprecation details when using --deprecated flag
+ if deprecated && !diff && pkg.deprecation_info !== nothing
+ reason = get(pkg.deprecation_info, "reason", nothing)
+ alternative = get(pkg.deprecation_info, "alternative", nothing)
+ if reason !== nothing
+ printstyled(io, " (reason: ", reason, ")"; color = :yellow)
+ end
+ if alternative !== nothing
+ printstyled(io, " (alternative: ", alternative, ")"; color = :yellow)
+ end
+ end
+
if outdated && !diff && pkg.compat_data !== nothing
packages_holding_back, max_version, max_version_compat = pkg.compat_data
if pkg.new.version !== max_version_compat && max_version_compat != max_version
- printstyled(io, " [ is_pkgversion_yanked(something(pkg.new, pkg.old), registries), package_statuses)
+
+ # Add warning for yanked packages
+ if any_yanked_packages
+ yanked_str = sprint((io, args) -> printstyled(io, args...; color = :yellow), "[yanked]", context = io)
+ printpkgstyle(io, :Warning, """Package versions marked with $yanked_str have been pulled from their registry. \
+ It is recommended to update them to resolve a valid version.""", color = Base.warn_color(), ignore_indent)
+ end
+
+ # Check if any packages are deprecated for info message
+ any_deprecated_packages = any(pkg -> pkg.deprecation_info !== nothing, package_statuses)
+
+ # Add info for deprecated packages (only if not already in deprecated mode)
+ if !deprecated && any_deprecated_packages
+ deprecated_str = sprint((io, args) -> printstyled(io, args...; color = :yellow), "[deprecated]", context = io)
+ tipend = manifest ? " -m" : ""
+ tip = show_usagetips ? " Use `status --deprecated$tipend` to see more information." : ""
+ printpkgstyle(io, :Info, """Packages marked with $deprecated_str are no longer maintained.$tip""", color = Base.info_color(), ignore_indent)
+ end
+
return nothing
end
@@ -2791,8 +3697,8 @@ function git_head_env(env, project_dir)
git_path = LibGit2.path(repo)
project_path = relpath(env.project_file, git_path)
manifest_path = relpath(env.manifest_file, git_path)
- new_env.project = read_project(GitTools.git_file_stream(repo, "HEAD:$project_path", fakeit=true))
- new_env.manifest = read_manifest(GitTools.git_file_stream(repo, "HEAD:$manifest_path", fakeit=true))
+ new_env.project = read_project(GitTools.git_file_stream(repo, "HEAD:$project_path", fakeit = true))
+ new_env.manifest = read_manifest(GitTools.git_file_stream(repo, "HEAD:$manifest_path", fakeit = true))
return new_env
end
catch err
@@ -2805,26 +3711,30 @@ function show_update(env::EnvCache, registries::Vector{Registry.RegistryInstance
old_env = EnvCache()
old_env.project = env.original_project
old_env.manifest = env.original_manifest
- status(env, registries; header=:Updating, mode=PKGMODE_COMBINED, env_diff=old_env, ignore_indent=false, io=io, hidden_upgrades_info)
+ status(env, registries; header = :Updating, mode = PKGMODE_COMBINED, env_diff = old_env, ignore_indent = false, io = io, hidden_upgrades_info)
return nothing
end
-function status(env::EnvCache, registries::Vector{Registry.RegistryInstance}, pkgs::Vector{PackageSpec}=PackageSpec[];
- header=nothing, mode::PackageMode=PKGMODE_PROJECT, git_diff::Bool=false, env_diff=nothing, ignore_indent=true,
- io::IO, workspace::Bool=false, outdated::Bool=false, extensions::Bool=false, hidden_upgrades_info::Bool=false, show_usagetips::Bool=true)
+function status(
+ env::EnvCache, registries::Vector{Registry.RegistryInstance}, pkgs::Vector{PackageSpec} = PackageSpec[];
+ header = nothing, mode::PackageMode = PKGMODE_PROJECT, git_diff::Bool = false, env_diff = nothing, ignore_indent = true,
+ io::IO, workspace::Bool = false, outdated::Bool = false, deprecated::Bool = false, extensions::Bool = false, hidden_upgrades_info::Bool = false, show_usagetips::Bool = true
+ )
io == Base.devnull && return
# if a package, print header
if header === nothing && env.pkg !== nothing
- printpkgstyle(io, :Project, string(env.pkg.name, " v", env.pkg.version), true; color=Base.info_color())
+ readonly_status = env.project.readonly ? " (readonly)" : ""
+ printpkgstyle(io, :Project, string(env.pkg.name, " v", env.pkg.version, readonly_status), true; color = Base.info_color())
end
# load old env
old_env = nothing
if git_diff
project_dir = dirname(env.project_file)
- if !ispath(joinpath(project_dir, ".git"))
+ git_repo_dir = discover_repo(project_dir)
+ if git_repo_dir == nothing
@warn "diff option only available for environments in git repositories, ignoring."
else
- old_env = git_head_env(env, project_dir)
+ old_env = git_head_env(env, git_repo_dir)
if old_env === nothing
@warn "could not read project from HEAD, displaying absolute status instead."
end
@@ -2839,15 +3749,25 @@ function status(env::EnvCache, registries::Vector{Registry.RegistryInstance}, pk
diff = old_env !== nothing
header = something(header, diff ? :Diff : :Status)
if mode == PKGMODE_PROJECT || mode == PKGMODE_COMBINED
- print_status(env, old_env, registries, header, filter_uuids, filter_names; manifest=false, diff, ignore_indent, io, workspace, outdated, extensions, mode, hidden_upgrades_info, show_usagetips)
+ print_status(env, old_env, registries, header, filter_uuids, filter_names; manifest = false, diff, ignore_indent, io, workspace, outdated, deprecated, extensions, mode, hidden_upgrades_info, show_usagetips)
end
if mode == PKGMODE_MANIFEST || mode == PKGMODE_COMBINED
- print_status(env, old_env, registries, header, filter_uuids, filter_names; diff, ignore_indent, io, workspace, outdated, extensions, mode, hidden_upgrades_info, show_usagetips)
+ print_status(env, old_env, registries, header, filter_uuids, filter_names; diff, ignore_indent, io, workspace, outdated, deprecated, extensions, mode, hidden_upgrades_info, show_usagetips)
end
- if is_manifest_current(env) === false
- tip = show_usagetips ? " It is recommended to `Pkg.resolve()` or consider `Pkg.update()` if necessary." : ""
- printpkgstyle(io, :Warning, "The project dependencies or compat requirements have changed since the manifest was last resolved.$tip",
- ignore_indent; color=Base.warn_color())
+ return if is_manifest_current(env) === false
+ tip = if show_usagetips
+ if Pkg.in_repl_mode()
+ " It is recommended to `pkg> resolve` or consider `pkg> update` if necessary."
+ else
+ " It is recommended to `Pkg.resolve()` or consider `Pkg.update()` if necessary."
+ end
+ else
+ ""
+ end
+ printpkgstyle(
+ io, :Warning, "The project dependencies or compat requirements have changed since the manifest was last resolved.$tip",
+ ignore_indent; color = Base.warn_color()
+ )
end
end
@@ -2884,7 +3804,7 @@ function print_compat(ctx::Context, pkgs_in::Vector{PackageSpec} = PackageSpec[]
printpkgstyle(io, :Compat, pathrepr(ctx.env.project_file))
names = [pkg.name for pkg in pkgs_in]
pkgs = isempty(pkgs_in) ? ctx.env.project.deps : filter(pkg -> in(first(pkg), names), ctx.env.project.deps)
- add_julia = isempty(pkgs_in) || any(p->p.name == "julia", pkgs_in)
+ add_julia = isempty(pkgs_in) || any(p -> p.name == "julia", pkgs_in)
longest_dep_len = isempty(pkgs) ? length("julia") : max(reduce(max, map(length, collect(keys(pkgs)))), length("julia"))
if add_julia
println(io, compat_line(io, "julia", nothing, get_compat_str(ctx.env.project, "julia"), longest_dep_len))
@@ -2892,13 +3812,16 @@ function print_compat(ctx::Context, pkgs_in::Vector{PackageSpec} = PackageSpec[]
for (dep, uuid) in pkgs
println(io, compat_line(io, dep, uuid, get_compat_str(ctx.env.project, dep), longest_dep_len))
end
+ return
end
print_compat(pkg::String; kwargs...) = print_compat(Context(), pkg; kwargs...)
print_compat(; kwargs...) = print_compat(Context(); kwargs...)
-function apply_force_latest_compatible_version!(ctx::Types.Context;
- target_name = nothing,
- allow_earlier_backwards_compatible_versions::Bool = true)
+function apply_force_latest_compatible_version!(
+ ctx::Types.Context;
+ target_name = nothing,
+ allow_earlier_backwards_compatible_versions::Bool = true
+ )
deps_from_env = load_direct_deps(ctx.env)
deps = [(; name = x.name, uuid = x.uuid) for x in deps_from_env]
for dep in deps
@@ -2914,10 +3837,12 @@ function apply_force_latest_compatible_version!(ctx::Types.Context;
return nothing
end
-function apply_force_latest_compatible_version!(ctx::Types.Context,
- dep::NamedTuple{(:name, :uuid), Tuple{String, Base.UUID}};
- target_name = nothing,
- allow_earlier_backwards_compatible_versions::Bool = true)
+function apply_force_latest_compatible_version!(
+ ctx::Types.Context,
+ dep::NamedTuple{(:name, :uuid), Tuple{String, Base.UUID}};
+ target_name = nothing,
+ allow_earlier_backwards_compatible_versions::Bool = true
+ )
name, uuid = dep
has_compat = haskey(ctx.env.project.compat, name)
if !has_compat
@@ -2957,22 +3882,26 @@ function get_earliest_backwards_compatible_version(ver::Base.VersionNumber)
return Base.VersionNumber(0, 0, ver.patch)
end
-function get_latest_compatible_version(ctx::Types.Context,
- uuid::Base.UUID,
- compat_spec::VersionSpec)
+function get_latest_compatible_version(
+ ctx::Types.Context,
+ uuid::Base.UUID,
+ compat_spec::VersionSpec
+ )
all_registered_versions = get_all_registered_versions(ctx, uuid)
compatible_versions = filter(in(compat_spec), all_registered_versions)
latest_compatible_version = maximum(compatible_versions)
return latest_compatible_version
end
-function get_all_registered_versions(ctx::Types.Context,
- uuid::Base.UUID)
+function get_all_registered_versions(
+ ctx::Types.Context,
+ uuid::Base.UUID
+ )
versions = Set{VersionNumber}()
for reg in ctx.registries
pkg = get(reg, uuid, nothing)
if pkg !== nothing
- info = Registry.registry_info(pkg)
+ info = Registry.registry_info(reg, pkg)
union!(versions, keys(info.version_info))
end
end
diff --git a/src/Pkg.jl b/src/Pkg.jl
index d6260607dd..0495e3667d 100644
--- a/src/Pkg.jl
+++ b/src/Pkg.jl
@@ -2,6 +2,11 @@
module Pkg
+# In Pkg tests we want to avoid Pkg being re-precompiled by subprocesses, so this is enabled in the test suite
+if Base.get_bool_env("JULIA_PKG_DISALLOW_PKG_PRECOMPILATION", false) == true
+ error("Precompililing Pkg is disallowed. JULIA_PKG_DISALLOW_PKG_PRECOMPILATION=$(ENV["JULIA_PKG_DISALLOW_PKG_PRECOMPILATION"])")
+end
+
if isdefined(Base, :Experimental) && isdefined(Base.Experimental, Symbol("@max_methods"))
@eval Base.Experimental.@max_methods 1
end
@@ -15,16 +20,20 @@ export PackageSpec
export PackageMode, PKGMODE_MANIFEST, PKGMODE_PROJECT
export UpgradeLevel, UPLEVEL_MAJOR, UPLEVEL_MINOR, UPLEVEL_PATCH
export PreserveLevel, PRESERVE_TIERED_INSTALLED, PRESERVE_TIERED, PRESERVE_ALL_INSTALLED, PRESERVE_ALL, PRESERVE_DIRECT, PRESERVE_SEMVER, PRESERVE_NONE
-export Registry, RegistrySpec
+export Registry, RegistrySpec, Apps
public activate, add, build, compat, develop, free, gc, generate, instantiate,
- pin, precompile, redo, rm, resolve, status, test, undo, update, why
+ pin, precompile, readonly, redo, rm, resolve, status, test, undo, update, why
depots() = Base.DEPOT_PATH
-function depots1()
- d = depots()
- isempty(d) && Pkg.Types.pkgerror("no depots found in DEPOT_PATH")
- return d[1]
+function depots1(depot_list::Union{String, Vector{String}} = depots())
+ # Get the first depot from a list, with proper error handling
+ if depot_list isa String
+ return depot_list
+ else
+ isempty(depot_list) && Pkg.Types.pkgerror("no depots provided")
+ return depot_list[1]
+ end
end
function pkg_server()
@@ -37,28 +46,63 @@ end
logdir(depot = depots1()) = joinpath(depot, "logs")
devdir(depot = depots1()) = get(ENV, "JULIA_PKG_DEVDIR", joinpath(depot, "dev"))
envdir(depot = depots1()) = joinpath(depot, "environments")
+
+function create_cachedir_tag(cache_dir::AbstractString)
+ return try
+ tag_file = joinpath(cache_dir, "CACHEDIR.TAG")
+ if !isfile(tag_file)
+ write(tag_file, "Signature: 8a477f597d28d172789f06886806bc55\n# This file is a cache directory tag created by Julia Pkg.\n# See https://bford.info/cachedir/\n")
+ end
+ catch
+ # Ignore errors to avoid failing operations on read-only filesystems
+ end
+end
const UPDATED_REGISTRY_THIS_SESSION = Ref(false)
const OFFLINE_MODE = Ref(false)
const RESPECT_SYSIMAGE_VERSIONS = Ref(true)
# For globally overriding in e.g. tests
-const DEFAULT_IO = Ref{Union{IO,Nothing}}(nothing)
+const DEFAULT_IO = Base.ScopedValues.ScopedValue{IO}()
+
+# ScopedValue to track whether we're currently in REPL mode
+const IN_REPL_MODE = Base.ScopedValues.ScopedValue{Bool}()
# See discussion in https://github.com/JuliaLang/julia/pull/52249
function unstableio(@nospecialize(io::IO))
# Needed to prevent specialization https://github.com/JuliaLang/julia/pull/52249#discussion_r1401199265
_io = Base.inferencebarrier(io)
- IOContext{IO}(
+ return IOContext{IO}(
_io,
- get(_io,:color,false) ? Base.ImmutableDict{Symbol,Any}(:color, true) : Base.ImmutableDict{Symbol,Any}()
+ get(_io, :color, false) ? Base.ImmutableDict{Symbol, Any}(:color, true) : Base.ImmutableDict{Symbol, Any}()
)
end
-stderr_f() = something(DEFAULT_IO[], unstableio(stderr))
-stdout_f() = something(DEFAULT_IO[], unstableio(stdout))
+stderr_f() = something(Base.ScopedValues.get(DEFAULT_IO), unstableio(stderr))
+stdout_f() = something(Base.ScopedValues.get(DEFAULT_IO), unstableio(stdout))
const PREV_ENV_PATH = Ref{String}("")
usable_io(io) = (io isa Base.TTY) || (io isa IOContext{IO} && io.io isa Base.TTY)
can_fancyprint(io::IO) = (usable_io(io)) && (get(ENV, "CI", nothing) != "true")
-should_autoprecompile() = Base.JLOptions().use_compiled_modules == 1 && Base.get_bool_env("JULIA_PKG_PRECOMPILE_AUTO", true)
+
+_autoprecompilation_enabled::Bool = true
+const _autoprecompilation_enabled_scoped = Base.ScopedValues.ScopedValue{Bool}(true)
+autoprecompilation_enabled(state::Bool) = (global _autoprecompilation_enabled = state)
+function should_autoprecompile()
+ if Base.JLOptions().use_compiled_modules == 1 &&
+ _autoprecompilation_enabled &&
+ _autoprecompilation_enabled_scoped[] &&
+ Base.get_bool_env("JULIA_PKG_PRECOMPILE_AUTO", true)
+ return true
+ else
+ return false
+ end
+end
+
+"""
+ in_repl_mode()
+
+Check if we're currently executing in REPL mode. This is used to determine
+whether to show tips in REPL format (`pkg> add Foo`) or API format (`Pkg.add("Foo")`).
+"""
+in_repl_mode() = @something(Base.ScopedValues.get(IN_REPL_MODE), false)
include("utils.jl")
include("MiniProgressBars.jl")
@@ -66,12 +110,14 @@ include("GitTools.jl")
include("PlatformEngines.jl")
include("Versions.jl")
include("Registry/Registry.jl")
-include("Resolve/Resolve.jl")
include("Types.jl")
-include("BinaryPlatforms_compat.jl")
+include("Resolve/Resolve.jl")
+include("BinaryPlatformsCompat.jl")
include("Artifacts.jl")
+const Artifacts = PkgArtifacts
include("Operations.jl")
include("API.jl")
+include("Apps/Apps.jl")
include("REPLMode/REPLMode.jl")
import .REPLMode: @pkg_str
@@ -172,7 +218,7 @@ Pkg.add(name="Example", version="0.3") # Specify version; latest release in the
Pkg.add(name="Example", version="0.3.1") # Specify version; exact release
Pkg.add(url="https://github.com/JuliaLang/Example.jl", rev="master") # From url to remote gitrepo
Pkg.add(url="/remote/mycompany/juliapackages/OurPackage") # From path to local gitrepo
-Pkg.add(url="https://github.com/Company/MonoRepo", subdir="juliapkgs/Package.jl)") # With subdir
+Pkg.add(url="https://github.com/Company/MonoRepo", subdir="juliapkgs/Package.jl") # With subdir
```
After the installation of new packages the project will be precompiled. See more at [Environment Precompilation](@ref).
@@ -185,11 +231,21 @@ const add = API.add
Pkg.precompile(; strict::Bool=false, timing::Bool=false)
Pkg.precompile(pkg; strict::Bool=false, timing::Bool=false)
Pkg.precompile(pkgs; strict::Bool=false, timing::Bool=false)
+ Pkg.precompile(f, args...; kwargs...)
Precompile all or specific dependencies of the project in parallel.
Set `timing=true` to show the duration of the precompilation of each dependency.
+To delay autoprecompilation of multiple Pkg actions until the end use.
+This may be most efficient while manipulating the environment in various ways.
+
+```julia
+Pkg.precompile() do
+ # Pkg actions here
+end
+```
+
!!! note
Errors will only throw when precompiling the top-level dependencies, given that
not all manifest dependencies may be loaded by the top-level dependencies on the given system.
@@ -207,6 +263,22 @@ Set `timing=true` to show the duration of the precompilation of each dependency.
!!! compat "Julia 1.9"
Timing mode requires at least Julia 1.9.
+!!! compat "Julia 1.13"
+ The `Pkg.precompile(f, args...; kwargs...)` do-block syntax requires at least Julia 1.13.
+
+During interactive precompilation the following keyboard controls are available:
+
+ * **`d`/`q`/`]`** — Detach. Returns to the REPL while precompilation continues in the background.
+ Use `pkg> precompile --monitor` to reattach, `--stop` to stop, or `--cancel` to cancel.
+ * **`c`** — Cancel. Kills all subprocesses; prompts for Enter to confirm.
+ * **`i`** — Info. Sends a profiling signal to subprocesses for a profile peek.
+ * **`v`** — Toggle verbose mode. Shows timing, worker PID, CPU%, and memory per compiling package.
+ * **`?`/`h`** — Show keyboard shortcut help.
+ * **Ctrl-C** — Interrupt. Sends SIGINT to subprocesses and displays their output.
+
+!!! compat "Julia 1.14"
+ Keyboard controls during precompilation require at least Julia 1.14.
+
# Examples
```julia
Pkg.precompile()
@@ -216,6 +288,39 @@ Pkg.precompile(["Foo", "Bar"])
"""
const precompile = API.precompile
+"""
+ Pkg.autoprecompilation_enabled(state::Bool)
+
+Enable or disable automatic precompilation for Pkg operations.
+
+When `state` is `true` (default), Pkg operations that modify the project environment
+will automatically trigger precompilation of affected packages. When `state` is `false`,
+automatic precompilation is disabled and packages will only be precompiled when
+explicitly requested via [`Pkg.precompile`](@ref).
+
+This setting affects the global state and persists across Pkg operations in the same
+Julia session. It can be used in combination with [`Pkg.precompile`](@ref) do-syntax
+for more fine-grained control over when precompilation occurs.
+
+!!! compat "Julia 1.13"
+ This function requires at least Julia 1.13.
+
+# Examples
+```julia
+# Disable automatic precompilation
+Pkg.autoprecompilation_enabled(false)
+Pkg.add("Example") # Will not trigger auto-precompilation
+Pkg.precompile() # Manual precompilation
+
+# Re-enable automatic precompilation
+Pkg.autoprecompilation_enabled(true)
+Pkg.add("AnotherPackage") # Will trigger auto-precompilation
+```
+
+See also [`Pkg.precompile`](@ref).
+"""
+autoprecompilation_enabled
+
"""
Pkg.rm(pkg::Union{String, Vector{String}}; mode::PackageMode = PKGMODE_PROJECT)
Pkg.rm(pkg::Union{PackageSpec, Vector{PackageSpec}}; mode::PackageMode = PKGMODE_PROJECT)
@@ -243,7 +348,7 @@ If `workspace` is true, this will consider all projects in the workspace and not
const why = API.why
"""
- Pkg.update(; level::UpgradeLevel=UPLEVEL_MAJOR, mode::PackageMode = PKGMODE_PROJECT, preserve::PreserveLevel)
+ Pkg.update(; level::UpgradeLevel=UPLEVEL_MAJOR, mode::PackageMode = PKGMODE_PROJECT, preserve::PreserveLevel, workspace::Bool = false)
Pkg.update(pkg::Union{String, Vector{String}})
Pkg.update(pkg::Union{PackageSpec, Vector{PackageSpec}})
@@ -255,6 +360,8 @@ If packages are given as positional arguments, the `preserve` argument can be us
- `PRESERVE_DIRECT`: Only allow `pkg` and indirect dependencies that are not a direct dependency in the project to update.
- `PRESERVE_NONE`: Allow `pkg` and all its indirect dependencies to update.
+If `workspace` is `true`, packages from all projects in the workspace will be included when no packages are specified.
+
After any package updates the project will be precompiled. See more at [Environment Precompilation](@ref).
See also [`PackageSpec`](@ref), [`PackageMode`](@ref), [`UpgradeLevel`](@ref).
@@ -279,19 +386,18 @@ const update = API.up
!!! compat "Julia 1.9"
Passing a string to `coverage` requires at least Julia 1.9.
-Run the tests for package `pkg`, or for the current project (which thus needs to be a package) if no
-positional argument is given to `Pkg.test`. A package is tested by running its
-`test/runtests.jl` file.
+Run the tests for the given package(s), or for the current project if no positional argument is given to `Pkg.test`
+(the current project would need to be a package). The package is tested by running its `test/runtests.jl` file.
-The tests are run by generating a temporary environment with only the `pkg` package
-and its (recursive) dependencies in it. If a manifest file exists and the `allow_reresolve`
-keyword argument is set to `false`, the versions in the manifest file are used.
-Otherwise a feasible set of packages is resolved and installed.
+The tests are run in a temporary environment that also includes the test specific dependencies
+of the package. The versions of dependencies in the current project are used for the
+test environment unless there is a compatibility conflict between the version of the dependencies and
+the test-specific dependencies. In that case, if `allow_reresolve` is `false` an error is thrown and
+if `allow_reresolve` is `true` a feasible set of versions of the dependencies is resolved and used.
-During the tests, test-specific dependencies are active, which are
-given in the project file as e.g.
+Test-specific dependnecies are declared in the project file as:
-```
+```toml
[extras]
Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40"
@@ -299,26 +405,27 @@ Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40"
test = ["Test"]
```
-The tests are executed in a new process with `check-bounds=yes` and by default `startup-file=no`.
+The tests are executed in a new process with the same `check-bounds` setting as the current Julia session and by default `startup-file=no`.
If using the startup file (`~/.julia/config/startup.jl`) is desired, start julia with `--startup-file=yes`.
+
Inlining of functions during testing can be disabled (for better coverage accuracy)
by starting julia with `--inline=no`. The tests can be run as if different command line arguments were
passed to julia by passing the arguments instead to the `julia_args` keyword argument, e.g.
-```
+```julia
Pkg.test("foo"; julia_args=["--inline"])
```
To pass some command line arguments to be used in the tests themselves, pass the arguments to the
`test_args` keyword argument. These could be used to control the code being tested, or to control the
tests in some way. For example, the tests could have optional additional tests:
-```
+```julia
if "--extended" in ARGS
@test some_function()
end
```
which could be enabled by testing with
-```
+```julia
Pkg.test("foo"; test_args=["--extended"])
```
"""
@@ -345,52 +452,77 @@ const gc = API.gc
Pkg.build(pkg::Union{String, Vector{String}}; verbose = false, io::IO=stderr)
Pkg.build(pkgs::Union{PackageSpec, Vector{PackageSpec}}; verbose = false, io::IO=stderr)
+**Keyword arguments:**
+ - `verbose::Bool=false`: print the build output to `stdout`/`stderr` instead of redirecting to the `build.log` file.
+ - `allow_reresolve::Bool=true`: allow Pkg to reresolve the package versions in the build environment
+
+!!! compat "Julia 1.13"
+ `allow_reresolve` requires at least Julia 1.13.
+
Run the build script in `deps/build.jl` for `pkg` and all of its dependencies in
depth-first recursive order.
If no argument is given to `build`, the current project is built, which thus needs
to be a package.
This function is called automatically on any package that gets installed
for the first time.
-`verbose = true` prints the build output to `stdout`/`stderr` instead of
-redirecting to the `build.log` file.
+
+The build takes place in a new process matching the current process with default of `startup-file=no`.
+If using the startup file (`~/.julia/config/startup.jl`) is desired, start julia with an explicit `--startup-file=yes`.
"""
const build = API.build
"""
- Pkg.pin(pkg::Union{String, Vector{String}}; io::IO=stderr, all_pkgs::Bool=false)
- Pkg.pin(pkgs::Union{PackageSpec, Vector{PackageSpec}}; io::IO=stderr, all_pkgs::Bool=false)
+ Pkg.pin(pkg::Union{String, Vector{String}}; io::IO=stderr, all_pkgs::Bool=false, workspace::Bool=false)
+ Pkg.pin(pkgs::Union{PackageSpec, Vector{PackageSpec}}; io::IO=stderr, all_pkgs::Bool=false, workspace::Bool=false)
Pin a package to the current version (or the one given in the `PackageSpec`) or to a certain
git revision. A pinned package is never automatically updated: if `pkg` is tracking a path,
or a repository, those remain tracked but will not update.
To get updates from the origin path or remote repository the package must first be freed.
+If `workspace` is `true` and `all_pkgs` is `true`, packages from all projects in the workspace
+will be included.
+
!!! compat "Julia 1.7"
The `all_pkgs` kwarg was introduced in julia 1.7.
# Examples
```julia
+# Pin a package to its current version
Pkg.pin("Example")
+
+# Pin a package to a specific version
Pkg.pin(name="Example", version="0.3.1")
+
+# Pin all packages in the project
Pkg.pin(all_pkgs = true)
```
"""
const pin = API.pin
"""
- Pkg.free(pkg::Union{String, Vector{String}}; io::IO=stderr, all_pkgs::Bool=false)
- Pkg.free(pkgs::Union{PackageSpec, Vector{PackageSpec}}; io::IO=stderr, all_pkgs::Bool=false)
+ Pkg.free(pkg::Union{String, Vector{String}}; io::IO=stderr, all_pkgs::Bool=false, workspace::Bool=false)
+ Pkg.free(pkgs::Union{PackageSpec, Vector{PackageSpec}}; io::IO=stderr, all_pkgs::Bool=false, workspace::Bool=false)
If `pkg` is pinned, remove the pin.
If `pkg` is tracking a path, e.g. after [`Pkg.develop`](@ref), go back to tracking registered versions.
To free all dependencies set `all_pkgs=true`.
+If `workspace` is `true` and `all_pkgs` is `true`, packages from all projects in the workspace
+will be included.
+
!!! compat "Julia 1.7"
The `all_pkgs` kwarg was introduced in julia 1.7.
# Examples
```julia
+# Free a single package (remove pin or stop tracking path)
Pkg.free("Package")
+
+# Free multiple packages
+Pkg.free(["PackageA", "PackageB"])
+
+# Free all packages in the project
Pkg.free(all_pkgs = true)
```
@@ -473,14 +605,14 @@ Request a `ProjectInfo` struct which contains information about the active proje
# `ProjectInfo` fields
-| Field | Description |
-|:-------------|:--------------------------------------------------------------------------------------------|
-| name | The project's name |
-| uuid | The project's UUID |
-| version | The project's version |
-| ispackage | Whether the project is a package (has a name and uuid) |
-| dependencies | The project's direct dependencies as a `Dict` which maps dependency name to dependency UUID |
-| path | The location of the project file which defines the active project |
+| Field | Description |
+|:---------------|:--------------------------------------------------------------------------------------------|
+| `name` | The project's name |
+| `uuid` | The project's UUID |
+| `version` | The project's version |
+| `ispackage` | Whether the project is a package (has a name and uuid) |
+| `dependencies` | The project's direct dependencies as a `Dict` which maps dependency name to dependency UUID |
+| `path` | The location of the project file which defines the active project |
"""
const project = API.project
@@ -499,10 +631,11 @@ dependencies in the manifest and instantiate the resulting project.
`julia_version_strict=true` will turn manifest version check failures into errors instead of logging warnings.
After packages have been installed the project will be precompiled.
-See more at [Environment Precompilation](@ref).
+See more and how to disable auto-precompilation at [Environment Precompilation](@ref).
!!! compat "Julia 1.12"
The `julia_version_strict` keyword argument requires at least Julia 1.12.
+
"""
const instantiate = API.instantiate
@@ -524,6 +657,7 @@ Print out the status of the project/manifest.
Packages marked with `⌃` have new versions that can be installed, e.g. via [`Pkg.update`](@ref).
Those marked with `⌅` have new versions available, but cannot be installed due to compatibility conflicts with other packages. To see why, set the
keyword argument `outdated=true`.
+Packages marked with `[yanked]` are yanked versions that should be updated or replaced as they may contain bugs or security vulnerabilities.
Setting `outdated=true` will only show packages that are not on the latest version,
their maximum version and why they are not on the latest version (either due to other
@@ -585,9 +719,21 @@ const compat = API.compat
Pkg.activate([s::String]; shared::Bool=false, io::IO=stderr)
Pkg.activate(; temp::Bool=false, shared::Bool=false, io::IO=stderr)
-Activate the environment at `s`. The active environment is the environment
-that is modified by executing package commands.
-The logic for what path is activated is as follows:
+Activate the environment at `s`, or return to the default environment if no argument is given.
+The active environment is the environment that is modified by executing package commands.
+Activating an environment only affects the current Julia session and does not persist when
+you restart Julia (unless you use the `--project` startup flag).
+
+# Returning to the default environment
+
+If no argument is given to `activate`, this returns you to the default shared environment
+(typically `@v#.#` in `~/.julia/environments/v#.#/`). This is the standard way to "deactivate"
+a project environment and return to your base package setup. There is no separate `deactivate`
+command—`Pkg.activate()` with no arguments serves this purpose.
+
+# Activating a path
+
+When `s` is provided, the logic for what path is activated is as follows:
* If `shared` is `true`, the first existing environment named `s` from the depots
in the depot stack will be activated. If no such environment exists,
@@ -599,15 +745,18 @@ The logic for what path is activated is as follows:
activate the environment at the tracked path.
* Otherwise, `s` is interpreted as a non-existing path, which is then activated.
-If no argument is given to `activate`, then use the first project found in `LOAD_PATH`
-(ignoring `"@"`). For the default value of `LOAD_PATH`, the result is to activate the
-`@v#.#` environment.
-
# Examples
-```
+```julia
+# Return to default environment (deactivate current project)
Pkg.activate()
+
+# Activate a project in a specific directory
Pkg.activate("local/path")
+
+# Activate a developed package by name
Pkg.activate("MyDependency")
+
+# Create and activate a temporary environment
Pkg.activate(; temp=true)
```
@@ -627,7 +776,7 @@ versions that are already downloaded in version resolution.
To work in offline mode across Julia sessions you can set the environment
variable `JULIA_PKG_OFFLINE` to `"true"` before starting Julia.
"""
-offline(b::Bool=true) = (OFFLINE_MODE[] = b; nothing)
+offline(b::Bool = true) = (OFFLINE_MODE[] = b; nothing)
"""
Pkg.respect_sysimage_versions(b::Bool=true)
@@ -640,7 +789,7 @@ If this option is enabled, Pkg will only install packages that have been put int
Also, trying to add a package at a URL or `develop` a package that is in the sysimage
will error.
"""
-respect_sysimage_versions(b::Bool=true) = (RESPECT_SYSIMAGE_VERSIONS[] = b; nothing)
+respect_sysimage_versions(b::Bool = true) = (RESPECT_SYSIMAGE_VERSIONS[] = b; nothing)
"""
PackageSpec(name::String, [uuid::UUID, version::VersionNumber])
@@ -701,7 +850,17 @@ Other choices for `protocol` are `"https"` or `"git"`.
```julia-repl
julia> Pkg.setprotocol!(domain = "github.com", protocol = "ssh")
+# Use HTTPS for GitHub (default, good for most users)
+julia> Pkg.setprotocol!(domain = "github.com", protocol = "https")
+
+# Reset to default (let package developer decide)
+julia> Pkg.setprotocol!(domain = "github.com", protocol = nothing)
+
+# Set protocol for custom domain without specifying protocol
julia> Pkg.setprotocol!(domain = "gitlab.mycompany.com")
+
+# Use Git protocol for a custom domain
+julia> Pkg.setprotocol!(domain = "gitlab.mycompany.com", protocol = "git")
```
"""
const setprotocol! = API.setprotocol!
@@ -758,14 +917,6 @@ Below is a comparison between the REPL mode and the functional API::
"""
const RegistrySpec = Registry.RegistrySpec
-"""
- upgrade_manifest()
- upgrade_manifest(manifest_path::String)
-
-Upgrades the format of the current or specified manifest file from v1.0 to v2.0 without re-resolving.
-"""
-const upgrade_manifest = API.upgrade_manifest
-
"""
is_manifest_current(path::AbstractString)
@@ -776,19 +927,37 @@ If the manifest doesn't have the project hash recorded, or if there is no manife
This function can be used in tests to verify that the manifest is synchronized with the project file:
- using Pkg, Test, Package
- @test Pkg.is_manifest_current(pkgdir(Package))
+```julia
+using Pkg, Test
+@test Pkg.is_manifest_current(pwd()) # Check current project
+@test Pkg.is_manifest_current("/path/to/project") # Check specific project
+```
"""
const is_manifest_current = API.is_manifest_current
+"""
+ readonly([state::Bool], [ctx::Context])
+
+Get or set the readonly state of the current environment.
+
+# Examples
+```julia-repl
+julia> Pkg.readonly() # check current readonly state
+false
+
+julia> Pkg.readonly(true) # enable readonly mode
+false # returns previous state
+
+julia> Pkg.readonly()
+true
+
+julia> Pkg.readonly(false) # disable readonly mode
+true
+```
+"""
+const readonly = API.readonly
+
function __init__()
- DEFAULT_IO[] = nothing
- Pkg.UPDATED_REGISTRY_THIS_SESSION[] = false
- if !isassigned(Base.PKG_PRECOMPILE_HOOK)
- # allows Base to use Pkg.precompile during loading
- # disable via `Base.PKG_PRECOMPILE_HOOK[] = Returns(nothing)`
- Base.PKG_PRECOMPILE_HOOK[] = precompile
- end
OFFLINE_MODE[] = Base.get_bool_env("JULIA_PKG_OFFLINE", false)
_auto_gc_enabled[] = Base.get_bool_env("JULIA_PKG_GC_AUTO", true)
return nothing
@@ -799,7 +968,7 @@ end
################
function installed()
- @warn "Pkg.installed() is deprecated"
+ @warn "`Pkg.installed()` is deprecated. Use `Pkg.dependencies()` instead." maxlog = 1
deps = dependencies()
installs = Dict{String, VersionNumber}()
for (uuid, dep) in deps
@@ -811,7 +980,7 @@ function installed()
end
function dir(pkg::String, paths::AbstractString...)
- @warn "`Pkg.dir(pkgname, paths...)` is deprecated; instead, do `import $pkg; joinpath(dirname(pathof($pkg)), \"..\", paths...)`." maxlog=1
+ @warn "`Pkg.dir(pkgname, paths...)` is deprecated; instead, do `import $pkg; joinpath(dirname(pathof($pkg)), \"..\", paths...)`." maxlog = 1
pkgid = Base.identify_package(pkg)
pkgid === nothing && return nothing
path = Base.locate_package(pkgid)
@@ -823,9 +992,9 @@ end
# AUTO GC #
###########
-const DEPOT_ORPHANAGE_TIMESTAMPS = Dict{String,Float64}()
+const DEPOT_ORPHANAGE_TIMESTAMPS = Dict{String, Float64}()
const _auto_gc_enabled = Ref{Bool}(true)
-function _auto_gc(ctx::Types.Context; collect_delay::Period = Day(7))
+function _auto_gc(ctx::Types.Context)
if !_auto_gc_enabled[]
return
end
@@ -836,19 +1005,19 @@ function _auto_gc(ctx::Types.Context; collect_delay::Period = Day(7))
# `orphaned.toml` file, which should tell us how long since the last time
# we GC'ed.
orphanage_path = joinpath(logdir(depots1()), "orphaned.toml")
- delay_secs = Second(collect_delay).value
+ delay_secs = Second(Day(7)).value # Run auto-GC at most once every 7 days
curr_time = time()
if curr_time - get(DEPOT_ORPHANAGE_TIMESTAMPS, depots1(), 0.0) >= delay_secs
DEPOT_ORPHANAGE_TIMESTAMPS[depots1()] = mtime(orphanage_path)
end
- if curr_time - DEPOT_ORPHANAGE_TIMESTAMPS[depots1()] > delay_secs
+ return if curr_time - DEPOT_ORPHANAGE_TIMESTAMPS[depots1()] > delay_secs
printpkgstyle(ctx.io, :Info, "We haven't cleaned this depot up for a bit, running Pkg.gc()...", color = Base.info_color())
try
- Pkg.gc(ctx; collect_delay)
+ Pkg.gc(ctx)
DEPOT_ORPHANAGE_TIMESTAMPS[depots1()] = curr_time
catch ex
- @error("GC failed", exception=ex)
+ @error("GC failed", exception = ex)
end
end
end
@@ -858,12 +1027,19 @@ end
# Precompilation #
##################
-function _auto_precompile(ctx::Types.Context, pkgs::Vector{PackageSpec}=PackageSpec[]; warn_loaded = true, already_instantiated = false)
- if should_autoprecompile()
- Pkg.precompile(ctx, pkgs; internal_call=true, warn_loaded = warn_loaded, already_instantiated = already_instantiated)
+function _auto_precompile(ctx::Types.Context, pkgs::Vector{PackageSpec} = PackageSpec[]; warn_loaded = true, already_instantiated = false)
+ return if should_autoprecompile()
+ # Auto precompile runs in foreground with detachable support
+ Pkg.precompile(ctx, pkgs; internal_call = true, warn_loaded = warn_loaded, already_instantiated = already_instantiated)
end
end
include("precompile.jl")
+# Reset globals that might have been mutated during precompilation.
+Pkg.UPDATED_REGISTRY_THIS_SESSION[] = false
+PREV_ENV_PATH[] = ""
+Types.STDLIB[] = nothing
+empty!(Registry.REGISTRY_CACHE)
+
end # module
diff --git a/src/PlatformEngines.jl b/src/PlatformEngines.jl
index 98f1934559..917138fd9c 100644
--- a/src/PlatformEngines.jl
+++ b/src/PlatformEngines.jl
@@ -4,15 +4,17 @@
module PlatformEngines
-using SHA, Downloads, Tar
-import ...Pkg: Pkg, TOML, pkg_server, depots1, can_fancyprint, stderr_f
+using SHA, Downloads, Tar, Dates, Printf
+import ...Pkg: Pkg, TOML, pkg_server, depots1, can_fancyprint, stderr_f, atomic_toml_write
using ..MiniProgressBars
-using Base.BinaryPlatforms, p7zip_jll
+using Base.BinaryPlatforms, p7zip_jll, Zstd_jll
-export verify, unpack, package, download_verify_unpack
+export verify, unpack, package, download_verify_unpack, get_extract_cmd, detect_archive_format
const EXE7Z_LOCK = ReentrantLock()
const EXE7Z = Ref{String}()
+const EXEZSTD_LOCK = ReentrantLock()
+const EXEZSTD = Ref{String}()
function exe7z()
# If the JLL is available, use the wrapper function defined in there
@@ -20,7 +22,7 @@ function exe7z()
return p7zip_jll.p7zip()
end
- lock(EXE7Z_LOCK) do
+ return lock(EXE7Z_LOCK) do
if !isassigned(EXE7Z)
EXE7Z[] = find7z()
end
@@ -28,6 +30,20 @@ function exe7z()
end
end
+function exezstd()
+ # If the JLL is available, use the wrapper function defined in there
+ if Zstd_jll.is_available()
+ return Zstd_jll.zstd()
+ end
+
+ return lock(EXEZSTD_LOCK) do
+ if !isassigned(EXEZSTD)
+ EXEZSTD[] = findzstd()
+ end
+ return Cmd([EXEZSTD[]])
+ end
+end
+
function find7z()
name = "7z"
Sys.iswindows() && (name = "$name.exe")
@@ -40,16 +56,28 @@ function find7z()
error("7z binary not found")
end
+function findzstd()
+ name = "zstd"
+ Sys.iswindows() && (name = "$name.exe")
+ for dir in (joinpath("..", "libexec"), ".")
+ path = normpath(Sys.BINDIR::String, dir, name)
+ isfile(path) && return path
+ end
+ path = Sys.which(name)
+ path !== nothing && return path
+ error("zstd binary not found")
+end
+
is_secure_url(url::AbstractString) =
occursin(r"^(https://|\w+://(127\.0\.0\.1|localhost)(:\d+)?($|/))"i, url)
function get_server_dir(
- url :: AbstractString,
- server :: Union{AbstractString, Nothing} = pkg_server(),
-)
+ url::AbstractString,
+ server::Union{AbstractString, Nothing} = pkg_server(),
+ )
server === nothing && return
url == server || startswith(url, "$server/") || return
- m = match(r"^\w+://([^\\/]+)(?:$|/)", server)
+ m = match(r"^\w+:///?([^\\/]+)(?:$|/)", server)
if m === nothing
@warn "malformed Pkg server value" server
return
@@ -60,7 +88,7 @@ function get_server_dir(
return joinpath(depots1(), "servers", dir)
end
-const AUTH_ERROR_HANDLERS = Pair{Union{String, Regex},Any}[]
+const AUTH_ERROR_HANDLERS = Pair{Union{String, Regex}, Any}[]
function handle_auth_error(url, err; verbose::Bool = false)
handled, should_retry = false, false
@@ -111,23 +139,23 @@ function get_auth_header(url::AbstractString; verbose::Bool = false)
server_dir = get_server_dir(url)
server_dir === nothing && return
auth_file = joinpath(server_dir, "auth.toml")
- isfile(auth_file) || return handle_auth_error(url, "no-auth-file"; verbose=verbose)
+ isfile(auth_file) || return handle_auth_error(url, "no-auth-file"; verbose = verbose)
# TODO: check for insecure auth file permissions
if !is_secure_url(url)
- @warn "refusing to send auth info over insecure connection" url=url
- return handle_auth_error(url, "insecure-connection"; verbose=verbose)
+ @warn "refusing to send auth info over insecure connection" url = url
+ return handle_auth_error(url, "insecure-connection"; verbose = verbose)
end
# parse the auth file
auth_info = try
TOML.parsefile(auth_file)
catch err
- @error "malformed auth file" file=auth_file err=err
- return handle_auth_error(url, "malformed-file"; verbose=verbose)
+ @error "malformed auth file" file = auth_file err = err
+ return handle_auth_error(url, "malformed-file"; verbose = verbose)
end
# check for an auth token
if !haskey(auth_info, "access_token")
- @warn "auth file without access_token field" file=auth_file
- return handle_auth_error(url, "no-access-token"; verbose=verbose)
+ @warn "auth file without access_token field" file = auth_file
+ return handle_auth_error(url, "no-access-token"; verbose = verbose)
end
auth_token = auth_info["access_token"]::String
auth_header = "Authorization" => "Bearer $auth_token"
@@ -141,44 +169,46 @@ function get_auth_header(url::AbstractString; verbose::Bool = false)
end
# if token is good until ten minutes from now, use it
time_now = time()
- if expires_at ≥ time_now + 10*60 # ten minutes
+ if expires_at ≥ time_now + 10 * 60 # ten minutes
return auth_header
end
if !haskey(auth_info, "refresh_url") || !haskey(auth_info, "refresh_token")
- if expires_at ≤ time_now
- @warn "expired auth without refresh keys" file=auth_file
+ if expires_at ≤ time_now
+ @warn "expired auth without refresh keys" file = auth_file
end
# try it anyway since we can't refresh
- return something(handle_auth_error(url, "no-refresh-key"; verbose=verbose), auth_header)
+ return something(handle_auth_error(url, "no-refresh-key"; verbose = verbose), auth_header)
end
refresh_url = auth_info["refresh_url"]::String
if !is_secure_url(refresh_url)
- @warn "ignoring insecure auth refresh URL" url=refresh_url
- return something(handle_auth_error(url, "insecure-refresh-url"; verbose=verbose), auth_header)
+ @warn "ignoring insecure auth refresh URL" url = refresh_url
+ return something(handle_auth_error(url, "insecure-refresh-url"; verbose = verbose), auth_header)
end
- verbose && @info "Refreshing expired auth token..." file=auth_file
+ verbose && @info "Refreshing expired auth token..." file = auth_file
tmp = tempname()
refresh_token = auth_info["refresh_token"]::String
refresh_auth = "Authorization" => "Bearer $refresh_token"
- try download(refresh_url, tmp, auth_header=refresh_auth, verbose=verbose)
+ try
+ download(refresh_url, tmp, auth_header = refresh_auth, verbose = verbose)
catch err
- @warn "token refresh failure" file=auth_file url=refresh_url err=err
- rm(tmp, force=true)
- return handle_auth_error(url, "token-refresh-failed"; verbose=verbose)
+ @warn "token refresh failure" file = auth_file url = refresh_url err = err
+ rm(tmp, force = true)
+ return handle_auth_error(url, "token-refresh-failed"; verbose = verbose)
end
- auth_info = try TOML.parsefile(tmp)
+ auth_info = try
+ TOML.parsefile(tmp)
catch err
- @warn "discarding malformed auth file" url=refresh_url err=err
- rm(tmp, force=true)
- return something(handle_auth_error(url, "malformed-file"; verbose=verbose), auth_header)
+ @warn "discarding malformed auth file" url = refresh_url err = err
+ rm(tmp, force = true)
+ return something(handle_auth_error(url, "malformed-file"; verbose = verbose), auth_header)
end
if !haskey(auth_info, "access_token")
if haskey(auth_info, "refresh_token")
auth_info["refresh_token"] = "*"^64
end
- @warn "discarding auth file without access token" auth=auth_info
- rm(tmp, force=true)
- return something(handle_auth_error(url, "no-access-token"; verbose=verbose), auth_header)
+ @warn "discarding auth file without access token" auth = auth_info
+ rm(tmp, force = true)
+ return something(handle_auth_error(url, "no-access-token"; verbose = verbose), auth_header)
end
if haskey(auth_info, "expires_in")
expires_in = auth_info["expires_in"]
@@ -188,12 +218,7 @@ function get_auth_header(url::AbstractString; verbose::Bool = false)
auth_info["expires_at"] = expires_at
end
end
- let auth_info = auth_info
- open(tmp, write=true) do io
- TOML.print(io, auth_info, sorted=true)
- end
- end
- mv(tmp, auth_file, force=true)
+ atomic_toml_write(auth_file, auth_info, sorted = true)
access_token = auth_info["access_token"]::String
return "Authorization" => "Bearer $access_token"
end
@@ -216,7 +241,7 @@ const CI_VARIABLES = [
]
function get_metadata_headers(url::AbstractString)
- headers = Pair{String,String}[]
+ headers = Pair{String, String}[]
server = pkg_server()
server_dir = get_server_dir(url, server)
server_dir === nothing && return headers
@@ -235,12 +260,25 @@ function get_metadata_headers(url::AbstractString)
end
push!(headers, "Julia-CI-Variables" => join(ci_info, ';'))
push!(headers, "Julia-Interactive" => string(isinteractive()))
+
+ # Add Accept-Encoding header only for compressed archive resources
+ # (registries, packages, artifacts - not for metadata endpoints like /registries or /meta)
+ # Don't use zstd for registries on Windows due to backwards compatibility with older Julia versions
+ # (7z can't decompress zstd until v17.6, older Julia versions on Windows only have 7z available)
+ if occursin(r"/(registry|package|artifact)/", url)
+ if Sys.iswindows() && occursin(r"/registry/", url)
+ # Skip zstd for registries on Windows
+ else
+ push!(headers, "Accept-Encoding" => "zstd, gzip")
+ end
+ end
+
for (key, val) in ENV
m = match(r"^JULIA_PKG_SERVER_([A-Z0-9_]+)$"i, key)
m === nothing && continue
val = strip(val)
isempty(val) && continue
- words = split(m.captures[1], '_', keepempty=false)
+ words = split(m.captures[1], '_', keepempty = false)
isempty(words) && continue
hdr = "Julia-" * join(map(titlecase, words), '-')
any(hdr == k for (k, v) in headers) && continue
@@ -250,16 +288,16 @@ function get_metadata_headers(url::AbstractString)
end
function download(
- url::AbstractString,
- dest::AbstractString;
- verbose::Bool = false,
- headers::Vector{Pair{String,String}} = Pair{String,String}[],
- auth_header::Union{Pair{String,String}, Nothing} = nothing,
- io::IO=stderr_f(),
- progress::Union{Nothing,Function} = nothing, # (total, now) -> nothing
-)
+ url::AbstractString,
+ dest::AbstractString;
+ verbose::Bool = false,
+ headers::Vector{Pair{String, String}} = Pair{String, String}[],
+ auth_header::Union{Pair{String, String}, Nothing} = nothing,
+ io::IO = stderr_f(),
+ progress::Union{Nothing, Function} = nothing, # (total, now) -> nothing
+ )
if auth_header === nothing
- auth_header = get_auth_header(url, verbose=verbose)
+ auth_header = get_auth_header(url, verbose = verbose)
end
if auth_header !== nothing
push!(headers, auth_header)
@@ -272,9 +310,9 @@ function download(
progress = if !isnothing(progress)
progress
elseif do_fancy
- bar = MiniProgressBar(header="Downloading", color=Base.info_color())
+ bar = MiniProgressBar(header = "Downloading", color = Base.info_color())
start_progress(io, bar)
- let bar=bar
+ let bar = bar
(total, now) -> begin
bar.max = total
bar.current = now
@@ -288,7 +326,7 @@ function download(
else
nothing
end
- try
+ return try
Downloads.download(url, dest; headers, progress)
finally
do_fancy && end_progress(io, bar)
@@ -323,14 +361,14 @@ set to `false`) the downloading process will be completely silent. If
printed in addition to messages regarding downloading.
"""
function download_verify(
- url::AbstractString,
- hash::Union{AbstractString, Nothing},
- dest::AbstractString;
- verbose::Bool = false,
- force::Bool = false,
- quiet_download::Bool = false,
- progress::Union{Nothing,Function} = nothing, # (total, now) -> nothing
-)
+ url::AbstractString,
+ hash::Union{AbstractString, Nothing},
+ dest::AbstractString;
+ verbose::Bool = false,
+ force::Bool = false,
+ quiet_download::Bool = false,
+ progress::Union{Nothing, Function} = nothing, # (total, now) -> nothing
+ )
# Whether the file existed in the first place
file_existed = false
@@ -342,7 +380,7 @@ function download_verify(
# verify download, if it passes, return happy. If it fails, (and
# `force` is `true`, re-download!)
- if hash !== nothing && verify(dest, hash; verbose=verbose)
+ if hash !== nothing && verify(dest, hash; verbose = verbose)
return true
elseif !force
error("Verification failed, not overwriting $(dest)")
@@ -356,7 +394,7 @@ function download_verify(
attempts = 3
for i in 1:attempts
try
- download(url, dest; verbose=verbose || !quiet_download, progress)
+ download(url, dest; verbose = verbose || !quiet_download, progress)
break
catch err
@debug "download and verify failed on attempt $i/$attempts" url dest err
@@ -377,10 +415,10 @@ function download_verify(
if verbose
@info("Continued download didn't work, restarting from scratch")
end
- Base.rm(dest; force=true)
+ Base.rm(dest; force = true)
# Download and verify from scratch
- download(url, dest; verbose=verbose || !quiet_download)
+ download(url, dest; verbose = verbose || !quiet_download)
if hash !== nothing && !verify(dest, hash; verbose, details)
@goto verification_failed
end
@@ -402,27 +440,97 @@ end
# TODO: can probably delete this, only affects tests
function copy_symlinks()
var = get(ENV, "BINARYPROVIDER_COPYDEREF", "")
- lowercase(var) in ("true", "t", "yes", "y", "1") ? true :
- lowercase(var) in ("false", "f", "no", "n", "0") ? false : nothing
+ return lowercase(var) in ("true", "t", "yes", "y", "1") ? true :
+ lowercase(var) in ("false", "f", "no", "n", "0") ? false : nothing
+end
+
+"""
+ detect_archive_format(tarball_path::AbstractString)
+
+Detect compression format by reading file magic bytes.
+Returns one of: "zstd", "gzip", "bzip2", "xz", "lz4", "tar", or "unknown".
+
+Note: This is used both for determining file extensions after download
+and for selecting the appropriate decompression tool.
+"""
+function detect_archive_format(tarball_path::AbstractString)
+ file_size = filesize(tarball_path)
+
+ if file_size == 0
+ error("cannot detect compression format: $tarball_path is empty")
+ end
+
+ magic = open(tarball_path, "r") do io
+ read(io, min(6, file_size))
+ end
+
+ # Check magic bytes for various formats
+ # Zstd: 0x28 0xB5 0x2F 0xFD (4 bytes)
+ if length(magic) >= 4 && magic[1:4] == [0x28, 0xB5, 0x2F, 0xFD]
+ return "zstd"
+ end
+ # Gzip: 0x1F 0x8B (2 bytes)
+ if length(magic) >= 2 && magic[1:2] == [0x1F, 0x8B]
+ return "gzip"
+ end
+ # Bzip2: 0x42 0x5A 0x68 (BZh) (3 bytes)
+ if length(magic) >= 3 && magic[1:3] == [0x42, 0x5A, 0x68]
+ return "bzip2"
+ end
+ # XZ: 0xFD 0x37 0x7A 0x58 0x5A 0x00 (6 bytes)
+ if length(magic) >= 6 && magic[1:6] == [0xFD, 0x37, 0x7A, 0x58, 0x5A, 0x00]
+ return "xz"
+ end
+ # LZ4: 0x04 0x22 0x4D 0x18 (4 bytes)
+ if length(magic) >= 4 && magic[1:4] == [0x04, 0x22, 0x4D, 0x18]
+ return "lz4"
+ end
+ return "unknown"
+end
+
+"""
+ get_extract_cmd(tarball_path::AbstractString)
+
+Get the decompression command for a tarball by detecting format via magic bytes.
+"""
+function get_extract_cmd(tarball_path::AbstractString)
+ format = detect_archive_format(tarball_path)
+ # 7z appears to normalize paths internally, which can cause mis-resolution
+ # if symbolic links are present
+ tarball_path = realpath(tarball_path)
+ if format == "zstd"
+ return `$(exezstd()) -d -c $tarball_path`
+ else
+ return `$(exe7z()) x $tarball_path -so`
+ end
end
function unpack(
- tarball_path::AbstractString,
- dest::AbstractString;
- verbose::Bool = false,
-)
- Tar.extract(`$(exe7z()) x $tarball_path -so`, dest, copy_symlinks = copy_symlinks())
+ tarball_path::AbstractString,
+ dest::AbstractString;
+ verbose::Bool = false,
+ )
+ return Tar.extract(get_extract_cmd(tarball_path), dest, copy_symlinks = copy_symlinks())
end
"""
package(src_dir::AbstractString, tarball_path::AbstractString)
Compress `src_dir` into a tarball located at `tarball_path`.
+Supports both gzip and zstd compression based on file extension.
"""
-function package(src_dir::AbstractString, tarball_path::AbstractString; io=stderr_f())
- rm(tarball_path, force=true)
- cmd = `$(exe7z()) a -si -tgzip -mx9 $tarball_path`
- open(pipeline(cmd, stdout=devnull, stderr=io), write=true) do io
+function package(src_dir::AbstractString, tarball_path::AbstractString; io = stderr_f())
+ rm(tarball_path, force = true)
+ # Choose compression based on file extension (case-insensitive)
+ tarball_lower = lowercase(tarball_path)
+ if endswith(tarball_lower, ".zst") || endswith(tarball_lower, ".tar.zst")
+ # Use zstd compression (level 19 for good compression)
+ cmd = `$(exezstd()) -19 -c -T -o $tarball_path`
+ else
+ # Use gzip compression (default)
+ cmd = `$(exe7z()) a -si -tgzip -mx9 $tarball_path`
+ end
+ return open(pipeline(cmd, stdout = devnull, stderr = io), write = true) do io
Tar.create(src_dir, io)
end
end
@@ -464,17 +572,17 @@ Returns `true` if a tarball was actually unpacked, `false` if nothing was
changed in the destination prefix.
"""
function download_verify_unpack(
- url::AbstractString,
- hash::Union{AbstractString, Nothing},
- dest::AbstractString;
- tarball_path = nothing,
- ignore_existence::Bool = false,
- force::Bool = false,
- verbose::Bool = false,
- quiet_download::Bool = false,
- io::IO=stderr_f(),
- progress::Union{Nothing,Function} = nothing, # (total, now) -> nothing
-)
+ url::AbstractString,
+ hash::Union{AbstractString, Nothing},
+ dest::AbstractString;
+ tarball_path = nothing,
+ ignore_existence::Bool = false,
+ force::Bool = false,
+ verbose::Bool = false,
+ quiet_download::Bool = false,
+ io::IO = stderr_f(),
+ progress::Union{Nothing, Function} = nothing, # (total, now) -> nothing
+ )
# First, determine whether we should keep this tarball around
remove_tarball = false
if tarball_path === nothing
@@ -495,12 +603,12 @@ function download_verify_unpack(
return nothing
end
- return url[dot_idx+1:end]
+ return url[(dot_idx + 1):end]
end
# If extension of url contains a recognized extension, use it, otherwise use ".gz"
ext = url_ext(url)
- if !(ext in ["tar", "gz", "tgz", "bz2", "xz"])
+ if !(ext in ["tar", "gz", "tgz", "bz2", "xz", "zst"])
ext = "gz"
end
@@ -523,7 +631,7 @@ function download_verify_unpack(
if verbose
@info("Removing dest directory $(dest) as source tarball changed")
end
- Base.rm(dest; recursive=true, force=true)
+ Base.rm(dest; recursive = true, force = true)
end
# If the destination path already exists, don't bother to unpack
@@ -540,15 +648,15 @@ function download_verify_unpack(
if verbose
@info("Unpacking $(tarball_path) into $(dest)...")
end
- isnothing(progress) || progress(10000, 10000; status="unpacking")
- open(`$(exe7z()) x $tarball_path -so`) do io
+ isnothing(progress) || progress(10000, 10000; status = "unpacking")
+ open(get_extract_cmd(tarball_path)) do io
Tar.extract(io, dest, copy_symlinks = copy_symlinks())
end
finally
if remove_tarball
Base.rm(tarball_path)
# Remove cached tarball hash, if it exists.
- Base.rm(string(tarball_path, ".sha256"); force=true)
+ Base.rm(string(tarball_path, ".sha256"); force = true)
end
end
@@ -579,9 +687,11 @@ successfully.
If `details` is provided, any pertinent detail will be pushed to it rather than logged.
"""
-function verify(path::AbstractString, hash::AbstractString; verbose::Bool = false,
- report_cache_status::Bool = false, hash_path::AbstractString="$(path).sha256",
- details::Union{Vector{String},Nothing} = nothing)
+function verify(
+ path::AbstractString, hash::AbstractString; verbose::Bool = false,
+ report_cache_status::Bool = false, hash_path::AbstractString = "$(path).sha256",
+ details::Union{Vector{String}, Nothing} = nothing
+ )
# Check hash string format
if !occursin(r"^[0-9a-f]{64}$"i, hash)
@@ -648,7 +758,7 @@ function verify(path::AbstractString, hash::AbstractString; verbose::Bool = fals
end
if calc_hash != hash
- msg = "Hash Mismatch!\n"
+ msg = "Hash Mismatch!\n"
msg *= " Expected sha256: $hash\n"
msg *= " Calculated sha256: $calc_hash"
if isnothing(details)
@@ -686,18 +796,18 @@ function verify(path::AbstractString, hash::AbstractString; verbose::Bool = fals
end
# Verify the git-tree-sha1 hash of a compressed archive.
-function verify_archive_tree_hash(tar_gz::AbstractString, expected_hash::Base.SHA1)
+function verify_archive_tree_hash(compressed_tar::AbstractString, expected_hash::Base.SHA1)
# This can fail because unlike sha256 verification of the downloaded
# tarball, tree hash verification requires that the file can i) be
# decompressed and ii) is a proper archive.
calc_hash = try
- Base.SHA1(open(Tar.tree_hash, `$(exe7z()) x $tar_gz -so`))
+ Base.SHA1(open(Tar.tree_hash, get_extract_cmd(compressed_tar)))
catch err
- @warn "unable to decompress and read archive" exception=err
+ @warn "unable to decompress and read archive" exception = err
return false
end
if calc_hash != expected_hash
- @warn "tarball content does not match expected git-tree-sha1"
+ @warn "Tarball content does not match expected hash (git-tree-sha1)" tarball = compressed_tar expected = expected_hash computed = calc_hash
return false
end
return true
diff --git a/src/REPLMode/REPLMode.jl b/src/REPLMode/REPLMode.jl
index aba9ef4dd8..2de3f12ba1 100644
--- a/src/REPLMode/REPLMode.jl
+++ b/src/REPLMode/REPLMode.jl
@@ -6,8 +6,8 @@ module REPLMode
using Markdown, UUIDs, Dates
-import ..casesensitive_isdir, ..OFFLINE_MODE, ..linewrap, ..pathrepr
-using ..Types, ..Operations, ..API, ..Registry, ..Resolve
+import ..OFFLINE_MODE, ..linewrap, ..pathrepr, ..IN_REPL_MODE
+using ..Types, ..Operations, ..API, ..Registry, ..Resolve, ..Apps
import ..stdout_f, ..stderr_f
@@ -21,10 +21,10 @@ const PRINTED_REPL_WARNING = Ref{Bool}(false)
#---------#
# Options #
#---------#
-const OptionDeclaration = Vector{Pair{Symbol,Any}}
+const OptionDeclaration = Vector{Pair{Symbol, Any}}
struct OptionSpec
name::String
- short_name::Union{Nothing,String}
+ short_name::Union{Nothing, String}
api::Pair{Symbol, Any}
takes_arg::Bool
end
@@ -32,10 +32,12 @@ end
# TODO assert names matching lex regex
# assert now so that you don't fail at user time
# see function `REPLMode.api_options`
-function OptionSpec(;name::String,
- short_name::Union{Nothing,String}=nothing,
- takes_arg::Bool=false,
- api::Pair{Symbol,<:Any})::OptionSpec
+function OptionSpec(;
+ name::String,
+ short_name::Union{Nothing, String} = nothing,
+ takes_arg::Bool = false,
+ api::Pair{Symbol, <:Any}
+ )::OptionSpec
takes_arg && @assert hasmethod(api.second, Tuple{String})
return OptionSpec(name, short_name, api, takes_arg)
end
@@ -43,7 +45,7 @@ end
function OptionSpecs(decs::Vector{OptionDeclaration})
specs = Dict{String, OptionSpec}()
for x in decs
- opt_spec = OptionSpec(;x...)
+ opt_spec = OptionSpec(; x...)
@assert !haskey(specs, opt_spec.name) # don't overwrite
specs[opt_spec.name] = opt_spec
if opt_spec.short_name !== nothing
@@ -65,43 +67,46 @@ end
#----------#
# Commands #
#----------#
-const CommandDeclaration = Vector{Pair{Symbol,Any}}
+const CommandDeclaration = Vector{Pair{Symbol, Any}}
mutable struct CommandSpec
const canonical_name::String
- const short_name::Union{Nothing,String}
+ const short_name::Union{Nothing, String}
const api::Function
const should_splat::Bool
const argument_spec::ArgSpec
- const option_specs::Dict{String,OptionSpec}
- completions::Union{Nothing,Symbol,Function} # Symbol is used as a marker for REPLExt to assign the function of that name
+ const option_specs::Dict{String, OptionSpec}
+ completions::Union{Nothing, Symbol, Function} # Symbol is used as a marker for REPLExt to assign the function of that name
const description::String
- const help::Union{Nothing,Markdown.MD}
+ const help::Union{Nothing, Markdown.MD}
end
default_parser(xs, options) = unwrap(xs)
-function CommandSpec(;name::Union{Nothing,String} = nothing,
- short_name::Union{Nothing,String} = nothing,
- api::Union{Nothing,Function} = nothing,
- should_splat::Bool = true,
- option_spec::Vector{OptionDeclaration} = OptionDeclaration[],
- help::Union{Nothing,Markdown.MD} = nothing,
- description::Union{Nothing,String} = nothing,
- completions::Union{Nothing,Symbol,Function} = nothing,
- arg_count::Pair = (0=>0),
- arg_parser::Function = default_parser,
- )::CommandSpec
+function CommandSpec(;
+ name::Union{Nothing, String} = nothing,
+ short_name::Union{Nothing, String} = nothing,
+ api::Union{Nothing, Function} = nothing,
+ should_splat::Bool = true,
+ option_spec::Vector{OptionDeclaration} = OptionDeclaration[],
+ help::Union{Nothing, Markdown.MD} = nothing,
+ description::Union{Nothing, String} = nothing,
+ completions::Union{Nothing, Symbol, Function} = nothing,
+ arg_count::Pair = (0 => 0),
+ arg_parser::Function = default_parser,
+ )::CommandSpec
name === nothing && error("Supply a canonical name")
description === nothing && error("Supply a description")
api === nothing && error("Supply API dispatch function for `$(name)`")
# TODO assert isapplicable completions dict, string
- return CommandSpec(name, short_name, api, should_splat, ArgSpec(arg_count, arg_parser),
- OptionSpecs(option_spec), completions, description, help)
+ return CommandSpec(
+ name, short_name, api, should_splat, ArgSpec(arg_count, arg_parser),
+ OptionSpecs(option_spec), completions, description, help
+ )
end
function CommandSpecs(declarations::Vector{CommandDeclaration})
- specs = Dict{String,CommandSpec}()
+ specs = Dict{String, CommandSpec}()
for dec in declarations
- spec = CommandSpec(;dec...)
+ spec = CommandSpec(; dec...)
@assert !haskey(specs, spec.canonical_name) "duplicate spec entry"
specs[spec.canonical_name] = spec
if spec.short_name !== nothing
@@ -113,7 +118,7 @@ function CommandSpecs(declarations::Vector{CommandDeclaration})
end
function CompoundSpecs(compound_declarations)
- compound_specs = Dict{String,Dict{String,CommandSpec}}()
+ compound_specs = Dict{String, Dict{String, CommandSpec}}()
for (name, command_declarations) in compound_declarations
specs = CommandSpecs(command_declarations)
@assert !haskey(compound_specs, name) "duplicate super spec entry"
@@ -138,19 +143,19 @@ unwrap(xs::Vector{QString}) = map(x -> x.raw, xs)
#---------#
struct Option
val::String
- argument::Union{Nothing,String}
+ argument::Union{Nothing, String}
Option(val::AbstractString) = new(val, nothing)
- Option(val::AbstractString, arg::Union{Nothing,String}) = new(val, arg)
+ Option(val::AbstractString, arg::Union{Nothing, String}) = new(val, arg)
end
Base.show(io::IO, opt::Option) = print(io, "--$(opt.val)", opt.argument === nothing ? "" : "=$(opt.argument)")
-wrap_option(option::String) = length(option) == 1 ? "-$option" : "--$option"
+wrap_option(option::String) = length(option) == 1 ? "-$option" : "--$option"
is_opt(word::AbstractString) = first(word) == '-' && word != "-"
function parse_option(word::AbstractString)::Option
m = match(r"^(?: -([a-z]) | --((?:[a-z]{1,}-?)*)(?:\s*=\s*(\S*))? )$"ix, word)
m === nothing && pkgerror("malformed option: ", repr(word))
option_name = m.captures[1] !== nothing ? something(m.captures[1]) : something(m.captures[2])
- option_arg = m.captures[3] === nothing ? nothing : String(something(m.captures[3]))
+ option_arg = m.captures[3] === nothing ? nothing : String(something(m.captures[3]))
return Option(option_name, option_arg)
end
@@ -159,14 +164,14 @@ end
#-----------#
# Statement: text-based representation of a command
Base.@kwdef mutable struct Statement
- super::Union{Nothing,String} = nothing
- spec::Union{Nothing,CommandSpec} = nothing
- options::Union{Vector{Option},Vector{String}} = String[]
- arguments::Vector{QString} = QString[]
+ super::Union{Nothing, String} = nothing
+ spec::Union{Nothing, CommandSpec} = nothing
+ options::Union{Vector{Option}, Vector{String}} = String[]
+ arguments::Vector{QString} = QString[]
end
function lex(cmd::String)::Vector{QString}
- replace_comma = (nothing!=match(r"^(add|dev|develop|rm|remove|status|precompile)+\s", cmd))
+ replace_comma = (nothing != match(r"^(add|dev|develop|rm|remove|status|precompile)+\s", cmd))
in_doublequote = false
in_singlequote = false
qstrings = QString[]
@@ -220,13 +225,16 @@ function lex(cmd::String)::Vector{QString}
(in_doublequote || in_singlequote) ? pkgerror("unterminated quote") : push_token!(false)
# to avoid complexity in the main loop, empty tokens are allowed above and
# filtered out before returning
- return filter(x->!isempty(x.raw), qstrings)
+ return filter(x -> !isempty(x.raw), qstrings)
end
-function tokenize(cmd::String)
+function tokenize(cmd::AbstractString; rm_leading_bracket::Bool = true)
cmd = replace(replace(cmd, "\r\n" => "; "), "\n" => "; ") # for multiline commands
+ if rm_leading_bracket && startswith(cmd, ']')
+ cmd = string(lstrip(cmd, ']'))
+ end
qstrings = lex(cmd)
- statements = foldl(qstrings; init=[QString[]]) do collection, next
+ statements = foldl(qstrings; init = [QString[]]) do collection, next
(next.raw == ";" && !next.isquoted) ?
push!(collection, QString[]) :
push!(collection[end], next)
@@ -235,28 +243,25 @@ function tokenize(cmd::String)
return statements
end
-function core_parse(words::Vector{QString}; only_cmd=false)
+function core_parse(words::Vector{QString}; only_cmd = false)
statement = Statement()
- word::Union{Nothing,QString} = nothing
- function next_word!()
- isempty(words) && return false
- word = popfirst!(words)
- return true
- end
+ word::Union{Nothing, QString} = nothing
# begin parsing
- next_word!() || return statement, ((word === nothing) ? nothing : word.raw)
+ isempty(words) && return statement, ((word === nothing) ? nothing : word.raw)
+ word = popfirst!(words)
# handle `?` alias for help
# It is special in that it requires no space between command and args
- if word.raw[1]=='?' && !word.isquoted
- length(word.raw) > 1 && pushfirst!(words, QString(word.raw[2:end],false))
+ if word.raw[1] == '?' && !word.isquoted
+ length(word.raw) > 1 && pushfirst!(words, QString(word.raw[2:end], false))
word = QString("?", false)
end
# determine command
super = get(SPECS, word.raw, nothing)
if super !== nothing # explicit
statement.super = word.raw
- next_word!() || return statement, word.raw
+ isempty(words) && return statement, word.raw
+ word = popfirst!(words)
command = get(super, word.raw, nothing)
command !== nothing || return statement, word.raw
else # try implicit package
@@ -268,12 +273,14 @@ function core_parse(words::Vector{QString}; only_cmd=false)
only_cmd && return statement, word.raw # hack to hook in `help` command
- next_word!() || return statement, word.raw
+ isempty(words) && return statement, word.raw
+ word = popfirst!(words)
# full option parsing is delayed so that the completions parser can use the raw string
while is_opt(word.raw)
push!(statement.options, word.raw)
- next_word!() || return statement, word.raw
+ isempty(words) && return statement, word.raw
+ word = popfirst!(words)
end
pushfirst!(words, word)
@@ -282,12 +289,12 @@ function core_parse(words::Vector{QString}; only_cmd=false)
end
parse(input::String) =
- map(Base.Iterators.filter(!isempty, tokenize(input))) do words
- statement, input_word = core_parse(words)
- statement.spec === nothing && pkgerror("`$input_word` is not a recognized command. Type ? for help with available commands")
- statement.options = map(parse_option, statement.options)
- statement
- end
+ map(Base.Iterators.filter(!isempty, tokenize(strip(input)))) do words
+ statement, input_word = core_parse(words)
+ statement.spec === nothing && pkgerror("`$input_word` is not a recognized command. Type ? for help with available commands")
+ statement.options = map(parse_option, statement.options)
+ statement
+end
#------------#
# APIOptions #
@@ -296,8 +303,10 @@ parse(input::String) =
# Do NOT introduce a constructor for APIOptions
# as long as it's an alias for Dict
const APIOptions = Dict{Symbol, Any}
-function api_options(options::Vector{Option},
- specs::Dict{String, OptionSpec})
+function api_options(
+ options::Vector{Option},
+ specs::Dict{String, OptionSpec}
+ )
api_opts = APIOptions()
enforce_option(options, specs)
for option in options
@@ -315,15 +324,15 @@ Context!(ctx::APIOptions)::Context = Types.Context!(collect(ctx))
# Command #
#---------#
Base.@kwdef struct Command
- spec::Union{Nothing,CommandSpec} = nothing
- options::APIOptions = APIOptions()
- arguments::Vector = []
+ spec::Union{Nothing, CommandSpec} = nothing
+ options::APIOptions = APIOptions()
+ arguments::Vector = []
end
-function enforce_option(option::Option, specs::Dict{String,OptionSpec})
+function enforce_option(option::Option, specs::Dict{String, OptionSpec})
spec = get(specs, option.val, nothing)
spec !== nothing || pkgerror("option '$(option.val)' is not a valid option")
- if spec.takes_arg
+ return if spec.takes_arg
option.argument !== nothing ||
pkgerror("option '$(option.val)' expects an argument, but no argument given")
else # option is a switch
@@ -339,22 +348,23 @@ checks:
- options which take an argument are given arguments
- options which do not take arguments are not given arguments
"""
-function enforce_option(options::Vector{Option}, specs::Dict{String,OptionSpec})
+function enforce_option(options::Vector{Option}, specs::Dict{String, OptionSpec})
unique_keys = Symbol[]
get_key(opt::Option) = specs[opt.val].api.first
# per option checking
- foreach(x->enforce_option(x,specs), options)
+ foreach(x -> enforce_option(x, specs), options)
# checking for compatible options
for opt in options
key = get_key(opt)
if key in unique_keys
- conflicting = filter(opt->get_key(opt) == key, options)
+ conflicting = filter(opt -> get_key(opt) == key, options)
pkgerror("Conflicting options: $conflicting")
else
push!(unique_keys, key)
end
end
+ return
end
"""
@@ -382,7 +392,7 @@ function prepare_cmd(input)
return commands
end
-do_cmds(input::String, io=stdout_f()) = do_cmds(prepare_cmd(input), io)
+do_cmds(input::String, io = stdout_f()) = do_cmds(prepare_cmd(input), io)
function do_cmds(commands::Vector{Command}, io)
@@ -398,24 +408,27 @@ function do_cmds(commands::Vector{Command}, io)
end
function do_cmd(command::Command, io)
- # REPL specific commands
- command.spec === SPECS["package"]["help"] && return Base.invokelatest(do_help!, command, io)
- # API commands
- if command.spec.should_splat
- TEST_MODE[] && return command.spec.api, command.arguments..., command.options
- command.spec.api(command.arguments...; collect(command.options)...) # TODO is invokelatest still needed?
- else
- TEST_MODE[] && return command.spec.api, command.arguments, command.options
- command.spec.api(command.arguments; collect(command.options)...)
+ # Set the scoped value to indicate we're in REPL mode
+ return Base.ScopedValues.@with IN_REPL_MODE => true begin
+ # REPL specific commands
+ command.spec === SPECS["package"]["help"] && return Base.invokelatest(do_help!, command, io)
+ # API commands
+ if command.spec.should_splat
+ TEST_MODE[] && return command.spec.api, command.arguments..., command.options
+ command.spec.api(command.arguments...; collect(command.options)...) # TODO is invokelatest still needed?
+ else
+ TEST_MODE[] && return command.spec.api, command.arguments, command.options
+ command.spec.api(command.arguments; collect(command.options)...)
+ end
end
end
function parse_command(words::Vector{QString})
- statement, word = core_parse(words; only_cmd=true)
+ statement, word = core_parse(words; only_cmd = true)
if statement.super === nothing && statement.spec === nothing
pkgerror("invalid input: `$word` is not a command")
end
- return statement.spec === nothing ? statement.super : statement.spec
+ return statement.spec === nothing ? statement.super : statement.spec
end
function do_help!(command::Command, io)
@@ -428,8 +441,10 @@ function do_help!(command::Command, io)
cmd = parse_command(command.arguments)
if cmd isa String
# gather all helps for super spec `cmd`
- all_specs = sort!(unique(values(SPECS[cmd]));
- by=(spec->spec.canonical_name))
+ all_specs = sort!(
+ unique(values(SPECS[cmd]));
+ by = (spec -> spec.canonical_name)
+ )
for spec in all_specs
isempty(help_md.content) || push!(help_md.content, md"---")
push!(help_md.content, spec.help)
@@ -438,14 +453,14 @@ function do_help!(command::Command, io)
push!(help_md.content, cmd.help)
end
!isempty(command.arguments) && @warn "More than one command specified, only rendering help for first"
- show(io, MIME("text/plain"), help_md)
+ return show(io, MIME("text/plain"), help_md)
end
# Provide a string macro pkg"cmd" that can be used in the same way
# as the REPLMode `pkg> cmd`. Useful for testing and in environments
# where we do not have a REPL, e.g. IJulia.
macro pkg_str(str::String)
- :(pkgstr($str))
+ return :(pkgstr($str))
end
function pkgstr(str::String)
@@ -465,32 +480,32 @@ const SPECS = CompoundSpecs(compound_declarations)
function canonical_names()
# add "package" commands
xs = [(spec.canonical_name => spec) for spec in unique(values(SPECS["package"]))]
- sort!(xs, by=first)
+ sort!(xs, by = first)
# add other super commands, e.g. "registry"
for (super, specs) in SPECS
super != "package" || continue # skip "package"
temp = [(join([super, spec.canonical_name], " ") => spec) for spec in unique(values(specs))]
- append!(xs, sort!(temp, by=first))
+ append!(xs, sort!(temp, by = first))
end
return xs
end
function gen_help()
help = md"""
-**Welcome to the Pkg REPL-mode**. To return to the `julia>` prompt, either press
-backspace when the input line is empty or press Ctrl+C.
+ **Welcome to the Pkg REPL-mode**. To return to the `julia>` prompt, either press
+ backspace when the input line is empty or press Ctrl+C.
-Full documentation available at https://pkgdocs.julialang.org/
+ Full documentation available at https://pkgdocs.julialang.org/
-**Synopsis**
+ **Synopsis**
- pkg> cmd [opts] [args]
+ pkg> cmd [opts] [args]
-Multiple commands can be given on the same line by interleaving a `;` between the commands.
-Some commands have an alias, indicated below.
+ Multiple commands can be given on the same line by interleaving a `;` between the commands.
+ Some commands have an alias, indicated below.
-**Commands**
-"""
+ **Commands**
+ """
for (command, spec) in canonical_names()
short_name = spec.short_name === nothing ? "" : ", `" * spec.short_name::String * '`'
push!(help.content, Markdown.parse("`$command`$short_name: $(spec.description)"))
diff --git a/src/REPLMode/argument_parsers.jl b/src/REPLMode/argument_parsers.jl
index c0f284a4b0..90f5058a42 100644
--- a/src/REPLMode/argument_parsers.jl
+++ b/src/REPLMode/argument_parsers.jl
@@ -1,4 +1,5 @@
import ..isdir_nothrow, ..Registry.RegistrySpec, ..isurl
+using UUIDs
struct PackageIdentifier
val::String
@@ -16,112 +17,432 @@ struct Subdir
dir::String
end
-const PackageToken = Union{PackageIdentifier,
- VersionToken,
- Rev,
- Subdir}
-
-packagetoken(word::String)::PackageToken =
- first(word) == '@' ? VersionToken(word[2:end]) :
- first(word) == '#' ? Rev(word[2:end]) :
- first(word) == ':' ? Subdir(word[2:end]) :
- PackageIdentifier(word)
-
-###############
-# PackageSpec #
-###############
-"""
-Parser for PackageSpec objects.
-"""
-function parse_package(args::Vector{QString}, options; add_or_dev=false)::Vector{PackageSpec}
- words′ = package_lex(args)
- words = String[]
- for word in words′
- if (m = match(r"https://github.com/(.*?)/(.*?)/(?:tree|commit)/(.*?)$", word)) !== nothing
- push!(words, "https://github.com/$(m.captures[1])/$(m.captures[2])")
- push!(words, "#$(m.captures[3])")
- else
- push!(words, word)
+const PackageToken = Union{
+ PackageIdentifier,
+ VersionToken,
+ Rev,
+ Subdir,
+}
+
+# Check if a string is a valid UUID
+function is_valid_uuid(str::String)
+ try
+ UUID(str)
+ return true
+ catch
+ return false
+ end
+end
+
+# Simple URL detection
+function looks_like_url(str::String)
+ if startswith(str, "http://") || startswith(str, "https://") ||
+ startswith(str, "git@") || startswith(str, "ssh://") ||
+ contains(str, ".git")
+ return true
+ end
+
+ # Check for user@host:path pattern (SSH URL with user)
+ # This handles cases like: user@10.20.30.40:PackageName.jl
+ # The host part should not contain / or @ characters, and should come before the :
+ at_pos = findfirst('@', str)
+ if at_pos !== nothing
+ colon_pos = findnext(':', str, nextind(str, at_pos))
+ if colon_pos !== nothing
+ host_part = str[nextind(str, at_pos):(prevind(str, colon_pos))]
+ # Host should not contain / (which would suggest this is package@version:subdir syntax)
+ # and should look like a hostname or IP address (no spaces, etc.)
+ # Additionally, exclude things that look like version numbers (e.g., "1.0", "1.0.0")
+ # by checking if the host contains only digits and dots (which would be a version or IP)
+ # If it's all digits and dots, it must have at least 3 dots to be an IP (X.X.X.X)
+ if !contains(host_part, '/') && !contains(host_part, ' ') && !isempty(host_part)
+ # Check if this looks like a version number (e.g., "1.0", "1.0.0")
+ # vs an IP address (e.g., "10.20.30.40") or hostname (e.g., "server.com")
+ if all(c -> isdigit(c) || c == '.', host_part)
+ # All digits and dots - could be version or IP
+ # Count dots: version has 1-2 dots, IP has 3 dots
+ dot_count = count(==('.'), host_part)
+ if dot_count >= 3
+ # Likely an IP address (X.X.X.X)
+ return true
+ end
+ # else: likely a version number, not a URL
+ else
+ # Contains letters or other chars - likely a hostname
+ return true
+ end
+ end
+ end
+ end
+
+ return false
+end
+
+# Simple path detection
+function looks_like_path(str::String)
+ return contains(str, '/') || contains(str, '\\') || str == "." || str == ".." || is_windows_drive_colon(str)
+end
+
+# Check if a string looks like a complete URL
+function looks_like_complete_url(str::String)
+ return (
+ startswith(str, "http://") || startswith(str, "https://") ||
+ startswith(str, "git@") || startswith(str, "ssh://")
+ ) &&
+ (contains(str, '.') || contains(str, '/'))
+end
+
+is_windows_drive_colon(str::String) = occursin(r"^[a-zA-Z]:", str)
+
+# Check if a colon at given position is part of a Windows drive letter
+function is_windows_drive_colon(input::String, colon_pos::Int)
+ # Windows drive letters are single letters followed by colon at beginning
+ # Examples: "C:", "D:", etc.
+ if colon_pos == 2 && length(input) >= 2
+ return is_windows_drive_colon(input)
+ end
+ return false
+end
+
+# Extract subdir specifier from the end of input (rightmost : that's not a Windows drive letter)
+function extract_subdir(input::String)
+ colon_pos = findlast(':', input)
+ if colon_pos === nothing
+ return input, nothing
+ end
+
+ # Skip Windows drive letters (e.g., C:, D:)
+ if is_windows_drive_colon(input, colon_pos)
+ return input, nothing
+ end
+
+ subdir_part = input[nextind(input, colon_pos):end]
+ remaining = input[1:prevind(input, colon_pos)]
+ return remaining, subdir_part
+end
+
+# Extract revision specifier from input (first # that separates base from revision)
+function extract_revision(input::String)
+ hash_pos = findfirst('#', input)
+ if hash_pos === nothing
+ return input, nothing
+ end
+
+ rev_part = input[nextind(input, hash_pos):end]
+ remaining = input[1:prevind(input, hash_pos)]
+ return remaining, rev_part
+end
+
+# Extract version specifier from the end of input (rightmost @)
+function extract_version(input::String)
+ at_pos = findlast('@', input)
+ if at_pos === nothing
+ return input, nothing
+ end
+
+ version_part = input[nextind(input, at_pos):end]
+ remaining = input[1:prevind(input, at_pos)]
+ return remaining, version_part
+end
+
+function preprocess_github_url(input::String)
+ # Handle GitHub tree/commit URLs
+ if (m = match(r"https://github.com/(.*?)/(.*?)/(?:tree|commit)/(.*?)$", input)) !== nothing
+ return [PackageIdentifier("https://github.com/$(m.captures[1])/$(m.captures[2])"), Rev(m.captures[3])]
+ # Handle GitHub pull request URLs
+ elseif (m = match(r"https://github.com/(.*?)/(.*?)/pull/(\d+)$", input)) !== nothing
+ return [PackageIdentifier("https://github.com/$(m.captures[1])/$(m.captures[2])"), Rev("pull/$(m.captures[3])/head")]
+ else
+ return nothing
+ end
+end
+
+# Check if a colon in a URL string is part of URL structure (not a subdir separator)
+function is_url_structure_colon(input::String, colon_pos::Int)
+ after_colon = input[nextind(input, colon_pos):end]
+
+ # Check for user@host:path syntax (including git@host:path)
+ at_pos = findfirst('@', input)
+ if at_pos !== nothing && at_pos < colon_pos
+ between_at_colon = input[nextind(input, at_pos):prevind(input, colon_pos)]
+ # If there's no '/' between @ and :, this colon is part of the SSH URL structure
+ if !contains(between_at_colon, '/')
+ return true
+ end
+ end
+
+ # Check for protocol:// syntax
+ if colon_pos <= lastindex(input) - 2
+ next_pos = nextind(input, colon_pos)
+ if next_pos <= lastindex(input) - 1 &&
+ input[colon_pos:nextind(input, nextind(input, colon_pos))] == "://"
+ return true
+ end
+ end
+
+ # Check for user:password@ syntax (: followed by text then @)
+ if contains(after_colon, '@')
+ at_in_after = findfirst('@', after_colon)
+ if at_in_after !== nothing
+ text_before_at = after_colon[1:prevind(after_colon, at_in_after)]
+ if !contains(text_before_at, '/')
+ return true
+ end
end
end
- args = PackageToken[packagetoken(pkgword) for pkgword in words]
- return parse_package_args(args; add_or_dev=add_or_dev)
+ # Check for port numbers (: followed by digits then /)
+ if occursin(r"^\d+(/|$)", after_colon)
+ return true
+ end
+
+ return false
+end
+
+# Extract subdir from URL, being careful about URL structure
+function extract_url_subdir(input::String)
+ colon_pos = findlast(':', input)
+ if colon_pos === nothing
+ return input, nothing
+ end
+
+ # Check if this colon is part of URL structure
+ if is_url_structure_colon(input, colon_pos)
+ return input, nothing
+ end
+
+ after_colon = input[nextind(input, colon_pos):end]
+ before_colon = input[1:prevind(input, colon_pos)]
+
+ # Only treat as subdir if it looks like one and the part before looks like a URL
+ if (contains(after_colon, '/') || (!contains(after_colon, '@') && !contains(after_colon, '#'))) &&
+ (contains(before_colon, "://") || contains(before_colon, ".git") || contains(before_colon, '@'))
+ return before_colon, after_colon
+ end
+
+ return input, nothing
+end
+
+# Extract revision from URL, only after a complete URL
+function extract_url_revision(input::String)
+ hash_pos = findfirst('#', input)
+ if hash_pos === nothing
+ return input, nothing
+ end
+
+ before_hash = input[1:prevind(input, hash_pos)]
+ after_hash = input[nextind(input, hash_pos):end]
+
+ if looks_like_complete_url(before_hash)
+ return before_hash, after_hash
+ end
+
+ return input, nothing
+end
+
+# Parse URLs with specifiers
+# URLs can only have revisions (#) and subdirs (:), NOT versions (@)
+function parse_url_with_specifiers(input::String)
+ tokens = PackageToken[]
+ remaining = input
+
+ # Extract subdir if present (rightmost : that looks like a subdir)
+ remaining, subdir_part = extract_url_subdir(remaining)
+
+ # Extract revision (first # that comes after a complete URL)
+ remaining, rev_part = extract_url_revision(remaining)
+
+ # What's left is the base URL
+ push!(tokens, PackageIdentifier(remaining))
+
+ # Add the specifiers in the correct order
+ if rev_part !== nothing
+ push!(tokens, Rev(rev_part))
+ end
+ if subdir_part !== nothing
+ push!(tokens, Subdir(subdir_part))
+ end
+
+ return tokens
end
- # Match a git repository URL. This includes uses of `@` and `:` but
- # requires that it has `.git` at the end.
-let url = raw"((git|ssh|http(s)?)|(git@[\w\-\.]+))(:(//)?)([\w\.@\:/\-~]+)(\.git$)(/)?",
+function parse_path_with_specifiers(input::String)
+ tokens = PackageToken[]
+ remaining = input
- # Match a `NAME=UUID` package specifier.
- name_uuid = raw"[^@\#\s:]+\s*=\s*[^@\#\s:]+",
+ # Extract subdir if present (rightmost :)
+ remaining, subdir_part = extract_subdir(remaining)
- # Match a `#BRANCH` branch or tag specifier.
- branch = raw"\#\s*[^@\#\s]*",
+ # Extract revision if present (rightmost #)
+ remaining, rev_part = extract_revision(remaining)
- # Match an `@VERSION` version specifier.
- version = raw"@\s*[^@\#\s]*",
+ # What's left is the base path
+ push!(tokens, PackageIdentifier(remaining))
- # Match a `:SUBDIR` subdir specifier.
- subdir = raw":[^@\#\s]+",
+ # Add specifiers in correct order
+ if rev_part !== nothing
+ push!(tokens, Rev(rev_part))
+ end
+ if subdir_part !== nothing
+ push!(tokens, Subdir(subdir_part))
+ end
+
+ return tokens
+end
+
+# Parse package names with specifiers
+function parse_name_with_specifiers(input::String)
+ tokens = PackageToken[]
+ remaining = input
+
+ # Extract subdir if present (rightmost :)
+ remaining, subdir_part = extract_subdir(remaining)
+
+ # Extract version if present (rightmost @)
+ remaining, version_part = extract_version(remaining)
+
+ # Extract revision if present (rightmost #)
+ remaining, rev_part = extract_revision(remaining)
+
+ # What's left is the base name
+ push!(tokens, PackageIdentifier(remaining))
+
+ # Add specifiers in correct order
+ if rev_part !== nothing
+ push!(tokens, Rev(rev_part))
+ end
+ if version_part !== nothing
+ push!(tokens, VersionToken(version_part))
+ end
+ if subdir_part !== nothing
+ push!(tokens, Subdir(subdir_part))
+ end
+
+ return tokens
+end
+
+# Parse a single package specification
+function parse_package_spec_new(input::String)
+ # Handle quoted strings
+ if (startswith(input, '"') && endswith(input, '"')) ||
+ (startswith(input, '\'') && endswith(input, '\''))
+ input = input[2:(end - 1)]
+ end
+
+ # Handle GitHub tree/commit URLs first (special case)
+ github_result = preprocess_github_url(input)
+ if github_result !== nothing
+ return github_result
+ end
- # Match any other way to specify a package. This includes package
- # names, local paths, and URLs that don't match the `url` part. In
- # order not to clash with the branch, version, and subdir
- # specifiers, these cannot include `@` or `#`, and `:` is only
- # allowed if followed by `/` or `\`. For URLs matching this part
- # of the regex, that means that `@` (e.g. user names) and `:`
- # (e.g. port) cannot be used but it doesn't have to end with
- # `.git`.
- other = raw"([^@\#\s:] | :(/|\\))+"
+ # Handle name=uuid format
+ if contains(input, '=')
+ parts = split(input, '=', limit = 2)
+ if length(parts) == 2
+ name = String(strip(parts[1]))
+ uuid_str = String(strip(parts[2]))
+ if is_valid_uuid(uuid_str)
+ return [PackageIdentifier("$name=$uuid_str")]
+ end
+ end
+ end
- # Combine all of the above.
- global const package_id_re = Regex(
- "$url | $name_uuid | $branch | $version | $subdir | $other", "x")
+ # Check what type of input this is and parse accordingly
+ if looks_like_url(input)
+ return parse_url_with_specifiers(input)
+ elseif looks_like_path(input)
+ return parse_path_with_specifiers(input)
+ else
+ return parse_name_with_specifiers(input)
+ end
end
-function package_lex(qwords::Vector{QString})::Vector{String}
- words = String[]
- for qword in qwords
- qword.isquoted ?
- push!(words, qword.raw) :
- append!(words, map(m->m.match, eachmatch(package_id_re, qword.raw)))
+function parse_package(args::Vector{QString}, options; add_or_dev = false)::Vector{PackageSpec}
+ tokens = PackageToken[]
+
+ i = 1
+ while i <= length(args)
+ arg = args[i]
+ input = arg.isquoted ? arg.raw : arg.raw
+
+ # Check if this argument is a standalone modifier (like #dev, @v1.0, :subdir)
+ if !arg.isquoted && (startswith(input, '#') || startswith(input, '@') || startswith(input, ':'))
+ # This is a standalone modifier - it should be treated as a token
+ if startswith(input, '#')
+ push!(tokens, Rev(input[2:end]))
+ elseif startswith(input, '@')
+ push!(tokens, VersionToken(input[2:end]))
+ elseif startswith(input, ':')
+ push!(tokens, Subdir(input[2:end]))
+ end
+ else
+ # Parse this argument normally
+ if arg.isquoted
+ # For quoted arguments, treat as literal without specifier extraction
+ arg_tokens = [PackageIdentifier(input)]
+ else
+ arg_tokens = parse_package_spec_new(input)
+ end
+ append!(tokens, arg_tokens)
+ end
+
+ i += 1
end
- return words
+
+ return parse_package_args(tokens; add_or_dev = add_or_dev)
end
-function parse_package_args(args::Vector{PackageToken}; add_or_dev=false)::Vector{PackageSpec}
+
+function parse_package_args(args::Vector{PackageToken}; add_or_dev = false)::Vector{PackageSpec}
# check for and apply PackageSpec modifier (e.g. `#foo` or `@v1.0.2`)
function apply_modifier!(pkg::PackageSpec, args::Vector{PackageToken})
(isempty(args) || args[1] isa PackageIdentifier) && return
- modifier = popfirst!(args)
- if modifier isa Subdir
- pkg.subdir = modifier.dir
- (isempty(args) || args[1] isa PackageIdentifier) && return
+ parsed_subdir = false
+ parsed_version = false
+ parsed_rev = false
+ while !isempty(args)
modifier = popfirst!(args)
+ if modifier isa PackageIdentifier
+ pushfirst!(args, modifier)
+ return
+ elseif modifier isa Subdir
+ if parsed_subdir
+ pkgerror("Multiple subdir specifiers `$args` found.")
+ end
+ pkg.subdir = modifier.dir
+ parsed_subdir = true
+ (isempty(args) || args[1] isa PackageIdentifier) && return
+ elseif modifier isa VersionToken
+ if parsed_version
+ pkgerror("Multiple version specifiers `$args` found.")
+ end
+ pkg.version = modifier.version
+ parsed_version = true
+ elseif modifier isa Rev
+ if parsed_rev
+ pkgerror("Multiple revision specifiers `$args` found.")
+ end
+ pkg.rev = modifier.rev
+ parsed_rev = true
+ else
+ pkgerror("Package name/uuid must precede subdir specifier `$args`.")
+ end
end
-
- if modifier isa VersionToken
- pkg.version = modifier.version
- elseif modifier isa Rev
- pkg.rev = modifier.rev
- else
- pkgerror("Package name/uuid must precede subdir specifier `$args`.")
- end
+ return
end
pkgs = PackageSpec[]
while !isempty(args)
arg = popfirst!(args)
if arg isa PackageIdentifier
- pkg = parse_package_identifier(arg; add_or_develop=add_or_dev)
+ pkg = parse_package_identifier(arg; add_or_develop = add_or_dev)
apply_modifier!(pkg, args)
push!(pkgs, pkg)
- # Modifiers without a corresponding package identifier -- this is a user error
+ # Modifiers without a corresponding package identifier -- this is a user error
else
arg isa VersionToken ?
pkgerror("Package name/uuid must precede version specifier `@$arg`.") :
- arg isa Rev ?
+ arg isa Rev ?
pkgerror("Package name/uuid must precede revision specifier `#$(arg.rev)`.") :
pkgerror("Package name/uuid must precede subdir specifier `[$arg]`.")
end
@@ -130,31 +451,27 @@ function parse_package_args(args::Vector{PackageToken}; add_or_dev=false)::Vecto
end
let uuid = raw"(?i)[0-9a-z]{8}-[0-9a-z]{4}-[0-9a-z]{4}-[0-9a-z]{4}-[0-9a-z]{12}(?-i)",
- name = raw"(\w+)(?:\.jl)?"
+ name = raw"(\w+)(?:\.jl)?"
global const name_re = Regex("^$name\$")
global const uuid_re = Regex("^$uuid\$")
global const name_uuid_re = Regex("^$name\\s*=\\s*($uuid)\$")
end
# packages can be identified through: uuid, name, or name+uuid
# additionally valid for add/develop are: local path, url
-function parse_package_identifier(pkg_id::PackageIdentifier; add_or_develop=false)::PackageSpec
+function parse_package_identifier(pkg_id::PackageIdentifier; add_or_develop = false)::PackageSpec
word = pkg_id.val
if add_or_develop
- if isurl(word)
- return PackageSpec(; url=word)
- elseif any(occursin.(['\\','/'], word)) || word == "." || word == ".."
- if casesensitive_isdir(expanduser(word))
- return PackageSpec(; path=normpath(expanduser(word)))
- else
- pkgerror("`$word` appears to be a local path, but directory does not exist")
- end
- end
- if occursin(name_re, word) && casesensitive_isdir(expanduser(word))
+ if occursin(name_re, word) && isdir(expanduser(word))
@info "Use `./$word` to add or develop the local directory at `$(Base.contractuser(abspath(word)))`."
end
+ if isurl(word)
+ return PackageSpec(; url = word)
+ elseif any(occursin.(['\\', '/'], word)) || word == "." || word == ".."
+ return PackageSpec(; path = normpath(expanduser(word)))
+ end
end
if occursin(uuid_re, word)
- return PackageSpec(;uuid=UUID(word))
+ return PackageSpec(; uuid = UUID(word))
elseif occursin(name_re, word)
m = match(name_re, word)
return PackageSpec(String(something(m.captures[1])))
@@ -169,18 +486,18 @@ end
################
# RegistrySpec #
################
-function parse_registry(raw_args::Vector{QString}, options; add=false)
+function parse_registry(raw_args::Vector{QString}, options; add = false)
regs = RegistrySpec[]
- foreach(x -> push!(regs, parse_registry(x; add=add)), unwrap(raw_args))
+ foreach(x -> push!(regs, parse_registry(x; add = add)), unwrap(raw_args))
return regs
end
# Registries can be identified through: uuid, name, or name+uuid
# when updating/removing. When adding we can accept a local path or url.
-function parse_registry(word::AbstractString; add=false)::RegistrySpec
+function parse_registry(word::AbstractString; add = false)::RegistrySpec
word = expanduser(word)
registry = RegistrySpec()
- if add && isdir_nothrow(word) # TODO: Should be casesensitive_isdir
+ if add && isdir_nothrow(word)
if isdir(joinpath(word, ".git")) # add path as url and clone it from there
registry.url = abspath(word)
else # put the path
@@ -204,6 +521,14 @@ function parse_registry(word::AbstractString; add=false)::RegistrySpec
return registry
end
+#
+# # Apps
+#
+function parse_app_add(raw_args::Vector{QString}, options)
+ return parse_package(raw_args, options; add_or_dev = true)
+end
+
+
#
# # Other
#
diff --git a/src/REPLMode/command_declarations.jl b/src/REPLMode/command_declarations.jl
index cb00dfb260..adb131dc1a 100644
--- a/src/REPLMode/command_declarations.jl
+++ b/src/REPLMode/command_declarations.jl
@@ -1,585 +1,760 @@
-const PSA = Pair{Symbol,Any}
+const PSA = Pair{Symbol, Any}
compound_declarations = [
-"package" => CommandDeclaration[
-PSA[:name => "test",
- :api => API.test,
- :should_splat => false,
- :arg_count => 0 => Inf,
- :arg_parser => parse_package,
- :option_spec => [
- PSA[:name => "coverage", :api => :coverage => true],
+ "package" => CommandDeclaration[
+ PSA[
+ :name => "test",
+ :api => API.test,
+ :should_splat => false,
+ :arg_count => 0 => Inf,
+ :arg_parser => parse_package,
+ :option_spec => [
+ PSA[:name => "coverage", :api => :coverage => true],
+ ],
+ :completions => :complete_installed_packages,
+ :description => "run tests for packages",
+ :help => md"""
+ test [--coverage] [pkg[=uuid]] ...
+
+ Run the tests for package `pkg`, or for the current project (which thus needs to be
+ a package) if `pkg` is omitted. This is done by running the file `test/runtests.jl`
+ in the package directory. The option `--coverage` can be used to run the tests with
+ coverage enabled. The `startup.jl` file is disabled during testing unless
+ julia is started with `--startup-file=yes`.
+ """,
+ ],
+ PSA[
+ :name => "help",
+ :short_name => "?",
+ :api => identity, # dummy API function
+ :arg_count => 0 => Inf,
+ :arg_parser => ((x, y) -> x),
+ :completions => :complete_help,
+ :description => "show this message",
+ :help => md"""
+ [?|help]
+
+ List available commands along with short descriptions.
+
+ [?|help] cmd
+
+ If `cmd` is a partial command, display help for all subcommands.
+ If `cmd` is a full command, display help for `cmd`.
+ """,
+ ],
+ PSA[
+ :name => "instantiate",
+ :api => API.instantiate,
+ :option_spec => [
+ PSA[:name => "project", :short_name => "p", :api => :manifest => false],
+ PSA[:name => "manifest", :short_name => "m", :api => :manifest => true],
+ PSA[:name => "verbose", :short_name => "v", :api => :verbose => true],
+ PSA[:name => "workspace", :api => :workspace => true],
+ PSA[:name => "julia_version_strict", :api => :julia_version_strict => false],
+ ],
+ :description => "downloads all the dependencies for the project",
+ :help => md"""
+ instantiate [-v|--verbose] [--workspace] [--julia_version_strict]
+ instantiate [-v|--verbose] [--workspace] [--julia_version_strict] [-m|--manifest]
+ instantiate [-v|--verbose] [--workspace] [--julia_version_strict] [-p|--project]
+
+ Download all the dependencies for the current project at the version given by the project's manifest.
+ If no manifest exists or the `--project` option is given, resolve and download the dependencies compatible with the project.
+ If `--workspace` is given, all dependencies in the workspace will be downloaded.
+ If `--julia_version_strict` is given, manifest version check failures will error instead of log warnings.
+
+ After packages have been installed the project will be precompiled. For more information see `pkg> ?precompile`.
+ """,
+ ],
+ PSA[
+ :name => "remove",
+ :short_name => "rm",
+ :api => API.rm,
+ :should_splat => false,
+ :arg_count => 0 => Inf,
+ :arg_parser => parse_package,
+ :option_spec => [
+ PSA[:name => "project", :short_name => "p", :api => :mode => PKGMODE_PROJECT],
+ PSA[:name => "manifest", :short_name => "m", :api => :mode => PKGMODE_MANIFEST],
+ PSA[:name => "all", :api => :all_pkgs => true],
+ ],
+ :completions => :complete_installed_packages,
+ :description => "remove packages from project or manifest",
+ :help => md"""
+ [rm|remove] [-p|--project] pkg[=uuid] ...
+ [rm|remove] [-p|--project] [--all]
+
+ Remove package `pkg` from the project file. Since the name `pkg` can only
+ refer to one package in a project this is unambiguous, but you can specify
+ a `uuid` anyway, and the command is ignored, with a warning, if package name
+ and UUID do not match. When a package is removed from the project file, it
+ may still remain in the manifest if it is required by some other package in
+ the project. Project mode operation is the default, so passing `-p` or
+ `--project` is optional unless it is preceded by the `-m` or `--manifest`
+ options at some earlier point. All packages can be removed by passing `--all`.
+
+ [rm|remove] [-m|--manifest] pkg[=uuid] ...
+ [rm|remove] [-m|--manifest] [--all]
+
+ Remove package `pkg` from the manifest file. If the name `pkg` refers to
+ multiple packages in the manifest, `uuid` disambiguates it. Removing a package
+ from the manifest forces the removal of all packages that depend on it, as well
+ as any no-longer-necessary manifest packages due to project package removals.
+ All packages can be removed by passing `--all`.
+ """,
+ ],
+ PSA[
+ :name => "add",
+ :api => API.add,
+ :should_splat => false,
+ :arg_count => 1 => Inf,
+ :arg_parser => ((x, y) -> parse_package(x, y; add_or_dev = true)),
+ :option_spec => [
+ PSA[:name => "preserve", :takes_arg => true, :api => :preserve => do_preserve],
+ PSA[:name => "weak", :short_name => "w", :api => :target => :weakdeps],
+ PSA[:name => "extra", :short_name => "e", :api => :target => :extras],
+ ],
+ :completions => :complete_add_dev,
+ :description => "add packages to project",
+ :help => md"""
+ add [--preserve=] [-w|--weak] [-e|--extra] pkg[=uuid] [@version] [#rev] ...
+
+ Add package `pkg` to the current project file. If `pkg` could refer to
+ multiple different packages, specifying `uuid` allows you to disambiguate.
+ `@version` optionally allows specifying which versions of packages to add. Version specifications
+ are of the form `@1`, `@1.2` or `@1.2.3`, allowing any version with a prefix
+ that matches, or ranges thereof, such as `@1.2-3.4.5`. A git revision can be
+ specified by `#branch` or `#commit`.
+
+ If the active environment is a package (the Project has both `name` and `uuid` fields) compat entries will be
+ added automatically with a lower bound of the added version.
+
+ If a local path is used as an argument to `add`, the path needs to be a git repository.
+ The project will then track that git repository just like it would track a remote repository online.
+ If the package is not located at the top of the git repository, a subdirectory can be specified with
+ `path:subdir/path`.
+
+ `Pkg` resolves the set of packages in your environment using a tiered approach.
+ The `--preserve` command line option allows you to key into a specific tier in the resolve algorithm.
+ The following table describes the command line arguments to `--preserve` (in order of strictness).
+
+ | Argument | Description |
+ |:-------------------|:-----------------------------------------------------------------------------------|
+ | `installed` | Like `all` except also only add versions that are already installed |
+ | `all` | Preserve the state of all existing dependencies (including recursive dependencies) |
+ | `direct` | Preserve the state of all existing direct dependencies |
+ | `semver` | Preserve semver-compatible versions of direct dependencies |
+ | `none` | Do not attempt to preserve any version information |
+ | `tiered_installed` | Like `tiered` except first try to add only installed versions |
+ | **`tiered`** | Use the tier that will preserve the most version information while |
+ | | allowing version resolution to succeed (this is the default) |
+
+ Note: To make the default strategy `tiered_installed` set the env var `JULIA_PKG_PRESERVE_TIERED_INSTALLED` to
+ true.
+
+ After the installation of new packages the project will be precompiled. For more information see `pkg> ?precompile`.
+
+ With the `installed` strategy the newly added packages will likely already be precompiled, but if not this may be
+ because either the combination of package versions resolved in this environment has not been resolved and
+ precompiled before, or the precompile cache has been deleted by the LRU cache storage
+ (see `JULIA_MAX_NUM_PRECOMPILE_FILES`).
+
+ **Examples**
+ ```
+ pkg> add Example
+ pkg> add --preserve=all Example
+ pkg> add --weak Example
+ pkg> add --extra Example
+ pkg> add Example@0.5
+ pkg> add Example#master
+ pkg> add Example#c37b675
+ pkg> add https://github.com/JuliaLang/Example.jl#master
+ pkg> add git@github.com:JuliaLang/Example.jl.git
+ pkg> add "git@github.com:JuliaLang/Example.jl.git"#master
+ pkg> add https://github.com/Company/MonoRepo:juliapkgs/Package.jl
+ pkg> add Example=7876af07-990d-54b4-ab0e-23690620f79a
+ ```
+ """,
+ ],
+ PSA[
+ :name => "develop",
+ :short_name => "dev",
+ :api => API.develop,
+ :should_splat => false,
+ :arg_count => 1 => Inf,
+ :arg_parser => ((x, y) -> parse_package(x, y; add_or_dev = true)),
+ :option_spec => [
+ PSA[:name => "strict", :api => :strict => true],
+ PSA[:name => "local", :api => :shared => false],
+ PSA[:name => "shared", :api => :shared => true],
+ PSA[:name => "preserve", :takes_arg => true, :api => :preserve => do_preserve],
+ ],
+ :completions => :complete_add_dev,
+ :description => "clone the full package repo locally for development",
+ :help => md"""
+ [dev|develop] [--preserve=] [--shared|--local] pkg[=uuid] ...
+ [dev|develop] [--preserve=] path
+
+ Make a package available for development. If `pkg` is an existing local path, that path will be recorded in
+ the manifest and used. Otherwise, a full git clone of `pkg` is made. The location of the clone is
+ controlled by the `--shared` (default) and `--local` arguments. The `--shared` location defaults to
+ `~/.julia/dev`, but can be controlled with the `JULIA_PKG_DEVDIR` environment variable.
+
+ When `--local` is given, the clone is placed in a `dev` folder in the current project. This
+ is not supported for paths, only registered packages.
+
+ This operation is undone by `free`.
+
+ The preserve strategies offered by `add` are also available via the `preserve` argument.
+ See `add` for more information.
+
+ **Examples**
+ ```jl
+ pkg> develop Example
+ pkg> develop https://github.com/JuliaLang/Example.jl
+ pkg> develop ~/mypackages/Example
+ pkg> develop --local Example
+ ```
+ """,
+ ],
+ PSA[
+ :name => "free",
+ :api => API.free,
+ :should_splat => false,
+ :arg_count => 0 => Inf,
+ :option_spec => [
+ PSA[:name => "all", :api => :all_pkgs => true],
+ PSA[:name => "workspace", :api => :workspace => true],
+ ],
+ :arg_parser => parse_package,
+ :completions => :complete_fixed_packages,
+ :description => "undoes a `pin`, `develop`, or stops tracking a repo",
+ :help => md"""
+ free [--workspace] pkg[=uuid] ...
+ free [--workspace] [--all]
+
+ Free pinned packages, which allows it to be upgraded or downgraded again. If the package is checked out (see `help develop`) then this command
+ makes the package no longer being checked out. Specifying `--all` will free all dependencies (direct and indirect).
+ The `--workspace` option includes packages from all projects in the workspace when used with `--all`.
+ """,
+ ],
+ PSA[
+ :name => "why",
+ :api => API.why,
+ :should_splat => false,
+ :arg_count => 1 => 1,
+ :option_spec => [
+ PSA[:name => "workspace", :api => :workspace => true],
+ ],
+ :arg_parser => parse_package,
+ :completions => :complete_all_installed_packages,
+ :description => "shows why a package is in the manifest",
+ :help => md"""
+ why [--workspace] pkg[=uuid] ...
+
+ Show the reason why packages are in the manifest, printed as a path through the
+ dependency graph starting at the direct dependencies.
+ The `workspace` option can be used to show the path from any dependency of a project in
+ the workspace.
+
+ !!! compat "Julia 1.9"
+ The `why` function is added in Julia 1.9
+ """,
+ ],
+ PSA[
+ :name => "pin",
+ :api => API.pin,
+ :should_splat => false,
+ :arg_count => 0 => Inf,
+ :option_spec => [
+ PSA[:name => "all", :api => :all_pkgs => true],
+ PSA[:name => "workspace", :api => :workspace => true],
+ ],
+ :arg_parser => parse_package,
+ :completions => :complete_installed_packages,
+ :description => "pins the version of packages",
+ :help => md"""
+ pin [--workspace] pkg[=uuid] ...
+ pin [--workspace] [--all]
+
+ Pin packages to given versions, or the current version if no version is specified. A pinned package has its version fixed and will not be upgraded or downgraded.
+ A pinned package has the symbol `⚲` next to its version in the status list. Specifying `--all` will pin all dependencies (direct and indirect).
+ The `--workspace` option includes packages from all projects in the workspace when used with `--all`.
+
+ **Examples**
+ ```
+ pkg> pin Example
+ pkg> pin Example@0.5.0
+ pkg> pin Example=7876af07-990d-54b4-ab0e-23690620f79a@0.5.0
+ pkg> pin --all
+ ```
+ """,
+ ],
+ PSA[
+ :name => "build",
+ :api => API.build,
+ :should_splat => false,
+ :arg_count => 0 => Inf,
+ :arg_parser => parse_package,
+ :option_spec => [
+ PSA[:name => "verbose", :short_name => "v", :api => :verbose => true],
+ ],
+ :completions => :complete_installed_packages,
+ :description => "run the build script for packages",
+ :help => md"""
+ build [-v|--verbose] pkg[=uuid] ...
+
+ Run the build script in `deps/build.jl` for `pkg` and all of its dependencies in depth-first recursive order.
+ If no packages are given, run the build scripts for all packages in the manifest.
+ The `-v`/`--verbose` option redirects build output to `stdout`/`stderr` instead of the `build.log` file.
+ The `startup.jl` file is disabled during building unless julia is started with `--startup-file=yes`.
+ """,
+ ],
+ PSA[
+ :name => "resolve",
+ :api => API.resolve,
+ :description => "resolves to update the manifest from changes in dependencies of developed packages",
+ :help => md"""
+ resolve
+
+ Resolve the project i.e. run package resolution and update the Manifest. This is useful in case the dependencies of developed
+ packages have changed causing the current Manifest to be out of sync.
+ """,
+ ],
+ PSA[
+ :name => "activate",
+ :api => API.activate,
+ :arg_count => 0 => 1,
+ :arg_parser => parse_activate,
+ :option_spec => [
+ PSA[:name => "shared", :api => :shared => true],
+ PSA[:name => "temp", :api => :temp => true],
+ ],
+ :completions => :complete_activate,
+ :description => "set the primary environment the package manager manipulates",
+ :help => md"""
+ activate
+ activate [--shared] path
+ activate --temp
+ activate - (activates the previously active environment)
+
+ Activate the environment at the given `path`, or return to the default environment if no
+ `path` is specified. When called with no arguments, this returns you to the default shared
+ environment (typically `@v#.#` in `~/.julia/environments/v#.#/`), which is the standard way
+ to "deactivate" a project environment.
+
+ The active environment is the environment that is modified by executing package commands.
+ Activating an environment only affects the current Julia session and does not persist when
+ you restart Julia (unless you use the `--project` startup flag).
+
+ When the option `--shared` is given, `path` will be assumed to be a directory name and searched for in the
+ `environments` folders of the depots in the depot stack. In case no such environment exists in any of the depots,
+ it will be placed in the first depot of the stack.
+
+ Use the `--temp` option to create temporary environments which are removed when the julia
+ process is exited.
+
+ Use a single `-` to activate the previously active environment.
+ """,
+ ],
+ PSA[
+ :name => "update",
+ :short_name => "up",
+ :api => API.up,
+ :should_splat => false,
+ :arg_count => 0 => Inf,
+ :arg_parser => parse_package,
+ :option_spec => [
+ PSA[:name => "project", :short_name => "p", :api => :mode => PKGMODE_PROJECT],
+ PSA[:name => "manifest", :short_name => "m", :api => :mode => PKGMODE_MANIFEST],
+ PSA[:name => "major", :api => :level => UPLEVEL_MAJOR],
+ PSA[:name => "minor", :api => :level => UPLEVEL_MINOR],
+ PSA[:name => "patch", :api => :level => UPLEVEL_PATCH],
+ PSA[:name => "fixed", :api => :level => UPLEVEL_FIXED],
+ PSA[:name => "preserve", :takes_arg => true, :api => :preserve => do_preserve],
+ PSA[:name => "workspace", :api => :workspace => true],
+ ],
+ :completions => :complete_installed_packages,
+ :description => "update packages in manifest",
+ :help => md"""
+ [up|update] [-p|--project] [--workspace] [opts] pkg[=uuid] [@version] ...
+ [up|update] [-m|--manifest] [--workspace] [opts] pkg[=uuid] [@version] ...
+
+ opts: --major | --minor | --patch | --fixed
+ --preserve=
+
+ Update `pkg` within the constraints of the indicated version
+ specifications. These specifications are of the form `@1`, `@1.2` or `@1.2.3`, allowing
+ any version with a prefix that matches, or ranges thereof, such as `@1.2-3.4.5`.
+ In `--project` mode, package specifications only match project packages, while
+ in `--manifest` mode they match any manifest package. Bound level options force
+ the following packages to be upgraded only within the current major, minor,
+ patch version; if the `--fixed` upgrade level is given, then the following
+ packages will not be upgraded at all.
+ The `--workspace` option includes packages from all projects in the workspace
+ when no packages are specified.
+
+ After any package updates the project will be precompiled. For more information see `pkg> ?precompile`.
+ """,
+ ],
+ PSA[
+ :name => "generate",
+ :api => API.generate,
+ :arg_count => 1 => 1,
+ :arg_parser => ((x, y) -> map(expanduser, unwrap(x))),
+ :description => "generate files for a new project",
+ :help => md"""
+ generate pkgname
+
+ Create a minimal project called `pkgname` in the current folder. For more featureful package creation, please see `PkgTemplates.jl`.
+ """,
+ ],
+ PSA[
+ :name => "precompile",
+ :api => API.precompile,
+ :should_splat => false,
+ :arg_count => 0 => Inf,
+ :completions => :complete_installed_packages,
+ :description => "precompile all the project dependencies",
+ :option_spec => [
+ PSA[:name => "workspace", :api => :workspace => true],
+ PSA[:name => "monitor", :api => :monitor => true],
+ PSA[:name => "stop", :api => :stop => true],
+ PSA[:name => "cancel", :api => :cancel => true],
+ ],
+ :help => md"""
+ precompile [--workspace]
+ precompile [--monitor | --stop | --cancel]
+ precompile [--workspace] pkgs...
+
+ Precompile all or specified dependencies of the project in parallel.
+ The `startup.jl` file is disabled during precompilation unless julia is started with `--startup-file=yes`.
+ The `workspace` option will precompile all packages in the workspace and not only the active project.
+
+ Errors will only throw when precompiling the top-level dependencies, given that
+ not all manifest dependencies may be loaded by the top-level dependencies on the given system.
+
+ This method is called automatically after any Pkg action that changes the manifest.
+ Any packages that have previously errored during precompilation won't be retried in auto mode
+ until they have changed. To disable automatic precompilation set the environment variable `JULIA_PKG_PRECOMPILE_AUTO=0`.
+ To manually control the number of tasks used set the environment variable `JULIA_NUM_PRECOMPILE_TASKS`.
+
+ Background precompilation control (after detaching with `d` during precompilation):
+ - `--monitor`: Reattach to the running background precompilation
+ - `--stop`: Gracefully stop background precompilation (waits for active jobs to finish)
+ - `--cancel`: Cancel background precompilation immediately (interrupts active jobs)
+
+ Keyboard controls during precompilation:
+ - `d`/`q`/`]`: Detach (return to REPL, precompilation continues in background)
+ - `c`: Cancel precompilation (kills subprocesses, prompts for confirmation)
+ - `i`: Send a profiling signal to subprocesses for a profile peek
+ - `v`: Toggle verbose mode (timing, worker PID, CPU%, memory)
+ - `?`/`h`: Show keyboard shortcut help
+ - `Ctrl-C`: Interrupt (sends SIGINT to subprocesses, shows output)
+ """,
+ ],
+ PSA[
+ :name => "status",
+ :short_name => "st",
+ :api => API.status,
+ :should_splat => false,
+ :arg_count => 0 => Inf,
+ :arg_parser => parse_package,
+ :option_spec => [
+ PSA[:name => "project", :short_name => "p", :api => :mode => PKGMODE_PROJECT],
+ PSA[:name => "manifest", :short_name => "m", :api => :mode => PKGMODE_MANIFEST],
+ PSA[:name => "diff", :short_name => "d", :api => :diff => true],
+ PSA[:name => "outdated", :short_name => "o", :api => :outdated => true],
+ PSA[:name => "deprecated", :api => :deprecated => true],
+ PSA[:name => "compat", :short_name => "c", :api => :compat => true],
+ PSA[:name => "extensions", :short_name => "e", :api => :extensions => true],
+ PSA[:name => "workspace", :api => :workspace => true],
+ ],
+ :completions => :complete_installed_packages,
+ :description => "summarize contents of and changes to environment",
+ :help => md"""
+ [st|status] [-d|--diff] [--workspace] [-o|--outdated] [--deprecated] [pkgs...]
+ [st|status] [-d|--diff] [--workspace] [-o|--outdated] [--deprecated] [-p|--project] [pkgs...]
+ [st|status] [-d|--diff] [--workspace] [-o|--outdated] [--deprecated] [-m|--manifest] [pkgs...]
+ [st|status] [-d|--diff] [--workspace] [-e|--extensions] [-p|--project] [pkgs...]
+ [st|status] [-d|--diff] [--workspace] [-e|--extensions] [-m|--manifest] [pkgs...]
+ [st|status] [-c|--compat] [pkgs...]
+
+ Show the status of the current environment. Packages marked with `⌃` have new
+ versions that may be installed, e.g. via `pkg> up`. Those marked with `⌅` have
+ new versions available, but cannot be installed due to compatibility
+ constraints. To see why use `pkg> status --outdated` which shows any packages
+ that are not at their latest version and if any packages are holding them back.
+ Packages marked with `[yanked]` have been yanked from the registry and should be
+ updated or removed. Packages marked with `[deprecated]` are no longer maintained.
+
+ Use `pkg> status --deprecated` to show only deprecated packages along with deprecation
+ information such as the reason and alternative packages (if provided by the registry).
+
+ Use `pkg> status --extensions` to show dependencies with extensions and what extension dependencies
+ of those that are currently loaded.
+
+ In `--project` mode (default), the status of the project file is summarized. In `--manifest`
+ mode the output also includes the recursive dependencies of added packages given in the manifest.
+ If there are any packages listed as arguments the output will be limited to those packages.
+ The `--diff` option will, if the environment is in a git repository, limit
+ the output to the difference as compared to the last git commit.
+ The `--compat` option alone shows project compat entries.
+ The `--workspace` option shows the (merged) status of packages in the workspace.
+
+ !!! compat "Julia 1.8"
+ The `⌃` and `⌅` indicators were added in Julia 1.8.
+ The `--outdated` and `--compat` options require at least Julia 1.8.
+ """,
+ ],
+ PSA[
+ :name => "compat",
+ :api => API.compat,
+ :arg_count => 0 => 2,
+ :completions => :complete_installed_packages_and_compat,
+ :option_spec => [
+ PSA[:name => "current", :api => :current => true],
+ ],
+ :description => "edit compat entries in the current Project and re-resolve",
+ :help => md"""
+ compat [pkg] [compat_string]
+ compat
+ compat --current
+ compat --current
+
+ Edit project [compat] entries directly, or via an interactive menu by not specifying any arguments.
+ Use --current flag to automatically populate missing compat entries with currently resolved versions.
+ When used alone, applies to all packages missing compat entries.
+ When combined with a package name, applies only to that package.
+ When directly editing use tab to complete the package name and any existing compat entry.
+ Specifying a package with a blank compat entry will remove the entry.
+ After changing compat entries a `resolve` will be attempted to check whether the current
+ environment is compliant with the new compat rules.
+ """,
+ ],
+ PSA[
+ :name => "gc",
+ :api => API.gc,
+ :option_spec => [
+ PSA[:name => "all", :api => :collect_delay => nothing],
+ PSA[:name => "verbose", :short_name => "v", :api => :verbose => true],
+ ],
+ :description => "garbage collect packages not used for a significant time",
+ :help => md"""
+ gc [-v|--verbose] [--all]
+
+ Free disk space by garbage collecting packages not used for a significant time.
+ The `--all` option will garbage collect all packages which can not be immediately
+ reached from any existing project.
+ Use verbose mode for detailed output.
+ """,
+ ],
+ PSA[
+ :name => "undo",
+ :api => API.undo,
+ :description => "undo the latest change to the active project",
+ :help => md"""
+ undo
+
+ Undoes the latest change to the active project.
+ """,
+ ],
+ PSA[
+ :name => "redo",
+ :api => API.redo,
+ :description => "redo the latest change to the active project",
+ :help => md"""
+ redo
+
+ Redoes the changes from the latest [`undo`](@ref).
+ """,
+ ],
+ ], #package
+ "registry" => CommandDeclaration[
+ PSA[
+ :name => "add",
+ :api => Registry.add,
+ :should_splat => false,
+ :arg_count => 0 => Inf,
+ :arg_parser => ((x, y) -> parse_registry(x, y; add = true)),
+ :description => "add package registries",
+ :help => md"""
+ registry add reg...
+
+ Add package registries `reg...` to the user depot. Without arguments
+ it adds known registries, i.e. the General registry and registries
+ served by the configured package server.
+
+ **Examples**
+ ```
+ pkg> registry add General
+ pkg> registry add https://www.my-custom-registry.com
+ pkg> registry add
+ ```
+ """,
+ ],
+ PSA[
+ :name => "remove",
+ :short_name => "rm",
+ :api => Registry.rm,
+ :should_splat => false,
+ :arg_count => 1 => Inf,
+ :arg_parser => parse_registry,
+ :description => "remove package registries",
+ :help => md"""
+ registry [rm|remove] reg...
+
+ Remove package registries `reg...`.
+
+ **Examples**
+ ```
+ pkg> registry [rm|remove] General
+ ```
+ """,
+ ],
+ PSA[
+ :name => "update",
+ :short_name => "up",
+ :api => Registry.update,
+ :should_splat => false,
+ :arg_count => 0 => Inf,
+ :arg_parser => parse_registry,
+ :description => "update package registries",
+ :help => md"""
+ registry [up|update]
+ registry [up|update] reg...
+
+ Update package registries `reg...`. If no registries are specified
+ all registries will be updated.
+
+ **Examples**
+ ```
+ pkg> registry up
+ pkg> registry up General
+ ```
+ """,
+ ],
+ PSA[
+ :name => "status",
+ :short_name => "st",
+ :api => Registry.status,
+ :description => "information about installed registries",
+ :help => md"""
+ registry [st|status]
+
+ Display information about installed registries.
+
+ **Examples**
+ ```
+ pkg> registry status
+ ```
+ """,
+ ],
+ ], #registry
+ "app" => CommandDeclaration[
+ PSA[
+ :name => "status",
+ :short_name => "st",
+ :api => Apps.status,
+ :should_splat => false,
+ :arg_count => 0 => Inf,
+ :arg_parser => parse_package,
+ :completions => :complete_installed_apps,
+ :description => "show status of apps",
+ :help => md"""
+ app status [pkg[=uuid]] ...
+
+ Show the status of installed apps. If packages are specified, only show
+ apps for those packages.
+ """,
+ ],
+ PSA[
+ :name => "add",
+ :api => Apps.add,
+ :should_splat => false,
+ :arg_count => 0 => Inf,
+ :arg_parser => parse_app_add,
+ :completions => :complete_add_dev,
+ :description => "add app",
+ :help => md"""
+ app add pkg[=uuid] ...
+
+ Add apps provided by packages `pkg...`. This will make the apps available
+ as executables in `~/.julia/bin` (which should be added to PATH).
+
+ **Examples**
+ ```
+ pkg> app add Example
+ pkg> app add Example@0.5.0
+ ```
+ """,
+ ],
+ PSA[
+ :name => "remove",
+ :short_name => "rm",
+ :api => Apps.rm,
+ :should_splat => false,
+ :arg_count => 0 => Inf,
+ :arg_parser => parse_package,
+ :completions => :complete_installed_apps,
+ :description => "remove apps",
+ :help => md"""
+ app [rm|remove] pkg[=uuid] ...
+
+ Remove apps provided by packages `pkg...`. This will remove the executables
+ from `~/.julia/bin`.
+
+ **Examples**
+ ```
+ pkg> app rm Example
+ ```
+ """,
+ ],
+ PSA[
+ :name => "develop",
+ :short_name => "dev",
+ :api => Apps.develop,
+ :should_splat => false,
+ :arg_count => 1 => Inf,
+ :arg_parser => (x, y) -> parse_package(x, y; add_or_dev = true),
+ :completions => :complete_add_dev,
+ :description => "develop a package and install all the apps in it",
+ :help => md"""
+ app [dev|develop] pkg[=uuid] ...
+ app [dev|develop] path
+
+ Same as `develop` but also installs all the apps in the package.
+ This allows one to edit their app and have the changes immediately be reflected in the app.
+
+ **Examples**
+ ```jl
+ pkg> app develop Example
+ pkg> app develop https://github.com/JuliaLang/Example.jl
+ pkg> app develop ~/mypackages/Example
+ pkg> app develop --local Example
+ ```
+ """,
+ ],
+ PSA[
+ :name => "update",
+ :short_name => "up",
+ :api => Apps.update,
+ :completions => :complete_installed_apps,
+ :should_splat => false,
+ :arg_count => 0 => Inf,
+ :arg_parser => parse_package,
+ :description => "update app",
+ :help => md"""
+ app [up|update] [pkg[=uuid]] ...
+
+ Update apps for packages `pkg...`. If no packages are specified, all apps will be updated.
+
+ **Examples**
+ ```
+ pkg> app update
+ pkg> app update Example
+ ```
+ """,
+ ], # app
],
- :completions => :complete_installed_packages,
- :description => "run tests for packages",
- :help => md"""
- test [--coverage] [pkg[=uuid]] ...
-
-Run the tests for package `pkg`, or for the current project (which thus needs to be
-a package) if `pkg` is ommitted. This is done by running the file `test/runtests.jl`
-in the package directory. The option `--coverage` can be used to run the tests with
-coverage enabled. The `startup.jl` file is disabled during testing unless
-julia is started with `--startup-file=yes`.
-""",
-],
-PSA[:name => "help",
- :short_name => "?",
- :api => identity, # dummy API function
- :arg_count => 0 => Inf,
- :arg_parser => ((x,y) -> x),
- :completions => :complete_help,
- :description => "show this message",
- :help => md"""
- [?|help]
-
-List available commands along with short descriptions.
-
- [?|help] cmd
-
-If `cmd` is a partial command, display help for all subcommands.
-If `cmd` is a full command, display help for `cmd`.
-""",
-],
-PSA[:name => "instantiate",
- :api => API.instantiate,
- :option_spec => [
- PSA[:name => "project", :short_name => "p", :api => :manifest => false],
- PSA[:name => "manifest", :short_name => "m", :api => :manifest => true],
- PSA[:name => "verbose", :short_name => "v", :api => :verbose => true],
- PSA[:name => "workspace", :api => :workspace => true],
- PSA[:name => "julia_version_strict", :api => :julia_version_strict => false],
- ],
- :description => "downloads all the dependencies for the project",
- :help => md"""
- instantiate [-v|--verbose] [--workspace] [--julia_version_strict]
- instantiate [-v|--verbose] [--workspace] [--julia_version_strict] [-m|--manifest]
- instantiate [-v|--verbose] [--workspace] [--julia_version_strict] [-p|--project]
-
-Download all the dependencies for the current project at the version given by the project's manifest.
-If no manifest exists or the `--project` option is given, resolve and download the dependencies compatible with the project.
-If `--workspace` is given, all dependencies in the workspace will be downloaded.
-If `--julia_version_strict` is given, manifest version check failures will error instead of log warnings.
-
-After packages have been installed the project will be precompiled. For more information see `pkg> ?precompile`.
-""",
-],
-PSA[:name => "remove",
- :short_name => "rm",
- :api => API.rm,
- :should_splat => false,
- :arg_count => 0 => Inf,
- :arg_parser => parse_package,
- :option_spec => [
- PSA[:name => "project", :short_name => "p", :api => :mode => PKGMODE_PROJECT],
- PSA[:name => "manifest", :short_name => "m", :api => :mode => PKGMODE_MANIFEST],
- PSA[:name => "all", :api => :all_pkgs => true],
- ],
- :completions => :complete_installed_packages,
- :description => "remove packages from project or manifest",
- :help => md"""
- [rm|remove] [-p|--project] pkg[=uuid] ...
- [rm|remove] [-p|--project] [--all]
-
-Remove package `pkg` from the project file. Since the name `pkg` can only
-refer to one package in a project this is unambiguous, but you can specify
-a `uuid` anyway, and the command is ignored, with a warning, if package name
-and UUID do not match. When a package is removed from the project file, it
-may still remain in the manifest if it is required by some other package in
-the project. Project mode operation is the default, so passing `-p` or
-`--project` is optional unless it is preceded by the `-m` or `--manifest`
-options at some earlier point. All packages can be removed by passing `--all`.
-
- [rm|remove] [-m|--manifest] pkg[=uuid] ...
- [rm|remove] [-m|--manifest] [--all]
-
-Remove package `pkg` from the manifest file. If the name `pkg` refers to
-multiple packages in the manifest, `uuid` disambiguates it. Removing a package
-from the manifest forces the removal of all packages that depend on it, as well
-as any no-longer-necessary manifest packages due to project package removals.
-All packages can be removed by passing `--all`.
-""",
-],
-PSA[:name => "add",
- :api => API.add,
- :should_splat => false,
- :arg_count => 1 => Inf,
- :arg_parser => ((x,y) -> parse_package(x,y; add_or_dev=true)),
- :option_spec => [
- PSA[:name => "preserve", :takes_arg => true, :api => :preserve => do_preserve],
- PSA[:name => "weak", :short_name => "w", :api => :target => :weakdeps],
- PSA[:name => "extra", :short_name => "e", :api => :target => :extras],
- ],
- :completions => :complete_add_dev,
- :description => "add packages to project",
- :help => md"""
- add [--preserve=] [-w|--weak] [-e|--extra] pkg[=uuid] [@version] [#rev] ...
-
-Add package `pkg` to the current project file. If `pkg` could refer to
-multiple different packages, specifying `uuid` allows you to disambiguate.
-`@version` optionally allows specifying which versions of packages to add. Version specifications
-are of the form `@1`, `@1.2` or `@1.2.3`, allowing any version with a prefix
-that matches, or ranges thereof, such as `@1.2-3.4.5`. A git revision can be
-specified by `#branch` or `#commit`.
-
-If the active environment is a package (the Project has both `name` and `uuid` fields) compat entries will be
-added automatically with a lower bound of the added version.
-
-If a local path is used as an argument to `add`, the path needs to be a git repository.
-The project will then track that git repository just like it would track a remote repository online.
-If the package is not located at the top of the git repository, a subdirectory can be specified with
-`path:subdir/path`.
-
-`Pkg` resolves the set of packages in your environment using a tiered approach.
-The `--preserve` command line option allows you to key into a specific tier in the resolve algorithm.
-The following table describes the command line arguments to `--preserve` (in order of strictness).
-
-| Argument | Description |
-|:-------------------|:-----------------------------------------------------------------------------------|
-| `installed` | Like `all` except also only add versions that are already installed |
-| `all` | Preserve the state of all existing dependencies (including recursive dependencies) |
-| `direct` | Preserve the state of all existing direct dependencies |
-| `semver` | Preserve semver-compatible versions of direct dependencies |
-| `none` | Do not attempt to preserve any version information |
-| `tiered_installed` | Like `tiered` except first try to add only installed versions |
-| **`tiered`** | Use the tier that will preserve the most version information while |
-| | allowing version resolution to succeed (this is the default) |
-
-Note: To make the default strategy `tiered_installed` set the env var `JULIA_PKG_PRESERVE_TIERED_INSTALLED` to
-true.
-
-After the installation of new packages the project will be precompiled. For more information see `pkg> ?precompile`.
-
-With the `installed` strategy the newly added packages will likely already be precompiled, but if not this may be
-because either the combination of package versions resolved in this environment has not been resolved and
-precompiled before, or the precompile cache has been deleted by the LRU cache storage
-(see `JULIA_MAX_NUM_PRECOMPILE_FILES`).
-
-**Examples**
-```
-pkg> add Example
-pkg> add --preserve=all Example
-pkg> add --weak Example
-pkg> add --extra Example
-pkg> add Example@0.5
-pkg> add Example#master
-pkg> add Example#c37b675
-pkg> add https://github.com/JuliaLang/Example.jl#master
-pkg> add git@github.com:JuliaLang/Example.jl.git
-pkg> add "git@github.com:JuliaLang/Example.jl.git"#master
-pkg> add https://github.com/Company/MonoRepo:juliapkgs/Package.jl
-pkg> add Example=7876af07-990d-54b4-ab0e-23690620f79a
-```
-""",
-],
-PSA[:name => "develop",
- :short_name => "dev",
- :api => API.develop,
- :should_splat => false,
- :arg_count => 1 => Inf,
- :arg_parser => ((x,y) -> parse_package(x,y; add_or_dev=true)),
- :option_spec => [
- PSA[:name => "strict", :api => :strict => true],
- PSA[:name => "local", :api => :shared => false],
- PSA[:name => "shared", :api => :shared => true],
- PSA[:name => "preserve", :takes_arg => true, :api => :preserve => do_preserve],
- ],
- :completions => :complete_add_dev,
- :description => "clone the full package repo locally for development",
- :help => md"""
- [dev|develop] [--preserve=] [--shared|--local] pkg[=uuid] ...
- [dev|develop] [--preserve=] path
-
-Make a package available for development. If `pkg` is an existing local path, that path will be recorded in
-the manifest and used. Otherwise, a full git clone of `pkg` is made. The location of the clone is
-controlled by the `--shared` (default) and `--local` arguments. The `--shared` location defaults to
-`~/.julia/dev`, but can be controlled with the `JULIA_PKG_DEVDIR` environment variable.
-
-When `--local` is given, the clone is placed in a `dev` folder in the current project. This
-is not supported for paths, only registered packages.
-
-This operation is undone by `free`.
-
-The preserve strategies offered by `add` are also available via the `preserve` argument.
-See `add` for more information.
-
-**Examples**
-```jl
-pkg> develop Example
-pkg> develop https://github.com/JuliaLang/Example.jl
-pkg> develop ~/mypackages/Example
-pkg> develop --local Example
-```
-""",
-],
-PSA[:name => "free",
- :api => API.free,
- :should_splat => false,
- :arg_count => 0 => Inf,
- :option_spec => [
- PSA[:name => "all", :api => :all_pkgs => true],
- ],
- :arg_parser => parse_package,
- :completions => :complete_fixed_packages,
- :description => "undoes a `pin`, `develop`, or stops tracking a repo",
- :help => md"""
- free pkg[=uuid] ...
- free [--all]
-
-Free pinned packages, which allows it to be upgraded or downgraded again. If the package is checked out (see `help develop`) then this command
-makes the package no longer being checked out. Specifying `--all` will free all dependencies (direct and indirect).
-""",
-],
-PSA[:name => "why",
- :api => API.why,
- :should_splat => false,
- :arg_count => 1 => 1,
- :option_spec => [
- PSA[:name => "workspace", :api => :workspace => true],
- ],
- :arg_parser => parse_package,
- :completions => :complete_all_installed_packages,
- :description => "shows why a package is in the manifest",
- :help => md"""
- why [--workspace] pkg[=uuid] ...
-
-Show the reason why packages are in the manifest, printed as a path through the
-dependency graph starting at the direct dependencies.
-The `workspace` option can be used to show the path from any dependency of a project in
-the workspace.
-
-!!! compat "Julia 1.9"
- The `why` function is added in Julia 1.9
-""",
-],
-PSA[:name => "pin",
- :api => API.pin,
- :should_splat => false,
- :arg_count => 0 => Inf,
- :option_spec => [
- PSA[:name => "all", :api => :all_pkgs => true],
- ],
- :arg_parser => parse_package,
- :completions => :complete_installed_packages,
- :description => "pins the version of packages",
- :help => md"""
- pin pkg[=uuid] ...
- pin [--all]
-
-Pin packages to given versions, or the current version if no version is specified. A pinned package has its version fixed and will not be upgraded or downgraded.
-A pinned package has the symbol `⚲` next to its version in the status list.. Specifying `--all` will pin all dependencies (direct and indirect).
-
-**Examples**
-```
-pkg> pin Example
-pkg> pin Example@0.5.0
-pkg> pin Example=7876af07-990d-54b4-ab0e-23690620f79a@0.5.0
-pkg> pin --all
-```
-""",
-],
-PSA[:name => "build",
- :api => API.build,
- :should_splat => false,
- :arg_count => 0 => Inf,
- :arg_parser => parse_package,
- :option_spec => [
- PSA[:name => "verbose", :short_name => "v", :api => :verbose => true],
- ],
- :completions => :complete_installed_packages,
- :description => "run the build script for packages",
- :help => md"""
- build [-v|--verbose] pkg[=uuid] ...
-
-Run the build script in `deps/build.jl` for `pkg` and all of its dependencies in depth-first recursive order.
-If no packages are given, run the build scripts for all packages in the manifest.
-The `-v`/`--verbose` option redirects build output to `stdout`/`stderr` instead of the `build.log` file.
-The `startup.jl` file is disabled during building unless julia is started with `--startup-file=yes`.
-""",
-],
-PSA[:name => "resolve",
- :api => API.resolve,
- :description => "resolves to update the manifest from changes in dependencies of developed packages",
- :help => md"""
- resolve
-
-Resolve the project i.e. run package resolution and update the Manifest. This is useful in case the dependencies of developed
-packages have changed causing the current Manifest to be out of sync.
-""",
-],
-PSA[:name => "activate",
- :api => API.activate,
- :arg_count => 0 => 1,
- :arg_parser => parse_activate,
- :option_spec => [
- PSA[:name => "shared", :api => :shared => true],
- PSA[:name => "temp", :api => :temp => true],
- ],
- :completions => :complete_activate,
- :description => "set the primary environment the package manager manipulates",
- :help => md"""
- activate
- activate [--shared] path
- activate --temp
- activate - (activates the previously active environment)
-
-Activate the environment at the given `path`, or use the first project found in
-`LOAD_PATH` (ignoring `"@"`) if no `path` is specified.
-In the latter case, for the default value of `LOAD_PATH`, the result is to activate the
-`@v#.#` environment.
-The active environment is the environment that is modified by executing package commands.
-When the option `--shared` is given, `path` will be assumed to be a directory name and searched for in the
-`environments` folders of the depots in the depot stack. In case no such environment exists in any of the depots,
-it will be placed in the first depot of the stack.
-Use the `--temp` option to create temporary environments which are removed when the julia
-process is exited.
-Use a single `-` to activate the previously active environment.
-""" ,
-],
-PSA[:name => "update",
- :short_name => "up",
- :api => API.up,
- :should_splat => false,
- :arg_count => 0 => Inf,
- :arg_parser => parse_package,
- :option_spec => [
- PSA[:name => "project", :short_name => "p", :api => :mode => PKGMODE_PROJECT],
- PSA[:name => "manifest", :short_name => "m", :api => :mode => PKGMODE_MANIFEST],
- PSA[:name => "major", :api => :level => UPLEVEL_MAJOR],
- PSA[:name => "minor", :api => :level => UPLEVEL_MINOR],
- PSA[:name => "patch", :api => :level => UPLEVEL_PATCH],
- PSA[:name => "fixed", :api => :level => UPLEVEL_FIXED],
- PSA[:name => "preserve", :takes_arg => true, :api => :preserve => do_preserve],
- ],
- :completions => :complete_installed_packages,
- :description => "update packages in manifest",
- :help => md"""
- [up|update] [-p|--project] [opts] pkg[=uuid] [@version] ...
- [up|update] [-m|--manifest] [opts] pkg[=uuid] [@version] ...
-
- opts: --major | --minor | --patch | --fixed
- --preserve=
-
-Update `pkg` within the constraints of the indicated version
-specifications. These specifications are of the form `@1`, `@1.2` or `@1.2.3`, allowing
-any version with a prefix that matches, or ranges thereof, such as `@1.2-3.4.5`.
-In `--project` mode, package specifications only match project packages, while
-in `--manifest` mode they match any manifest package. Bound level options force
-the following packages to be upgraded only within the current major, minor,
-patch version; if the `--fixed` upgrade level is given, then the following
-packages will not be upgraded at all.
-
-After any package updates the project will be precompiled. For more information see `pkg> ?precompile`.
-""",
-],
-PSA[:name => "generate",
- :api => API.generate,
- :arg_count => 1 => 1,
- :arg_parser => ((x,y) -> map(expanduser, unwrap(x))),
- :description => "generate files for a new project",
- :help => md"""
- generate pkgname
-
-Create a minimal project called `pkgname` in the current folder. For more featureful package creation, please see `PkgTemplates.jl`.
-""",
-],
-PSA[:name => "precompile",
- :api => API.precompile,
- :should_splat => false,
- :arg_count => 0 => Inf,
- :completions => :complete_installed_packages,
- :description => "precompile all the project dependencies",
- :option_spec => [
- PSA[:name => "workspace", :api => :workspace => true],
- ],
- :help => md"""
- precompile [--workspace]
- precompile [--workspace] pkgs...
-
-Precompile all or specified dependencies of the project in parallel.
-The `startup.jl` file is disabled during precompilation unless julia is started with `--startup-file=yes`.
-The `workspace` option will precompile all packages in the workspace and not only the active project.
-
-Errors will only throw when precompiling the top-level dependencies, given that
-not all manifest dependencies may be loaded by the top-level dependencies on the given system.
-
-This method is called automatically after any Pkg action that changes the manifest.
-Any packages that have previously errored during precompilation won't be retried in auto mode
-until they have changed. To disable automatic precompilation set the environment variable `JULIA_PKG_PRECOMPILE_AUTO=0`.
-To manually control the number of tasks used set the environment variable `JULIA_NUM_PRECOMPILE_TASKS`.
-""",
-],
-PSA[:name => "status",
- :short_name => "st",
- :api => API.status,
- :should_splat => false,
- :arg_count => 0 => Inf,
- :arg_parser => parse_package,
- :option_spec => [
- PSA[:name => "project", :short_name => "p", :api => :mode => PKGMODE_PROJECT],
- PSA[:name => "manifest", :short_name => "m", :api => :mode => PKGMODE_MANIFEST],
- PSA[:name => "diff", :short_name => "d", :api => :diff => true],
- PSA[:name => "outdated", :short_name => "o", :api => :outdated => true],
- PSA[:name => "compat", :short_name => "c", :api => :compat => true],
- PSA[:name => "extensions", :short_name => "e", :api => :extensions => true],
- PSA[:name => "workspace", :api => :workspace => true],
- ],
- :completions => :complete_installed_packages,
- :description => "summarize contents of and changes to environment",
- :help => md"""
- [st|status] [-d|--diff] [--workspace] [-o|--outdated] [pkgs...]
- [st|status] [-d|--diff] [--workspace] [-o|--outdated] [-p|--project] [pkgs...]
- [st|status] [-d|--diff] [--workspace] [-o|--outdated] [-m|--manifest] [pkgs...]
- [st|status] [-d|--diff] [--workspace] [-e|--extensions] [-p|--project] [pkgs...]
- [st|status] [-d|--diff] [--workspace] [-e|--extensions] [-m|--manifest] [pkgs...]
- [st|status] [-c|--compat] [pkgs...]
-
-Show the status of the current environment. Packages marked with `⌃` have new
-versions that may be installed, e.g. via `pkg> up`. Those marked with `⌅` have
-new versions available, but cannot be installed due to compatibility
-constraints. To see why use `pkg> status --outdated` which shows any packages
-that are not at their latest version and if any packages are holding them back.
-
-Use `pkg> status --extensions` to show dependencies with extensions and what extension dependencies
-of those that are currently loaded.
-
-In `--project` mode (default), the status of the project file is summarized. In `--manifest`
-mode the output also includes the recursive dependencies of added packages given in the manifest.
-If there are any packages listed as arguments the output will be limited to those packages.
-The `--diff` option will, if the environment is in a git repository, limit
-the output to the difference as compared to the last git commit.
-The `--compat` option alone shows project compat entries.
-The `--workspace` option shows the (merged) status of packages in the workspace.
-
-!!! compat "Julia 1.8"
- The `⌃` and `⌅` indicators were added in Julia 1.8.
- The `--outdated` and `--compat` options require at least Julia 1.8.
-""",
-],
-PSA[:name => "compat",
- :api => API.compat,
- :arg_count => 0 => 2,
- :completions => :complete_installed_packages_and_compat,
- :description => "edit compat entries in the current Project and re-resolve",
- :help => md"""
- compat [pkg] [compat_string]
-
-Edit project [compat] entries directly, or via an interactive menu by not specifying any arguments.
-When directly editing use tab to complete the package name and any existing compat entry.
-Specifying a package with a blank compat entry will remove the entry.
-After changing compat entries a `resolve` will be attempted to check whether the current
-environment is compliant with the new compat rules.
-""",
-],
-PSA[:name => "gc",
- :api => API.gc,
- :option_spec => [
- PSA[:name => "all", :api => :collect_delay => Hour(0)],
- PSA[:name => "verbose", :short_name => "v", :api => :verbose => true],
- ],
- :description => "garbage collect packages not used for a significant time",
- :help => md"""
- gc [-v|--verbose] [--all]
-
-Free disk space by garbage collecting packages not used for a significant time.
-The `--all` option will garbage collect all packages which can not be immediately
-reached from any existing project.
-Use verbose mode for detailed output.
-""",
-],
-PSA[:name => "undo",
- :api => API.undo,
- :description => "undo the latest change to the active project",
- :help => md"""
- undo
-
-Undoes the latest change to the active project.
-""",
-],
-PSA[:name => "redo",
- :api => API.redo,
- :description => "redo the latest change to the active project",
- :help => md"""
- redo
-
-Redoes the changes from the latest [`undo`](@ref).
-""",
-],
-], #package
-"registry" => CommandDeclaration[
-PSA[:name => "add",
- :api => Registry.add,
- :should_splat => false,
- :arg_count => 0 => Inf,
- :arg_parser => ((x,y) -> parse_registry(x,y; add = true)),
- :description => "add package registries",
- :help => md"""
- registry add reg...
-
-Add package registries `reg...` to the user depot. Without arguments
-it adds known registries, i.e. the General registry and registries
-served by the configured package server.
-
-**Examples**
-```
-pkg> registry add General
-pkg> registry add https://www.my-custom-registry.com
-pkg> registry add
-```
-""",
-],
-PSA[:name => "remove",
- :short_name => "rm",
- :api => Registry.rm,
- :should_splat => false,
- :arg_count => 1 => Inf,
- :arg_parser => parse_registry,
- :description => "remove package registries",
- :help => md"""
- registry [rm|remove] reg...
-
-Remove package registries `reg...`.
-
-**Examples**
-```
-pkg> registry [rm|remove] General
-```
-""",
-],
-PSA[:name => "update",
- :short_name => "up",
- :api => Registry.update,
- :should_splat => false,
- :arg_count => 0 => Inf,
- :arg_parser => parse_registry,
- :description => "update package registries",
- :help => md"""
- registry [up|update]
- registry [up|update] reg...
-
-Update package registries `reg...`. If no registries are specified
-all registries will be updated.
-
-**Examples**
-```
-pkg> registry up
-pkg> registry up General
-```
-""",
-],
-PSA[:name => "status",
- :short_name => "st",
- :api => Registry.status,
- :description => "information about installed registries",
- :help => md"""
- registry [st|status]
-
-Display information about installed registries.
-
-**Examples**
-```
-pkg> registry status
-```
-""",
-]
-], #registry
] #command_declarations
diff --git a/src/Registry/Registry.jl b/src/Registry/Registry.jl
index d5e938baa1..36131248e3 100644
--- a/src/Registry/Registry.jl
+++ b/src/Registry/Registry.jl
@@ -1,9 +1,47 @@
+"""
+ Pkg.Registry
+
+A module for managing Julia package registries.
+
+Registries are repositories that contain metadata about available packages, including
+their versions, dependencies, and locations. The most common registry is the General
+registry, which hosts publicly available Julia packages.
+
+# Main Functions
+
+- [`Pkg.Registry.add`](@ref): Add new package registries
+- [`Pkg.Registry.rm`](@ref): Remove installed registries
+- [`Pkg.Registry.update`](@ref): Update installed registries
+- [`Pkg.Registry.status`](@ref): Display information about available registries
+
+# Examples
+
+```julia
+# Add the default registries (typically the General registry)
+Pkg.Registry.add()
+
+# Add a specific registry by name, UUID, or URL
+Pkg.Registry.add("General")
+Pkg.Registry.add(url = "https://github.com/JuliaRegistries/General.git")
+
+# Update all registries
+Pkg.Registry.update()
+
+# Check registry status
+Pkg.Registry.status()
+
+# Remove a registry
+Pkg.Registry.rm("General")
+```
+
+See also: [`RegistrySpec`](@ref)
+"""
module Registry
import ..Pkg
-using ..Pkg: depots1, printpkgstyle, stderr_f, isdir_nothrow, pathrepr, pkg_server,
- GitTools
-using ..Pkg.PlatformEngines: download_verify_unpack, download, download_verify, exe7z, verify_archive_tree_hash
+using ..Pkg: depots, depots1, printpkgstyle, stderr_f, isdir_nothrow, pathrepr, pkg_server,
+ GitTools, atomic_toml_write, create_cachedir_tag
+using ..Pkg.PlatformEngines: download_verify_unpack, download, download_verify, verify_archive_tree_hash, get_extract_cmd, detect_archive_format
using UUIDs, LibGit2, TOML, Dates
import FileWatching
@@ -12,18 +50,20 @@ public add, rm, status, update
include("registry_instance.jl")
mutable struct RegistrySpec
- name::Union{String,Nothing}
- uuid::Union{UUID,Nothing}
- url::Union{String,Nothing}
+ name::Union{String, Nothing}
+ uuid::Union{UUID, Nothing}
+ url::Union{String, Nothing}
# the path field can be a local source when adding a registry
# otherwise it is the path where the registry is installed
- path::Union{String,Nothing}
- linked::Union{Bool,Nothing}
+ path::Union{String, Nothing}
+ linked::Union{Bool, Nothing}
end
-RegistrySpec(name::String) = RegistrySpec(name = name)
-RegistrySpec(;name::Union{String,Nothing}=nothing, uuid::Union{String,UUID,Nothing}=nothing,
-url::Union{String,Nothing}=nothing, path::Union{String,Nothing}=nothing, linked::Union{Bool,Nothing}=nothing) =
- RegistrySpec(name, isa(uuid, String) ? UUID(uuid) : uuid, url, path, linked)
+RegistrySpec(name::AbstractString) = RegistrySpec(name = name)
+RegistrySpec(;
+ name::Union{AbstractString, Nothing} = nothing, uuid::Union{AbstractString, UUID, Nothing} = nothing,
+ url::Union{AbstractString, Nothing} = nothing, path::Union{AbstractString, Nothing} = nothing, linked::Union{Bool, Nothing} = nothing
+) =
+ RegistrySpec(name, isa(uuid, AbstractString) ? UUID(string(uuid)) : uuid, url, path, linked)
"""
Pkg.Registry.add(registry::RegistrySpec)
@@ -39,27 +79,31 @@ Pkg.Registry.add(uuid = "23338594-aafe-5451-b93e-139f81909106")
Pkg.Registry.add(url = "https://github.com/JuliaRegistries/General.git")
```
"""
-add(reg::Union{String,RegistrySpec}; kwargs...) = add([reg]; kwargs...)
+add(reg::Union{String, RegistrySpec}; kwargs...) = add([reg]; kwargs...)
add(regs::Vector{String}; kwargs...) = add(RegistrySpec[RegistrySpec(name = name) for name in regs]; kwargs...)
-function add(; name=nothing, uuid=nothing, url=nothing, path=nothing, linked=nothing, kwargs...)
- if all(isnothing, (name, uuid, url, path, linked))
+function add(; name = nothing, uuid = nothing, url = nothing, path = nothing, linked = nothing, kwargs...)
+ return if all(isnothing, (name, uuid, url, path, linked))
add(RegistrySpec[]; kwargs...)
else
add([RegistrySpec(; name, uuid, url, path, linked)]; kwargs...)
end
end
-function add(regs::Vector{RegistrySpec}; io::IO=stderr_f(), depot=depots1())
- if isempty(regs)
- download_default_registries(io, only_if_empty = false; depot)
+function add(regs::Vector{RegistrySpec}; io::IO = stderr_f(), depots::Union{String, Vector{String}} = depots())
+ return if isempty(regs)
+ download_default_registries(io, only_if_empty = false; depots = depots)
else
- download_registries(io, regs, depot)
+ download_registries(io, regs, depots)
end
end
const DEFAULT_REGISTRIES =
- RegistrySpec[RegistrySpec(name = "General",
- uuid = UUID("23338594-aafe-5451-b93e-139f81909106"),
- url = "https://github.com/JuliaRegistries/General.git")]
+ RegistrySpec[
+ RegistrySpec(
+ name = "General",
+ uuid = UUID("23338594-aafe-5451-b93e-139f81909106"),
+ url = "https://github.com/JuliaRegistries/General.git"
+ ),
+]
function pkg_server_registry_info()
registry_info = Dict{UUID, Base.SHA1}()
@@ -69,12 +113,12 @@ function pkg_server_registry_info()
download_ok = false
try
f = retry(delays = fill(1.0, 3)) do
- download("$server/registries", tmp_path, verbose=false)
+ download("$server/registries", tmp_path, verbose = false)
end
f()
download_ok = true
catch err
- @warn "could not download $server/registries" exception=err
+ @warn "could not download $server/registries" exception = err
end
download_ok || return nothing
open(tmp_path) do io
@@ -86,7 +130,7 @@ function pkg_server_registry_info()
end
end
end
- Base.rm(tmp_path, force=true)
+ Base.rm(tmp_path, force = true)
return server, registry_info
end
@@ -103,12 +147,36 @@ end
pkg_server_url_hash(url::String) = Base.SHA1(split(url, '/')[end])
-function download_default_registries(io::IO; only_if_empty::Bool = true, depot=depots1())
- installed_registries = reachable_registries()
+"""
+ is_pkg_in_pkgserver_registry(pkg_uuid::Base.UUID, server_registry_info, registries)
+
+Check if a package UUID is tracked by the PkgServer by verifying it exists in
+a registry that is known to the PkgServer.
+"""
+function is_pkg_in_pkgserver_registry(pkg_uuid::Base.UUID, server_registry_info, registries)
+ server_registry_info === nothing && return false
+ registries === nothing && return false
+
+ server, registry_info = server_registry_info
+ for reg in registries
+ if reg.uuid in keys(registry_info)
+ if haskey(reg, pkg_uuid)
+ return true
+ end
+ end
+ end
+ return false
+end
+
+function download_default_registries(io::IO; only_if_empty::Bool = true, depots::Union{String, Vector{String}} = depots())
+ # Check the specified depots for installed registries
+ installed_registries = reachable_registries(; depots)
# Only clone if there are no installed registries, unless called
# with false keyword argument.
if isempty(installed_registries) || !only_if_empty
- printpkgstyle(io, :Installing, "known registries into $(pathrepr(depot))")
+ # Install to the first depot in the list
+ target_depot = depots1(depots)
+ printpkgstyle(io, :Installing, "known registries into $(pathrepr(target_depot))")
registries = copy(DEFAULT_REGISTRIES)
for uuid in keys(pkg_server_registry_urls())
if !(uuid in (reg.uuid for reg in registries))
@@ -116,7 +184,7 @@ function download_default_registries(io::IO; only_if_empty::Bool = true, depot=d
end
end
filter!(reg -> !(reg.uuid in installed_registries), registries)
- download_registries(io, registries, depot)
+ download_registries(io, registries, depots)
return true
end
return false
@@ -135,7 +203,9 @@ function populate_known_registries_with_urls!(registries::Vector{RegistrySpec})
elseif reg.name !== nothing
if reg.name == known.name
named_regs = filter(r -> r.name == reg.name, known_registries)
- if !all(r -> r.uuid == first(named_regs).uuid, named_regs)
+ if isempty(named_regs)
+ Pkg.Types.pkgerror("registry with name `$(reg.name)` not found in known registries.")
+ elseif !all(r -> r.uuid == first(named_regs).uuid, named_regs)
Pkg.Types.pkgerror("multiple registries with name `$(reg.name)`, please specify with uuid.")
end
reg.uuid = known.uuid
@@ -145,10 +215,11 @@ function populate_known_registries_with_urls!(registries::Vector{RegistrySpec})
end
end
end
+ return
end
function registry_use_pkg_server()
- get(ENV, "JULIA_PKG_SERVER", nothing) !== ""
+ return get(ENV, "JULIA_PKG_SERVER", nothing) !== ""
end
registry_read_from_tarball() =
@@ -158,125 +229,170 @@ function check_registry_state(reg)
reg_currently_uses_pkg_server = reg.tree_info !== nothing
reg_should_use_pkg_server = registry_use_pkg_server()
if reg_currently_uses_pkg_server && !reg_should_use_pkg_server
+ pkg_cmd = Pkg.in_repl_mode() ? "pkg> registry rm $(reg.name); registry add $(reg.name)" : "using Pkg; Pkg.Registry.rm(\"$(reg.name)\"); Pkg.Registry.add(\"$(reg.name)\")"
msg = string(
"Your registry may be outdated. We recommend that you run the ",
"following command: ",
- "using Pkg; Pkg.Registry.rm(\"$(reg.name)\"); Pkg.Registry.add(\"$(reg.name)\")",
+ pkg_cmd,
)
@warn(msg)
end
return nothing
end
-function download_registries(io::IO, regs::Vector{RegistrySpec}, depot::String=depots1())
+function archive_format_to_extension(filepath::AbstractString)::String
+ format = detect_archive_format(filepath)
+ # Map detected format to file extension
+ if format == "zstd"
+ return ".tar.zst"
+ elseif format == "gzip"
+ return ".tar.gz"
+ elseif format == "bzip2"
+ return ".tar.bz2"
+ elseif format == "xz"
+ return ".tar.xz"
+ elseif format == "lz4"
+ return ".tar.lz4"
+ else
+ # Default to .tar.gz for tar or unknown formats
+ return ".tar.gz"
+ end
+end
+
+function download_registries(io::IO, regs::Vector{RegistrySpec}, depots::Union{String, Vector{String}} = depots())
+ # Use the first depot as the target
+ target_depot = depots1(depots)
populate_known_registries_with_urls!(regs)
- regdir = joinpath(depot, "registries")
+ registry_update_log = get_registry_update_log()
+ regdir = joinpath(target_depot, "registries")
isdir(regdir) || mkpath(regdir)
+ create_cachedir_tag(regdir)
# only allow one julia process to download and install registries at a time
FileWatching.mkpidlock(joinpath(regdir, ".pid"), stale_age = 10) do
- registry_urls = pkg_server_registry_urls()
- for reg in regs
- if reg.path !== nothing && reg.url !== nothing
- Pkg.Types.pkgerror("""
- ambiguous registry specification; both `url` and `path` are set:
- url=\"$(reg.url)\"
- path=\"$(reg.path)\"
- """
- )
- end
- url = get(registry_urls, reg.uuid, nothing)
- if url !== nothing && registry_read_from_tarball()
- tmp = tempname()
- try
- download_verify(url, nothing, tmp)
- catch err
- Pkg.Types.pkgerror("could not download $url \nException: $(sprint(showerror, err))")
- end
- _hash = pkg_server_url_hash(url)
- if !verify_archive_tree_hash(tmp, _hash)
- Pkg.Types.pkgerror("unable to verify download from $url")
- end
- if reg.name === nothing
- # Need to look up the registry name here
- reg_unc = uncompress_registry(tmp)
- reg.name = TOML.parse(reg_unc["Registry.toml"])["name"]::String
- end
- mv(tmp, joinpath(regdir, reg.name * ".tar.gz"); force=true)
- reg_info = Dict("uuid" => string(reg.uuid), "git-tree-sha1" => string(_hash), "path" => reg.name * ".tar.gz")
- open(joinpath(regdir, reg.name * ".toml"), "w") do io
- TOML.print(io, reg_info)
+ # once we're pidlocked check if another process has installed any of the registries
+ reachable_uuids = map(r -> r.uuid, reachable_registries(; depots))
+ filter!(r -> !in(r.uuid, reachable_uuids), regs)
+
+ registry_urls = pkg_server_registry_urls()
+ for reg in regs
+ if reg.path !== nothing && reg.url !== nothing
+ Pkg.Types.pkgerror(
+ """
+ ambiguous registry specification; both `url` and `path` are set:
+ url=\"$(reg.url)\"
+ path=\"$(reg.path)\"
+ """
+ )
end
- printpkgstyle(io, :Added, "`$(reg.name)` registry to $(Base.contractuser(regdir))")
- else
- mktempdir() do tmp
- if reg.path !== nothing && reg.linked == true # symlink to local source
- registry = Registry.RegistryInstance(reg.path)
- regpath = joinpath(regdir, registry.name)
- printpkgstyle(io, :Symlinking, "registry from `$(Base.contractuser(reg.path))`")
- isdir(dirname(regpath)) || mkpath(dirname(regpath))
- symlink(reg.path, regpath)
- isfile(joinpath(regpath, "Registry.toml")) || Pkg.Types.pkgerror("no `Registry.toml` file in linked registry.")
- registry = Registry.RegistryInstance(regpath)
- printpkgstyle(io, :Symlinked, "registry `$(Base.contractuser(registry.name))` to `$(Base.contractuser(regpath))`")
- return
- elseif reg.url !== nothing && reg.linked == true
- Pkg.Types.pkgerror("""
- A symlinked registry was requested but `path` was not set and `url` was set to `$url`.
- Set only `path` and `linked = true` to use registry symlinking.
- """)
- elseif url !== nothing && registry_use_pkg_server()
- # download from Pkg server
- try
- download_verify_unpack(url, nothing, tmp, ignore_existence = true, io = io)
- catch err
- Pkg.Types.pkgerror("could not download $url \nException: $(sprint(showerror, err))")
- end
- tree_info_file = joinpath(tmp, ".tree_info.toml")
- hash = pkg_server_url_hash(url)
- write(tree_info_file, "git-tree-sha1 = " * repr(string(hash)))
- elseif reg.path !== nothing # copy from local source
- printpkgstyle(io, :Copying, "registry from `$(Base.contractuser(reg.path))`")
- isfile(joinpath(reg.path, "Registry.toml")) || Pkg.Types.pkgerror("no `Registry.toml` file in source directory.")
- registry = Registry.RegistryInstance(reg.path)
- regpath = joinpath(regdir, registry.name)
- cp(reg.path, regpath; force=true) # has to be cp given we're copying
- printpkgstyle(io, :Copied, "registry `$(Base.contractuser(registry.name))` to `$(Base.contractuser(regpath))`")
- return
- elseif reg.url !== nothing # clone from url
- # retry to help spurious connection issues, particularly on CI
- repo = retry(GitTools.clone, delays = fill(1.0, 5), check=(s,e)->isa(e, LibGit2.GitError))(io, reg.url, tmp; header = "registry from $(repr(reg.url))")
- LibGit2.close(repo)
- else
- Pkg.Types.pkgerror("no path or url specified for registry")
+ url = get(registry_urls, reg.uuid, nothing)
+ if url !== nothing && registry_read_from_tarball()
+ tmp = tempname()
+ try
+ download_verify(url, nothing, tmp)
+ catch err
+ Pkg.Types.pkgerror("could not download $url \nException: $(sprint(showerror, err))")
end
- # verify that the clone looks like a registry
- if !isfile(joinpath(tmp, "Registry.toml"))
- Pkg.Types.pkgerror("no `Registry.toml` file in cloned registry.")
+ _hash = pkg_server_url_hash(url)
+ if !verify_archive_tree_hash(tmp, _hash)
+ Pkg.Types.pkgerror("unable to verify download from $url")
end
- registry = Registry.RegistryInstance(tmp)
- regpath = joinpath(regdir, registry.name)
- # copy to `depot`
- ispath(dirname(regpath)) || mkpath(dirname(regpath))
- if isfile(joinpath(regpath, "Registry.toml"))
- existing_registry = Registry.RegistryInstance(regpath)
- if registry.uuid == existing_registry.uuid
- println(io,
- "Registry `$(registry.name)` already exists in `$(Base.contractuser(regpath))`.")
+ if reg.name === nothing
+ # Need to look up the registry name here
+ reg_unc = uncompress_registry(tmp)
+ reg.name = TOML.parse(reg_unc["Registry.toml"])["name"]::String
+ end
+ # Detect what we actually got from the server (defensive against servers that don't support zstd yet)
+ ext = archive_format_to_extension(tmp)
+ mv(tmp, joinpath(regdir, reg.name * ext); force = true)
+ reg_info = Dict("uuid" => string(reg.uuid), "git-tree-sha1" => string(_hash), "path" => reg.name * ext)
+ atomic_toml_write(joinpath(regdir, reg.name * ".toml"), reg_info)
+ registry_update_log[string(reg.uuid)] = now()
+ printpkgstyle(io, :Added, "`$(reg.name)` registry to $(Base.contractuser(regdir))")
+ else
+ mktempdir() do tmp
+ if reg.path !== nothing && reg.linked == true # symlink to local source
+ registry = Registry.RegistryInstance(reg.path)
+ regpath = joinpath(regdir, registry.name)
+ printpkgstyle(io, :Symlinking, "registry from `$(Base.contractuser(reg.path))`")
+ isdir(dirname(regpath)) || mkpath(dirname(regpath))
+ symlink(reg.path, regpath)
+ isfile(joinpath(regpath, "Registry.toml")) || Pkg.Types.pkgerror("no `Registry.toml` file in linked registry.")
+ registry = Registry.RegistryInstance(regpath)
+ printpkgstyle(io, :Symlinked, "registry `$(Base.contractuser(registry.name))` to `$(Base.contractuser(regpath))`")
+ registry_update_log[string(reg.uuid)] = now()
+ save_registry_update_log(registry_update_log)
+ return
+ elseif reg.url !== nothing && reg.linked == true
+ Pkg.Types.pkgerror(
+ """
+ A symlinked registry was requested but `path` was not set and `url` was set to `$url`.
+ Set only `path` and `linked = true` to use registry symlinking.
+ """
+ )
+ elseif url !== nothing && registry_use_pkg_server()
+ # download from Pkg server
+ try
+ download_verify_unpack(url, nothing, tmp, ignore_existence = true, io = io)
+ catch err
+ Pkg.Types.pkgerror("could not download $url \nException: $(sprint(showerror, err))")
+ end
+ tree_info_file = joinpath(tmp, ".tree_info.toml")
+ hash = pkg_server_url_hash(url)
+ write(tree_info_file, "git-tree-sha1 = " * repr(string(hash)))
+ elseif reg.path !== nothing # copy from local source
+ printpkgstyle(io, :Copying, "registry from `$(Base.contractuser(reg.path))`")
+ isfile(joinpath(reg.path, "Registry.toml")) || Pkg.Types.pkgerror("no `Registry.toml` file in source directory.")
+ registry = Registry.RegistryInstance(reg.path)
+ regpath = joinpath(regdir, registry.name)
+ cp(reg.path, regpath; force = true) # has to be cp given we're copying
+ printpkgstyle(io, :Copied, "registry `$(Base.contractuser(registry.name))` to `$(Base.contractuser(regpath))`")
+ registry_update_log[string(reg.uuid)] = now()
+ save_registry_update_log(registry_update_log)
+ return
+ elseif reg.url !== nothing # clone from url
+ # retry to help spurious connection issues, particularly on CI
+ # Use shallow clone (depth=1) for registries since we only need the latest state
+ repo = retry(GitTools.clone, delays = fill(1.0, 5), check = (s, e) -> isa(e, LibGit2.GitError))(io, reg.url, tmp; header = "registry from $(repr(reg.url))", depth = 1)
+ LibGit2.close(repo)
else
- throw(Pkg.Types.PkgError("registry `$(registry.name)=\"$(registry.uuid)\"` conflicts with " *
- "existing registry `$(existing_registry.name)=\"$(existing_registry.uuid)\"`. " *
- "To install it you can clone it manually into e.g. " *
- "`$(Base.contractuser(joinpath(regdir, registry.name*"-2")))`."))
+ Pkg.Types.pkgerror("no path or url specified for registry")
+ end
+ # verify that the clone looks like a registry
+ if !isfile(joinpath(tmp, "Registry.toml"))
+ Pkg.Types.pkgerror("no `Registry.toml` file in cloned registry.")
+ end
+ registry = Registry.RegistryInstance(tmp)
+ regpath = joinpath(regdir, registry.name)
+ # copy to `depot`
+ ispath(dirname(regpath)) || mkpath(dirname(regpath))
+ if isfile(joinpath(regpath, "Registry.toml"))
+ existing_registry = Registry.RegistryInstance(regpath)
+ if registry.uuid == existing_registry.uuid
+ println(
+ io,
+ "Registry `$(registry.name)` already exists in `$(Base.contractuser(regpath))`."
+ )
+ else
+ throw(
+ Pkg.Types.PkgError(
+ "registry `$(registry.name)=\"$(registry.uuid)\"` conflicts with " *
+ "existing registry `$(existing_registry.name)=\"$(existing_registry.uuid)\"`. " *
+ "To install it you can clone it manually into e.g. " *
+ "`$(Base.contractuser(joinpath(regdir, registry.name * "-2")))`."
+ )
+ )
+ end
+ elseif (url !== nothing && registry_use_pkg_server()) || reg.linked !== true
+ # if the dir doesn't exist, or exists but doesn't contain a Registry.toml
+ mv(tmp, regpath, force = true)
+ registry_update_log[string(reg.uuid)] = now()
+ printpkgstyle(io, :Added, "registry `$(registry.name)` to `$(Base.contractuser(regpath))`")
end
- elseif (url !== nothing && registry_use_pkg_server()) || reg.linked !== true
- # if the dir doesn't exist, or exists but doesn't contain a Registry.toml
- mv(tmp, regpath, force=true)
- printpkgstyle(io, :Added, "registry `$(registry.name)` to `$(Base.contractuser(regpath))`")
end
end
end
- end
end # mkpidlock
+ save_registry_update_log(registry_update_log)
return nothing
end
@@ -292,29 +408,31 @@ Pkg.Registry.rm("General")
Pkg.Registry.rm(uuid = "23338594-aafe-5451-b93e-139f81909106")
```
"""
-rm(reg::Union{String,RegistrySpec}; kwargs...) = rm([reg]; kwargs...)
+rm(reg::Union{String, RegistrySpec}; kwargs...) = rm([reg]; kwargs...)
rm(regs::Vector{String}; kwargs...) = rm([RegistrySpec(name = name) for name in regs]; kwargs...)
-function rm(; name=nothing, uuid=nothing, url=nothing, path=nothing, linked=nothing, kwargs...)
- rm([RegistrySpec(; name, uuid, url, path, linked)]; kwargs...)
+function rm(; name = nothing, uuid = nothing, url = nothing, path = nothing, linked = nothing, kwargs...)
+ return rm([RegistrySpec(; name, uuid, url, path, linked)]; kwargs...)
end
-function rm(regs::Vector{RegistrySpec}; io::IO=stderr_f())
- for registry in find_installed_registries(io, regs; depots=first(Base.DEPOT_PATH))
+function rm(regs::Vector{RegistrySpec}; io::IO = stderr_f())
+ for registry in find_installed_registries(io, regs; depots = first(Base.DEPOT_PATH))
printpkgstyle(io, :Removing, "registry `$(registry.name)` from $(Base.contractuser(registry.path))")
if isfile(registry.path)
d = TOML.parsefile(registry.path)
if haskey(d, "path")
- Base.rm(joinpath(dirname(registry.path), d["path"]); force=true)
+ Base.rm(joinpath(dirname(registry.path), d["path"]); force = true)
end
end
- Base.rm(registry.path; force=true, recursive=true)
+ Base.rm(registry.path; force = true, recursive = true)
end
return nothing
end
# Search for the input registries among installed ones
-function find_installed_registries(io::IO,
- needles::Union{Vector{Registry.RegistryInstance}, Vector{RegistrySpec}};
- depots=Base.DEPOT_PATH)
+function find_installed_registries(
+ io::IO,
+ needles::Union{Vector{Registry.RegistryInstance}, Vector{RegistrySpec}};
+ depots = Base.DEPOT_PATH
+ )
haystack = reachable_registries(; depots)
output = Registry.RegistryInstance[]
for needle in needles
@@ -331,7 +449,9 @@ function find_installed_registries(io::IO,
elseif needle.name !== nothing
if needle.name == candidate.name
named_regs = filter(r -> r.name == needle.name, haystack)
- if !all(r -> r.uuid == first(named_regs).uuid, named_regs)
+ if isempty(named_regs)
+ Pkg.Types.pkgerror("registry with name `$(needle.name)` not found in reachable registries.")
+ elseif !all(r -> r.uuid == first(named_regs).uuid, named_regs)
Pkg.Types.pkgerror("multiple registries with name `$(needle.name)`, please specify with uuid.")
end
push!(output, candidate)
@@ -340,9 +460,13 @@ function find_installed_registries(io::IO,
end
end
if !found
- println(io, "registry `$(needle.name === nothing ? needle.uuid :
- needle.uuid === nothing ? needle.name :
- "$(needle.name)=$(needle.uuid)")` not found.")
+ println(
+ io, "registry `$(
+ needle.name === nothing ? needle.uuid :
+ needle.uuid === nothing ? needle.name :
+ "$(needle.name)=$(needle.uuid)"
+ )` not found."
+ )
end
end
return output
@@ -358,10 +482,9 @@ end
function save_registry_update_log(d::Dict)
pkg_scratch_space = joinpath(DEPOT_PATH[1], "scratchspaces", "44cfe95a-1eb2-52ea-b672-e2afdf69b78f")
mkpath(pkg_scratch_space)
+ create_cachedir_tag(joinpath(DEPOT_PATH[1], "scratchspaces"))
pkg_reg_updated_file = joinpath(pkg_scratch_space, "registry_updates.toml")
- open(pkg_reg_updated_file, "w") do io
- TOML.print(io, d)
- end
+ return atomic_toml_write(pkg_reg_updated_file, d)
end
"""
@@ -379,172 +502,188 @@ Pkg.Registry.update("General")
Pkg.Registry.update(uuid = "23338594-aafe-5451-b93e-139f81909106")
```
"""
-update(reg::Union{String,RegistrySpec}; kwargs...) = update([reg]; kwargs...)
+update(reg::Union{String, RegistrySpec}; kwargs...) = update([reg]; kwargs...)
update(regs::Vector{String}; kwargs...) = update([RegistrySpec(name = name) for name in regs]; kwargs...)
-function update(; name=nothing, uuid=nothing, url=nothing, path=nothing, linked=nothing, kwargs...)
- if all(isnothing, (name, uuid, url, path, linked))
+function update(; name = nothing, uuid = nothing, url = nothing, path = nothing, linked = nothing, kwargs...)
+ return if all(isnothing, (name, uuid, url, path, linked))
update(RegistrySpec[]; kwargs...)
else
update([RegistrySpec(; name, uuid, url, path, linked)]; kwargs...)
end
end
-function update(regs::Vector{RegistrySpec}; io::IO=stderr_f(), force::Bool=true, depots = [depots1()], update_cooldown = Second(1))
+function update(regs::Vector{RegistrySpec}; io::IO = stderr_f(), force::Bool = true, depots = [depots1()], update_cooldown = Second(1))
registry_update_log = get_registry_update_log()
for depot in depots
- depot_regs = isempty(regs) ? reachable_registries(; depots=depot) : regs
+ depot_regs = isempty(regs) ? reachable_registries(; depots = depot) : regs
regdir = joinpath(depot, "registries")
isdir(regdir) || mkpath(regdir)
+ create_cachedir_tag(regdir)
# only allow one julia process to update registries in this depot at a time
FileWatching.mkpidlock(joinpath(regdir, ".pid"), stale_age = 10) do
- errors = Tuple{String, String}[]
- registry_urls = pkg_server_registry_urls()
- for reg in unique(r -> r.uuid, find_installed_registries(io, depot_regs; depots=[depot]); seen=Set{UUID}())
- prev_update = get(registry_update_log, string(reg.uuid), nothing)::Union{Nothing, DateTime}
- if prev_update !== nothing
- diff = now() - prev_update
- if diff < update_cooldown
- @debug "Skipping updating registry $(reg.name) since it is on cooldown: $(Dates.canonicalize(Millisecond(update_cooldown) - diff)) left"
- continue
+ errors = Tuple{String, String}[]
+ registry_urls = pkg_server_registry_urls()
+ for reg in unique(r -> r.uuid, find_installed_registries(io, depot_regs; depots = [depot]); seen = Set{UUID}())
+ prev_update = get(registry_update_log, string(reg.uuid), nothing)::Union{Nothing, DateTime}
+ if prev_update !== nothing
+ diff = now() - prev_update
+ if diff < update_cooldown
+ @debug "Skipping updating registry $(reg.name) since it is on cooldown: $(Dates.canonicalize(Millisecond(update_cooldown) - diff)) left"
+ continue
+ end
end
- end
- let reg=reg, errors=errors
- regpath = pathrepr(reg.path)
- let regpath=regpath
- if reg.tree_info !== nothing
- printpkgstyle(io, :Updating, "registry at " * regpath)
- old_hash = reg.tree_info
- url = get(registry_urls, reg.uuid, nothing)
- if url !== nothing
- check_registry_state(reg)
+ let reg = reg, errors = errors
+ regpath = pathrepr(reg.path)
+ let regpath = regpath
+ if !iswritable(dirname(reg.path))
+ @warn "Skipping update of registry at $regpath (read-only file system)"
+ continue
end
- if url !== nothing && (new_hash = pkg_server_url_hash(url)) != old_hash
- # TODO: update faster by using a diff, if available
- # TODO: DRY with the code in `download_default_registries`
- let new_hash = new_hash, url = url
- if registry_read_from_tarball()
- tmp = tempname()
- try
- download_verify(url, nothing, tmp)
- catch err
- push!(errors, (reg.path, "failed to download from $(url). Exception: $(sprint(showerror, err))"))
- @goto done_tarball_read
- end
- hash = pkg_server_url_hash(url)
- if !verify_archive_tree_hash(tmp, hash)
- push!(errors, (reg.path, "failed to verify download from $(url)"))
- @goto done_tarball_read
- end
- # If we have an uncompressed Pkg server registry, remove it and get the compressed version
- if isdir(reg.path)
- Base.rm(reg.path; recursive=true, force=true)
- end
- registry_path = dirname(reg.path)
- mv(tmp, joinpath(registry_path, reg.name * ".tar.gz"); force=true)
- reg_info = Dict("uuid" => string(reg.uuid), "git-tree-sha1" => string(hash), "path" => reg.name * ".tar.gz")
- open(joinpath(registry_path, reg.name * ".toml"), "w") do io
- TOML.print(io, reg_info)
- end
- registry_update_log[string(reg.uuid)] = now()
- @label done_tarball_read
- else
- if reg.name == "General" && Base.get_bool_env("JULIA_PKG_GEN_REG_FMT_CHECK", true)
- @info """
+
+ if reg.tree_info !== nothing
+ printpkgstyle(io, :Updating, "registry at " * regpath)
+ old_hash = reg.tree_info
+ url = get(registry_urls, reg.uuid, nothing)
+ if url !== nothing
+ check_registry_state(reg)
+ end
+ if url !== nothing && (new_hash = pkg_server_url_hash(url)) != old_hash
+ # TODO: update faster by using a diff, if available
+ # TODO: DRY with the code in `download_default_registries`
+ let new_hash = new_hash, url = url
+ if registry_read_from_tarball()
+ tmp = tempname()
+ try
+ download_verify(url, nothing, tmp)
+ catch err
+ push!(errors, (reg.path, "failed to download from $(url). Exception: $(sprint(showerror, err))"))
+ @goto done_tarball_read
+ end
+ hash = pkg_server_url_hash(url)
+ if !verify_archive_tree_hash(tmp, hash)
+ push!(errors, (reg.path, "failed to verify download from $(url)"))
+ @goto done_tarball_read
+ end
+ # If we have an uncompressed Pkg server registry, remove it and get the compressed version
+ if isdir(reg.path)
+ Base.rm(reg.path; recursive = true, force = true)
+ end
+ registry_path = dirname(reg.path)
+ # Detect what we actually got from the server (defensive against servers that don't support zstd yet)
+ format = detect_archive_format(tmp)
+ ext = format == "zstd" ? ".tar.zst" : ".tar.gz"
+ mv(tmp, joinpath(registry_path, reg.name * ext); force = true)
+ reg_info = Dict("uuid" => string(reg.uuid), "git-tree-sha1" => string(hash), "path" => reg.name * ext)
+ atomic_toml_write(joinpath(registry_path, reg.name * ".toml"), reg_info)
+ registry_update_log[string(reg.uuid)] = now()
+ @label done_tarball_read
+ else
+ if reg.name == "General" &&
+ Base.get_bool_env("JULIA_PKG_GEN_REG_FMT_CHECK", true) &&
+ get(ENV, "JULIA_PKG_SERVER", nothing) != ""
+ # warn if JULIA_PKG_SERVER is set to a non-empty string or not set
+ @info """
The General registry is installed via unpacked tarball.
Consider reinstalling it via the newer faster direct from
tarball format by running:
pkg> registry rm General; registry add General
- """ maxlog=1
- end
- mktempdir() do tmp
- try
- download_verify_unpack(url, nothing, tmp, ignore_existence = true, io=io)
- registry_update_log[string(reg.uuid)] = now()
- catch err
- push!(errors, (reg.path, "failed to download and unpack from $(url). Exception: $(sprint(showerror, err))"))
- @goto done_tarball_unpack
+ """ maxlog = 1
+ end
+ mktempdir() do tmp
+ try
+ download_verify_unpack(url, nothing, tmp, ignore_existence = true, io = io)
+ registry_update_log[string(reg.uuid)] = now()
+ catch err
+ push!(errors, (reg.path, "failed to download and unpack from $(url). Exception: $(sprint(showerror, err))"))
+ @goto done_tarball_unpack
+ end
+ tree_info_file = joinpath(tmp, ".tree_info.toml")
+ write(tree_info_file, "git-tree-sha1 = " * repr(string(new_hash)))
+ mv(tmp, reg.path, force = true)
+ @label done_tarball_unpack
end
- tree_info_file = joinpath(tmp, ".tree_info.toml")
- write(tree_info_file, "git-tree-sha1 = " * repr(string(new_hash)))
- mv(tmp, reg.path, force=true)
- @label done_tarball_unpack
end
end
end
- end
- elseif isdir(joinpath(reg.path, ".git"))
- printpkgstyle(io, :Updating, "registry at " * regpath)
- if reg.name == "General" && Base.get_bool_env("JULIA_PKG_GEN_REG_FMT_CHECK", true)
- @info """
+ elseif isdir(joinpath(reg.path, ".git"))
+ printpkgstyle(io, :Updating, "registry at " * regpath)
+ if reg.name == "General" &&
+ Base.get_bool_env("JULIA_PKG_GEN_REG_FMT_CHECK", true) &&
+ get(ENV, "JULIA_PKG_SERVER", nothing) != ""
+ # warn if JULIA_PKG_SERVER is set to a non-empty string or not set
+ @info """
The General registry is installed via git. Consider reinstalling it via
the newer faster direct from tarball format by running:
pkg> registry rm General; registry add General
- """ maxlog=1
- end
- LibGit2.with(LibGit2.GitRepo(reg.path)) do repo
- if LibGit2.isdirty(repo)
- push!(errors, (regpath, "registry dirty"))
- @goto done_git
- end
- if !LibGit2.isattached(repo)
- push!(errors, (regpath, "registry detached"))
- @goto done_git
+ """ maxlog = 1
end
- if !("origin" in LibGit2.remotes(repo))
- push!(errors, (regpath, "origin not in the list of remotes"))
- @goto done_git
- end
- branch = LibGit2.headname(repo)
- try
- GitTools.fetch(io, repo; refspecs=["+refs/heads/$branch:refs/remotes/origin/$branch"])
- catch e
- e isa Pkg.Types.PkgError || rethrow()
- push!(errors, (reg.path, "failed to fetch from repo: $(e.msg)"))
- @goto done_git
- end
- attempts = 0
- @label merge
- ff_succeeded = try
- LibGit2.merge!(repo; branch="refs/remotes/origin/$branch", fastforward=true)
- catch e
- attempts += 1
- if e isa LibGit2.GitError && e.code == LibGit2.Error.ELOCKED && attempts <= 3
- @warn "Registry update attempt failed because repository is locked. Resetting and retrying." e
- LibGit2.reset!(repo, LibGit2.head_oid(repo), LibGit2.Consts.RESET_HARD)
- sleep(1)
- @goto merge
- elseif e isa LibGit2.GitError && e.code == LibGit2.Error.ENOTFOUND
- push!(errors, (reg.path, "branch origin/$branch not found"))
+ LibGit2.with(LibGit2.GitRepo(reg.path)) do repo
+ if LibGit2.isdirty(repo)
+ push!(errors, (regpath, "registry dirty"))
@goto done_git
- else
- rethrow()
end
-
- end
-
- if !ff_succeeded
- try LibGit2.rebase!(repo, "origin/$branch")
+ if !LibGit2.isattached(repo)
+ push!(errors, (regpath, "registry detached"))
+ @goto done_git
+ end
+ if !("origin" in LibGit2.remotes(repo))
+ push!(errors, (regpath, "origin not in the list of remotes"))
+ @goto done_git
+ end
+ branch = LibGit2.headname(repo)
+ try
+ # If this is a shallow clone, continue using shallow fetches
+ fetch_depth = GitTools.isshallow(repo) ? 1 : 0
+ GitTools.fetch(io, repo; refspecs = ["+refs/heads/$branch:refs/remotes/origin/$branch"], depth = fetch_depth)
catch e
- e isa LibGit2.GitError || rethrow()
- push!(errors, (reg.path, "registry failed to rebase on origin/$branch"))
+ e isa Pkg.Types.PkgError || rethrow()
+ push!(errors, (reg.path, "failed to fetch from repo: $(e.msg)"))
@goto done_git
end
+ attempts = 0
+ @label merge
+ ff_succeeded = try
+ LibGit2.merge!(repo; branch = "refs/remotes/origin/$branch", fastforward = true)
+ catch e
+ attempts += 1
+ if e isa LibGit2.GitError && e.code == LibGit2.Error.ELOCKED && attempts <= 3
+ @warn "Registry update attempt failed because repository is locked. Resetting and retrying." e
+ LibGit2.reset!(repo, LibGit2.head_oid(repo), LibGit2.Consts.RESET_HARD)
+ sleep(1)
+ @goto merge
+ elseif e isa LibGit2.GitError && e.code == LibGit2.Error.ENOTFOUND
+ push!(errors, (reg.path, "branch origin/$branch not found"))
+ @goto done_git
+ else
+ rethrow()
+ end
+
+ end
+
+ if !ff_succeeded
+ try
+ LibGit2.rebase!(repo, "origin/$branch")
+ catch e
+ e isa LibGit2.GitError || rethrow()
+ push!(errors, (reg.path, "registry failed to rebase on origin/$branch"))
+ @goto done_git
+ end
+ end
+ registry_update_log[string(reg.uuid)] = now()
+ @label done_git
end
- registry_update_log[string(reg.uuid)] = now()
- @label done_git
end
end
end
end
- end
- if !isempty(errors)
- warn_str = "Some registries failed to update:"
- for (reg, err) in errors
- warn_str *= "\n — $reg — $err"
+ if !isempty(errors)
+ warn_str = "Some registries failed to update:"
+ for (reg, err) in errors
+ warn_str *= "\n — $reg — $err"
+ end
+ @error warn_str
end
- @error warn_str
- end
end # mkpidlock
end
save_registry_update_log(registry_update_log)
@@ -562,20 +701,93 @@ Display information about available registries.
Pkg.Registry.status()
```
"""
-function status(io::IO=stderr_f())
+function status(io::IO = stderr_f())
regs = reachable_registries()
- regs = unique(r -> r.uuid, regs; seen=Set{Union{UUID,Nothing}}())
+ regs = unique(r -> r.uuid, regs; seen = Set{Union{UUID, Nothing}}())
printpkgstyle(io, Symbol("Registry Status"), "")
- if isempty(regs)
+ return if isempty(regs)
println(io, " (no registries found)")
else
+ registry_update_log = get_registry_update_log()
+ server_registry_info = Pkg.OFFLINE_MODE[] ? nothing : pkg_server_registry_info()
+ flavor = get(ENV, "JULIA_PKG_SERVER_REGISTRY_PREFERENCE", "")
for reg in regs
printstyled(io, " [$(string(reg.uuid)[1:8])]"; color = :light_black)
print(io, " $(reg.name)")
reg.repo === nothing || print(io, " ($(reg.repo))")
println(io)
+
+ registry_type = get_registry_type(reg)
+ if registry_type == :git
+ print(io, " git registry")
+ elseif registry_type == :unpacked
+ print(io, " unpacked registry with hash $(reg.tree_info)")
+ elseif registry_type == :packed
+ print(io, " packed registry with hash $(reg.tree_info)")
+ elseif registry_type == :bare
+ # We could try to detect a symlink but this is too
+ # rarely used to be worth the complexity.
+ print(io, " bare registry")
+ else
+ print(io, " unknown registry format")
+ end
+ update_time = get(registry_update_log, string(reg.uuid), nothing)
+ if !isnothing(update_time)
+ time_string = Dates.format(update_time, dateformat"yyyy-mm-dd HH:MM:SS")
+ print(io, ", last updated $(time_string)")
+ end
+ println(io)
+
+ if registry_type != :git && !isnothing(server_registry_info)
+ server_url, registries = server_registry_info
+ if haskey(registries, reg.uuid)
+ print(io, " served by $(server_url)")
+ if flavor != ""
+ print(io, " ($flavor flavor)")
+ end
+ if registries[reg.uuid] != reg.tree_info
+ print(io, " - update available")
+ end
+ println(io)
+ end
+ end
end
end
end
+# The registry can be installed in a number of different ways, for
+# evolutionary reasons.
+#
+# 1. A tarball that is not unpacked. In this case Pkg handles the
+# registry in memory. The tarball is distributed by a package server.
+# This is the preferred option, in particular for the General
+# registry.
+#
+# 2. A tarball that is unpacked. This only differs from above by
+# having the files on disk instead of in memory. In both cases Pkg
+# keeps track of the tarball's tree hash to know if it can be updated.
+#
+# 3. A clone of a git repository. This is characterized by the
+# presence of a .git directory. All updating is handled with git.
+# This is not preferred for the General registry but may be the only
+# practical option for private registries.
+#
+# 4. A bare registry with only the registry files and no metadata.
+# This can be installed by adding or symlinking from a local path but
+# there is no way to update it from Pkg.
+#
+# It is also possible for a packed/unpacked registry to coexist on
+# disk with a git/bare registry, in which case a new Julia may use the
+# former and a sufficiently old Julia the latter.
+function get_registry_type(reg)
+ isnothing(reg.in_memory_registry) || return :packed
+ isnothing(reg.tree_info) || return :unpacked
+ isdir(joinpath(reg.path, ".git")) && return :git
+ isfile(joinpath(reg.path, "Registry.toml")) && return :bare
+ # Indicates either that the registry data is corrupt or that it
+ # has been handled by a future Julia version with non-backwards
+ # compatible conventions.
+ return :unknown
+end
+
end # module
diff --git a/src/Registry/registry_instance.jl b/src/Registry/registry_instance.jl
index c5743fed4f..77d804369c 100644
--- a/src/Registry/registry_instance.jl
+++ b/src/Registry/registry_instance.jl
@@ -24,7 +24,7 @@ function parsefile(in_memory_registry::Union{Dict, Nothing}, folder::AbstractStr
return _parsefile(joinpath(folder, file))
else
content = in_memory_registry[to_tar_path_format(file)]
- parser = Base.TOML.Parser{Dates}(content; filepath=file)
+ parser = Base.TOML.Parser{Dates}(content; filepath = file)
return Base.TOML.parse(parser)
end
end
@@ -36,10 +36,6 @@ custom_isfile(in_memory_registry::Union{Dict, Nothing}, folder::AbstractString,
mutable struct VersionInfo
const git_tree_sha1::Base.SHA1
const yanked::Bool
- uncompressed_compat::Dict{UUID, VersionSpec} # lazily initialized
- weak_uncompressed_compat::Dict{UUID, VersionSpec} # lazily initialized
-
- VersionInfo(git_tree_sha1::Base.SHA1, yanked::Bool) = new(git_tree_sha1, yanked)
end
# This is the information that exists in e.g. General/A/ACME
@@ -48,244 +44,487 @@ struct PkgInfo
repo::Union{String, Nothing}
subdir::Union{String, Nothing}
+ # Package.toml [metadata.deprecated]:
+ deprecated::Union{Dict{String, Any}, Nothing}
+
# Versions.toml:
version_info::Dict{VersionNumber, VersionInfo}
- # Compat.toml
- compat::Dict{VersionRange, Dict{String, VersionSpec}}
+ # Deps.toml - which dependencies exist
+ deps::Dict{VersionRange, Set{UUID}}
- # Deps.toml
- deps::Dict{VersionRange, Dict{String, UUID}}
+ # Compat.toml - version constraints on deps
+ compat::Dict{VersionRange, Dict{UUID, VersionSpec}}
- # WeakCompat.toml
- weak_compat::Dict{VersionRange, Dict{String, VersionSpec}}
+ # WeakDeps.toml - which weak dependencies exist
+ weak_deps::Dict{VersionRange, Set{UUID}}
- # WeakDeps.toml
- weak_deps::Dict{VersionRange, Dict{String, UUID}}
+ # WeakCompat.toml - version constraints on weak deps
+ weak_compat::Dict{VersionRange, Dict{UUID, VersionSpec}}
end
isyanked(pkg::PkgInfo, v::VersionNumber) = pkg.version_info[v].yanked
treehash(pkg::PkgInfo, v::VersionNumber) = pkg.version_info[v].git_tree_sha1
+isdeprecated(pkg::PkgInfo) = pkg.deprecated !== nothing
+
+const JULIA_UUID = UUID("1222c4b2-2114-5bfd-aeef-88e4692bbb3e")
+
+
+mutable struct PkgEntry
+ # Registry.toml:
+ const path::String
+ const registry_path::String
+ const name::String
+ const uuid::UUID
+
+ # Version.toml / (Compat.toml / Deps.toml):
+ info::PkgInfo # lazily initialized
+
+ PkgEntry(path, registry_path, name, uuid) = new(path, registry_path, name, uuid #= undef =#)
+end
-function uncompress(compressed::Dict{VersionRange, Dict{String, T}}, vsorted::Vector{VersionNumber}) where {T}
- @assert issorted(vsorted)
- uncompressed = Dict{VersionNumber, Dict{String, T}}()
- for v in vsorted
- uncompressed[v] = Dict{String, T}()
- end
- for (vs, data) in compressed
- first = length(vsorted) + 1
- # We find the first and last version that are in the range
- # and since the versions are sorted, all versions in between are sorted
- for i in eachindex(vsorted)
- v = vsorted[i]
- v in vs && (first = i; break)
+# Helper to load deps data from Deps.toml or WeakDeps.toml
+# Returns Dict{VersionRange, Set{UUID}} - just lists which deps exist
+function load_deps_data(in_memory_registry, registry_path, pkg_path, filename, name_to_uuid)
+ deps_data_toml = custom_isfile(in_memory_registry, registry_path, joinpath(pkg_path, filename)) ?
+ parsefile(in_memory_registry, registry_path, joinpath(pkg_path, filename)) : Dict{String, Any}()
+ deps = Dict{VersionRange, Set{UUID}}()
+ for (v, data) in deps_data_toml
+ data = data::Dict{String, Any}
+ vr = VersionRange(v)
+ d = Set{UUID}()
+ for (dep, uuid_str) in data
+ uuid_val = UUID(uuid_str::String)
+ push!(d, uuid_val)
+ name_to_uuid[dep] = uuid_val
end
- last = 0
- for i in reverse(eachindex(vsorted))
- v = vsorted[i]
- v in vs && (last = i; break)
+ deps[vr] = d
+ end
+ return deps
+end
+
+# Helper to load compat data from Compat.toml or WeakCompat.toml
+function load_compat_data(in_memory_registry, registry_path, pkg_path, filename, name_to_uuid)
+ compat_data_toml = custom_isfile(in_memory_registry, registry_path, joinpath(pkg_path, filename)) ?
+ parsefile(in_memory_registry, registry_path, joinpath(pkg_path, filename)) : Dict{String, Any}()
+ compat = Dict{VersionRange, Dict{UUID, VersionSpec}}()
+ for (v, data) in compat_data_toml
+ data = data::Dict{String, Any}
+ vr = VersionRange(v)
+ d = Dict{UUID, VersionSpec}()
+ for (dep, vr_dep::Union{String, Vector{String}}) in data
+ d[name_to_uuid[dep]] = VersionSpec(vr_dep)
end
- for i in first:last
- v = vsorted[i]
- uv = uncompressed[v]
- for (key, value) in data
- if haskey(uv, key)
- error("Overlapping ranges for $(key) for version $v in registry.")
- else
- uv[key] = value
- end
+ compat[vr] = d
+ end
+ return compat
+end
+
+# Helper function to query just the dependencies (without compat specs) for a version
+# Returns Set{UUID} of all dependencies (both strong and weak) for the given version
+function query_deps_for_version(
+ deps_compressed::Dict{VersionRange, Set{UUID}},
+ weak_deps_compressed::Dict{VersionRange, Set{UUID}},
+ version::VersionNumber
+ )::Set{UUID}
+ result = Set{UUID}()
+ for compressed in (deps_compressed, weak_deps_compressed)
+ for (vrange, deps_set) in compressed
+ if version in vrange
+ union!(result, deps_set)
end
end
end
- return uncompressed
+ return result
end
-const JULIA_UUID = UUID("1222c4b2-2114-5bfd-aeef-88e4692bbb3e")
-function initialize_uncompressed!(pkg::PkgInfo, versions = keys(pkg.version_info))
- # Only valid to call this with existing versions of the package
- # Remove all versions we have already uncompressed
- versions = filter!(v -> !isdefined(pkg.version_info[v], :uncompressed_compat), collect(versions))
+# Helper function to query deps for a specific version from multi-registry maps
+function query_deps_for_version(
+ deps_map::Dict{UUID, Vector{Dict{VersionRange, Set{UUID}}}},
+ weak_deps_map::Dict{UUID, Vector{Dict{VersionRange, Set{UUID}}}},
+ uuid::UUID,
+ version::VersionNumber
+ )::Set{UUID}
+ result = Set{UUID}()
+ deps_list = get(Vector{Dict{VersionRange, Set{UUID}}}, deps_map, uuid)
+ weak_deps_list = get(Vector{Dict{VersionRange, Set{UUID}}}, weak_deps_map, uuid)
+
+ # Query each registry's data
+ for i in eachindex(deps_list)
+ deps_compressed = deps_list[i]
+ weak_deps_compressed = weak_deps_list[i]
+ union!(result, query_deps_for_version(deps_compressed, weak_deps_compressed, version))
+ end
- sort!(versions)
+ return result
+end
- uncompressed_compat = uncompress(pkg.compat, versions)
- uncompressed_deps = uncompress(pkg.deps, versions)
+# Helper function to query compressed compat data from PkgInfo
+# Convenience wrapper that uses PkgInfo's compressed data directly
+# Returns Dict{UUID, VersionSpec} if target_uuid is nothing
+# Returns Union{VersionSpec, Nothing} if target_uuid is provided
+function query_compat_for_version(
+ pkg_info::PkgInfo,
+ version::VersionNumber,
+ target_uuid::Union{UUID, Nothing} = nothing
+ )
+ return query_compat_for_version(pkg_info.deps, pkg_info.compat, pkg_info.weak_deps, pkg_info.weak_compat, version, target_uuid)
+end
- for v in versions
- vinfo = pkg.version_info[v]
- compat = Dict{UUID, VersionSpec}()
- uncompressed_deps_v = uncompressed_deps[v]
- # Everything depends on Julia
- uncompressed_deps_v["julia"] = JULIA_UUID
- uncompressed_compat_v = uncompressed_compat[v]
- for (pkg, uuid) in uncompressed_deps_v
- vspec = get(uncompressed_compat_v, pkg, nothing)
- compat[uuid] = vspec === nothing ? VersionSpec() : vspec
+# Mutating helper function to query compressed compat data for a specific version
+# Merges deps (which dependencies exist) with compat (version constraints on those deps)
+# Dependencies without explicit compat entries get VersionSpec() (any version)
+# Includes both strong and weak dependencies
+# If target_uuid is provided, only includes that UUID if it exists
+# The result dictionary is emptied before populating
+function query_compat_for_version!(
+ result::Dict{UUID, VersionSpec},
+ deps_compressed::Dict{VersionRange, Set{UUID}},
+ compat_compressed::Dict{VersionRange, Dict{UUID, VersionSpec}},
+ weak_deps_compressed::Dict{VersionRange, Set{UUID}},
+ weak_compat_compressed::Dict{VersionRange, Dict{UUID, VersionSpec}},
+ version::VersionNumber,
+ target_uuid::Union{UUID, Nothing} = nothing
+ )
+ empty!(result)
+
+ for deps_dict in (deps_compressed, weak_deps_compressed)
+ for (vrange, deps_set) in deps_dict
+ if version in vrange
+ for dep_uuid in deps_set
+ if target_uuid === nothing || dep_uuid == target_uuid
+ result[dep_uuid] = VersionSpec() # Default: any version
+ end
+ end
+ end
end
- @assert !isdefined(vinfo, :uncompressed_compat)
- vinfo.uncompressed_compat = compat
end
- return pkg
-end
-function initialize_weak_uncompressed!(pkg::PkgInfo, versions = keys(pkg.version_info))
- # Only valid to call this with existing versions of the package
- # Remove all versions we have already uncompressed
- versions = filter!(v -> !isdefined(pkg.version_info[v], :weak_uncompressed_compat), collect(versions))
+ # Override with explicit compat specs from regular and weak compat
+ for compat_dict in (compat_compressed, weak_compat_compressed)
+ for (vrange, compat_entries) in compat_dict
+ if version in vrange
+ for (dep_uuid, vspec) in compat_entries
+ if target_uuid === nothing || dep_uuid == target_uuid
+ result[dep_uuid] = vspec
+ end
+ end
+ end
+ end
+ end
- sort!(versions)
+ return nothing
+end
- weak_uncompressed_compat = uncompress(pkg.weak_compat, versions)
- weak_uncompressed_deps = uncompress(pkg.weak_deps, versions)
+# Non-mutating wrapper for backwards compatibility
+# If target_uuid is provided, returns VersionSpec or nothing for that specific UUID
+# If target_uuid is nothing, returns Dict{UUID, VersionSpec} for all dependencies
+function query_compat_for_version(
+ deps_compressed::Dict{VersionRange, Set{UUID}},
+ compat_compressed::Dict{VersionRange, Dict{UUID, VersionSpec}},
+ weak_deps_compressed::Dict{VersionRange, Set{UUID}},
+ weak_compat_compressed::Dict{VersionRange, Dict{UUID, VersionSpec}},
+ version::VersionNumber,
+ target_uuid::Union{UUID, Nothing} = nothing
+ )
+ result = Dict{UUID, VersionSpec}()
+ query_compat_for_version!(result, deps_compressed, compat_compressed, weak_deps_compressed, weak_compat_compressed, version, target_uuid)
- for v in versions
- vinfo = pkg.version_info[v]
- weak_compat = Dict{UUID, VersionSpec}()
- weak_uncompressed_deps_v = weak_uncompressed_deps[v]
- weak_uncompressed_compat_v = weak_uncompressed_compat[v]
- for (pkg, uuid) in weak_uncompressed_deps_v
- vspec = get(weak_uncompressed_compat_v, pkg, nothing)
- weak_compat[uuid] = vspec === nothing ? VersionSpec() : vspec
- end
- @assert !isdefined(vinfo, :weak_uncompressed_compat)
- vinfo.weak_uncompressed_compat = weak_compat
+ # If a specific UUID was requested, return just its VersionSpec (or nothing)
+ if target_uuid !== nothing
+ return get(result, target_uuid, nothing)
end
- return pkg
-end
-function compat_info(pkg::PkgInfo)
- initialize_uncompressed!(pkg)
- return Dict(v => info.uncompressed_compat for (v, info) in pkg.version_info)
+ return result
end
-function weak_compat_info(pkg::PkgInfo)
- if isempty(pkg.weak_deps)
- return nothing
+# Helper to check if a UUID is in the weak deps for a specific version
+function is_weak_dep(
+ weak_compressed::Dict{VersionRange, Set{UUID}},
+ version::VersionNumber,
+ dep_uuid::UUID
+ )::Bool
+ for (vrange, weak_set) in weak_compressed
+ if version in vrange && (dep_uuid in weak_set)
+ return true
+ end
end
- initialize_weak_uncompressed!(pkg)
- return Dict(v => info.weak_uncompressed_compat for (v, info) in pkg.version_info)
+ return false
end
-mutable struct PkgEntry
- # Registry.toml:
- const path::String
- const registry_path::String
- const name::String
- const uuid::UUID
+# Helper function to query compat across multiple registries
+# Each registry has its own compressed dictionaries and version set
+# Only queries a registry if the version actually exists in that registry
+function query_compat_for_version_multi_registry!(
+ result::Dict{UUID, VersionSpec},
+ reg_result::Dict{UUID, VersionSpec},
+ deps_list::Vector{Dict{VersionRange, Set{UUID}}},
+ compat_list::Vector{Dict{VersionRange, Dict{UUID, VersionSpec}}},
+ weak_deps_list::Vector{Dict{VersionRange, Set{UUID}}},
+ weak_compat_list::Vector{Dict{VersionRange, Dict{UUID, VersionSpec}}},
+ versions_per_registry::Vector{Set{VersionNumber}},
+ version::VersionNumber
+ )
+ empty!(result)
- const in_memory_registry::Union{Dict{String, String}, Nothing}
- # Version.toml / (Compat.toml / Deps.toml):
- info::PkgInfo # lazily initialized
+ # Query each registry's data separately
+ for i in eachindex(deps_list)
+ # CRITICAL: Only query this registry if the version exists in it!
+ if !(version in versions_per_registry[i])
+ continue
+ end
- PkgEntry(path, registry_path, name, uuid, in_memory_registry) = new(path, registry_path, name, uuid, in_memory_registry, #= undef =#)
-end
+ reg_deps = deps_list[i]
+ reg_compat = compat_list[i]
+ reg_weak_deps = weak_deps_list[i]
+ reg_weak_compat = weak_compat_list[i]
-registry_info(pkg::PkgEntry) = init_package_info!(pkg)
-
-function init_package_info!(pkg::PkgEntry)
- # Already uncompressed the info for this package, return early
- isdefined(pkg, :info) && return pkg.info
- path = pkg.registry_path
-
- d_p = parsefile(pkg.in_memory_registry, pkg.registry_path, joinpath(pkg.path, "Package.toml"))
- name = d_p["name"]::String
- name != pkg.name && error("inconsistent name in Registry.toml ($(name)) and Package.toml ($(pkg.name)) for pkg at $(path)")
- repo = get(d_p, "repo", nothing)::Union{Nothing, String}
- subdir = get(d_p, "subdir", nothing)::Union{Nothing, String}
-
- # Versions.toml
- d_v = custom_isfile(pkg.in_memory_registry, pkg.registry_path, joinpath(pkg.path, "Versions.toml")) ?
- parsefile(pkg.in_memory_registry, pkg.registry_path, joinpath(pkg.path, "Versions.toml")) : Dict{String, Any}()
- version_info = Dict{VersionNumber, VersionInfo}(VersionNumber(k) =>
- VersionInfo(SHA1(v["git-tree-sha1"]::String), get(v, "yanked", false)::Bool) for (k, v) in d_v)
-
- # Compat.toml
- compat_data_toml = custom_isfile(pkg.in_memory_registry, pkg.registry_path, joinpath(pkg.path, "Compat.toml")) ?
- parsefile(pkg.in_memory_registry, pkg.registry_path, joinpath(pkg.path, "Compat.toml")) : Dict{String, Any}()
- compat = Dict{VersionRange, Dict{String, VersionSpec}}()
- for (v, data) in compat_data_toml
- data = data::Dict{String, Any}
- vr = VersionRange(v)
- d = Dict{String, VersionSpec}(dep => VersionSpec(vr_dep) for (dep, vr_dep::Union{String, Vector{String}}) in data)
- compat[vr] = d
- end
+ # Use the mutating query function to avoid allocation
+ query_compat_for_version!(reg_result, reg_deps, reg_compat, reg_weak_deps, reg_weak_compat, version)
- # Deps.toml
- deps_data_toml = custom_isfile(pkg.in_memory_registry, pkg.registry_path, joinpath(pkg.path, "Deps.toml")) ?
- parsefile(pkg.in_memory_registry, pkg.registry_path, joinpath(pkg.path, "Deps.toml")) : Dict{String, Any}()
- deps = Dict{VersionRange, Dict{String, UUID}}()
- for (v, data) in deps_data_toml
- data = data::Dict{String, Any}
- vr = VersionRange(v)
- d = Dict{String, UUID}(dep => UUID(uuid) for (dep, uuid::String) in data)
- deps[vr] = d
+ # Merge results, preferring the first registry's compat if there's overlap
+ for (uuid, vspec) in reg_result
+ if !haskey(result, uuid)
+ result[uuid] = vspec
+ end
+ # If uuid already exists, keep the first registry's vspec (first wins)
+ end
end
- # All packages depend on julia
- deps[VersionRange()] = Dict("julia" => JULIA_UUID)
- # WeakCompat.toml
- weak_compat_data_toml = custom_isfile(pkg.in_memory_registry, pkg.registry_path, joinpath(pkg.path, "WeakCompat.toml")) ?
- parsefile(pkg.in_memory_registry, pkg.registry_path, joinpath(pkg.path, "WeakCompat.toml")) : Dict{String, Any}()
- weak_compat = Dict{VersionRange, Dict{String, VersionSpec}}()
- for (v, data) in weak_compat_data_toml
- data = data::Dict{String, Any}
- vr = VersionRange(v)
- d = Dict{String, VersionSpec}(dep => VersionSpec(vr_dep) for (dep, vr_dep::Union{String, Vector{String}}) in data)
- weak_compat[vr] = d
- end
+ return nothing
+end
- # WeakDeps.toml
- weak_deps_data_toml = custom_isfile(pkg.in_memory_registry, pkg.registry_path, joinpath(pkg.path, "WeakDeps.toml")) ?
- parsefile(pkg.in_memory_registry, pkg.registry_path, joinpath(pkg.path, "WeakDeps.toml")) : Dict{String, Any}()
- weak_deps = Dict{VersionRange, Dict{String, UUID}}()
- for (v, data) in weak_deps_data_toml
- data = data::Dict{String, Any}
- vr = VersionRange(v)
- d = Dict{String, UUID}(dep => UUID(uuid) for (dep, uuid::String) in data)
- weak_deps[vr] = d
+# Validate that no version ranges overlap for the same dependency
+# This enforces the registry invariant that each dependency should be specified
+# at most once for any given version
+# Works with any collection type (Set, Dict, etc.) and any key type (UUID, String, etc.)
+function validate_no_overlapping_ranges(
+ compressed::Dict{VersionRange, T},
+ versions::Vector{VersionNumber},
+ pkg_name::String,
+ data_type::String, # "Deps", "WeakDeps", "Compat", or "WeakCompat"
+ name_to_uuid::Dict{String, UUID}
+ ) where {T}
+ # Build inverse mapping for better error messages
+ uuid_to_name = Dict{UUID, String}(uuid => name for (name, uuid) in name_to_uuid)
+
+ # For each version, check that no dependency UUID appears in multiple ranges
+ for v in versions
+ seen_deps = Dict{UUID, VersionRange}()
+ for (vrange, dep_collection) in compressed
+ if v in vrange
+ # Works for both Set{UUID} (iterate directly) and Dict{UUID,...} (iterate keys)
+ for dep_uuid in (dep_collection isa AbstractDict ? keys(dep_collection) : dep_collection)
+ if haskey(seen_deps, dep_uuid)
+ dep_name = get(uuid_to_name, dep_uuid, string(dep_uuid))
+ error(
+ "Overlapping ranges for dependency $(dep_name) in $(pkg_name) $(data_type).toml: " *
+ "version $v is covered by both $(seen_deps[dep_uuid]) and $(vrange)"
+ )
+ end
+ seen_deps[dep_uuid] = vrange
+ end
+ end
+ end
end
-
- @assert !isdefined(pkg, :info)
- pkg.info = PkgInfo(repo, subdir, version_info, compat, deps, weak_compat, weak_deps)
-
- return pkg.info
+ return
end
+# Simplified tarball reader without path tracking overhead
+function read_tarball_simple(
+ callback::Function,
+ predicate::Function,
+ tar::IO;
+ buf::Vector{UInt8} = Vector{UInt8}(undef, Tar.DEFAULT_BUFFER_SIZE),
+ )
+ globals = Dict{String, String}()
+ while !eof(tar)
+ hdr = Tar.read_header(tar, globals = globals, buf = buf)
+ hdr === nothing && break
+ predicate(hdr)::Bool || continue
+ Tar.check_header(hdr)
+ before = applicable(position, tar) ? position(tar) : 0
+ callback(hdr)
+ applicable(position, tar) || continue
+ advanced = position(tar) - before
+ expected = Tar.round_up(hdr.size)
+ advanced == expected ||
+ error("callback read $advanced bytes instead of $expected")
+ end
+ return
+end
-function uncompress_registry(tar_gz::AbstractString)
- if !isfile(tar_gz)
- error("$(repr(tar_gz)): No such file")
+function uncompress_registry(compressed_tar::AbstractString)
+ if !isfile(compressed_tar)
+ error("$(repr(compressed_tar)): No such file")
end
data = Dict{String, String}()
buf = Vector{UInt8}(undef, Tar.DEFAULT_BUFFER_SIZE)
io = IOBuffer()
- open(`$(exe7z()) x $tar_gz -so`) do tar
- Tar.read_tarball(x->true, tar; buf=buf) do hdr, _
- if hdr.type == :file
- Tar.read_data(tar, io; size=hdr.size, buf=buf)
- data[hdr.path] = String(take!(io))
- end
+ open(get_extract_cmd(compressed_tar)) do tar
+ read_tarball_simple(x -> true, tar; buf = buf) do hdr
+ Tar.read_data(tar, io; size = hdr.size, buf = buf)
+ data[hdr.path] = String(take!(io))
end
end
return data
end
-struct RegistryInstance
+mutable struct RegistryInstance
path::String
+ tree_info::Union{Base.SHA1, Nothing}
+ compressed_file::Union{String, Nothing}
+ const load_lock::ReentrantLock # Lock for thread-safe lazy loading
+
+ # Lazily loaded fields
name::String
uuid::UUID
repo::Union{String, Nothing}
description::Union{String, Nothing}
pkgs::Dict{UUID, PkgEntry}
- tree_info::Union{Base.SHA1, Nothing}
in_memory_registry::Union{Nothing, Dict{String, String}}
# various caches
name_to_uuids::Dict{String, Vector{UUID}}
+
+ # Inner constructor for lazy loading - leaves fields undefined
+ function RegistryInstance(path::String, tree_info::Union{Base.SHA1, Nothing}, compressed_file::Union{String, Nothing})
+ return new(path, tree_info, compressed_file, ReentrantLock())
+ end
+
+ # Full constructor for when all fields are known
+ function RegistryInstance(
+ path::String, tree_info::Union{Base.SHA1, Nothing}, compressed_file::Union{String, Nothing},
+ name::String, uuid::UUID, repo::Union{String, Nothing}, description::Union{String, Nothing},
+ pkgs::Dict{UUID, PkgEntry}, in_memory_registry::Union{Nothing, Dict{String, String}},
+ name_to_uuids::Dict{String, Vector{UUID}}
+ )
+ return new(path, tree_info, compressed_file, ReentrantLock(), name, uuid, repo, description, pkgs, in_memory_registry, name_to_uuids)
+ end
end
const REGISTRY_CACHE = Dict{String, Tuple{Base.SHA1, Bool, RegistryInstance}}()
+function init_package_info!(registry::RegistryInstance, pkg::PkgEntry)
+ # Thread-safe lazy loading with double-check pattern
+ # Use the registry's load_lock to protect lazy loading of package info
+ return @lock registry.load_lock begin
+ # Double-check: if another thread loaded while we were waiting for the lock
+ isdefined(pkg, :info) && return pkg.info
+
+ path = pkg.registry_path
+ in_memory_registry = registry.in_memory_registry
+
+ d_p = parsefile(in_memory_registry, pkg.registry_path, joinpath(pkg.path, "Package.toml"))
+ name = d_p["name"]::String
+ name != pkg.name && error("inconsistent name in Registry.toml ($(name)) and Package.toml ($(pkg.name)) for pkg at $(path)")
+ repo = get(d_p, "repo", nothing)::Union{Nothing, String}
+ subdir = get(d_p, "subdir", nothing)::Union{Nothing, String}
+
+ # The presence of a [metadata.deprecated] table indicates the package is deprecated
+ # We store the raw table to allow other tools to use the metadata
+ metadata = get(d_p, "metadata", nothing)::Union{Nothing, Dict{String, Any}}
+ deprecated = metadata !== nothing ? get(metadata, "deprecated", nothing)::Union{Nothing, Dict{String, Any}} : nothing
+
+ # Versions.toml
+ d_v = custom_isfile(in_memory_registry, pkg.registry_path, joinpath(pkg.path, "Versions.toml")) ?
+ parsefile(in_memory_registry, pkg.registry_path, joinpath(pkg.path, "Versions.toml")) : Dict{String, Any}()
+ version_info = Dict{VersionNumber, VersionInfo}(
+ VersionNumber(k) =>
+ VersionInfo(SHA1(v["git-tree-sha1"]::String), get(v, "yanked", false)::Bool) for (k, v) in d_v
+ )
+
+ # Deps.toml (load first to build name -> UUID mapping)
+ name_to_uuid = Dict{String, UUID}()
+ deps = load_deps_data(in_memory_registry, pkg.registry_path, pkg.path, "Deps.toml", name_to_uuid)
+ # All packages depend on julia
+ deps[VersionRange()] = Set([JULIA_UUID])
+ name_to_uuid["julia"] = JULIA_UUID
+
+ # WeakDeps.toml (load to extend name -> UUID mapping)
+ weak_deps = load_deps_data(in_memory_registry, pkg.registry_path, pkg.path, "WeakDeps.toml", name_to_uuid)
+
+ # Compat.toml (convert names to UUIDs using the mapping)
+ compat = load_compat_data(in_memory_registry, pkg.registry_path, pkg.path, "Compat.toml", name_to_uuid)
+
+ # WeakCompat.toml (convert names to UUIDs using the mapping)
+ weak_compat = load_compat_data(in_memory_registry, pkg.registry_path, pkg.path, "WeakCompat.toml", name_to_uuid)
+
+ #=
+ # These validations are a bit too expensive
+ # RegistryTools does this already: https://github.com/JuliaRegistries/RegistryTools.jl/blob/b5ff4d541b0aad2261ac21416113cee9718e28b3/src/Compress.jl#L64
+ # Validate that no ranges overlap for the same dependency (registry invariant)
+ versions_list = sort!(collect(keys(version_info)))
+ if !isempty(deps)
+ validate_no_overlapping_ranges(deps, versions_list, pkg.name, "Deps", name_to_uuid)
+ end
+ if !isempty(weak_deps)
+ validate_no_overlapping_ranges(weak_deps, versions_list, pkg.name, "WeakDeps", name_to_uuid)
+ end
+ if !isempty(compat)
+ validate_no_overlapping_ranges(compat, versions_list, pkg.name, "Compat", name_to_uuid)
+ end
+ if !isempty(weak_compat)
+ validate_no_overlapping_ranges(weak_compat, versions_list, pkg.name, "WeakCompat", name_to_uuid)
+ end
+ =#
+
+ @assert !isdefined(pkg, :info)
+ pkg.info = PkgInfo(repo, subdir, deprecated, version_info, deps, compat, weak_deps, weak_compat)
+
+ # Free memory: delete the package's files from in_memory_registry since we've fully parsed them
+ if in_memory_registry !== nothing
+ for filename in ("Package.toml", "Versions.toml", "Deps.toml", "WeakDeps.toml", "Compat.toml", "WeakCompat.toml")
+ delete!(in_memory_registry, to_tar_path_format(joinpath(pkg.path, filename)))
+ end
+ end
+
+ return pkg.info
+ end
+end
+
+registry_info(registry::RegistryInstance, pkg::PkgEntry) = init_package_info!(registry, pkg)
+
+@noinline function _ensure_registry_loaded_slow!(r::RegistryInstance)
+ return @lock r.load_lock begin
+ # Double-check pattern: if another thread loaded while we were waiting for the lock
+ isdefined(r, :pkgs) && return r
+
+ if getfield(r, :compressed_file) !== nothing
+ r.in_memory_registry = uncompress_registry(joinpath(dirname(getfield(r, :path)), getfield(r, :compressed_file)))
+ else
+ r.in_memory_registry = nothing
+ end
+
+ d = parsefile(r.in_memory_registry, getfield(r, :path), "Registry.toml")
+ r.name = d["name"]::String
+ r.uuid = UUID(d["uuid"]::String)
+ r.repo = get(d, "repo", nothing)::Union{String, Nothing}
+ r.description = get(d, "description", nothing)::Union{String, Nothing}
+
+ r.pkgs = Dict{UUID, PkgEntry}()
+ for (uuid, info) in d["packages"]::Dict{String, Any}
+ uuid = UUID(uuid::String)
+ info::Dict{String, Any}
+ name = info["name"]::String
+ pkgpath = info["path"]::String
+ pkg = PkgEntry(pkgpath, getfield(r, :path), name, uuid)
+ r.pkgs[uuid] = pkg
+ end
+
+ r.name_to_uuids = Dict{String, Vector{UUID}}()
+
+ return r
+ end
+end
+
+# Property accessors that trigger lazy loading
+@inline function Base.getproperty(r::RegistryInstance, f::Symbol)
+ if f === :name || f === :uuid || f === :repo || f === :description || f === :pkgs || f === :name_to_uuids
+ _ensure_registry_loaded_slow!(r) # Takes a lock to ensure thread safety
+ end
+ return getfield(r, f)
+end
+
function get_cached_registry(path, tree_info::Base.SHA1, compressed::Bool)
if !ispath(path)
delete!(REGISTRY_CACHE, path)
@@ -326,33 +565,9 @@ function RegistryInstance(path::AbstractString)
end
end
- in_memory_registry = if compressed_file !== nothing
- uncompress_registry(joinpath(dirname(path), compressed_file))
- else
- nothing
- end
-
- d = parsefile(in_memory_registry, path, "Registry.toml")
- pkgs = Dict{UUID, PkgEntry}()
- for (uuid, info) in d["packages"]::Dict{String, Any}
- uuid = UUID(uuid::String)
- info::Dict{String, Any}
- name = info["name"]::String
- pkgpath = info["path"]::String
- pkg = PkgEntry(pkgpath, path, name, uuid, in_memory_registry)
- pkgs[uuid] = pkg
- end
- reg = RegistryInstance(
- path,
- d["name"]::String,
- UUID(d["uuid"]::String),
- get(d, "repo", nothing)::Union{String, Nothing},
- get(d, "description", nothing)::Union{String, Nothing},
- pkgs,
- tree_info,
- in_memory_registry,
- Dict{String, UUID}(),
- )
+ # Create partially initialized registry - defer expensive operations
+ reg = RegistryInstance(path, tree_info, compressed_file)
+
if tree_info !== nothing
REGISTRY_CACHE[path] = (tree_info, compressed_file !== nothing, reg)
end
@@ -366,8 +581,9 @@ function Base.show(io::IO, ::MIME"text/plain", r::RegistryInstance)
if r.tree_info !== nothing
println(io, " git-tree-sha1: ", r.tree_info)
end
- println(io, " packages: ", length(r.pkgs))
+ return println(io, " packages: ", length(r.pkgs))
end
+Base.show(io::IO, r::RegistryInstance) = Base.show(io, MIME"text/plain"(), r)
function uuids_from_name(r::RegistryInstance, name::String)
create_name_uuid_mapping!(r)
@@ -386,7 +602,7 @@ end
function verify_compressed_registry_toml(path::String)
d = TOML.tryparsefile(path)
if d isa TOML.ParserError
- @warn "Failed to parse registry TOML file at $(repr(path))" exception=d
+ @warn "Failed to parse registry TOML file at $(repr(path))" exception = d
return false
end
for key in ("git-tree-sha1", "uuid", "path")
@@ -403,7 +619,7 @@ function verify_compressed_registry_toml(path::String)
return true
end
-function reachable_registries(; depots::Union{String, Vector{String}}=Base.DEPOT_PATH)
+function reachable_registries(; depots::Union{String, Vector{String}} = Base.DEPOT_PATH)
# collect registries
if depots isa String
depots = [depots]
@@ -413,7 +629,7 @@ function reachable_registries(; depots::Union{String, Vector{String}}=Base.DEPOT
isdir(d) || continue
reg_dir = joinpath(d, "registries")
isdir(reg_dir) || continue
- reg_paths = readdir(reg_dir; join=true)
+ reg_paths = readdir(reg_dir; join = true)
candidate_registries = String[]
# All folders could be registries
append!(candidate_registries, filter(isdir, reg_paths))
diff --git a/src/Resolve/Resolve.jl b/src/Resolve/Resolve.jl
index dc11c7540f..4d2b891a03 100644
--- a/src/Resolve/Resolve.jl
+++ b/src/Resolve/Resolve.jl
@@ -3,6 +3,8 @@
module Resolve
using ..Versions
+using ..Registry
+using ..Types
import ..stdout_f, ..stderr_f
using Printf
@@ -14,7 +16,7 @@ export resolve, sanity_check, Graph, pkgID
####################
# Requires / Fixed #
####################
-const Requires = Dict{UUID,VersionSpec}
+const Requires = Dict{UUID, VersionSpec}
struct Fixed
version::VersionNumber
@@ -35,19 +37,19 @@ Base.show(io::IO, f::Fixed) = isempty(f.requires) ?
struct ResolverError <: Exception
msg::AbstractString
- ex::Union{Exception,Nothing}
+ ex::Union{Exception, Nothing}
end
ResolverError(msg::AbstractString) = ResolverError(msg, nothing)
struct ResolverTimeoutError <: Exception
msg::AbstractString
- ex::Union{Exception,Nothing}
+ ex::Union{Exception, Nothing}
end
ResolverTimeoutError(msg::AbstractString) = ResolverTimeoutError(msg, nothing)
function Base.showerror(io::IO, pkgerr::ResolverError)
print(io, pkgerr.msg)
- if pkgerr.ex !== nothing
+ return if pkgerr.ex !== nothing
pkgex = pkgerr.ex
if isa(pkgex, CompositeException)
for cex in pkgex
@@ -76,16 +78,16 @@ function resolve(graph::Graph)
return compute_output_dict(sol, graph)
end
-function _resolve(graph::Graph, lower_bound::Union{Vector{Int},Nothing}, previous_sol::Union{Vector{Int},Nothing})
+function _resolve(graph::Graph, lower_bound::Union{Vector{Int}, Nothing}, previous_sol::Union{Vector{Int}, Nothing})
np = graph.np
spp = graph.spp
gconstr = graph.gconstr
if lower_bound ≢ nothing
- for p0 = 1:np
+ for p0 in 1:np
v0 = lower_bound[p0]
@assert v0 ≠ spp[p0]
- gconstr[p0][1:(v0-1)] .= false
+ gconstr[p0][1:(v0 - 1)] .= false
end
end
@@ -114,11 +116,15 @@ function _resolve(graph::Graph, lower_bound::Union{Vector{Int},Nothing}, previou
else
@assert maxsum_result == :timedout
log_event_global!(graph, "maxsum solver timed out")
- throw(ResolverTimeoutError("""
- The resolution process timed out. This is likely due to unsatisfiable requirements.
- You can increase the maximum resolution time via the environment variable JULIA_PKG_RESOLVE_MAX_TIME
- (the current value is $(get(ENV, "JULIA_PKG_RESOLVE_MAX_TIME", DEFAULT_MAX_TIME))).
- """))
+ throw(
+ ResolverTimeoutError(
+ """
+ The resolution process timed out. This is likely due to unsatisfiable requirements.
+ You can increase the maximum resolution time via the environment variable JULIA_PKG_RESOLVE_MAX_TIME
+ (the current value is $(get(ENV, "JULIA_PKG_RESOLVE_MAX_TIME", DEFAULT_MAX_TIME))).
+ """
+ )
+ )
end
@@ -153,6 +159,13 @@ function _resolve(graph::Graph, lower_bound::Union{Vector{Int},Nothing}, previou
end
end
+struct SanitySortKey{G, P}
+ gadj::G
+ pdict::P
+end
+
+(key::SanitySortKey)(pv) = -length(key.gadj[key.pdict[pv[1]]])
+
"""
Scan the graph for (explicit or implicit) contradictions. Returns a list of problematic
(package,version) combinations.
@@ -166,7 +179,7 @@ function sanity_check(graph::Graph, sources::Set{UUID} = Set{UUID}(), verbose::B
isempty(req_inds) || @warn("sanity check called on a graph with non-empty requirements")
if !any(is_julia(graph, fp0) for fp0 in fix_inds)
@warn("sanity check called on a graph without julia requirement, adding it")
- add_fixed!(graph, Dict(uuid_julia=>Fixed(VERSION)))
+ add_fixed!(graph, Dict(uuid_julia => Fixed(VERSION)))
end
if length(fix_inds) ≠ 1
@warn("sanity check called on a graph with extra fixed requirements (besides julia)")
@@ -190,22 +203,22 @@ function sanity_check(graph::Graph, sources::Set{UUID} = Set{UUID}(), verbose::B
pvers = data.pvers
eq_classes = data.eq_classes
- problematic = Tuple{String,VersionNumber}[]
+ problematic = Tuple{String, VersionNumber}[]
np == 0 && return problematic
- vers = [(pkgs[p0],pvers[p0][v0]) for p0 = 1:np for v0 = 1:(spp[p0]-1)]
- sort!(vers, by=pv->(-length(gadj[pdict[pv[1]]])))
+ vers = [(pkgs[p0], pvers[p0][v0]) for p0 in 1:np for v0 in 1:(spp[p0] - 1)]
+ sort!(vers, by = SanitySortKey(gadj, pdict))
nv = length(vers)
- svdict = Dict{Tuple{UUID,VersionNumber},Int}(vers[i] => i for i = 1:nv)
+ svdict = Dict{Tuple{UUID, VersionNumber}, Int}(vers[i] => i for i in 1:nv)
checked = falses(nv)
last_str_len = 0
- for (i,(p,vn)) in enumerate(vers)
+ for (i, (p, vn)) in enumerate(vers)
if verbose
frac_compl = i / nv
print("\r", " "^last_str_len, "\r")
@@ -249,8 +262,8 @@ function sanity_check(graph::Graph, sources::Set{UUID} = Set{UUID}(), verbose::B
else
@assert verify_solution(sol, graph)
sol_dict = compute_output_dict(sol, graph)
- for (sp,svn) in sol_dict
- j = svdict[sp,svn]
+ for (sp, svn) in sol_dict
+ j = svdict[sp, svn]
checked[j] = true
end
end
@@ -278,8 +291,8 @@ function compute_output_dict(sol::Vector{Int}, graph::Graph)
pvers = graph.data.pvers
pruned = graph.data.pruned
- want = Dict{UUID,VersionNumber}()
- for p0 = 1:np
+ want = Dict{UUID, VersionNumber}()
+ for p0 in 1:np
p0 ∈ fix_inds && continue
p = pkgs[p0]
s0 = sol[p0]
@@ -287,7 +300,7 @@ function compute_output_dict(sol::Vector{Int}, graph::Graph)
vn = pvers[p0][s0]
want[p] = vn
end
- for (p,vn) in pruned
+ for (p, vn) in pruned
@assert !haskey(want, p)
want[p] = vn
end
@@ -309,12 +322,12 @@ function greedysolver(graph::Graph)
gconstr = graph.gconstr
# initialize solution: all uninstalled
- sol = Int[spp[p0] for p0 = 1:np]
+ sol = Int[spp[p0] for p0 in 1:np]
# packages which are not allowed to be uninstalled
# (NOTE: this is potentially a superset of graph.req_inds,
# since it may include implicit requirements)
- req_inds = Set{Int}(p0 for p0 = 1:np if !gconstr[p0][end])
+ req_inds = Set{Int}(p0 for p0 in 1:np if !gconstr[p0][end])
# set up required packages to their highest allowed versions
for rp0 in req_inds
@@ -347,10 +360,10 @@ function greedysolver(graph::Graph)
@assert s0 < spp[p0]
# scan dependencies
- for (j1,p1) in enumerate(gadj[p0])
+ for (j1, p1) in enumerate(gadj[p0])
msk = gmsk[p0][j1]
# look for the highest version which satisfies the requirements
- v1 = findlast(msk[:,s0] .& gconstr[p1])
+ v1 = findlast(msk[:, s0] .& gconstr[p1])
v1 == spp[p1] && continue # p1 is not required by p0's current version
# if we found a version, and the package was uninstalled
# or the same version was already selected, we're ok;
@@ -374,7 +387,7 @@ function greedysolver(graph::Graph)
pop_snapshot!(graph)
- for p0 = 1:np
+ for p0 in 1:np
log_event_greedysolved!(graph, p0, sol[p0])
end
@@ -396,13 +409,13 @@ function verify_solution(sol::Vector{Int}, graph::Graph)
@assert all(sol .> 0)
# verify constraints and dependencies
- for p0 = 1:np
+ for p0 in 1:np
s0 = sol[p0]
gconstr[p0][s0] || (@warn("gconstr[$p0][$s0] fail"); return false)
- for (j1,p1) in enumerate(gadj[p0])
+ for (j1, p1) in enumerate(gadj[p0])
msk = gmsk[p0][j1]
s1 = sol[p1]
- msk[s1,s0] || (@warn("gmsk[$p0][$p1][$s1,$s0] fail"); return false)
+ msk[s1, s0] || (@warn("gmsk[$p0][$p1][$s1,$s0] fail"); return false)
end
end
return true
@@ -413,7 +426,7 @@ end
Uninstall unreachable packages:
start from the required ones and keep only the packages reachable from them along the graph.
"""
-function _uninstall_unreachable!(sol::Vector{Int}, why::Vector{Union{Symbol,Int}}, graph::Graph)
+function _uninstall_unreachable!(sol::Vector{Int}, why::Vector{Union{Symbol, Int}}, graph::Graph)
np = graph.np
spp = graph.spp
gadj = graph.gadj
@@ -421,8 +434,8 @@ function _uninstall_unreachable!(sol::Vector{Int}, why::Vector{Union{Symbol,Int}
gconstr = graph.gconstr
uninst = trues(np)
- staged = Set{Int}(p0 for p0 = 1:np if !gconstr[p0][end])
- seen = copy(staged) ∪ Set{Int}(p0 for p0 = 1:np if sol[p0] == spp[p0]) # we'll skip uninstalled packages
+ staged = Set{Int}(p0 for p0 in 1:np if !gconstr[p0][end])
+ seen = copy(staged) ∪ Set{Int}(p0 for p0 in 1:np if sol[p0] == spp[p0]) # we'll skip uninstalled packages
while !isempty(staged)
staged_next = Set{Int}()
@@ -430,9 +443,9 @@ function _uninstall_unreachable!(sol::Vector{Int}, why::Vector{Union{Symbol,Int}
s0 = sol[p0]
@assert s0 ≠ spp[p0]
uninst[p0] = false
- for (j1,p1) in enumerate(gadj[p0])
+ for (j1, p1) in enumerate(gadj[p0])
p1 ∈ seen && continue # we've already seen the package, or it is uninstalled
- gmsk[p0][j1][end,s0] && continue # the package is not required by p0 at version s0
+ gmsk[p0][j1][end, s0] && continue # the package is not required by p0 at version s0
push!(staged_next, p1)
end
end
@@ -444,6 +457,7 @@ function _uninstall_unreachable!(sol::Vector{Int}, why::Vector{Union{Symbol,Int}
sol[p0] = spp[p0]
why[p0] = :uninst
end
+ return
end
"""
@@ -463,7 +477,7 @@ function enforce_optimality!(sol::Vector{Int}, graph::Graph)
pkgs = graph.data.pkgs
# keep a track for the log
- why = Union{Symbol,Int}[0 for p0 = 1:np]
+ why = Union{Symbol, Int}[0 for p0 in 1:np]
# Strategy:
# There's a cycle in which first the unnecessary (unconnected) packages are removed,
@@ -510,10 +524,10 @@ function enforce_optimality!(sol::Vector{Int}, graph::Graph)
move_up .= sol .≠ spp
copy!(upperbound, spp)
let move_up = move_up
- lowerbound .= [move_up[p0] ? sol[p0] : 1 for p0 = 1:np]
+ lowerbound .= [move_up[p0] ? sol[p0] : 1 for p0 in 1:np]
end
- for p0 = 1:np
+ for p0 in 1:np
s0 = sol[p0]
s0 == spp[p0] && (why[p0] = :uninst; continue) # the package is not installed
move_up[p0] || continue # the package is only installed as a result of a previous bump, skip it
@@ -521,9 +535,9 @@ function enforce_optimality!(sol::Vector{Int}, graph::Graph)
@assert upperbound[p0] == spp[p0]
# pick the next version that doesn't violate a constraint (if any)
- bump_range = collect(s0+1:spp[p0])
+ bump_range = collect((s0 + 1):spp[p0])
bump = let gconstr = gconstr
- findfirst(v0->gconstr[p0][v0], bump_range)
+ findfirst(v0 -> gconstr[p0][v0], bump_range)
end
# no such version was found, skip this package
@@ -551,7 +565,7 @@ function enforce_optimality!(sol::Vector{Int}, graph::Graph)
while !isempty(staged)
for f0 in staged
- for (j1,f1) in enumerate(gadj[f0])
+ for (j1, f1) in enumerate(gadj[f0])
s1 = sol[f1]
msk = gmsk[f0][j1]
if f1 == p0 || try_uninstall
@@ -565,13 +579,13 @@ function enforce_optimality!(sol::Vector{Int}, graph::Graph)
s1 > lb1 && @assert s1 == spp[f1]
# the arrangement of the range gives precedence to improving the
# current situation, but allows reinstalling a package if needed
- bump_range = vcat(s1:ub1, s1-1:-1:lb1)
+ bump_range = vcat(s1:ub1, (s1 - 1):-1:lb1)
else
bump_range = collect(ub1:-1:lb1)
end
end
bump = let gconstr = gconstr
- findfirst(v1->(gconstr[f1][v1] && msk[v1, sol[f0]]), bump_range)
+ findfirst(v1 -> (gconstr[f1][v1] && msk[v1, sol[f0]]), bump_range)
end
if bump ≡ nothing
why[p0] = f1 # TODO: improve this? (ideally we might want the path from p0 to f1)
@@ -610,15 +624,16 @@ function enforce_optimality!(sol::Vector{Int}, graph::Graph)
@assert verify_solution(sol, graph)
- for p0 = 1:np
+ for p0 in 1:np
log_event_maxsumsolved!(graph, p0, sol[p0], why[p0])
end
+ return
end
function apply_maxsum_trace!(graph::Graph, sol::Vector{Int})
gconstr = graph.gconstr
- for (p0,s0) in enumerate(sol)
+ for (p0, s0) in enumerate(sol)
s0 == 0 && continue
gconstr0 = gconstr[p0]
old_constr = copy(gconstr0)
@@ -627,9 +642,10 @@ function apply_maxsum_trace!(graph::Graph, sol::Vector{Int})
gconstr0[s0] = true
gconstr0 ≠ old_constr && log_event_maxsumtrace!(graph, p0, s0)
end
+ return
end
-function trigger_failure!(graph::Graph, sol::Vector{Int}, staged::Tuple{Int,Int})
+function trigger_failure!(graph::Graph, sol::Vector{Int}, staged::Tuple{Int, Int})
apply_maxsum_trace!(graph, sol)
simplify_graph_soft!(graph, Set(findall(sol .> 0)), log_events = true) # this may throw an error...
@@ -643,8 +659,8 @@ function trigger_failure!(graph::Graph, sol::Vector{Int}, staged::Tuple{Int,Int}
log_event_maxsumtrace!(graph, p0, v0)
simplify_graph!(graph) # this may throw an error...
outdict = resolve(graph) # ...otherwise, this MUST throw an error
- open(io->showlog(io, graph, view=:chronological), "logchrono.errresolve.txt", "w")
- error("this is not supposed to happen... $(Dict(pkgID(p, graph) => vn for (p,vn) in outdict))")
+ open(io -> showlog(io, graph, view = :chronological), "logchrono.errresolve.txt", "w")
+ error("this is not supposed to happen... $(Dict(pkgID(p, graph) => vn for (p, vn) in outdict))")
end
end # module
diff --git a/src/Resolve/fieldvalues.jl b/src/Resolve/fieldvalues.jl
index 028d6c6036..a32929f14a 100644
--- a/src/Resolve/fieldvalues.jl
+++ b/src/Resolve/fieldvalues.jl
@@ -15,10 +15,12 @@ struct FieldValue
l1::VersionWeight
l2::VersionWeight
l3::Int64
- FieldValue(l0::Integer = 0,
- l1::VersionWeight = zero(VersionWeight),
- l2::VersionWeight = zero(VersionWeight),
- l3::Integer = 0) = new(l0, l1, l2, l3)
+ FieldValue(
+ l0::Integer = 0,
+ l1::VersionWeight = zero(VersionWeight),
+ l2::VersionWeight = zero(VersionWeight),
+ l3::Integer = 0
+ ) = new(l0, l1, l2, l3)
end
# This isn't nice, but it's for debugging only anyway
@@ -37,10 +39,10 @@ const Field = Vector{FieldValue}
Base.zero(::Type{FieldValue}) = FieldValue()
-Base.typemin(::Type{FieldValue}) = (x=typemin(Int64); y=typemin(VersionWeight); FieldValue(x, y, y, x))
+Base.typemin(::Type{FieldValue}) = (x = typemin(Int64); y = typemin(VersionWeight); FieldValue(x, y, y, x))
-Base.:-(a::FieldValue, b::FieldValue) = FieldValue(a.l0-b.l0, a.l1-b.l1, a.l2-b.l2, a.l3-b.l3)
-Base.:+(a::FieldValue, b::FieldValue) = FieldValue(a.l0+b.l0, a.l1+b.l1, a.l2+b.l2, a.l3+b.l3)
+Base.:-(a::FieldValue, b::FieldValue) = FieldValue(a.l0 - b.l0, a.l1 - b.l1, a.l2 - b.l2, a.l3 - b.l3)
+Base.:+(a::FieldValue, b::FieldValue) = FieldValue(a.l0 + b.l0, a.l1 + b.l1, a.l2 + b.l2, a.l3 + b.l3)
function Base.isless(a::FieldValue, b::FieldValue)
a.l0 < b.l0 && return true
@@ -59,7 +61,7 @@ validmax(a::FieldValue) = a.l0 >= 0
function Base.argmax(f::Field)
m = typemin(FieldValue)
mi = 0
- for j = length(f):-1:1
+ for j in length(f):-1:1
if f[j] > m
m = f[j]
mi = j
@@ -74,7 +76,7 @@ end
function secondmax(f::Field, msk::BitVector = trues(length(f)))
m = typemin(FieldValue)
m2 = typemin(FieldValue)
- for i = 1:length(f)
+ for i in 1:length(f)
msk[i] || continue
a = f[i]
if a > m
diff --git a/src/Resolve/graphtype.jl b/src/Resolve/graphtype.jl
index f2cffce50d..496b460dbf 100644
--- a/src/Resolve/graphtype.jl
+++ b/src/Resolve/graphtype.jl
@@ -15,17 +15,17 @@
const UUID0 = UUID(UInt128(0))
-const ResolveJournal = Vector{Tuple{UUID,String}}
+const ResolveJournal = Vector{Tuple{UUID, String}}
mutable struct ResolveLogEntry
journal::ResolveJournal # shared with all other entries
pkg::UUID
header::String
- events::Vector{Tuple{Any,String}} # here Any should ideally be Union{ResolveLogEntry,Nothing}
+ events::Vector{Tuple{Any, String}} # here Any should ideally be Union{ResolveLogEntry,Nothing}
ResolveLogEntry(journal::ResolveJournal, pkg::UUID, header::String = "") = new(journal, pkg, header, [])
end
-function Base.push!(entry::ResolveLogEntry, reason::Tuple{Union{ResolveLogEntry,Nothing},String}, to_journal::Bool = true)
+function Base.push!(entry::ResolveLogEntry, reason::Tuple{Union{ResolveLogEntry, Nothing}, String}, to_journal::Bool = true)
push!(entry.events, reason)
to_journal && entry.pkg ≠ uuid_julia && push!(entry.journal, (entry.pkg, reason[2]))
return entry
@@ -41,7 +41,7 @@ mutable struct ResolveLog
globals::ResolveLogEntry
# pool: records entries associated to each package
- pool::Dict{UUID,ResolveLogEntry}
+ pool::Dict{UUID, ResolveLogEntry}
# journal: record all messages in order (shared between all entries)
journal::ResolveJournal
@@ -54,18 +54,18 @@ mutable struct ResolveLog
verbose::Bool
# UUID to names
- uuid_to_name::Dict{UUID,String}
+ uuid_to_name::Dict{UUID, String}
- function ResolveLog(uuid_to_name::Dict{UUID,String}, verbose::Bool = false)
+ function ResolveLog(uuid_to_name::Dict{UUID, String}, verbose::Bool = false)
journal = ResolveJournal()
init = ResolveLogEntry(journal, UUID0, "")
globals = ResolveLogEntry(journal, UUID0, "Global events:")
- return new(init, globals, Dict{UUID,ResolveLogEntry}(), journal, true, verbose, uuid_to_name)
+ return new(init, globals, Dict{UUID, ResolveLogEntry}(), journal, true, verbose, uuid_to_name)
end
end
# Installation state: either a version, or uninstalled
-const InstState = Union{VersionNumber,Nothing}
+const InstState = Union{VersionNumber, Nothing}
# GraphData is basically a part of Graph that collects data structures useful
@@ -82,7 +82,7 @@ mutable struct GraphData
spp::Vector{Int}
# package dict: associates an index to each package id
- pdict::Dict{UUID,Int}
+ pdict::Dict{UUID, Int}
# package versions: for each package, keep the list of the
# possible version numbers; this defines a
@@ -93,57 +93,58 @@ mutable struct GraphData
# versions dict: associates a version index to each package
# version; such that
# pvers[p0][vdict[p0][vn]] = vn
- vdict::Vector{Dict{VersionNumber,Int}}
+ vdict::Vector{Dict{VersionNumber, Int}}
# UUID to names
- uuid_to_name::Dict{UUID,String}
+ uuid_to_name::Dict{UUID, String}
# pruned packages: during graph simplification, packages that
# only have one allowed version are pruned.
# This keeps track of them, so that they may
# be returned in the solution (unless they
# were explicitly fixed)
- pruned::Dict{UUID,VersionNumber}
+ pruned::Dict{UUID, VersionNumber}
# equivalence classes: for each package and each of its possible
# states, keep track of other equivalent states
- eq_classes::Dict{UUID,Dict{InstState,Set{InstState}}}
+ eq_classes::Dict{UUID, Dict{InstState, Set{InstState}}}
# resolve log: keep track of the resolution process
rlog::ResolveLog
function GraphData(
- compat::Dict{UUID,Dict{VersionNumber,Dict{UUID,VersionSpec}}},
- uuid_to_name::Dict{UUID,String},
+ compat_compressed::Dict{UUID, Vector{Dict{VersionRange, Dict{UUID, VersionSpec}}}},
+ pkg_versions::Dict{UUID, Vector{VersionNumber}},
+ uuid_to_name::Dict{UUID, String},
verbose::Bool = false
)
# generate pkgs
- pkgs = sort!(collect(keys(compat)))
+ pkgs = sort!(collect(keys(pkg_versions)))
np = length(pkgs)
# generate pdict
- pdict = Dict{UUID,Int}(pkgs[p0] => p0 for p0 = 1:np)
+ pdict = Dict{UUID, Int}(pkgs[p0] => p0 for p0 in 1:np)
- # generate spp and pvers
- pvers = [sort!(collect(keys(compat[pkgs[p0]]))) for p0 = 1:np]
+ # generate spp and pvers from provided version lists
+ pvers = [pkg_versions[pkgs[p0]] for p0 in 1:np]
spp = length.(pvers) .+ 1
# generate vdict
- vdict = [Dict{VersionNumber,Int}(vn => i for (i,vn) in enumerate(pvers[p0])) for p0 = 1:np]
+ vdict = [Dict{VersionNumber, Int}(vn => i for (i, vn) in enumerate(pvers[p0])) for p0 in 1:np]
# nothing is pruned yet, of course
- pruned = Dict{UUID,VersionNumber}()
+ pruned = Dict{UUID, VersionNumber}()
# equivalence classes (at the beginning each state represents just itself)
eq_vn(v0, p0) = (v0 == spp[p0] ? nothing : pvers[p0][v0])
# Hot code, measure performance before changing
- eq_classes = Dict{UUID,Dict{InstState,Set{InstState}}}()
- for p0 = 1:np
+ eq_classes = Dict{UUID, Dict{InstState, Set{InstState}}}()
+ for p0 in 1:np
d = Dict{InstState, Set{InstState}}()
- for v0 = 1:spp[p0]
+ for v0 in 1:spp[p0]
let p0 = p0 # Due to https://github.com/JuliaLang/julia/issues/15276
- d[eq_vn(v0,p0)] = Set([eq_vn(v0,p0)])
+ d[eq_vn(v0, p0)] = Set{InstState}((eq_vn(v0, p0),))
end
end
eq_classes[pkgs[p0]] = d
@@ -164,10 +165,10 @@ mutable struct GraphData
np = data.np
spp = copy(data.spp)
pdict = copy(data.pdict)
- pvers = [copy(data.pvers[p0]) for p0 = 1:np]
- vdict = [copy(data.vdict[p0]) for p0 = 1:np]
+ pvers = [copy(data.pvers[p0]) for p0 in 1:np]
+ vdict = [copy(data.vdict[p0]) for p0 in 1:np]
pruned = copy(data.pruned)
- eq_classes = Dict(p => copy(eq) for (p,eq) in data.eq_classes)
+ eq_classes = Dict(p => copy(eq) for (p, eq) in data.eq_classes)
rlog = deepcopy(data.rlog)
uuid_to_name = rlog.uuid_to_name
@@ -206,7 +207,7 @@ mutable struct Graph
# allows one to retrieve the indices in gadj, so that
# gadj[p0][adjdict[p1][p0]] = p1
# ("At which index does package p1 appear in gadj[p0]?")
- adjdict::Vector{Dict{Int,Int}}
+ adjdict::Vector{Dict{Int, Int}}
# indices of the packages that were *explicitly* required
# used to favor their versions at resolution
@@ -221,7 +222,7 @@ mutable struct Graph
# stack of constraints/ignored packages:
# allows to keep a sort of "versioning" of the constraints
# such that the solver can implement tentative solutions
- solve_stack::Vector{Tuple{Vector{BitVector},BitVector}}
+ solve_stack::Vector{Tuple{Vector{BitVector}, BitVector}}
# states per package: same as in GraphData
spp::Vector{Int}
@@ -235,73 +236,126 @@ mutable struct Graph
cavfld::Vector{FieldValue}
function Graph(
- compat::Dict{UUID,Dict{VersionNumber,Dict{UUID,VersionSpec}}},
- compat_weak::Dict{UUID,Dict{VersionNumber,Set{UUID}}},
- uuid_to_name::Dict{UUID,String},
+ deps_compressed::Dict{UUID, Vector{Dict{VersionRange, Set{UUID}}}},
+ compat_compressed::Dict{UUID, Vector{Dict{VersionRange, Dict{UUID, VersionSpec}}}},
+ weak_deps_compressed::Dict{UUID, Vector{Dict{VersionRange, Set{UUID}}}},
+ weak_compat_compressed::Dict{UUID, Vector{Dict{VersionRange, Dict{UUID, VersionSpec}}}},
+ pkg_versions::Dict{UUID, Vector{VersionNumber}},
+ pkg_versions_per_registry::Dict{UUID, Vector{Set{VersionNumber}}},
+ uuid_to_name::Dict{UUID, String},
reqs::Requires,
- fixed::Dict{UUID,Fixed},
+ fixed::Dict{UUID, Fixed},
verbose::Bool = false,
- julia_version::Union{VersionNumber,Nothing} = VERSION
+ julia_version::Union{VersionNumber, Nothing} = VERSION
)
# Tell the resolver about julia itself
uuid_to_name[uuid_julia] = "julia"
if julia_version !== nothing
fixed[uuid_julia] = Fixed(julia_version)
- compat[uuid_julia] = Dict(julia_version => Dict{VersionNumber,Dict{UUID,VersionSpec}}())
+ deps_compressed[uuid_julia] = [Dict{VersionRange, Set{UUID}}()]
+ compat_compressed[uuid_julia] = [Dict{VersionRange, Dict{UUID, VersionSpec}}()]
+ weak_deps_compressed[uuid_julia] = [Dict{VersionRange, Set{UUID}}()]
+ weak_compat_compressed[uuid_julia] = [Dict{VersionRange, Dict{UUID, VersionSpec}}()]
+ pkg_versions[uuid_julia] = [julia_version]
+ pkg_versions_per_registry[uuid_julia] = [Set([julia_version])]
else
- compat[uuid_julia] = Dict{VersionNumber,Dict{UUID,VersionSpec}}()
+ deps_compressed[uuid_julia] = [Dict{VersionRange, Set{UUID}}()]
+ compat_compressed[uuid_julia] = [Dict{VersionRange, Dict{UUID, VersionSpec}}()]
+ weak_deps_compressed[uuid_julia] = [Dict{VersionRange, Set{UUID}}()]
+ weak_compat_compressed[uuid_julia] = [Dict{VersionRange, Dict{UUID, VersionSpec}}()]
+ pkg_versions[uuid_julia] = VersionNumber[]
+ pkg_versions_per_registry[uuid_julia] = [Set{VersionNumber}()]
end
- data = GraphData(compat, uuid_to_name, verbose)
+ data = GraphData(compat_compressed, pkg_versions, uuid_to_name, verbose)
pkgs, np, spp, pdict, pvers, vdict, rlog = data.pkgs, data.np, data.spp, data.pdict, data.pvers, data.vdict, data.rlog
extended_deps = let spp = spp # Due to https://github.com/JuliaLang/julia/issues/15276
- [Vector{Dict{Int,BitVector}}(undef, spp[p0]-1) for p0 = 1:np]
+ [Vector{Dict{Int, BitVector}}(undef, spp[p0] - 1) for p0 in 1:np]
end
- for p0 = 1:np, v0 = 1:(spp[p0]-1)
- vn = pvers[p0][v0]
- req = Dict{Int,VersionSpec}()
+ vnmap = Dict{UUID, VersionSpec}()
+ reg_result = Dict{UUID, VersionSpec}()
+ req = Dict{Int, VersionSpec}()
+ for p0 in 1:np
uuid0 = pkgs[p0]
- vnmap = get(Dict{UUID,VersionSpec}, compat[uuid0], vn)
- for (uuid1, vs) in vnmap
- p1 = pdict[uuid1]
- p1 == p0 && error("Package $(pkgID(pkgs[p0], uuid_to_name)) version $vn has a dependency with itself")
- # check conflicts instead of intersecting?
- # (intersecting is used by fixed packages though...)
- req_p1 = get!(VersionSpec, req, p1)
- req[p1] = req_p1 ∩ vs
- end
- # Translate the requirements into bit masks
- # Hot code, measure performance before changing
- req_msk = Dict{Int,BitVector}()
- maybe_weak = haskey(compat_weak, uuid0) && haskey(compat_weak[uuid0], vn)
- for (p1, vs) in req
- pv = pvers[p1]
- req_msk_p1 = BitVector(undef, spp[p1])
- @inbounds for i in 1:spp[p1] - 1
- req_msk_p1[i] = pv[i] ∈ vs
+
+ # Query compressed deps and compat data for this version (including weak deps)
+ # We have a vector of per-registry dictionaries, need to query across all
+ uuid0_deps_list = get(Vector{Dict{VersionRange, Set{UUID}}}, deps_compressed, uuid0)
+ uuid0_compat_list = get(Vector{Dict{VersionRange, Dict{UUID, VersionSpec}}}, compat_compressed, uuid0)
+ uuid0_weak_deps_list = get(Vector{Dict{VersionRange, Set{UUID}}}, weak_deps_compressed, uuid0)
+ uuid0_weak_compat_list = get(Vector{Dict{VersionRange, Dict{UUID, VersionSpec}}}, weak_compat_compressed, uuid0)
+ uuid0_versions_per_reg = get(Vector{Set{VersionNumber}}, pkg_versions_per_registry, uuid0)
+
+ for v0 in 1:(spp[p0] - 1)
+ vn = pvers[p0][v0]
+ empty!(req)
+ Registry.query_compat_for_version_multi_registry!(vnmap, reg_result, uuid0_deps_list, uuid0_compat_list, uuid0_weak_deps_list, uuid0_weak_compat_list, uuid0_versions_per_reg, vn)
+
+ # Filter out incompatible stdlib compat entries from registry dependencies
+ for (dep_uuid, dep_compat) in vnmap
+ if Types.is_stdlib(dep_uuid) && !(dep_uuid in Types.UPGRADABLE_STDLIBS_UUIDS)
+ stdlib_ver = Types.stdlib_version(dep_uuid, julia_version)
+ if stdlib_ver !== nothing && !isempty(dep_compat) && !(stdlib_ver in dep_compat)
+ @debug "Ignoring incompatible stdlib compat entry" dep = get(uuid_to_name, dep_uuid, string(dep_uuid)) stdlib_ver dep_compat package = uuid_to_name[uuid0] version = vn
+ delete!(vnmap, dep_uuid)
+ end
+ end
+ end
+
+ for (uuid1, vs) in vnmap
+ p1 = pdict[uuid1]
+ p1 == p0 && error("Package $(pkgID(pkgs[p0], uuid_to_name)) version $vn has a dependency with itself")
+ # check conflicts instead of intersecting?
+ # (intersecting is used by fixed packages though...)
+ req_p1 = get(req, p1, nothing)
+ if req_p1 == nothing
+ req[p1] = vs
+ else
+ req[p1] = req_p1 ∩ vs
+ end
end
- weak = maybe_weak && (pkgs[p1] ∈ compat_weak[uuid0][vn])
- req_msk_p1[end] = weak
- req_msk[p1] = req_msk_p1
+
+ # Translate the requirements into bit masks
+ # Hot code, measure performance before changing
+ req_msk = Dict{Int, BitVector}()
+ sizehint!(req_msk, length(req))
+
+ for (p1, vs) in req
+ pv = pvers[p1]
+ # Allocate BitVector with space for weak dep flag
+ req_msk_p1 = BitVector(undef, spp[p1])
+ # Use optimized batch version check (fills indices 1 through spp[p1]-1)
+ Versions.matches_spec_range!(req_msk_p1, pv, vs, spp[p1] - 1)
+ # Check if this is a weak dep across all registries
+ weak = false
+ for weak_deps_dict in uuid0_weak_deps_list
+ if Registry.is_weak_dep(weak_deps_dict, vn, pkgs[p1])
+ weak = true
+ break
+ end
+ end
+ req_msk_p1[end] = weak
+ req_msk[p1] = req_msk_p1
+ end
+ extended_deps[p0][v0] = req_msk
end
- extended_deps[p0][v0] = req_msk
end
- gadj = [Int[] for p0 = 1:np]
- gmsk = [BitMatrix[] for p0 = 1:np]
+ gadj = [Int[] for p0 in 1:np]
+ gmsk = [BitMatrix[] for p0 in 1:np]
gconstr = let spp = spp # Due to https://github.com/JuliaLang/julia/issues/15276
- [trues(spp[p0]) for p0 = 1:np]
+ [trues(spp[p0]) for p0 in 1:np]
end
- adjdict = [Dict{Int,Int}() for p0 = 1:np]
+ adjdict = [Dict{Int, Int}() for p0 in 1:np]
- for p0 = 1:np, v0 = 1:(spp[p0]-1), (p1,rmsk1) in extended_deps[p0][v0]
+ for p0 in 1:np, v0 in 1:(spp[p0] - 1), (p1, rmsk1) in extended_deps[p0][v0]
@assert p0 ≠ p1
j0 = get(adjdict[p1], p0, length(gadj[p0]) + 1)
j1 = get(adjdict[p0], p1, length(gadj[p1]) + 1)
@assert (j0 > length(gadj[p0]) && j1 > length(gadj[p1])) ||
- (j0 ≤ length(gadj[p0]) && j1 ≤ length(gadj[p1]))
+ (j0 ≤ length(gadj[p0]) && j1 ≤ length(gadj[p1]))
if j0 > length(gadj[p0])
push!(gadj[p0], p1)
@@ -322,7 +376,7 @@ mutable struct Graph
bmt = gmsk[p1][j1]
end
- for v1 = 1:spp[p1]
+ @inbounds for v1 in 1:spp[p1]
rmsk1[v1] && continue
bm[v1, v0] = false
bmt[v0, v1] = false
@@ -333,10 +387,12 @@ mutable struct Graph
fix_inds = Set{Int}()
ignored = falses(np)
- solve_stack = Tuple{Vector{BitVector},BitVector}[]
+ solve_stack = Tuple{Vector{BitVector}, BitVector}[]
- graph = new(data, gadj, gmsk, gconstr, adjdict, req_inds, fix_inds, ignored, solve_stack, spp, np,
- FieldValue[], FieldValue[], FieldValue[])
+ graph = new(
+ data, gadj, gmsk, gconstr, adjdict, req_inds, fix_inds, ignored, solve_stack, spp, np,
+ FieldValue[], FieldValue[], FieldValue[]
+ )
_add_fixed!(graph, fixed)
_add_reqs!(graph, reqs, :explicit_requirement)
@@ -351,14 +407,14 @@ mutable struct Graph
data = copy(graph.data)
np = graph.np
spp = data.spp
- gadj = [copy(graph.gadj[p0]) for p0 = 1:np]
- gmsk = [[copy(graph.gmsk[p0][j0]) for j0 = 1:length(gadj[p0])] for p0 = 1:np]
- gconstr = [copy(graph.gconstr[p0]) for p0 = 1:np]
- adjdict = [copy(graph.adjdict[p0]) for p0 = 1:np]
+ gadj = [copy(graph.gadj[p0]) for p0 in 1:np]
+ gmsk = [[copy(graph.gmsk[p0][j0]) for j0 in 1:length(gadj[p0])] for p0 in 1:np]
+ gconstr = [copy(graph.gconstr[p0]) for p0 in 1:np]
+ adjdict = [copy(graph.adjdict[p0]) for p0 in 1:np]
req_inds = copy(graph.req_inds)
fix_inds = copy(graph.fix_inds)
ignored = copy(graph.ignored)
- solve_stack = [([copy(gc0) for gc0 in sav_gconstr],copy(sav_ignored)) for (sav_gconstr,sav_ignored) in graph.solve_stack]
+ solve_stack = [([copy(gc0) for gc0 in sav_gconstr], copy(sav_ignored)) for (sav_gconstr, sav_ignored) in graph.solve_stack]
return new(data, gadj, gmsk, gconstr, adjdict, req_inds, fix_inds, ignored, solve_stack, spp, np)
end
@@ -382,11 +438,11 @@ function _add_reqs!(graph::Graph, reqs::Requires, reason; weak_reqs::Set{UUID} =
pdict = graph.data.pdict
pvers = graph.data.pvers
- for (rp,rvs) in reqs
+ for (rp, rvs) in reqs
haskey(pdict, rp) || error("unknown required package $(pkgID(rp, graph))")
rp0 = pdict[rp]
new_constr = trues(spp[rp0])
- for rv0 = 1:(spp[rp0]-1)
+ for rv0 in 1:(spp[rp0] - 1)
rvn = pvers[rp0][rv0]
rvn ∈ rvs || (new_constr[rv0] = false)
end
@@ -401,21 +457,21 @@ function _add_reqs!(graph::Graph, reqs::Requires, reason; weak_reqs::Set{UUID} =
end
"Add fixed packages to the graph, and their requirements."
-function add_fixed!(graph::Graph, fixed::Dict{UUID,Fixed})
+function add_fixed!(graph::Graph, fixed::Dict{UUID, Fixed})
_add_fixed!(graph, fixed)
check_constraints(graph)
# TODO: add fixed to graph data?
return graph
end
-function _add_fixed!(graph::Graph, fixed::Dict{UUID,Fixed})
+function _add_fixed!(graph::Graph, fixed::Dict{UUID, Fixed})
gconstr = graph.gconstr
spp = graph.spp
fix_inds = graph.fix_inds
pdict = graph.data.pdict
vdict = graph.data.vdict
- for (fp,fx) in fixed
+ for (fp, fx) in fixed
haskey(pdict, fp) || error("unknown fixed package $(pkgID(fp, graph))")
fp0 = pdict[fp]
fv0 = vdict[fp0][fx.version]
@@ -424,7 +480,7 @@ function _add_fixed!(graph::Graph, fixed::Dict{UUID,Fixed})
gconstr[fp0] .&= new_constr
push!(fix_inds, fp0)
bkitem = log_event_fixed!(graph, fp, fx)
- _add_reqs!(graph, fx.requires, (fp, bkitem); weak_reqs=fx.weak)
+ _add_reqs!(graph, fx.requires, (fp, bkitem); weak_reqs = fx.weak)
end
return graph
end
@@ -435,7 +491,7 @@ pkgID(p0::Int, data::GraphData) = pkgID(data.pkgs[p0], data)
pkgID(p, graph::Graph) = pkgID(p, graph.data)
## user-friendly representation of package IDs ##
-function pkgID(p::UUID, uuid_to_name::Dict{UUID,String})
+function pkgID(p::UUID, uuid_to_name::Dict{UUID, String})
name = get(uuid_to_name, p, "(unknown)")
uuid_short = string(p)[1:8]
return "$name [$uuid_short]"
@@ -467,18 +523,18 @@ function check_consistency(graph::Graph)
for x in Any[spp, gadj, gmsk, gconstr, adjdict, ignored, rlog.pool, pkgs, pdict, pvers, vdict]
@assert length(x)::Int == np
end
- for p0 = 1:np
+ for p0 in 1:np
@assert pdict[pkgs[p0]] == p0
spp0 = spp[p0]
@assert spp0 ≥ 1
pvers0 = pvers[p0]
vdict0 = vdict[p0]
@assert length(pvers0) == spp0 - 1
- for v0 = 1:(spp0-1)
+ for v0 in 1:(spp0 - 1)
@assert vdict0[pvers0[v0]] == v0
end
- for (vn,v0) in vdict0
- @assert 1 ≤ v0 ≤ spp0-1
+ for (vn, v0) in vdict0
+ @assert 1 ≤ v0 ≤ spp0 - 1
@assert pvers0[v0] == vn
end
gconstr0 = gconstr[p0]
@@ -489,18 +545,18 @@ function check_consistency(graph::Graph)
adjdict0 = adjdict[p0]
@assert length(gmsk0) == length(gadj0)
@assert length(adjdict0) == length(gadj0)
- for (j0,p1) in enumerate(gadj0)
+ for (j0, p1) in enumerate(gadj0)
@assert p1 ≠ p0
@assert adjdict[p1][p0] == j0
spp1 = spp[p1]
- @assert size(gmsk0[j0]) == (spp1,spp0)
+ @assert size(gmsk0[j0]) == (spp1, spp0)
j1 = adjdict0[p1]
gmsk1 = gmsk[p1]
# This assert is a bit too expensive
# @assert gmsk1[j1] == permutedims(gmsk0[j0])
end
end
- for (p,p0) in pdict
+ for (p, p0) in pdict
@assert 1 ≤ p0 ≤ np
@assert pkgs[p0] == p
@assert !haskey(pruned, p)
@@ -515,14 +571,14 @@ function check_consistency(graph::Graph)
@assert count(gconstr[p0]) ≤ 1 # note: the 0 case should be handled by check_constraints
end
- for (p,eq_cl) in eq_classes, (rvn,rvs) in eq_cl
+ for (p, eq_cl) in eq_classes, (rvn, rvs) in eq_cl
@assert rvn ∈ rvs
end
- for (sav_gconstr,sav_ignored) in solve_stack
+ for (sav_gconstr, sav_ignored) in solve_stack
@assert length(sav_ignored) == np
@assert length(sav_gconstr) == np
- for p0 = 1:np
+ for p0 in 1:np
@assert length(sav_gconstr[p0]) == spp[p0]
end
end
@@ -570,8 +626,8 @@ pkgID_color(pkgID) = CONFLICT_COLORS[mod1(hash(pkgID), end)]
logstr(pkgID) = logstr(pkgID, pkgID)
function logstr(pkgID, args...)
# workout the string with the color codes, check stderr to decide if color is enabled
- return sprint(args; context=stderr::IO) do io, iargs
- printstyled(io, iargs...; color=pkgID_color(pkgID))
+ return sprint(args; context = stderr::IO) do io, iargs
+ printstyled(io, iargs...; color = pkgID_color(pkgID))
end
end
@@ -584,7 +640,7 @@ end
Finds a minimal collection of ranges as a `VersionSpec`, that permits everything in the
`subset`, but does not permit anything else from the `pool`.
"""
-function range_compressed_versionspec(pool, subset=pool)
+function range_compressed_versionspec(pool, subset = pool)
length(subset) == 1 && return VersionSpec(only(subset))
# PREM-OPT: we keep re-sorting these, probably not required.
sort!(pool)
@@ -597,7 +653,7 @@ function range_compressed_versionspec(pool, subset=pool)
pool_ii = findfirst(isequal(range_start), pool) + 1 # skip-forward til we have started
for s in @view subset[2:end]
if s != pool[pool_ii]
- range_end = pool[pool_ii-1] # previous element was last in this range
+ range_end = pool[pool_ii - 1] # previous element was last in this range
push!(contiguous_subsets, VersionRange(range_start, range_end))
range_start = s # start a new range
while (s != pool[pool_ii]) # advance til time to start
@@ -616,7 +672,7 @@ function init_log!(data::GraphData)
pkgs = data.pkgs
pvers = data.pvers
rlog = data.rlog
- for p0 = 1:np
+ for p0 in 1:np
p = pkgs[p0]
id = pkgID(p0, data)
versions = pvers[p0]
@@ -655,8 +711,8 @@ function log_event_fixed!(graph::Graph, fp::UUID, fx::Fixed)
end
function _vs_string(p0::Int, vmask::BitVector, id::String, pvers::Vector{Vector{VersionNumber}})
- if any(vmask[1:(end-1)])
- vspec = range_compressed_versionspec(pvers[p0], pvers[p0][vmask[1:(end-1)]])
+ if any(vmask[1:(end - 1)])
+ vspec = range_compressed_versionspec(pvers[p0], pvers[p0][vmask[1:(end - 1)]])
vns = logstr(id, vspec)
vmask[end] && (vns *= " or uninstalled")
else
@@ -678,7 +734,7 @@ function log_event_req!(graph::Graph, rp::UUID, rvs::VersionSpec, reason)
other_entry = nothing
msg *= "an explicit requirement"
else
- other_p, other_entry = reason::Tuple{UUID,ResolveLogEntry}
+ other_p, other_entry = reason::Tuple{UUID, ResolveLogEntry}
if other_p == uuid_julia
msg *= "julia compatibility requirements"
other_entry = nothing # don't propagate the log
@@ -739,7 +795,7 @@ end
function log_event_global!(graph::Graph, msg::String)
rlog = graph.data.rlog
rlog.verbose && @info(msg)
- push!(rlog.globals, (nothing, msg))
+ return push!(rlog.globals, (nothing, msg))
end
function log_event_implicit_req!(graph::Graph, p1::Int, vmask::BitVector, p0::Int)
@@ -754,7 +810,7 @@ function log_event_implicit_req!(graph::Graph, p1::Int, vmask::BitVector, p0::In
other_p, other_entry = pkgs[p0], rlog.pool[pkgs[p0]]
other_id = pkgID(other_p, rlog)
if any(vmask)
- if all(vmask[1:(end-1)]) # Check if all versions are allowed (except uninstalled)
+ if all(vmask[1:(end - 1)]) # Check if all versions are allowed (except uninstalled)
@assert other_p ≠ uuid_julia
msg = "required (without additional version restrictions) by $(logstr(other_id))"
else
@@ -847,7 +903,7 @@ function log_event_maxsumsolved!(graph::Graph, p0::Int, s0::Int, why::Symbol)
if s0 == spp[p0] - 1
msg = "set by the solver to its maximum version: $ver"
else
- xver = logstr(id, pvers[p0][s0+1])
+ xver = logstr(id, pvers[p0][s0 + 1])
msg = "set by the solver to version: $ver (version $xver would violate its constraints)"
end
end
@@ -870,7 +926,7 @@ function log_event_maxsumsolved!(graph::Graph, p0::Int, s0::Int, p1::Int)
if s0 == spp[p0] - 1
msg = "set by the solver to its maximum version: $ver (installation is required by $other_id)"
else
- xver = logstr(id, pvers[p0][s0+1])
+ xver = logstr(id, pvers[p0][s0 + 1])
msg = "set by the solver version: $ver (version $xver would violate a dependency relation with $other_id)"
end
other_entry = rlog.pool[pkgs[p1]]
@@ -890,8 +946,8 @@ function log_event_eq_classes!(graph::Graph, p0::Int)
id = pkgID(p, rlog)
msg = "versions reduced by equivalence to: "
- if any(gconstr[p0][1:(end-1)])
- vspec = range_compressed_versionspec(pvers[p0], pvers[p0][gconstr[p0][1:(end-1)]])
+ if any(gconstr[p0][1:(end - 1)])
+ vspec = range_compressed_versionspec(pvers[p0], pvers[p0][gconstr[p0][1:(end - 1)]])
msg *= logstr(id, vspec)
gconstr[p0][end] && (msg *= " or uninstalled")
elseif gconstr[p0][end]
@@ -945,11 +1001,12 @@ function showlog(io::IO, rlog::ResolveLog; view::Symbol = :plain)
seen = IdDict()
recursive = (view === :tree)
_show(io, rlog, rlog.globals, _logindent, seen, false)
- initentries = Union{ResolveLogEntry,Nothing}[event[1]::Union{ResolveLogEntry,Nothing} for event in rlog.init.events]
- for entry in sort!(initentries, by=(entry->pkgID(entry.pkg, rlog)))
+ initentries = Union{ResolveLogEntry, Nothing}[event[1]::Union{ResolveLogEntry, Nothing} for event in rlog.init.events]
+ for entry in sort!(initentries, by = (entry -> pkgID(entry.pkg, rlog)))
seen[entry] = true
_show(io, rlog, entry, _logindent, seen, recursive)
end
+ return
end
ansi_length(s) = textwidth(replace(s, r"\e\[[0-9]+(?:;[0-9]+)*m" => ""))
@@ -957,13 +1014,14 @@ ansi_length(s) = textwidth(replace(s, r"\e\[[0-9]+(?:;[0-9]+)*m" => ""))
function showlogjournal(io::IO, rlog::ResolveLog)
journal = rlog.journal
id(p) = p == UUID0 ? "[global event]" : logstr(pkgID(p, rlog))
- padding = maximum(ansi_length(id(p)) for (p,_) in journal; init=0)
- for (p,msg) in journal
+ padding = maximum(ansi_length(id(p)) for (p, _) in journal; init = 0)
+ for (p, msg) in journal
s = id(p)
l = ansi_length(s)
pad = max(0, padding - l)
println(io, ' ', s, ' '^pad, ": ", msg)
end
+ return
end
"""
@@ -974,24 +1032,26 @@ the same as for `showlog(io, rlog)`); the default is `:tree`.
function showlog(io::IO, rlog::ResolveLog, p::UUID; view::Symbol = :tree)
view ∈ [:plain, :tree] || throw(ArgumentError("the view argument should be `:plain` or `:tree`"))
entry = rlog.pool[p]
- if view === :tree
- _show(io, rlog, entry, _logindent, IdDict{Any,Any}(entry=>true), true)
+ return if view === :tree
+ _show(io, rlog, entry, _logindent, IdDict{Any, Any}(entry => true), true)
else
entries = ResolveLogEntry[entry]
- function getentries(entry)
- for (other_entry,_) in entry.events
- (other_entry ≡ nothing || other_entry ∈ entries) && continue
- push!(entries, other_entry)
- getentries(other_entry)
- end
- end
- getentries(entry)
+ collect_log_entries!(entries, entry)
for entry in entries
_show(io, rlog, entry, _logindent, IdDict(), false)
end
end
end
+function collect_log_entries!(entries::Vector{ResolveLogEntry}, entry::ResolveLogEntry)
+ for (other_entry, _) in entry.events
+ (other_entry ≡ nothing || other_entry ∈ entries) && continue
+ push!(entries, other_entry)
+ collect_log_entries!(entries, other_entry)
+ end
+ return
+end
+
# Show a recursive tree with requirements applied to a package, either directly or indirectly
function _show(io::IO, rlog::ResolveLog, entry::ResolveLogEntry, indent::String, seen::IdDict, recursive::Bool)
toplevel = (indent == _logindent)
@@ -999,11 +1059,11 @@ function _show(io::IO, rlog::ResolveLog, entry::ResolveLogEntry, indent::String,
pre = toplevel ? "" : " "
println(io, indent, firstglyph, entry.header)
l = length(entry.events)
- for (i,(otheritem,msg)) in enumerate(entry.events)
+ for (i, (otheritem, msg)) in enumerate(entry.events)
if !isempty(msg)
- print(io, indent * pre, (i==l ? '└' : '├'), '─')
+ print(io, indent * pre, (i == l ? '└' : '├'), '─')
println(io, msg)
- newindent = indent * pre * (i==l ? " " : "│ ")
+ newindent = indent * pre * (i == l ? " " : "│ ")
else
newindent = indent
end
@@ -1016,6 +1076,7 @@ function _show(io::IO, rlog::ResolveLog, entry::ResolveLogEntry, indent::String,
seen[otheritem] = true
_show(io, rlog, otheritem, newindent, seen, recursive)
end
+ return
end
is_julia(graph::Graph, p0::Int) = graph.data.pkgs[p0] == uuid_julia
@@ -1030,7 +1091,7 @@ function check_constraints(graph::Graph)
id(p0::Int) = pkgID(p0, graph)
- for p0 = 1:np
+ for p0 in 1:np
any(gconstr[p0]) && continue
if exact
err_msg = "Unsatisfiable requirements detected for package $(logstr(id(p0))):\n"
@@ -1068,16 +1129,22 @@ function propagate_constraints!(graph::Graph, sources::Set{Int} = Set{Int}(); lo
# unless otherwise specified, start from packages which
# are not allowed to be uninstalled
staged = isempty(sources) ?
- Set{Int}(p0 for p0 = 1:np if !gconstr[p0][end]) :
+ Set{Int}(p0 for p0 in 1:np if !gconstr[p0][end]) :
sources
seen = copy(staged)
+ staged_next = Set{Int}()
+
+ # Pre-allocate workspace for added constraints
+ max_spp = maximum(spp, init = 0)
+ added_constr1 = BitVector(undef, max_spp)
+ old_gconstr1 = BitVector(undef, max_spp)
while !isempty(staged)
staged_next = Set{Int}()
for p0 in staged
gconstr0 = gconstr[p0]
- for (j1,p1) in enumerate(gadj[p0])
+ for (j1, p1) in enumerate(gadj[p0])
# if p1 is ignored, the relation between it and all its neighbors
# has already been propagated
ignored[p1] && continue
@@ -1086,16 +1153,28 @@ function propagate_constraints!(graph::Graph, sources::Set{Int} = Set{Int}(); lo
pkgs[p1] == uuid_julia && continue
msk = gmsk[p0][j1]
- # consider the sub-mask with only allowed versions of p0
- sub_msk = msk[:,gconstr0]
# if an entire row of the sub-mask is false, that version of p1
# is effectively forbidden
# (this is just like calling `any` row-wise)
- added_constr1 = any!(BitVector(undef, spp[p1]), sub_msk)
+ # sub_msk = msk[:, gconstr0]
+ # added_constr1 = any!(BitVector(undef, spp[p1]), sub_msk)
+ # The code below is equivalent to the shorter code above, but avoids allocating
+ spp1 = spp[p1]
+ resize!(added_constr1, spp1)
+ fill!(added_constr1, false)
+ for v1 in 1:spp1
+ for v0 in 1:spp[p0]
+ if gconstr0[v0] && msk[v1, v0]
+ added_constr1[v1] = true
+ break
+ end
+ end
+ end
+
# apply the new constraints, checking for contradictions
# (keep the old ones for comparison)
gconstr1 = gconstr[p1]
- old_gconstr1 = copy(gconstr1)
+ copy!(old_gconstr1, gconstr1)
gconstr1 .&= added_constr1
# if the new constraints are more restrictive than the
# previous ones, record it and propagate them next
@@ -1139,15 +1218,15 @@ function disable_unreachable!(graph::Graph, sources::Set{Int} = Set{Int}())
log_event_global!(graph, "disabling unreachable nodes")
# 2nd argument are packages which are not allowed to be uninstalled
- staged = union(sources, Set{Int}(p0 for p0 = 1:np if !gconstr[p0][end]))
+ staged = union(sources, Set{Int}(p0 for p0 in 1:np if !gconstr[p0][end]))
seen = copy(staged)
while !isempty(staged)
staged_next = Set{Int}()
for p0 in staged
- gconstr0idx = findall(gconstr[p0][1:(end-1)])
- for (j1,p1) in enumerate(gadj[p0])
- all(gmsk[p0][j1][end,gconstr0idx]) && continue # the package is not required by any of the allowed versions of p0
+ gconstr0idx = findall(gconstr[p0][1:(end - 1)])
+ for (j1, p1) in enumerate(gadj[p0])
+ all(gmsk[p0][j1][end, gconstr0idx]) && continue # the package is not required by any of the allowed versions of p0
p1 ∈ seen || push!(staged_next, p1)
end
end
@@ -1156,7 +1235,7 @@ function disable_unreachable!(graph::Graph, sources::Set{Int} = Set{Int}())
end
# Force uninstalled state for all unseen packages
- for p0 = 1:np
+ for p0 in 1:np
p0 ∈ seen && continue
gconstr0 = gconstr[p0]
@assert gconstr0[end]
@@ -1196,10 +1275,10 @@ function validate_versions!(graph::Graph, sources::Set{Int} = Set{Int}(); skim::
log_event_global!(graph, "validating versions [mode=$(skim ? "skim" : "deep")]")
- sumspp = sum(count(gconstr[p0]) for p0 = 1:np)
+ sumspp = sum(count(gconstr[p0]) for p0 in 1:np)
# TODO: better data structure (need a FIFO queue with fast membership loopup)
- squeue = union(sources, Set{Int}(p0 for p0 = 1:np if !gconstr[p0][end]))
+ squeue = union(sources, Set{Int}(p0 for p0 in 1:np if !gconstr[p0][end]))
isempty(squeue) && (squeue = Set{Int}(1:np))
queue = collect(squeue)
@@ -1234,7 +1313,7 @@ function validate_versions!(graph::Graph, sources::Set{Int} = Set{Int}(); skim::
unsat = !any(gconstr0)
if unsat
# we'll trigger a failure by pinning the highest version
- v0 = findlast(old_gconstr0[1:(end-1)])
+ v0 = findlast(old_gconstr0[1:(end - 1)])
@assert v0 ≢ nothing # this should be ensured by a previous pruning
# @info "pinning $(logstr(id(p0))) to version $(pvers[p0][v0])"
log_event_pin!(graph, pkgs[p0], pvers[p0][v0])
@@ -1257,9 +1336,9 @@ function validate_versions!(graph::Graph, sources::Set{Int} = Set{Int}(); skim::
end
end
- sumspp_new = sum(count(gconstr[p0]) for p0 = 1:np)
+ sumspp_new = sum(count(gconstr[p0]) for p0 in 1:np)
- log_event_global!(graph, "versions validation completed, stats (total n. of states): before = $(sumspp) after = $(sumspp_new) diff = $(sumspp-sumspp_new)")
+ log_event_global!(graph, "versions validation completed, stats (total n. of states): before = $(sumspp) after = $(sumspp_new) diff = $(sumspp - sumspp_new)")
return graph, changed
end
@@ -1274,8 +1353,15 @@ function compute_eq_classes!(graph::Graph)
np = graph.np
sumspp = sum(graph.spp)
- for p0 = 1:np
- build_eq_classes1!(graph, p0)
+
+ # Preallocate workspace matrix - make it large enough for worst case
+ max_rows = maximum(1 + sum(size(m, 1) for m in graph.gmsk[p0]; init = 0) for p0 in 1:np; init = 1)
+ max_cols = maximum(graph.spp; init = 1)
+ cmat_workspace = BitMatrix(undef, max_rows, max_cols)
+ cvecs_workspace = [BitVector(undef, max_rows) for _ in 1:max_cols]
+
+ for p0 in 1:np
+ build_eq_classes1!(graph, p0, cmat_workspace, cvecs_workspace)
end
log_event_global!(graph, "computed version equivalence classes, stats (total n. of states): before = $(sumspp) after = $(sum(graph.spp))")
@@ -1285,7 +1371,7 @@ function compute_eq_classes!(graph::Graph)
return graph
end
-function build_eq_classes1!(graph::Graph, p0::Int)
+function build_eq_classes1!(graph::Graph, p0::Int, cmat_workspace::BitMatrix, cvecs_workspace::Vector{BitVector})
np = graph.np
spp = graph.spp
gadj = graph.gadj
@@ -1301,8 +1387,24 @@ function build_eq_classes1!(graph::Graph, p0::Int)
# concatenate all the constraints; the columns of the
# result encode the behavior of each version
- cmat = vcat(BitMatrix(permutedims(gconstr[p0])), gmsk[p0]...)
- cvecs = [cmat[:,v0] for v0 = 1:spp[p0]]
+ # cmat = vcat(BitMatrix(permutedims(gconstr[p0])), gmsk[p0]...)
+ ncols = spp[p0]
+ cmat_workspace[1, 1:ncols] = gconstr[p0]
+ row_idx = 2
+ for j1 in 1:length(gmsk[p0])
+ msk = gmsk[p0][j1]
+ nrows_msk = size(msk, 1)
+ cmat_workspace[row_idx:(row_idx + nrows_msk - 1), 1:ncols] = msk
+ row_idx += nrows_msk
+ end
+
+ # cvecs = [cmat[:, v0] for v0 in 1:spp[p0]]
+ nrows = row_idx - 1
+ cvecs = view(cvecs_workspace, 1:ncols)
+ for v0 in 1:ncols
+ resize!(cvecs[v0], nrows)
+ copy!(cvecs[v0], view(cmat_workspace, 1:nrows, v0))
+ end
# find unique behaviors
repr_vecs = unique(cvecs)
@@ -1314,7 +1416,7 @@ function build_eq_classes1!(graph::Graph, p0::Int)
# group versions into sets that behave identically
eq_sets = [Set{Int}(v0 for v0 in 1:spp[p0] if cvecs[v0] == rvec) for rvec in repr_vecs]
- sort!(eq_sets, by=maximum)
+ sort!(eq_sets, by = maximum)
# each set is represented by its highest-valued member
repr_vers = map(maximum, eq_sets)
@@ -1324,7 +1426,7 @@ function build_eq_classes1!(graph::Graph, p0::Int)
# update equivalence classes
eq_vn(v0) = (v0 == spp[p0] ? nothing : pvers[p0][v0])
eq_classes0 = eq_classes[pkgs[p0]]
- for (v0,rvs) in zip(repr_vers, eq_sets)
+ for (v0, rvs) in zip(repr_vers, eq_sets)
@assert v0 ∈ rvs
vn0 = eq_vn(v0)
for v1 in rvs
@@ -1339,16 +1441,16 @@ function build_eq_classes1!(graph::Graph, p0::Int)
# reduce the constraints and the interaction matrices
spp[p0] = neq
gconstr[p0] = gconstr[p0][repr_vers]
- for (j1,p1) in enumerate(gadj[p0])
- gmsk[p0][j1] = gmsk[p0][j1][:,repr_vers]
+ for (j1, p1) in enumerate(gadj[p0])
+ gmsk[p0][j1] = gmsk[p0][j1][:, repr_vers]
j0 = adjdict[p0][p1]
- gmsk[p1][j0] = gmsk[p1][j0][repr_vers,:]
+ gmsk[p1][j0] = gmsk[p1][j0][repr_vers, :]
end
# reduce/rebuild version dictionaries
- pvers[p0] = pvers[p0][repr_vers[1:(end-1)]]
- vdict[p0] = Dict(vn => i for (i,vn) in enumerate(pvers[p0]))
+ pvers[p0] = pvers[p0][repr_vers[1:(end - 1)]]
+ vdict[p0] = Dict(vn => i for (i, vn) in enumerate(pvers[p0]))
# put a record in the log
log_event_eq_classes!(graph, p0)
@@ -1365,14 +1467,14 @@ function compute_eq_classes_soft!(graph::Graph; log_events::Bool = true)
ignored = graph.ignored
gconstr = graph.gconstr
- sumspp = sum(count(gconstr[p0]) for p0 = 1:np)
- for p0 = 1:np
+ sumspp = sum(count(gconstr[p0]) for p0 in 1:np)
+ for p0 in 1:np
ignored[p0] && continue
build_eq_classes_soft1!(graph, p0)
end
- sumspp_new = sum(count(gconstr[p0]) for p0 = 1:np)
+ sumspp_new = sum(count(gconstr[p0]) for p0 in 1:np)
- log_events && log_event_global!(graph, "computed version equivalence classes, stats (total n. of states): before = $(sumspp) after = $(sumspp_new) diff = $(sumspp_new-sumspp)")
+ log_events && log_event_global!(graph, "computed version equivalence classes, stats (total n. of states): before = $(sumspp) after = $(sumspp_new) diff = $(sumspp_new - sumspp)")
@assert check_consistency(graph)
@@ -1393,7 +1495,7 @@ function build_eq_classes_soft1!(graph::Graph, p0::Int)
gmsk0 = gmsk[p0]
gconstr0 = gconstr[p0]
eff_spp0 = count(gconstr0)
- cvecs = BitVector[vcat(BitVector(), (gmsk0[j1][gconstr[gadj0[j1]],v0] for j1 = 1:length(gadj0) if !ignored[gadj0[j1]])...) for v0 in findall(gconstr0)]
+ cvecs = BitVector[vcat(BitVector(), (gmsk0[j1][gconstr[gadj0[j1]], v0] for j1 in 1:length(gadj0) if !ignored[gadj0[j1]])...) for v0 in findall(gconstr0)]
@assert length(cvecs) == eff_spp0
@@ -1407,7 +1509,7 @@ function build_eq_classes_soft1!(graph::Graph, p0::Int)
# group versions into sets that behave identically
# each set is represented by its highest-valued member
- repr_vers = sort!(Int[findlast(isequal(repr_vecs[w0]), cvecs) for w0 = 1:neq])
+ repr_vers = sort!(Int[findlast(isequal(repr_vecs[w0]), cvecs) for w0 in 1:neq])
@assert all(>(0), repr_vers)
@assert repr_vers[end] == eff_spp0
@@ -1428,7 +1530,7 @@ function update_ignored!(graph::Graph)
gconstr = graph.gconstr
ignored = graph.ignored
- for p0 = 1:np
+ for p0 in 1:np
ignored[p0] = (count(gconstr[p0]) == 1)
end
@@ -1459,15 +1561,15 @@ function prune_graph!(graph::Graph)
# We will remove all packages that only have one allowed state
# (includes fixed packages and forbidden packages)
- pkg_mask = BitVector(count(gconstr[p0]) ≠ 1 for p0 = 1:np)
+ pkg_mask = BitVector(count(gconstr[p0]) ≠ 1 for p0 in 1:np)
new_np = count(pkg_mask)
# a map that translates the new index ∈ 1:new_np into its
# corresponding old index ∈ 1:np
old_idx = findall(pkg_mask)
# the reverse of the above
- new_idx = Dict{Int,Int}()
- for new_p0 = 1:new_np
+ new_idx = Dict{Int, Int}()
+ for new_p0 in 1:new_np
new_idx[old_idx[new_p0]] = new_p0
end
@@ -1500,7 +1602,7 @@ function prune_graph!(graph::Graph)
# Update packages records
new_pkgs = pkgs[pkg_mask]
- new_pdict = Dict(new_pkgs[new_p0]=>new_p0 for new_p0 = 1:new_np)
+ new_pdict = Dict(new_pkgs[new_p0] => new_p0 for new_p0 in 1:new_np)
new_ignored = ignored[pkg_mask]
empty!(graph.solve_stack)
@@ -1508,26 +1610,26 @@ function prune_graph!(graph::Graph)
# versions that aren't allowed (but not the "uninstalled" state)
function keep_vers(new_p0)
p0 = old_idx[new_p0]
- return BitVector((v0 == spp[p0]) | gconstr[p0][v0] for v0 = 1:spp[p0])
+ return BitVector((v0 == spp[p0]) | gconstr[p0][v0] for v0 in 1:spp[p0])
end
- vers_mask = [keep_vers(new_p0) for new_p0 = 1:new_np]
+ vers_mask = [keep_vers(new_p0) for new_p0 in 1:new_np]
# Update number of states per package
- new_spp = Int[count(vers_mask[new_p0]) for new_p0 = 1:new_np]
+ new_spp = Int[count(vers_mask[new_p0]) for new_p0 in 1:new_np]
# Update versions maps
function compute_pvers(new_p0)
p0 = old_idx[new_p0]
pvers0 = pvers[p0]
vmsk0 = vers_mask[new_p0]
- return pvers0[vmsk0[1:(end-1)]]
+ return pvers0[vmsk0[1:(end - 1)]]
end
- new_pvers = [compute_pvers(new_p0) for new_p0 = 1:new_np]
+ new_pvers = [compute_pvers(new_p0) for new_p0 in 1:new_np]
# explicitly writing out the following loop since the generator equivalent caused type inference failure
new_vdict = Vector{Dict{VersionNumber, Int}}(undef, length(new_pvers))
for new_p0 in eachindex(new_vdict)
- new_vdict[new_p0] = Dict(vn => v0 for (v0,vn) in enumerate(new_pvers[new_p0]))
+ new_vdict[new_p0] = Dict(vn => v0 for (v0, vn) in enumerate(new_pvers[new_p0]))
end
# The new constraints are all going to be `true`, except possibly
@@ -1538,13 +1640,13 @@ function prune_graph!(graph::Graph)
new_gconstr0[end] = gconstr[p0][end]
return new_gconstr0
end
- new_gconstr = [compute_gconstr(new_p0) for new_p0 = 1:new_np]
+ new_gconstr = [compute_gconstr(new_p0) for new_p0 in 1:new_np]
# Recreate the graph adjacency list, skipping some packages
- new_gadj = [Int[] for new_p0 = 1:new_np]
- new_adjdict = [Dict{Int,Int}() for new_p0 = 1:new_np]
+ new_gadj = [Int[] for new_p0 in 1:new_np]
+ new_adjdict = [Dict{Int, Int}() for new_p0 in 1:new_np]
- for new_p0 = 1:new_np, (j1,p1) in enumerate(gadj[old_idx[new_p0]])
+ for new_p0 in 1:new_np, (j1, p1) in enumerate(gadj[old_idx[new_p0]])
pkg_mask[p1] || continue
new_p1 = new_idx[p1]
@@ -1552,7 +1654,7 @@ function prune_graph!(graph::Graph)
new_j1 = get(new_adjdict[new_p0], new_p1, length(new_gadj[new_p1]) + 1)
@assert (new_j0 > length(new_gadj[new_p0]) && new_j1 > length(new_gadj[new_p1])) ||
- (new_j0 ≤ length(new_gadj[new_p0]) && new_j1 ≤ length(new_gadj[new_p1]))
+ (new_j0 ≤ length(new_gadj[new_p0]) && new_j1 ≤ length(new_gadj[new_p1]))
new_j0 > length(new_gadj[new_p0]) || continue
push!(new_gadj[new_p0], new_p1)
@@ -1570,16 +1672,16 @@ function prune_graph!(graph::Graph)
new_p1 = new_gadj[new_p0][new_j0]
p1 = old_idx[new_p1]
j0 = adjdict[p1][p0]
- return gmsk[p0][j0][vers_mask[new_p1],vers_mask[new_p0]]
+ return gmsk[p0][j0][vers_mask[new_p1], vers_mask[new_p0]]
end
- new_gmsk = [[compute_gmsk(new_p0, new_j0) for new_j0 = 1:length(new_gadj[new_p0])] for new_p0 = 1:new_np]
+ new_gmsk = [[compute_gmsk(new_p0, new_j0) for new_j0 in 1:length(new_gadj[new_p0])] for new_p0 in 1:new_np]
# Reduce log pool (the other items are still reachable through rlog.init)
- rlog.pool = Dict(p=>rlog.pool[p] for p in new_pkgs)
+ rlog.pool = Dict(p => rlog.pool[p] for p in new_pkgs)
# Done
- log_event_global!(graph, "pruned graph — stats (n. of packages, mean connectivity): before = ($np,$(sum(spp)/length(spp))) after = ($new_np,$(sum(new_spp)/length(new_spp)))")
+ log_event_global!(graph, "pruned graph — stats (n. of packages, mean connectivity): before = ($np,$(sum(spp) / length(spp))) after = ($new_np,$(sum(new_spp) / length(new_spp)))")
# Replace old data with new
data.pkgs = new_pkgs
@@ -1619,7 +1721,7 @@ function simplify_graph!(graph::Graph, sources::Set{Int} = Set{Int}(); validate_
compute_eq_classes!(graph)
prune_graph!(graph)
if validate_versions
- _, changed = validate_versions!(graph, sources; skim=true)
+ _, changed = validate_versions!(graph, sources; skim = true)
if changed
compute_eq_classes!(graph)
prune_graph!(graph)
diff --git a/src/Resolve/maxsum.jl b/src/Resolve/maxsum.jl
index 1c683a6bbc..7fb71f12c7 100644
--- a/src/Resolve/maxsum.jl
+++ b/src/Resolve/maxsum.jl
@@ -9,10 +9,14 @@ mutable struct MaxSumParams
max_time::Float64 # maximum allowed time
function MaxSumParams()
- accuracy = parse(Int, get(ENV, "JULIA_PKG_RESOLVE_ACCURACY",
- # Allow for `JULIA_PKGRESOLVE_ACCURACY` for backward
- # compatibility with Julia v1.7-
- get(ENV, "JULIA_PKGRESOLVE_ACCURACY", "1")))
+ accuracy = parse(
+ Int, get(
+ ENV, "JULIA_PKG_RESOLVE_ACCURACY",
+ # Allow for `JULIA_PKGRESOLVE_ACCURACY` for backward
+ # compatibility with Julia v1.7-
+ get(ENV, "JULIA_PKGRESOLVE_ACCURACY", "1")
+ )
+ )
accuracy > 0 || error("JULIA_PKG_RESOLVE_ACCURACY must be > 0")
dec_interval = accuracy * 5
dec_fraction = 0.05 / accuracy
@@ -50,19 +54,19 @@ mutable struct Messages
pdict = graph.data.pdict
## generate wveights (v0 == spp[p0] is the "uninstalled" state)
- vweight = [[VersionWeight(v0 < spp[p0] ? pvers[p0][v0] : v"0") for v0 = 1:spp[p0]] for p0 = 1:np]
+ vweight = [[VersionWeight(v0 < spp[p0] ? pvers[p0][v0] : v"0") for v0 in 1:spp[p0]] for p0 in 1:np]
# external fields: favor newest versions over older, and no-version over all;
# explicit requirements use level l1 instead of l2
fv(p0, v0) = p0 ∈ req_inds ?
- FieldValue(0, vweight[p0][v0], zero(VersionWeight), (v0==spp[p0])) :
- FieldValue(0, zero(VersionWeight), vweight[p0][v0], (v0==spp[p0]))
- fld = [[fv(p0, v0) for v0 = 1:spp[p0]] for p0 = 1:np]
+ FieldValue(0, vweight[p0][v0], zero(VersionWeight), (v0 == spp[p0])) :
+ FieldValue(0, zero(VersionWeight), vweight[p0][v0], (v0 == spp[p0]))
+ fld = [[fv(p0, v0) for v0 in 1:spp[p0]] for p0 in 1:np]
initial_fld = [copy(f0) for f0 in fld]
# allocate cavity messages
- msg = [[Field(undef, spp[p0]) for j1 = 1:length(gadj[p0])] for p0 = 1:np]
+ msg = [[Field(undef, spp[p0]) for j1 in 1:length(gadj[p0])] for p0 in 1:np]
msgs = new(msg, fld, initial_fld)
@@ -82,12 +86,12 @@ function reset_messages!(msgs::Messages, graph::Graph)
spp = graph.spp
gconstr = graph.gconstr
ignored = graph.ignored
- for p0 = 1:np
+ for p0 in 1:np
ignored[p0] && continue
- map(m->fill!(m, zero(FieldValue)), msg[p0])
+ map(m -> fill!(m, zero(FieldValue)), msg[p0])
copyto!(fld[p0], initial_fld[p0])
gconstr0 = gconstr[p0]
- for v0 = 1:spp[p0]
+ for v0 in 1:spp[p0]
gconstr0[v0] || (fld[p0][v0] = FieldValue(-1))
end
fld[p0] .-= maximum(fld[p0])
@@ -101,7 +105,7 @@ mutable struct SolutionTrace
num_nondecimated::Int
best::Vector{Int}
- staged::Union{Tuple{Int,Int},Nothing}
+ staged::Union{Tuple{Int, Int}, Nothing}
function SolutionTrace(graph::Graph)
np = graph.np
@@ -190,13 +194,13 @@ function update!(p0::Int, graph::Graph, msgs::Messages)
# newmsg = [maximum(cavfld[bm1[:,v1]]) for v1 = 1:spp1]
# )
# This is hot code for the resolver
- @inbounds for v1 = 1:spp1, v0 = 1:spp0
+ @inbounds for v1 in 1:spp1, v0 in 1:spp0
bm1[v0, v1] || continue
newmsg[v1] = max(newmsg[v1], cavfld[v0])
end
m = maximum(newmsg)
validmax(m) || return Unsat(p0) # No state available without violating some
- # hard constraint
+ # hard constraint
# normalize the new message
@inbounds for i in 1:length(newmsg)
@@ -226,12 +230,13 @@ end
function Random.shuffle!(perm::NodePerm)
p = perm.p
- for j = length(p):-1:2
+ for j in length(p):-1:2
k = perm.step % j + 1
p[j], p[k] = p[k], p[j]
perm.step += isodd(j) ? 1 : k
end
#@assert isperm(p)
+ return
end
Base.iterate(perm::NodePerm, state...) = iterate(perm.p, state...)
@@ -271,7 +276,7 @@ function decimate1!(p0::Int, graph::Graph, strace::SolutionTrace, msgs::Messages
haskey(adjdict[p0], p1) || continue
s1 = solution[p1]
j1 = adjdict[p0][p1]
- gmsk[p1][j1][s0,s1] || return 0
+ gmsk[p1][j1][s0, s1] || return 0
end
solution[p0] = s0
strace.num_nondecimated -= 1
@@ -285,14 +290,14 @@ function decimate!(graph::Graph, strace::SolutionTrace, msgs::Messages, n::Integ
fld = msgs.fld
@assert n ≥ 1
- dtrace = Tuple{Int,Int}[]
+ dtrace = Tuple{Int, Int}[]
dec = 0
- fldorder = sort(findall(.!(ignored)), by=p0->secondmax(fld[p0], gconstr[p0]))
+ fldorder = sort(findall(.!(ignored)), by = p0 -> secondmax(fld[p0], gconstr[p0]))
for p0 in fldorder
s0 = decimate1!(p0, graph, strace, msgs)
s0 == 0 && continue
- push!(dtrace, (p0,s0))
+ push!(dtrace, (p0, s0))
dec += 1
dec == n && break
end
@@ -305,15 +310,15 @@ function clean_forbidden!(graph::Graph, msgs::Messages)
gconstr = graph.gconstr
ignored = graph.ignored
fld = msgs.fld
- affected = Tuple{Int,Int}[]
+ affected = Tuple{Int, Int}[]
- for p0 = 1:np
+ for p0 in 1:np
ignored[p0] && continue
fld0 = fld[p0]
gconstr0 = gconstr[p0]
for v0 in findall(gconstr0)
validmax(fld0[v0]) && continue
- push!(affected, (p0,v0))
+ push!(affected, (p0, v0))
end
end
return affected
@@ -377,7 +382,7 @@ function converge!(graph::Graph, msgs::Messages, strace::SolutionTrace, perm::No
# perform some maxsum iterations, then decimate one node.
# If failure happens during this process, we bail (return :unsat)
it = 0
- for it = 1:params.dec_interval
+ for it in 1:params.dec_interval
maxdiff = iterate!(graph, msgs, perm)
if maxdiff isa Unsat
if is_best_sofar
@@ -397,7 +402,7 @@ function converge!(graph::Graph, msgs::Messages, strace::SolutionTrace, perm::No
isempty(affected) && @goto decimate
sources = Set{Int}()
- for (p0,v0) in affected
+ for (p0, v0) in affected
graph.gconstr[p0][v0] = false
push!(sources, p0)
end
@@ -419,7 +424,7 @@ function converge!(graph::Graph, msgs::Messages, strace::SolutionTrace, perm::No
if is_best_sofar
# pick the first decimation candidate
smx(p1) = secondmax(msgs.fld[p1], graph.gconstr[p1])
- p0 = reduce((p1,p2)->(smx(p1)≤smx(p2) ? p1 : p2), findall(.!(graph.ignored)))
+ p0 = reduce((p1, p2) -> (smx(p1) ≤ smx(p2) ? p1 : p2), findall(.!(graph.ignored)))
s0 = argmax(fld[p0])
strace.staged = dec_firstcandidate(graph, msgs)
end
@@ -437,7 +442,7 @@ function converge!(graph::Graph, msgs::Messages, strace::SolutionTrace, perm::No
push_snapshot!(graph)
# info("setting dtrace=$dtrace")
- for (p0,s0) in dtrace
+ for (p0, s0) in dtrace
@assert !graph.ignored[p0]
@assert graph.gconstr[p0][s0]
fill!(graph.gconstr[p0], false)
@@ -467,7 +472,7 @@ function converge!(graph::Graph, msgs::Messages, strace::SolutionTrace, perm::No
lentr == 1 && break
# halve the dtrace
- deleteat!(dtrace, ((lentr÷2)+1):lentr)
+ deleteat!(dtrace, ((lentr ÷ 2) + 1):lentr)
end
@assert length(dtrace) == 1
diff --git a/src/Resolve/versionweights.jl b/src/Resolve/versionweights.jl
index 5113dd83c9..5af27881db 100644
--- a/src/Resolve/versionweights.jl
+++ b/src/Resolve/versionweights.jl
@@ -14,19 +14,19 @@ VersionWeight(vn::VersionNumber) = VersionWeight(vn.major, vn.minor, vn.patch)
Base.zero(::Type{VersionWeight}) = VersionWeight()
-Base.typemin(::Type{VersionWeight}) = (x=typemin(Int64); VersionWeight(x, x, x))
+Base.typemin(::Type{VersionWeight}) = (x = typemin(Int64); VersionWeight(x, x, x))
Base.:(-)(a::VersionWeight, b::VersionWeight) =
- VersionWeight(a.major-b.major, a.minor-b.minor, a.patch-b.patch)
+ VersionWeight(a.major - b.major, a.minor - b.minor, a.patch - b.patch)
Base.:(+)(a::VersionWeight, b::VersionWeight) =
- VersionWeight(a.major+b.major, a.minor+b.minor, a.patch+b.patch)
+ VersionWeight(a.major + b.major, a.minor + b.minor, a.patch + b.patch)
Base.:(-)(a::VersionWeight) =
VersionWeight(-a.major, -a.minor, -a.patch)
function Base.isless(a::VersionWeight, b::VersionWeight)
- (a.major, a.minor, a.patch) < (b.major, b.minor, b.patch)
+ return (a.major, a.minor, a.patch) < (b.major, b.minor, b.patch)
end
Base.abs(a::VersionWeight) =
@@ -40,5 +40,5 @@ function Base.show(io::IO, a::VersionWeight)
a == VersionWeight(a.major, a.minor) && @goto done
print(io, ".", a.patch)
@label done
- print(io, ")")
+ return print(io, ")")
end
diff --git a/src/Types.jl b/src/Types.jl
index 859b93221a..4765f252b0 100644
--- a/src/Types.jl
+++ b/src/Types.jl
@@ -10,7 +10,7 @@ import Base.string
using TOML
import ..Pkg, ..Registry
-import ..Pkg: GitTools, depots, depots1, logdir, set_readonly, safe_realpath, pkg_server, stdlib_dir, stdlib_path, isurl, stderr_f, RESPECT_SYSIMAGE_VERSIONS
+import ..Pkg: GitTools, depots, depots1, logdir, set_readonly, safe_realpath, pkg_server, stdlib_path, isurl, stderr_f, RESPECT_SYSIMAGE_VERSIONS, atomic_toml_write, create_cachedir_tag, normalize_path_for_toml
import Base.BinaryPlatforms: Platform
using ..Pkg.Versions
import FileWatching
@@ -19,7 +19,7 @@ import Base: SHA1
using SHA
export UUID, SHA1, VersionRange, VersionSpec,
- PackageSpec, PackageEntry, EnvCache, Context, GitRepo, Context!, Manifest, Project, err_rep,
+ PackageSpec, PackageEntry, EnvCache, Context, GitRepo, Context!, Manifest, ManifestRegistryEntry, Project, err_rep,
PkgError, pkgerror,
has_name, has_uuid, is_stdlib, is_or_was_stdlib, stdlib_version, is_unregistered_stdlib, stdlibs, stdlib_infos, write_env, write_env_usage, parse_toml,
project_resolve!, project_deps_resolve!, manifest_resolve!, registry_resolve!, stdlib_resolve!, handle_repos_develop!, handle_repos_add!, ensure_resolved,
@@ -75,62 +75,68 @@ Base.showerror(io::IO, err::PkgError) = print(io, err.msg)
@enum(PreserveLevel, PRESERVE_ALL_INSTALLED, PRESERVE_ALL, PRESERVE_DIRECT, PRESERVE_SEMVER, PRESERVE_TIERED, PRESERVE_TIERED_INSTALLED, PRESERVE_NONE)
@enum(PackageMode, PKGMODE_PROJECT, PKGMODE_MANIFEST, PKGMODE_COMBINED)
-const VersionTypes = Union{VersionNumber,VersionSpec,UpgradeLevel}
+const VersionTypes = Union{VersionNumber, VersionSpec, UpgradeLevel}
Base.@kwdef mutable struct GitRepo
- source::Union{Nothing,String} = nothing
- rev::Union{Nothing,String} = nothing
+ source::Union{Nothing, String} = nothing
+ rev::Union{Nothing, String} = nothing
subdir::Union{String, Nothing} = nothing
end
Base.:(==)(r1::GitRepo, r2::GitRepo) =
r1.source == r2.source && r1.rev == r2.rev && r1.subdir == r2.subdir
-
+Base.hash(r::GitRepo, h::UInt) =
+ foldr(hash, [r.source, r.rev, r.subdir], init = h)
mutable struct PackageSpec
- name::Union{Nothing,String}
- uuid::Union{Nothing,UUID}
- version::Union{Nothing,VersionTypes,String}
- tree_hash::Union{Nothing,SHA1}
- repo::GitRepo
- path::Union{Nothing,String}
+ name::Union{Nothing, String}
+ uuid::Union{Nothing, UUID}
+ version::Union{Nothing, VersionTypes, String}
+ tree_hash::Union{Nothing, SHA1}
+ repo::GitRepo # private
+ path::Union{Nothing, String}
pinned::Bool
# used for input only
url::Union{Nothing, String}
rev::Union{Nothing, String}
subdir::Union{Nothing, String}
-
end
-function PackageSpec(; name::Union{Nothing,AbstractString} = nothing,
- uuid::Union{Nothing,UUID,AbstractString} = nothing,
- version::Union{Nothing,VersionTypes,AbstractString} = VersionSpec(),
- tree_hash::Union{Nothing,SHA1} = nothing,
- repo::GitRepo = GitRepo(),
- path::Union{Nothing,AbstractString} = nothing,
- pinned::Bool = false,
- url = nothing,
- rev = nothing,
- subdir = nothing)
+function PackageSpec(;
+ name::Union{Nothing, AbstractString} = nothing,
+ uuid::Union{Nothing, UUID, AbstractString} = nothing,
+ version::Union{Nothing, VersionTypes, AbstractString} = VersionSpec(),
+ tree_hash::Union{Nothing, SHA1} = nothing,
+ repo::GitRepo = GitRepo(),
+ path::Union{Nothing, AbstractString} = nothing,
+ pinned::Bool = false,
+ url = nothing,
+ rev = nothing,
+ subdir = nothing,
+ )
uuid = uuid === nothing ? nothing : UUID(uuid)
return PackageSpec(name, uuid, version, tree_hash, repo, path, pinned, url, rev, subdir)
end
-PackageSpec(name::AbstractString) = PackageSpec(;name=name)::PackageSpec
-PackageSpec(name::AbstractString, uuid::UUID) = PackageSpec(;name=name, uuid=uuid)::PackageSpec
-PackageSpec(name::AbstractString, version::VersionTypes) = PackageSpec(;name=name, version=version)::PackageSpec
-PackageSpec(n::AbstractString, u::UUID, v::VersionTypes) = PackageSpec(;name=n, uuid=u, version=v)::PackageSpec
+PackageSpec(name::AbstractString) = PackageSpec(; name = name)::PackageSpec
+PackageSpec(name::AbstractString, uuid::UUID) = PackageSpec(; name = name, uuid = uuid)::PackageSpec
+PackageSpec(name::AbstractString, version::VersionTypes) = PackageSpec(; name = name, version = version)::PackageSpec
+PackageSpec(n::AbstractString, u::UUID, v::VersionTypes) = PackageSpec(; name = n, uuid = u, version = v)::PackageSpec
+# XXX: These definitions are a bit fishy. It seems to be used in an `==` call in status printing
function Base.:(==)(a::PackageSpec, b::PackageSpec)
return a.name == b.name && a.uuid == b.uuid && a.version == b.version &&
- a.tree_hash == b.tree_hash && a.repo == b.repo && a.path == b.path &&
- a.pinned == b.pinned
+ a.tree_hash == b.tree_hash && a.repo == b.repo && a.path == b.path &&
+ a.pinned == b.pinned
+end
+function Base.hash(a::PackageSpec, h::UInt)
+ return foldr(hash, [a.name, a.uuid, a.version, a.tree_hash, a.repo, a.path, a.pinned], init = h)
end
-function err_rep(pkg::PackageSpec)
+function err_rep(pkg::PackageSpec; quotes::Bool = true)
x = pkg.name !== nothing && pkg.uuid !== nothing ? x = "$(pkg.name) [$(string(pkg.uuid)[1:8])]" :
pkg.name !== nothing ? pkg.name :
pkg.uuid !== nothing ? string(pkg.uuid)[1:8] :
pkg.repo.source
- return "`$x`"
+ return quotes ? "`$x`" : x
end
has_name(pkg::PackageSpec) = pkg.name !== nothing
@@ -163,14 +169,14 @@ function Base.show(io::IO, pkg::PackageSpec)
for (field, value) in f
print(io, " ", field, " = ", string(value)::String, "\n")
end
- print(io, ")")
+ return print(io, ")")
end
############
# EnvCache #
############
-function projectfile_path(env_path::String; strict=false)
+function projectfile_path(env_path::String; strict = false)
for name in Base.project_names
maybe_file = joinpath(env_path, name)
isfile(maybe_file) && return maybe_file
@@ -178,8 +184,8 @@ function projectfile_path(env_path::String; strict=false)
return strict ? nothing : joinpath(env_path, "Project.toml")
end
-function manifestfile_path(env_path::String; strict=false)
- for name in Base.manifest_names
+function manifestfile_path(env_path::String; strict = false)
+ for name in (Base.manifest_names..., "AppManifest.toml")
maybe_file = joinpath(env_path, name)
isfile(maybe_file) && return maybe_file
end
@@ -197,7 +203,7 @@ function manifestfile_path(env_path::String; strict=false)
end
end
-function find_project_file(env::Union{Nothing,String}=nothing)
+function find_project_file(env::Union{Nothing, String} = nothing)
project_file = nothing
if env isa Nothing
project_file = Base.active_project()
@@ -215,14 +221,18 @@ function find_project_file(env::Union{Nothing,String}=nothing)
end
end
if isfile(project_file) && !contains(basename(project_file), "Project")
- pkgerror("""
- The active project has been set to a file that isn't a Project file: $project_file
- The project path must be to a Project file or directory.
- """)
+ pkgerror(
+ """
+ The active project has been set to a file that isn't a Project file: $project_file
+ The project path must be to a Project file or directory.
+ """
+ )
end
@assert project_file isa String &&
- (isfile(project_file) || !ispath(project_file) ||
- isdir(project_file) && isempty(readdir(project_file)))
+ (
+ isfile(project_file) || !ispath(project_file) ||
+ isdir(project_file) && isempty(readdir(project_file))
+ )
return Pkg.safe_realpath(project_file)
end
@@ -233,46 +243,58 @@ end
Base.:(==)(t1::Compat, t2::Compat) = t1.val == t2.val
Base.hash(t::Compat, h::UInt) = hash(t.val, h)
+struct AppInfo
+ name::String
+ julia_command::Union{String, Nothing}
+ submodule::Union{String, Nothing}
+ julia_flags::Vector{String}
+ other::Dict{String, Any}
+end
Base.@kwdef mutable struct Project
- other::Dict{String,Any} = Dict{String,Any}()
+ other::Dict{String, Any} = Dict{String, Any}()
# Fields
name::Union{String, Nothing} = nothing
uuid::Union{UUID, Nothing} = nothing
version::Union{VersionTypes, Nothing} = nothing
manifest::Union{String, Nothing} = nothing
entryfile::Union{String, Nothing} = nothing
+ julia_syntax_version::Union{VersionTypes, Nothing} = nothing
# Sections
- deps::Dict{String,UUID} = Dict{String,UUID}()
+ deps::Dict{String, UUID} = Dict{String, UUID}()
# deps that are also in weakdeps for backwards compat
# we do not store them in deps because we want to ignore them
# but for writing out the project file we need to remember them:
- _deps_weak::Dict{String,UUID} = Dict{String,UUID}()
- weakdeps::Dict{String,UUID} = Dict{String,UUID}()
- exts::Dict{String,Union{Vector{String}, String}} = Dict{String,String}()
- extras::Dict{String,UUID} = Dict{String,UUID}()
- targets::Dict{String,Vector{String}} = Dict{String,Vector{String}}()
- compat::Dict{String,Compat} = Dict{String,Compat}()
- sources::Dict{String,Dict{String, String}} = Dict{String,Dict{String, String}}()
+ _deps_weak::Dict{String, UUID} = Dict{String, UUID}()
+ weakdeps::Dict{String, UUID} = Dict{String, UUID}()
+ exts::Dict{String, Union{Vector{String}, String}} = Dict{String, String}()
+ extras::Dict{String, UUID} = Dict{String, UUID}()
+ targets::Dict{String, Vector{String}} = Dict{String, Vector{String}}()
+ apps::Dict{String, AppInfo} = Dict{String, AppInfo}()
+ compat::Dict{String, Compat} = Dict{String, Compat}()
+ sources::Dict{String, Dict{String, String}} = Dict{String, Dict{String, String}}()
workspace::Dict{String, Any} = Dict{String, Any}()
+ readonly::Bool = false
end
Base.:(==)(t1::Project, t2::Project) = all(x -> (getfield(t1, x) == getfield(t2, x))::Bool, fieldnames(Project))
-Base.hash(t::Project, h::UInt) = foldr(hash, [getfield(t, x) for x in fieldnames(Project)], init=h)
-
+Base.hash(t::Project, h::UInt) = foldr(hash, [getfield(t, x) for x in fieldnames(Project)], init = h)
Base.@kwdef mutable struct PackageEntry
- name::Union{String,Nothing} = nothing
- version::Union{VersionNumber,Nothing} = nothing
- path::Union{String,Nothing} = nothing
- entryfile::Union{String,Nothing} = nothing
+ name::Union{String, Nothing} = nothing
+ version::Union{VersionNumber, Nothing} = nothing
+ path::Union{String, Nothing} = nothing
+ entryfile::Union{String, Nothing} = nothing
pinned::Bool = false
repo::GitRepo = GitRepo()
- tree_hash::Union{Nothing,SHA1} = nothing
- deps::Dict{String,UUID} = Dict{String,UUID}()
- weakdeps::Dict{String,UUID} = Dict{String,UUID}()
- exts::Dict{String,Union{Vector{String}, String}} = Dict{String,String}()
+ tree_hash::Union{Nothing, SHA1} = nothing
+ deps::Dict{String, UUID} = Dict{String, UUID}()
+ weakdeps::Dict{String, UUID} = Dict{String, UUID}()
+ exts::Dict{String, Union{Vector{String}, String}} = Dict{String, String}()
uuid::Union{Nothing, UUID} = nothing
- other::Union{Dict,Nothing} = nothing
+ apps::Dict{String, AppInfo} = Dict{String, AppInfo}() # used by AppManifest.toml
+ registries::Vector{String} = String[]
+ julia_syntax_version::Union{VersionTypes, Nothing} = nothing
+ other::Union{Dict, Nothing} = nothing
end
Base.:(==)(t1::PackageEntry, t2::PackageEntry) = t1.name == t2.name &&
t1.version == t2.version &&
@@ -284,19 +306,43 @@ Base.:(==)(t1::PackageEntry, t2::PackageEntry) = t1.name == t2.name &&
t1.deps == t2.deps &&
t1.weakdeps == t2.weakdeps &&
t1.exts == t2.exts &&
- t1.uuid == t2.uuid
- # omits `other`
-Base.hash(x::PackageEntry, h::UInt) = foldr(hash, [x.name, x.version, x.path, x.entryfile, x.pinned, x.repo, x.tree_hash, x.deps, x.weakdeps, x.exts, x.uuid], init=h) # omits `other`
+ t1.uuid == t2.uuid &&
+ t1.apps == t2.apps &&
+ t1.julia_syntax_version == t2.julia_syntax_version &&
+ t1.registries == t2.registries
+# omits `other`
+Base.hash(x::PackageEntry, h::UInt) = foldr(hash, [x.name, x.version, x.path, x.entryfile, x.pinned, x.repo, x.tree_hash, x.deps, x.weakdeps, x.exts, x.uuid, x.registries, x.julia_syntax_version], init = h) # omits `other`
+
+"""
+ ManifestRegistryEntry
+
+Metadata about a registry referenced from a manifest. `id` is the stable key written
+to the manifest (typically the registry name, falling back to UUID on collision).
+Only `uuid` and `url` are written to the manifest file.
+"""
+Base.@kwdef mutable struct ManifestRegistryEntry
+ id::String
+ uuid::UUID
+ url::Union{Nothing, String} = nothing
+end
+Base.:(==)(t1::ManifestRegistryEntry, t2::ManifestRegistryEntry) =
+ t1.id == t2.id &&
+ t1.uuid == t2.uuid &&
+ t1.url == t2.url
+Base.hash(x::ManifestRegistryEntry, h::UInt) =
+ foldr(hash, (x.id, x.uuid, x.url), init = h)
+
Base.@kwdef mutable struct Manifest
- julia_version::Union{Nothing,VersionNumber} = nothing # only set to VERSION when resolving
- project_hash::Union{Nothing,SHA1} = nothing
+ julia_version::Union{Nothing, VersionNumber} = nothing # only set to VERSION when resolving
+ project_hash::Union{Nothing, SHA1} = nothing
manifest_format::VersionNumber = v"2.0.0"
- deps::Dict{UUID,PackageEntry} = Dict{UUID,PackageEntry}()
- other::Dict{String,Any} = Dict{String,Any}()
+ deps::Dict{UUID, PackageEntry} = Dict{UUID, PackageEntry}()
+ registries::Dict{String, ManifestRegistryEntry} = Dict{String, ManifestRegistryEntry}()
+ other::Dict{String, Any} = Dict{String, Any}()
end
Base.:(==)(t1::Manifest, t2::Manifest) = all(x -> (getfield(t1, x) == getfield(t2, x))::Bool, fieldnames(Manifest))
-Base.hash(m::Manifest, h::UInt) = foldr(hash, [getfield(m, x) for x in fieldnames(Manifest)], init=h)
+Base.hash(m::Manifest, h::UInt) = foldr(hash, [getfield(m, x) for x in fieldnames(Manifest)], init = h)
Base.getindex(m::Manifest, i_or_key) = getindex(m.deps, i_or_key)
Base.get(m::Manifest, key, default) = get(m.deps, key, default)
Base.setindex!(m::Manifest, i_or_key, value) = setindex!(m.deps, i_or_key, value)
@@ -308,21 +354,23 @@ Base.values(m::Manifest) = values(m.deps)
Base.keys(m::Manifest) = keys(m.deps)
Base.haskey(m::Manifest, key) = haskey(m.deps, key)
+
function Base.show(io::IO, pkg::PackageEntry)
f = []
- pkg.name !== nothing && push!(f, "name" => pkg.name)
- pkg.version !== nothing && push!(f, "version" => pkg.version)
- pkg.tree_hash !== nothing && push!(f, "tree_hash" => pkg.tree_hash)
- pkg.path !== nothing && push!(f, "dev/path" => pkg.path)
- pkg.pinned && push!(f, "pinned" => pkg.pinned)
- pkg.repo.source !== nothing && push!(f, "url/path" => "`$(pkg.repo.source)`")
- pkg.repo.rev !== nothing && push!(f, "rev" => pkg.repo.rev)
- pkg.repo.subdir !== nothing && push!(f, "subdir" => pkg.repo.subdir)
+ pkg.name !== nothing && push!(f, "name" => pkg.name)
+ pkg.version !== nothing && push!(f, "version" => pkg.version)
+ pkg.tree_hash !== nothing && push!(f, "tree_hash" => pkg.tree_hash)
+ pkg.path !== nothing && push!(f, "dev/path" => pkg.path)
+ pkg.pinned && push!(f, "pinned" => pkg.pinned)
+ pkg.repo.source !== nothing && push!(f, "url/path" => "`$(pkg.repo.source)`")
+ pkg.repo.rev !== nothing && push!(f, "rev" => pkg.repo.rev)
+ pkg.repo.subdir !== nothing && push!(f, "subdir" => pkg.repo.subdir)
+ !isempty(pkg.registries) && push!(f, "registries" => pkg.registries)
print(io, "PackageEntry(\n")
for (field, value) in f
print(io, " ", field, " = ", value, "\n")
end
- print(io, ")")
+ return print(io, ")")
end
function find_root_base_project(start_project::String)
@@ -332,14 +380,15 @@ function find_root_base_project(start_project::String)
base_project_file === nothing && return project_file
project_file = base_project_file
end
+ return
end
-function collect_workspace(base_project_file::String, d::Dict{String, Project}=Dict{String, Project}())
+function collect_workspace(base_project_file::String, d::Dict{String, Project} = Dict{String, Project}())
base_project = read_project(base_project_file)
d[base_project_file] = base_project
base_project_file_dir = dirname(base_project_file)
- projects = get(base_project.workspace, "projects", nothing)::Union{Nothing,Vector{String}}
+ projects = get(base_project.workspace, "projects", nothing)::Union{Nothing, Vector{String}}
projects === nothing && return d
project_paths = [abspath(base_project_file_dir, project) for project in projects]
for project_path in project_paths
@@ -353,7 +402,7 @@ end
mutable struct EnvCache
# environment info:
- env::Union{Nothing,String}
+ env::Union{Nothing, String}
# paths for files:
project_file::String
manifest_file::String
@@ -361,14 +410,14 @@ mutable struct EnvCache
pkg::Union{PackageSpec, Nothing}
# cache of metadata:
project::Project
- workspace::Dict{String,Project} # paths relative to base
+ workspace::Dict{String, Project} # paths relative to base
manifest::Manifest
# What these where at creation of the EnvCache
original_project::Project
original_manifest::Manifest
end
-function EnvCache(env::Union{Nothing,String}=nothing)
+function EnvCache(env::Union{Nothing, String} = nothing)
# @show env
project_file = find_project_file(env)
# @show project_file
@@ -405,7 +454,8 @@ function EnvCache(env::Union{Nothing,String}=nothing)
write_env_usage(manifest_file, "manifest_usage.toml")
manifest = read_manifest(manifest_file)
- env′ = EnvCache(env,
+ env′ = EnvCache(
+ env,
project_file,
manifest_file,
project_package,
@@ -414,11 +464,27 @@ function EnvCache(env::Union{Nothing,String}=nothing)
manifest,
deepcopy(project),
deepcopy(manifest),
- )
+ )
return env′
end
+# Convert a path from project-relative to manifest-relative
+# If path is absolute, returns it as-is
+function project_path_to_manifest_path(project_file::String, manifest_file::String, path::String)
+ isabspath(path) && return path
+ abs_path = Pkg.safe_realpath(joinpath(dirname(project_file), path))
+ return relpath(abs_path, Pkg.safe_realpath(dirname(manifest_file)))
+end
+
+# Convert a path from manifest-relative to project-relative
+# If path is absolute, returns it as-is
+function manifest_path_to_project_path(project_file::String, manifest_file::String, path::String)
+ isabspath(path) && return path
+ abs_path = Pkg.safe_realpath(joinpath(dirname(manifest_file), path))
+ return relpath(abs_path, Pkg.safe_realpath(dirname(project_file)))
+end
+
include("project.jl")
include("manifest.jl")
@@ -443,7 +509,7 @@ Base.@kwdef mutable struct Context
registries::Vector{Registry.RegistryInstance} = Registry.reachable_registries()
# The Julia Version to resolve with respect to
- julia_version::Union{VersionNumber,Nothing} = VERSION
+ julia_version::Union{VersionNumber, Nothing} = VERSION
end
project_uuid(env::EnvCache) = project_uuid(env.project, env.project_file)
@@ -460,25 +526,25 @@ is_project_uuid(env::EnvCache, uuid::UUID) = project_uuid(env) == uuid
# Context #
###########
-const FORMER_STDLIBS = ["DelimitedFiles", "Statistics"]
-const FORMER_STDLIBS_UUIDS = Set{UUID}()
-const STDLIB = Ref{DictStdLibs}()
+const UPGRADABLE_STDLIBS = ["DelimitedFiles", "Statistics"]
+const UPGRADABLE_STDLIBS_UUIDS = Set{UUID}()
+const STDLIB = Ref{Union{DictStdLibs, Nothing}}(nothing)
function load_stdlib()
stdlib = DictStdLibs()
- for name in readdir(stdlib_dir())
- projfile = projectfile_path(stdlib_path(name); strict=true)
+ for name in readdir(Sys.STDLIB)
+ projfile = projectfile_path(stdlib_path(name); strict = true)
nothing === projfile && continue
project = parse_toml(projfile)
uuid = get(project, "uuid", nothing)::Union{String, Nothing}
v_str = get(project, "version", nothing)::Union{String, Nothing}
version = isnothing(v_str) ? nothing : VersionNumber(v_str)
nothing === uuid && continue
- if name in FORMER_STDLIBS
- push!(FORMER_STDLIBS_UUIDS, UUID(uuid))
+ if name in UPGRADABLE_STDLIBS
+ push!(UPGRADABLE_STDLIBS_UUIDS, UUID(uuid))
continue
end
- deps = UUID.(values(get(project, "deps", Dict{String,Any}())))
- weakdeps = UUID.(values(get(project, "weakdeps", Dict{String,Any}())))
+ deps = UUID.(values(get(project, "deps", Dict{String, Any}())))
+ weakdeps = UUID.(values(get(project, "weakdeps", Dict{String, Any}())))
stdlib[UUID(uuid)] = StdlibInfo(name, Base.UUID(uuid), version, deps, weakdeps)
end
return stdlib
@@ -491,7 +557,7 @@ function stdlibs()
return Dict(uuid => (info.name, info.version) for (uuid, info) in stdlib_infos())
end
function stdlib_infos()
- if !isassigned(STDLIB)
+ if STDLIB[] === nothing
STDLIB[] = load_stdlib()
end
return STDLIB[]
@@ -499,12 +565,12 @@ end
is_stdlib(uuid::UUID) = uuid in keys(stdlib_infos())
# Includes former stdlibs
function is_or_was_stdlib(uuid::UUID, julia_version::Union{VersionNumber, Nothing})
- return is_stdlib(uuid, julia_version) || uuid in FORMER_STDLIBS_UUIDS
+ return is_stdlib(uuid, julia_version) || uuid in UPGRADABLE_STDLIBS_UUIDS
end
function historical_stdlibs_check()
- if isempty(STDLIBS_BY_VERSION)
+ return if isempty(STDLIBS_BY_VERSION)
pkgerror("If you want to set `julia_version`, you must first populate the `STDLIBS_BY_VERSION` global constant. Try `using HistoricalStdlibVersions`")
end
end
@@ -518,12 +584,23 @@ function get_last_stdlibs(julia_version::VersionNumber; use_historical_for_curre
end
historical_stdlibs_check()
last_stdlibs = UNREGISTERED_STDLIBS
+ last_version = nothing
+
for (version, stdlibs) in STDLIBS_BY_VERSION
+ if !isnothing(last_version) && last_version > version
+ pkgerror("STDLIBS_BY_VERSION must be sorted by version number")
+ end
if VersionNumber(julia_version.major, julia_version.minor, julia_version.patch) < version
break
end
last_stdlibs = stdlibs
+ last_version = version
end
+ # Serving different patches is safe-ish, but different majors or minors is most likely not.
+ if last_version !== nothing && (last_version.major != julia_version.major || last_version.minor != julia_version.minor)
+ pkgerror("Could not find a julia version in STDLIBS_BY_VERSION that matches the major & minor version of requested julia_version v$(julia_version)")
+ end
+
return last_stdlibs
end
# If `julia_version` is set to `nothing`, that means (essentially) treat all registered
@@ -551,7 +628,7 @@ end
# `nothing` if that stdlib is not versioned. We only store version numbers for
# stdlibs that are external and thus could be installed from their repositories,
# e.g. things like `GMP_jll`, `Tar`, etc...
-function stdlib_version(uuid::UUID, julia_version::Union{VersionNumber,Nothing})
+function stdlib_version(uuid::UUID, julia_version::Union{VersionNumber, Nothing})
last_stdlibs = get_last_stdlibs(julia_version)
if !(uuid in keys(last_stdlibs))
return nothing
@@ -564,12 +641,22 @@ function is_unregistered_stdlib(uuid::UUID)
return haskey(UNREGISTERED_STDLIBS, uuid)
end
-Context!(kw_context::Vector{Pair{Symbol,Any}})::Context =
+Context!(kw_context::Vector{Pair{Symbol, Any}})::Context =
Context!(Context(); kw_context...)
function Context!(ctx::Context; kwargs...)
for (k, v) in kwargs
setfield!(ctx, k, v)
end
+
+ # Highlight for logging purposes if julia_version is set to a different version than current VERSION
+ if haskey(kwargs, :julia_version) && ctx.julia_version !== nothing && ctx.julia_version != VERSION
+ Pkg.printpkgstyle(
+ ctx.io, :Context,
+ "Pkg is operating with julia_version set to `$(ctx.julia_version)`",
+ color = Base.warn_color()
+ )
+ end
+
return ctx
end
@@ -586,29 +673,40 @@ end
# only hash the deps and compat fields as they are the only fields that affect a resolve
function workspace_resolve_hash(env::EnvCache)
# Handle deps in both [deps] and [weakdeps]
- deps = Dict(pkg.name => pkg.uuid for pkg in Pkg.Operations.load_direct_deps(env))
+ deps = Dict{String, UUID}()
+ for pkg in Pkg.Operations.load_direct_deps(env)
+ deps[pkg.name] = pkg.uuid
+ end
weakdeps = load_workspace_weak_deps(env)
alldeps = merge(deps, weakdeps)
- compats = Dict(name => Pkg.Operations.get_compat_workspace(env, name) for (name, uuid) in alldeps)
+ compats = Dict{String, VersionSpec}()
+ for (name, uuid) in alldeps
+ compats[name] = Pkg.Operations.get_compat_workspace(env, name)
+ end
iob = IOBuffer()
- for (name, uuid) in sort!(collect(deps); by=first)
+ for (name, uuid) in sort!(collect(deps); by = first)
println(iob, name, "=", uuid)
end
println(iob)
- for (name, uuid) in sort!(collect(weakdeps); by=first)
+ for (name, uuid) in sort!(collect(weakdeps); by = first)
println(iob, name, "=", uuid)
end
println(iob)
- for (name, compat) in sort!(collect(compats); by=first)
+ for (name, compat) in sort!(collect(compats); by = first)
println(iob, name, "=", compat)
end
str = String(take!(iob))
return bytes2hex(sha1(str))
end
-function write_env_usage(source_file::AbstractString, usage_filepath::AbstractString)
+
+write_env_usage(source_file::AbstractString, usage_filepath::AbstractString) =
+ write_env_usage([source_file], usage_filepath)
+
+function write_env_usage(source_files, usage_filepath::AbstractString)
# Don't record ghost usage
- !isfile(source_file) && return
+ source_files = filter(isfile, source_files)
+ isempty(source_files) && return
# Ensure that log dir exists
!ispath(logdir()) && mkpath(logdir())
@@ -630,7 +728,9 @@ function write_env_usage(source_file::AbstractString, usage_filepath::AbstractSt
end
# record new usage
- usage[source_file] = [Dict("time" => timestamp)]
+ for source_file in source_files
+ usage[source_file] = [Dict("time" => timestamp)]
+ end
# keep only latest usage info
for k in keys(usage)
@@ -646,15 +746,10 @@ function write_env_usage(source_file::AbstractString, usage_filepath::AbstractSt
usage[k] = [Dict("time" => maximum(times))]
end
- tempfile = tempname()
try
- open(tempfile, "w") do io
- TOML.print(io, usage, sorted=true)
- end
- TOML.parsefile(tempfile) # compare to `usage` ?
- mv(tempfile, usage_file; force=true) # only mv if parse succeeds
+ atomic_toml_write(usage_file, usage, sorted = true)
catch err
- @error "Failed to write valid usage file `$usage_file`" tempfile
+ @error "Failed to write valid usage file `$usage_file`" exception = err
end
end
return
@@ -677,13 +772,21 @@ function read_package(path::String)
return project
end
-const refspecs = ["+refs/*:refs/remotes/cache/*"]
+const refspecs = ["+refs/heads/*:refs/cache/heads/*"]
+const refspecs_fallback = ["+refs/*:refs/cache/*"]
+
+function looks_like_commit_hash(rev::AbstractString)
+ # Commit hashes are 7-40 hex characters
+ return occursin(r"^[0-9a-f]{7,40}$"i, rev)
+end
function relative_project_path(project_file::String, path::String)
# compute path relative the project
# realpath needed to expand symlinks before taking the relative path
- return relpath(Pkg.safe_realpath(abspath(path)),
- Pkg.safe_realpath(dirname(project_file)))
+ return relpath(
+ Pkg.safe_realpath(abspath(path)),
+ Pkg.safe_realpath(dirname(project_file))
+ )
end
function devpath(env::EnvCache, name::AbstractString, shared::Bool)
@@ -699,7 +802,7 @@ function error_if_in_sysimage(pkg::PackageSpec)
return false
end
pkgid = Base.PkgId(pkg.uuid, pkg.name)
- if Base.in_sysimage(pkgid)
+ return if Base.in_sysimage(pkgid)
pkgerror("Tried to develop or add by URL package $(pkgid) which is already in the sysimage, use `Pkg.respect_sysimage_versions(false)` to disable this check.")
end
end
@@ -707,8 +810,17 @@ end
function handle_repo_develop!(ctx::Context, pkg::PackageSpec, shared::Bool)
# First, check if we can compute the path easily (which requires a given local path or name)
is_local_path = pkg.repo.source !== nothing && !isurl(pkg.repo.source)
+ # Preserve whether the original source was an absolute path - needed later to decide how to store the path
+ original_source_was_absolute = is_local_path && isabspath(pkg.repo.source)
+
if is_local_path || pkg.name !== nothing
- dev_path = is_local_path ? pkg.repo.source : devpath(ctx.env, pkg.name, shared)
+ # Resolve manifest-relative paths to absolute paths for file system operations
+ dev_path = if is_local_path
+ isabspath(pkg.repo.source) ? pkg.repo.source :
+ Pkg.manifest_rel_path(ctx.env, pkg.repo.source)
+ else
+ devpath(ctx.env, pkg.name, shared)
+ end
if pkg.repo.subdir !== nothing
dev_path = joinpath(dev_path, pkg.repo.subdir)
end
@@ -724,7 +836,7 @@ function handle_repo_develop!(ctx::Context, pkg::PackageSpec, shared::Bool)
resolve_projectfile!(pkg, dev_path)
error_if_in_sysimage(pkg)
if is_local_path
- pkg.path = isabspath(dev_path) ? dev_path : relative_project_path(ctx.env.manifest_file, dev_path)
+ pkg.path = original_source_was_absolute ? dev_path : relative_project_path(ctx.env.manifest_file, dev_path)
else
pkg.path = shared ? dev_path : relative_project_path(ctx.env.manifest_file, dev_path)
end
@@ -753,18 +865,22 @@ function handle_repo_develop!(ctx::Context, pkg::PackageSpec, shared::Bool)
cloned = false
package_path = pkg.repo.subdir === nothing ? repo_path : joinpath(repo_path, pkg.repo.subdir)
if !has_name(pkg)
- LibGit2.close(GitTools.ensure_clone(ctx.io, repo_path, pkg.repo.source))
+ # Resolve manifest-relative path to absolute before passing to git
+ repo_source_resolved = !isurl(pkg.repo.source) && !isabspath(pkg.repo.source) ?
+ Pkg.manifest_rel_path(ctx.env, pkg.repo.source) :
+ pkg.repo.source
+ LibGit2.close(GitTools.ensure_clone(ctx.io, repo_path, repo_source_resolved))
cloned = true
resolve_projectfile!(pkg, package_path)
end
if pkg.repo.subdir !== nothing
- repo_name = split(pkg.repo.source, '/', keepempty=false)[end]
+ repo_name = split(pkg.repo.source, '/', keepempty = false)[end]
# Make the develop path prettier.
if endswith(repo_name, ".git")
- repo_name = chop(repo_name, tail=4)
+ repo_name = chop(repo_name, tail = 4)
end
if endswith(repo_name, ".jl")
- repo_name = chop(repo_name, tail=3)
+ repo_name = chop(repo_name, tail = 3)
end
dev_path = devpath(ctx.env, repo_name, shared)
else
@@ -776,17 +892,27 @@ function handle_repo_develop!(ctx::Context, pkg::PackageSpec, shared::Bool)
else
mkpath(dirname(dev_path))
if !cloned
- LibGit2.close(GitTools.ensure_clone(ctx.io, dev_path, pkg.repo.source))
+ # Resolve manifest-relative path to absolute before passing to git
+ repo_source_resolved = !isurl(pkg.repo.source) && !isabspath(pkg.repo.source) ?
+ Pkg.manifest_rel_path(ctx.env, pkg.repo.source) :
+ pkg.repo.source
+ LibGit2.close(GitTools.ensure_clone(ctx.io, dev_path, repo_source_resolved))
else
mv(repo_path, dev_path)
end
new = true
end
if !has_uuid(pkg)
- resolve_projectfile!(pkg, dev_path)
+ resolve_projectfile!(pkg, joinpath(dev_path, pkg.repo.subdir === nothing ? "" : pkg.repo.subdir))
end
error_if_in_sysimage(pkg)
- pkg.path = shared ? dev_path : relative_project_path(ctx.env.manifest_file, dev_path)
+ # When an explicit local path was given, preserve whether it was absolute or relative
+ # Otherwise, use shared flag to determine if path should be absolute (shared) or relative (local)
+ if is_local_path
+ pkg.path = original_source_was_absolute ? dev_path : relative_project_path(ctx.env.manifest_file, dev_path)
+ else
+ pkg.path = shared ? dev_path : relative_project_path(ctx.env.manifest_file, dev_path)
+ end
if pkg.repo.subdir !== nothing
pkg.path = joinpath(pkg.path, pkg.repo.subdir)
end
@@ -812,15 +938,15 @@ function set_repo_source_from_registry!(ctx, pkg)
registry_resolve!(ctx.registries, pkg)
# Didn't find the package in the registry, but maybe it exists in the updated registry
if !isresolved(pkg)
- Pkg.Operations.update_registries(ctx; force=false)
+ Pkg.Operations.update_registries(ctx; force = false)
registry_resolve!(ctx.registries, pkg)
end
- ensure_resolved(ctx, ctx.env.manifest, [pkg]; registry=true)
+ ensure_resolved(ctx, ctx.env.manifest, [pkg]; registry = true)
# We might have been given a name / uuid combo that does not have an entry in the registry
for reg in ctx.registries
regpkg = get(reg, pkg.uuid, nothing)
regpkg === nothing && continue
- info = Pkg.Registry.registry_info(regpkg)
+ info = Pkg.Registry.registry_info(reg, regpkg)
url = info.repo
url === nothing && continue
pkg.repo.source = url
@@ -839,7 +965,7 @@ function handle_repo_add!(ctx::Context, pkg::PackageSpec)
@assert pkg.repo.rev !== nothing
# First, we try resolving against the manifest and current registry to avoid updating registries if at all possible.
# This also handles the case where we _only_ wish to switch the tracking branch for a package.
- manifest_resolve!(ctx.env.manifest, [pkg]; force=true)
+ manifest_resolve!(ctx.env.manifest, [pkg]; force = true)
if isresolved(pkg)
entry = manifest_info(ctx.env.manifest, pkg.uuid)
if entry !== nothing
@@ -854,30 +980,51 @@ function handle_repo_add!(ctx::Context, pkg::PackageSpec)
@assert pkg.repo.source !== nothing
# We now have the source of the package repo, check if it is a local path and if that exists
- repo_source = pkg.repo.source
+ repo_source = !isurl(pkg.repo.source) && !isabspath(pkg.repo.source) ?
+ normpath(joinpath(dirname(ctx.env.manifest_file), pkg.repo.source)) :
+ pkg.repo.source
if !isurl(pkg.repo.source)
- if isdir(pkg.repo.source)
- if !isdir(joinpath(pkg.repo.source, ".git"))
- msg = "Did not find a git repository at `$(pkg.repo.source)`"
- if isfile(joinpath(pkg.repo.source, "Project.toml")) || isfile(joinpath(pkg.repo.source, "JuliaProject.toml"))
+ if isdir(repo_source)
+ git_path = joinpath(repo_source, ".git")
+ if isfile(git_path)
+ # Git submodule: .git is a file containing path to actual git directory
+ git_ref_content = readline(git_path)
+ git_info_path = joinpath(dirname(git_path), last(split(git_ref_content)))
+ else
+ # Regular git repo: .git is a directory
+ git_info_path = git_path
+ end
+ if !isdir(git_info_path)
+ local msg = "Did not find a git repository at `$(repo_source)`"
+ if isfile(joinpath(repo_source, "Project.toml")) || isfile(joinpath(repo_source, "JuliaProject.toml"))
msg *= ", perhaps you meant `Pkg.develop`?"
end
pkgerror(msg)
end
- LibGit2.with(GitTools.check_valid_HEAD, LibGit2.GitRepo(pkg.repo.source)) # check for valid git HEAD
- pkg.repo.source = isabspath(pkg.repo.source) ? safe_realpath(pkg.repo.source) : relative_project_path(ctx.env.manifest_file, pkg.repo.source)
- repo_source = normpath(joinpath(dirname(ctx.env.manifest_file), pkg.repo.source))
+ LibGit2.with(GitTools.check_valid_HEAD, LibGit2.GitRepo(repo_source)) # check for valid git HEAD
+ let repo_source = repo_source
+ LibGit2.with(LibGit2.GitRepo(repo_source)) do repo
+ if LibGit2.isdirty(repo)
+ @warn "The repository at `$(repo_source)` has uncommitted changes. Consider using `Pkg.develop` instead of `Pkg.add` if you want to work with the current state of the repository."
+ end
+ end
+ end
+ # Store the path: use the original path format (absolute vs relative) as the user provided
+ # Canonicalize repo_source for consistent hashing in cache paths
+ repo_source = safe_realpath(repo_source)
+ pkg.repo.source = isabspath(pkg.repo.source) ? repo_source : relative_project_path(ctx.env.manifest_file, repo_source)
else
- pkgerror("Path `$(pkg.repo.source)` does not exist.")
+ # For error messages, show the absolute path which is more informative than manifest-relative
+ pkgerror("Path `$(repo_source)` does not exist.")
end
end
- let repo_source = repo_source
+ return let repo_source = repo_source
# The type-assertions below are necessary presumably due to julia#36454
- LibGit2.with(GitTools.ensure_clone(ctx.io, add_repo_cache_path(repo_source::Union{Nothing,String}), repo_source::Union{Nothing,String}; isbare=true)) do repo
- repo_source_typed = repo_source::Union{Nothing,String}
+ LibGit2.with(GitTools.ensure_clone(ctx.io, add_repo_cache_path(repo_source::String), repo_source::String; isbare = true, depth = 1)) do repo
+ repo_source_typed = repo_source::Union{Nothing, String}
GitTools.check_valid_HEAD(repo)
-
+ create_cachedir_tag(dirname(add_repo_cache_path(repo_source)))
# If the user didn't specify rev, assume they want the default (master) branch if on a branch, otherwise the current commit
if pkg.repo.rev === nothing
pkg.repo.rev = LibGit2.isattached(repo) ? LibGit2.branch(repo) : string(LibGit2.GitHash(LibGit2.head(repo)))
@@ -887,8 +1034,26 @@ function handle_repo_add!(ctx::Context, pkg::PackageSpec)
fetched = false
if obj_branch === nothing
fetched = true
- GitTools.fetch(ctx.io, repo, repo_source_typed; refspecs=refspecs)
+ rev_or_hash_str = string(rev_or_hash)
+ # For pull requests, fetch the specific PR ref
+ if startswith(rev_or_hash_str, "pull/") && endswith(rev_or_hash_str, "/head")
+ pr_number = rev_or_hash_str[6:(end - 5)] # Extract number from "pull/X/head"
+ pr_refspecs = ["+refs/pull/$(pr_number)/head:refs/cache/pull/$(pr_number)/head"]
+ GitTools.fetch(ctx.io, repo, repo_source_typed; refspecs = pr_refspecs, depth = 1)
+ # For branch names, fetch only the specific branch
+ elseif !looks_like_commit_hash(rev_or_hash_str)
+ specific_refspec = ["+refs/heads/$(rev_or_hash):refs/cache/heads/$(rev_or_hash)"]
+ GitTools.fetch(ctx.io, repo, repo_source_typed; refspecs = specific_refspec, depth = 1)
+ else
+ # For commit hashes, fetch all branches including the older commits
+ GitTools.fetch(ctx.io, repo, repo_source_typed; refspecs = refspecs, depth = LibGit2.Consts.FETCH_DEPTH_UNSHALLOW)
+ end
obj_branch = get_object_or_branch(repo, rev_or_hash)
+ # If still not found, try with broader refspec as fallback
+ if obj_branch === nothing
+ GitTools.fetch(ctx.io, repo, repo_source_typed; refspecs = refspecs_fallback)
+ obj_branch = get_object_or_branch(repo, rev_or_hash)
+ end
if obj_branch === nothing
pkgerror("Did not find rev $(rev_or_hash) in repository")
end
@@ -899,7 +1064,9 @@ function handle_repo_add!(ctx::Context, pkg::PackageSpec)
innerentry = manifest_info(ctx.env.manifest, pkg.uuid)
ispinned = innerentry !== nothing && innerentry.pinned
if isbranch && !fetched && !ispinned
- GitTools.fetch(ctx.io, repo, repo_source_typed; refspecs=refspecs)
+ # Fetch only the specific branch being tracked
+ specific_refspec = ["+refs/heads/$(rev_or_hash):refs/cache/heads/$(rev_or_hash)"]
+ GitTools.fetch(ctx.io, repo, repo_source_typed; refspecs = specific_refspec, depth = 1)
gitobject, isbranch = get_object_or_branch(repo, rev_or_hash)
end
@@ -913,6 +1080,7 @@ function handle_repo_add!(ctx::Context, pkg::PackageSpec)
pkgerror("Did not find subdirectory `$(pkg.repo.subdir)`")
end
end
+ @assert pkg.path === nothing
pkg.tree_hash = SHA1(string(LibGit2.GitHash(tree_hash_object)))
# If we already resolved a uuid, we can bail early if this package is already installed at the current tree_hash
@@ -934,7 +1102,8 @@ function handle_repo_add!(ctx::Context, pkg::PackageSpec)
# Otherwise, move the temporary path into its correct place and set read only
mkpath(version_path)
- mv(temp_path, version_path; force=true)
+ mv(temp_path, version_path; force = true)
+ create_cachedir_tag(dirname(dirname(version_path)))
set_readonly(version_path)
return true
end
@@ -951,16 +1120,20 @@ function handle_repos_add!(ctx::Context, pkgs::AbstractVector{PackageSpec})
end
function resolve_projectfile!(pkg, project_path)
- project_file = projectfile_path(project_path; strict=true)
- project_file === nothing && pkgerror(string("could not find project file (Project.toml or JuliaProject.toml) in package at `",
- something(pkg.repo.source, pkg.path, project_path), "` maybe `subdir` needs to be specified"))
+ project_file = projectfile_path(project_path; strict = true)
+ project_file === nothing && pkgerror(
+ string(
+ "could not find project file (Project.toml or JuliaProject.toml) in package at `",
+ something(pkg.repo.source, pkg.path, project_path), "` maybe `subdir` needs to be specified"
+ )
+ )
project_data = read_package(project_file)
if pkg.uuid === nothing || pkg.uuid == project_data.uuid
pkg.uuid = project_data.uuid
else
pkgerror("UUID `$(project_data.uuid)` given by project file `$project_file` does not match given UUID `$(pkg.uuid)`")
end
- if pkg.name === nothing || pkg.name == project_data.name
+ return if pkg.name === nothing || pkg.name == project_data.name
pkg.name = project_data.name
else
pkgerror("name `$(project_data.name)` given by project file `$project_file` does not match given name `$(pkg.name)`")
@@ -972,8 +1145,18 @@ get_object_or_branch(repo, rev::SHA1) =
# Returns nothing if rev could not be found in repo
function get_object_or_branch(repo, rev)
+ # Handle pull request references
+ if startswith(rev, "pull/") && endswith(rev, "/head")
+ try
+ gitobject = LibGit2.GitObject(repo, "cache/" * rev)
+ return gitobject, true
+ catch err
+ err isa LibGit2.GitError && err.code == LibGit2.Error.ENOTFOUND || rethrow()
+ end
+ end
+
try
- gitobject = LibGit2.GitObject(repo, "remotes/cache/heads/" * rev)
+ gitobject = LibGit2.GitObject(repo, "cache/heads/" * rev)
return gitobject, true
catch err
err isa LibGit2.GitError && err.code == LibGit2.Error.ENOTFOUND || rethrow()
@@ -984,6 +1167,12 @@ function get_object_or_branch(repo, rev)
catch err
err isa LibGit2.GitError && err.code == LibGit2.Error.ENOTFOUND || rethrow()
end
+ try
+ gitobject = LibGit2.GitObject(repo, "heads/" * rev)
+ return gitobject, true
+ catch err
+ err isa LibGit2.GitError && err.code == LibGit2.Error.ENOTFOUND || rethrow()
+ end
try
gitobject = LibGit2.GitObject(repo, rev)
return gitobject, false
@@ -1006,6 +1195,7 @@ function project_resolve!(env::EnvCache, pkgs::AbstractVector{PackageSpec})
pkg.uuid = env.pkg.uuid
end
end
+ return
end
# Disambiguate name/uuid package specifications using project info.
@@ -1023,12 +1213,13 @@ function project_deps_resolve!(env::EnvCache, pkgs::AbstractVector{PackageSpec})
pkg.name = names[pkg.uuid]
end
end
+ return
end
# Disambiguate name/uuid package specifications using manifest info.
-function manifest_resolve!(manifest::Manifest, pkgs::AbstractVector{PackageSpec}; force=false)
- uuids = Dict{String,Vector{UUID}}()
- names = Dict{UUID,String}()
+function manifest_resolve!(manifest::Manifest, pkgs::AbstractVector{PackageSpec}; force = false)
+ uuids = Dict{String, Vector{UUID}}()
+ names = Dict{UUID, String}()
for (uuid, entry) in manifest
push!(get!(uuids, entry.name, UUID[]), uuid)
names[uuid] = entry.name # can be duplicate but doesn't matter
@@ -1041,6 +1232,7 @@ function manifest_resolve!(manifest::Manifest, pkgs::AbstractVector{PackageSpec}
pkg.name = names[pkg.uuid]
end
end
+ return
end
# Disambiguate name/uuid package specifications using registry info.
@@ -1077,20 +1269,23 @@ function stdlib_resolve!(pkgs::AbstractVector{PackageSpec})
end
end
end
+ return
end
include("fuzzysorting.jl")
# Ensure that all packages are fully resolved
-function ensure_resolved(ctx::Context, manifest::Manifest,
+function ensure_resolved(
+ ctx::Context, manifest::Manifest,
pkgs::AbstractVector{PackageSpec};
- registry::Bool=false,)::Nothing
- unresolved_uuids = Dict{String,Vector{UUID}}()
+ registry::Bool = false,
+ )::Nothing
+ unresolved_uuids = Dict{String, Vector{UUID}}()
for pkg in pkgs
has_uuid(pkg) && continue
!has_name(pkg) && pkgerror("Package $pkg has neither name nor uuid")
uuids = [uuid for (uuid, entry) in manifest if entry.name == pkg.name]
- sort!(uuids, by=uuid -> uuid.value)
+ sort!(uuids, by = uuid -> uuid.value)
unresolved_uuids[pkg.name] = uuids
end
unresolved_names = UUID[]
@@ -1102,7 +1297,7 @@ function ensure_resolved(ctx::Context, manifest::Manifest,
msg = sprint(context = ctx.io) do io
if !isempty(unresolved_uuids)
print(io, "The following package names could not be resolved:")
- for (name, uuids) in sort!(collect(unresolved_uuids), by=lowercase ∘ first)
+ for (name, uuids) in sort!(collect(unresolved_uuids), by = lowercase ∘ first)
print(io, "\n * $name (")
if length(uuids) == 0
what = ["project", "manifest"]
@@ -1157,7 +1352,7 @@ function registered_uuids(registries::Vector{Registry.RegistryInstance}, name::S
return uuids
end
# Determine a single UUID for a given name, prompting if needed
-function registered_uuid(registries::Vector{Registry.RegistryInstance}, name::String)::Union{Nothing,UUID}
+function registered_uuid(registries::Vector{Registry.RegistryInstance}, name::String)::Union{Nothing, UUID}
uuids = registered_uuids(registries, name)
length(uuids) == 0 && return nothing
length(uuids) == 1 && return first(uuids)
@@ -1166,7 +1361,7 @@ function registered_uuid(registries::Vector{Registry.RegistryInstance}, name::St
for reg in registries
pkg = get(reg, uuid, nothing)
pkg === nothing && continue
- info = Pkg.Registry.registry_info(pkg)
+ info = Pkg.Registry.registry_info(reg, pkg)
repo = info.repo
repo === nothing && continue
push!(repo_infos, (reg.name, repo, uuid))
@@ -1177,7 +1372,7 @@ end
# Determine current name for a given package UUID
-function registered_name(registries::Vector{Registry.RegistryInstance}, uuid::UUID)::Union{Nothing,String}
+function registered_name(registries::Vector{Registry.RegistryInstance}, uuid::UUID)::Union{Nothing, String}
name = nothing
for reg in registries
regpkg = get(reg, uuid, nothing)
@@ -1193,20 +1388,22 @@ end
# Find package by UUID in the manifest file
manifest_info(::Manifest, uuid::Nothing) = nothing
-function manifest_info(manifest::Manifest, uuid::UUID)::Union{PackageEntry,Nothing}
+function manifest_info(manifest::Manifest, uuid::UUID)::Union{PackageEntry, Nothing}
return get(manifest, uuid, nothing)
end
-function write_env(env::EnvCache; update_undo=true,
- skip_writing_project::Bool=false)
+function write_env(
+ env::EnvCache; update_undo = true,
+ skip_writing_project::Bool = false,
+ skip_readonly_check::Bool = false
+ )
# Verify that the generated manifest is consistent with `sources`
for (pkg, uuid) in env.project.deps
- path, repo = get_path_repo(env.project, pkg)
+ path, repo = get_path_repo(env.project, env.project_file, env.manifest_file, pkg)
entry = manifest_info(env.manifest, uuid)
if path !== nothing
- @assert entry.path == path
+ @assert normpath(entry.path) == normpath(path)
end
if repo != GitRepo()
- @assert entry.repo.source == repo.source
if repo.rev !== nothing
@assert entry.repo.rev == repo.rev
end
@@ -1214,17 +1411,34 @@ function write_env(env::EnvCache; update_undo=true,
@assert entry.repo.subdir == repo.subdir
end
end
+ if entry !== nothing
+ if entry.path !== nothing
+ # Convert path from manifest-relative to project-relative before writing
+ project_relative_path = manifest_path_to_project_path(env.project_file, env.manifest_file, entry.path)
+ env.project.sources[pkg] = Dict("path" => project_relative_path)
+ elseif entry.repo != GitRepo()
+ d = Dict{String, String}()
+ entry.repo.source !== nothing && (d["url"] = entry.repo.source)
+ entry.repo.rev !== nothing && (d["rev"] = entry.repo.rev)
+ entry.repo.subdir !== nothing && (d["subdir"] = entry.repo.subdir)
+ env.project.sources[pkg] = d
+ end
+ end
+ end
+
+ # Check if the environment is readonly before attempting to write
+ if env.project.readonly && !skip_readonly_check
+ pkgerror("Cannot modify a readonly environment. The project at $(env.project_file) is marked as readonly.")
end
if (env.project != env.original_project) && (!skip_writing_project)
- write_project(env)
+ write_project(env, skip_readonly_check)
end
if env.manifest != env.original_manifest
write_manifest(env)
end
- update_undo && Pkg.API.add_snapshot_to_undo(env)
+ return update_undo && Pkg.API.add_snapshot_to_undo(env)
end
-
end # module
diff --git a/src/Versions.jl b/src/Versions.jl
index 22d489fe1a..0d7b419400 100644
--- a/src/Versions.jl
+++ b/src/Versions.jl
@@ -8,13 +8,13 @@ export VersionBound, VersionRange, VersionSpec, semver_spec, isjoinable
# VersionBound #
################
struct VersionBound
- t::NTuple{3,UInt32}
+ t::NTuple{3, UInt32}
n::Int
- function VersionBound(tin::NTuple{n,Integer}) where n
+ function VersionBound(tin::NTuple{n, Integer}) where {n}
n <= 3 || throw(ArgumentError("VersionBound: you can only specify major, minor and patch versions"))
- n == 0 && return new((0, 0, 0), n)
- n == 1 && return new((tin[1], 0, 0), n)
- n == 2 && return new((tin[1], tin[2], 0), n)
+ n == 0 && return new((0, 0, 0), n)
+ n == 1 && return new((tin[1], 0, 0), n)
+ n == 2 && return new((tin[1], tin[2], 0), n)
n == 3 && return new((tin[1], tin[2], tin[3]), n)
error("invalid $n")
end
@@ -40,7 +40,7 @@ end
function isless_ll(a::VersionBound, b::VersionBound)
m, n = a.n, b.n
- for i = 1:min(m, n)
+ for i in 1:min(m, n)
a[i] < b[i] && return true
a[i] > b[i] && return false
end
@@ -52,7 +52,7 @@ stricterlower(a::VersionBound, b::VersionBound) = isless_ll(a, b) ? b : a
# Comparison between two upper bounds
function isless_uu(a::VersionBound, b::VersionBound)
m, n = a.n, b.n
- for i = 1:min(m, n)
+ for i in 1:min(m, n)
a[i] < b[i] && return true
a[i] > b[i] && return false
end
@@ -70,7 +70,7 @@ function isjoinable(up::VersionBound, lo::VersionBound)
up.n == 0 && lo.n == 0 && return true
if up.n == lo.n
n = up.n
- for i = 1:(n - 1)
+ for i in 1:(n - 1)
up[i] > lo[i] && return true
up[i] < lo[i] && return false
end
@@ -78,7 +78,7 @@ function isjoinable(up::VersionBound, lo::VersionBound)
return true
else
l = min(up.n, lo.n)
- for i = 1:l
+ for i in 1:l
up[i] > lo[i] && return true
up[i] < lo[i] && return false
end
@@ -96,19 +96,19 @@ function VersionBound(s::AbstractString)
l = lastindex(s)
p = findnext('.', s, 1)
- b = p === nothing ? l : (p-1)
+ b = p === nothing ? l : (p - 1)
i = parse(Int64, SubString(s, 1, b))
p === nothing && return VersionBound(i)
- a = p+1
+ a = p + 1
p = findnext('.', s, a)
- b = p === nothing ? l : (p-1)
+ b = p === nothing ? l : (p - 1)
j = parse(Int64, SubString(s, a, b))
p === nothing && return VersionBound(i, j)
- a = p+1
+ a = p + 1
p = findnext('.', s, a)
- b = p === nothing ? l : (p-1)
+ b = p === nothing ? l : (p - 1)
k = parse(Int64, SubString(s, a, b))
p === nothing && return VersionBound(i, j, k)
@@ -130,9 +130,9 @@ struct VersionRange
return new(lo, hi)
end
end
-VersionRange(b::VersionBound=VersionBound()) = VersionRange(b, b)
-VersionRange(t::Integer...) = VersionRange(VersionBound(t...))
-VersionRange(v::VersionNumber) = VersionRange(VersionBound(v))
+VersionRange(b::VersionBound = VersionBound()) = VersionRange(b, b)
+VersionRange(t::Integer...) = VersionRange(VersionBound(t...))
+VersionRange(v::VersionNumber) = VersionRange(VersionBound(v))
VersionRange(lo::VersionNumber, hi::VersionNumber) = VersionRange(VersionBound(lo), VersionBound(hi))
# The vast majority of VersionRanges are in practice equal to "1"
@@ -149,7 +149,7 @@ function VersionRange(s::AbstractString)
end
function Base.isempty(r::VersionRange)
- for i = 1:min(r.lower.n, r.upper.n)
+ for i in 1:min(r.lower.n, r.upper.n)
r.lower[i] > r.upper[i] && return true
r.lower[i] < r.upper[i] && return false
end
@@ -158,7 +158,7 @@ end
function Base.print(io::IO, r::VersionRange)
m, n = r.lower.n, r.upper.n
- if (m, n) == (0, 0)
+ return if (m, n) == (0, 0)
print(io, '*')
elseif m == 0
print(io, "0 -")
@@ -184,14 +184,14 @@ function Base.union!(ranges::Vector{<:VersionRange})
l = length(ranges)
l == 0 && return ranges
- sort!(ranges, lt=(a, b) -> (isless_ll(a.lower, b.lower) || (a.lower == b.lower && isless_uu(a.upper, b.upper))))
+ sort!(ranges, lt = (a, b) -> (isless_ll(a.lower, b.lower) || (a.lower == b.lower && isless_uu(a.upper, b.upper))))
k0 = 1
ks = findfirst(!isempty, ranges)
ks === nothing && return empty!(ranges)
lo, up, k0 = ranges[ks].lower, ranges[ks].upper, 1
- for k = (ks + 1):l
+ for k in (ks + 1):l
isempty(ranges[k]) && continue
lo1, up1 = ranges[k].lower, ranges[k].upper
if isjoinable(up, lo1)
@@ -237,6 +237,38 @@ function Base.in(v::VersionNumber, s::VersionSpec)
return false
end
+# Optimized batch version check for version lists
+# Fills dest[1:n] indicating which versions are in the VersionSpec
+# Optimized for sorted version lists (but works correctly even if unsorted)
+# Note: Only fills indices 1:n, leaves rest of dest unchanged
+function matches_spec_range!(dest::BitVector, versions::AbstractVector{VersionNumber}, spec::VersionSpec, n::Int)
+ @assert length(versions) == n
+ @assert length(dest) >= n
+
+ # Initialize to false
+ dest[1:n] .= false
+
+ isempty(spec.ranges) && return dest
+
+ # Assumes versions are sorted (as created in Operations.jl:1002)
+ # If sorted, this avoids O(n*m) comparisons by scanning linearly
+ @inbounds for range in spec.ranges
+ # Find first version that could be in range
+ i = 1
+ while i <= n && !(range.lower ≲ versions[i])
+ i += 1
+ end
+
+ # Mark all versions in range
+ while i <= n && versions[i] ≲ range.upper
+ dest[i] = true
+ i += 1
+ end
+ end
+
+ return dest
+end
+
Base.copy(vs::VersionSpec) = VersionSpec(vs)
const empty_versionspec = VersionSpec(VersionRange[])
@@ -253,7 +285,7 @@ function Base.intersect(A::VersionSpec, B::VersionSpec)
ranges[i] = intersect(a, b)
i += 1
end
- VersionSpec(ranges)
+ return VersionSpec(ranges)
end
Base.intersect(a::VersionNumber, B::VersionSpec) = a in B ? VersionSpec(a) : empty_versionspec
Base.intersect(A::VersionSpec, b::VersionNumber) = intersect(b, A)
@@ -273,11 +305,11 @@ function Base.print(io::IO, s::VersionSpec)
isempty(s) && return print(io, _empty_symbol)
length(s.ranges) == 1 && return print(io, s.ranges[1])
print(io, '[')
- for i = 1:length(s.ranges)
+ for i in 1:length(s.ranges)
1 < i && print(io, ", ")
print(io, s.ranges[i])
end
- print(io, ']')
+ return print(io, ']')
end
function Base.show(io::IO, s::VersionSpec)
@@ -286,13 +318,13 @@ function Base.show(io::IO, s::VersionSpec)
print(io, '"', s.ranges[1], '"')
else
print(io, "[")
- for i = 1:length(s.ranges)
+ for i in 1:length(s.ranges)
1 < i && print(io, ", ")
print(io, '"', s.ranges[i], '"')
end
print(io, ']')
end
- print(io, ")")
+ return print(io, ")")
end
@@ -328,7 +360,7 @@ function semver_interval(m::RegexMatch)
@assert length(m.captures) == 4
n_significant = count(x -> x !== nothing, m.captures) - 1
typ, _major, _minor, _patch = m.captures
- major = parse(Int, _major)
+ major = parse(Int, _major)
minor = (n_significant < 2) ? 0 : parse(Int, _minor)
patch = (n_significant < 3) ? 0 : parse(Int, _patch)
if n_significant == 3 && major == 0 && minor == 0 && patch == 0
@@ -337,7 +369,7 @@ function semver_interval(m::RegexMatch)
# Default type is :caret
vertyp = (typ == "" || typ == "^") ? :caret : :tilde
v0 = VersionBound((major, minor, patch))
- if vertyp === :caret
+ return if vertyp === :caret
if major != 0
return VersionRange(v0, VersionBound((v0[1],)))
elseif minor != 0
@@ -346,14 +378,14 @@ function semver_interval(m::RegexMatch)
if n_significant == 1
return VersionRange(v0, VersionBound((0,)))
elseif n_significant == 2
- return VersionRange(v0, VersionBound((0, 0,)))
+ return VersionRange(v0, VersionBound((0, 0)))
else
return VersionRange(v0, VersionBound((0, 0, v0[3])))
end
end
else
if n_significant == 3 || n_significant == 2
- return VersionRange(v0, VersionBound((v0[1], v0[2],)))
+ return VersionRange(v0, VersionBound((v0[1], v0[2])))
else
return VersionRange(v0, VersionBound((v0[1],)))
end
@@ -365,7 +397,7 @@ function inequality_interval(m::RegexMatch)
@assert length(m.captures) == 4
typ, _major, _minor, _patch = m.captures
n_significant = count(x -> x !== nothing, m.captures) - 1
- major = parse(Int, _major)
+ major = parse(Int, _major)
minor = (n_significant < 2) ? 0 : parse(Int, _minor)
patch = (n_significant < 3) ? 0 : parse(Int, _patch)
if n_significant == 3 && major == 0 && minor == 0 && patch == 0
@@ -376,18 +408,18 @@ function inequality_interval(m::RegexMatch)
nil = VersionBound(0, 0, 0)
if v[3] == 0
if v[2] == 0
- v1 = VersionBound(v[1]-1)
+ v1 = VersionBound(v[1] - 1)
else
- v1 = VersionBound(v[1], v[2]-1)
+ v1 = VersionBound(v[1], v[2] - 1)
end
else
- v1 = VersionBound(v[1], v[2], v[3]-1)
+ v1 = VersionBound(v[1], v[2], v[3] - 1)
end
return VersionRange(nil, v1)
elseif occursin(r"^=\s*$", typ)
return VersionRange(v)
elseif occursin(r"^>=\s*$", typ) || occursin(r"^≥\s*$", typ)
- return VersionRange(v, _inf)
+ return VersionRange(v, _inf)
else
error("invalid prefix $typ")
end
@@ -399,32 +431,40 @@ function hyphen_interval(m::RegexMatch)
if isnothing(_lower_minor)
lower_bound = VersionBound(parse(Int, _lower_major))
elseif isnothing(_lower_patch)
- lower_bound = VersionBound(parse(Int, _lower_major),
- parse(Int, _lower_minor))
+ lower_bound = VersionBound(
+ parse(Int, _lower_major),
+ parse(Int, _lower_minor)
+ )
else
- lower_bound = VersionBound(parse(Int, _lower_major),
- parse(Int, _lower_minor),
- parse(Int, _lower_patch))
+ lower_bound = VersionBound(
+ parse(Int, _lower_major),
+ parse(Int, _lower_minor),
+ parse(Int, _lower_patch)
+ )
end
if isnothing(_upper_minor)
upper_bound = VersionBound(parse(Int, _upper_major))
elseif isnothing(_upper_patch)
- upper_bound = VersionBound(parse(Int, _upper_major),
- parse(Int, _upper_minor))
+ upper_bound = VersionBound(
+ parse(Int, _upper_major),
+ parse(Int, _upper_minor)
+ )
else
- upper_bound = VersionBound(parse(Int, _upper_major),
- parse(Int, _upper_minor),
- parse(Int, _upper_patch))
+ upper_bound = VersionBound(
+ parse(Int, _upper_major),
+ parse(Int, _upper_minor),
+ parse(Int, _upper_patch)
+ )
end
return VersionRange(lower_bound, upper_bound)
end
const version = "v?([0-9]+?)(?:\\.([0-9]+?))?(?:\\.([0-9]+?))?"
const ver_regs =
-Pair{Regex,Any}[
- Regex("^([~^]?)?$version\$") => semver_interval, # 0.5 ^0.4 ~0.3.2
- Regex("^((?:≥\\s*)|(?:>=\\s*)|(?:=\\s*)|(?:<\\s*)|(?:=\\s*))v?$version\$") => inequality_interval,# < 0.2 >= 0.5,2
- Regex("^[\\s]*$version[\\s]*?\\s-\\s[\\s]*?$version[\\s]*\$") => hyphen_interval, # 0.7 - 1.3
+ Pair{Regex, Any}[
+ Regex("^([~^]?)?$version\$") => semver_interval, # 0.5 ^0.4 ~0.3.2
+ Regex("^((?:≥\\s*)|(?:>=\\s*)|(?:=\\s*)|(?:<\\s*)|(?:=\\s*))v?$version\$") => inequality_interval, # < 0.2 >= 0.5,2
+ Regex("^[\\s]*$version[\\s]*?\\s-\\s[\\s]*?$version[\\s]*\$") => hyphen_interval, # 0.7 - 1.3
]
end
diff --git a/src/fuzzysorting.jl b/src/fuzzysorting.jl
index 0d8d842b7f..fd88eb4715 100644
--- a/src/fuzzysorting.jl
+++ b/src/fuzzysorting.jl
@@ -1,143 +1,300 @@
module FuzzySorting
-_displaysize(io::IO) = displaysize(io)::Tuple{Int,Int}
+_displaysize(io::IO) = displaysize(io)::Tuple{Int, Int}
-# This code is duplicated from REPL.jl
-# Considering breaking this into an independent package
+# Character confusion weights for fuzzy matching
+const CHARACTER_CONFUSIONS = Dict(
+ ('a', 'e') => 0.5, ('e', 'a') => 0.5,
+ ('i', 'y') => 0.5, ('y', 'i') => 0.5,
+ ('u', 'o') => 0.5, ('o', 'u') => 0.5,
+ ('c', 'k') => 0.3, ('k', 'c') => 0.3,
+ ('s', 'z') => 0.3, ('z', 's') => 0.3,
+ # Keyboard proximity (QWERTY layout)
+ ('q', 'w') => 0.4, ('w', 'q') => 0.4,
+ ('w', 'e') => 0.4, ('e', 'w') => 0.4,
+ ('e', 'r') => 0.4, ('r', 'e') => 0.4,
+ ('r', 't') => 0.4, ('t', 'r') => 0.4,
+ ('t', 'y') => 0.4, ('y', 't') => 0.4,
+ ('y', 'u') => 0.4, ('u', 'y') => 0.4,
+ ('u', 'i') => 0.4, ('i', 'u') => 0.4,
+ ('i', 'o') => 0.4, ('o', 'i') => 0.4,
+ ('o', 'p') => 0.4, ('p', 'o') => 0.4,
+ ('a', 's') => 0.4, ('s', 'a') => 0.4,
+ ('s', 'd') => 0.4, ('d', 's') => 0.4,
+ ('d', 'f') => 0.4, ('f', 'd') => 0.4,
+ ('f', 'g') => 0.4, ('g', 'f') => 0.4,
+ ('g', 'h') => 0.4, ('h', 'g') => 0.4,
+ ('h', 'j') => 0.4, ('j', 'h') => 0.4,
+ ('j', 'k') => 0.4, ('k', 'j') => 0.4,
+ ('k', 'l') => 0.4, ('l', 'k') => 0.4,
+ ('z', 'x') => 0.4, ('x', 'z') => 0.4,
+ ('x', 'c') => 0.4, ('c', 'x') => 0.4,
+ ('c', 'v') => 0.4, ('v', 'c') => 0.4,
+ ('v', 'b') => 0.4, ('b', 'v') => 0.4,
+ ('b', 'n') => 0.4, ('n', 'b') => 0.4,
+ ('n', 'm') => 0.4, ('m', 'n') => 0.4,
+)
-# Search & Rescue
-# Utilities for correcting user mistakes and (eventually)
-# doing full documentation searches from the repl.
+# Enhanced fuzzy scoring with multiple factors
+function fuzzyscore(needle::AbstractString, haystack::AbstractString)
+ needle_lower, haystack_lower = lowercase(needle), lowercase(haystack)
-# Fuzzy Search Algorithm
+ # Factor 1: Prefix matching bonus (highest priority)
+ prefix_score = prefix_match_score(needle_lower, haystack_lower)
-function matchinds(needle, haystack; acronym::Bool = false)
- chars = collect(needle)
- is = Int[]
- lastc = '\0'
- for (i, char) in enumerate(haystack)
- while !isempty(chars) && isspace(first(chars))
- popfirst!(chars) # skip spaces
- end
- isempty(chars) && break
- if lowercase(char) == lowercase(chars[1]) &&
- (!acronym || !isletter(lastc))
- push!(is, i)
- popfirst!(chars)
+ # Factor 2: Subsequence matching
+ subseq_score = subsequence_score(needle_lower, haystack_lower)
+
+ # Factor 3: Character-level similarity (improved edit distance)
+ char_score = character_similarity_score(needle_lower, haystack_lower)
+
+ # Factor 4: Case preservation bonus
+ case_score = case_preservation_score(needle, haystack)
+
+ # Factor 5: Length penalty for very long matches
+ length_penalty = length_penalty_score(needle, haystack)
+
+ # Weighted combination
+ base_score = 0.4 * prefix_score + 0.3 * subseq_score + 0.2 * char_score + 0.1 * case_score
+ final_score = base_score * length_penalty
+
+ return final_score
+end
+
+# Prefix matching: exact prefix gets maximum score
+function prefix_match_score(needle::AbstractString, haystack::AbstractString)
+ if startswith(haystack, needle)
+ return 1.0
+ elseif startswith(needle, haystack)
+ return 0.9 # Partial prefix match
+ else
+ # Check for prefix after common separators
+ for sep in ['_', '-', '.']
+ parts = split(haystack, sep)
+ for part in parts
+ if startswith(part, needle)
+ return 0.7 # Component prefix match
+ end
+ end
end
- lastc = char
+ return 0.0
end
- return is
end
-longer(x, y) = length(x) ≥ length(y) ? (x, true) : (y, false)
+# Subsequence matching with position weighting
+function subsequence_score(needle::AbstractString, haystack::AbstractString)
+ if isempty(needle)
+ return 1.0
+ end
-bestmatch(needle, haystack) =
- longer(matchinds(needle, haystack, acronym = true),
- matchinds(needle, haystack))
-
-# Optimal string distance: Counts the minimum number of insertions, deletions,
-# transpositions or substitutions to go from one string to the other.
-function string_distance(a::AbstractString, lena::Integer, b::AbstractString, lenb::Integer)
- if lena > lenb
- a, b = b, a
- lena, lenb = lenb, lena
+ needle_chars = collect(needle)
+ haystack_chars = collect(haystack)
+
+ matched_positions = Int[]
+ haystack_idx = 1
+
+ for needle_char in needle_chars
+ found = false
+ for i in haystack_idx:length(haystack_chars)
+ if haystack_chars[i] == needle_char
+ push!(matched_positions, i)
+ haystack_idx = i + 1
+ found = true
+ break
+ end
+ end
+ if !found
+ return 0.0
+ end
end
- start = 0
- for (i, j) in zip(a, b)
- if a == b
- start += 1
- else
- break
+
+ # Calculate score based on how clustered the matches are
+ if length(matched_positions) <= 1
+ return 1.0
+ end
+
+ # Penalize large gaps between matches
+ gaps = diff(matched_positions)
+ avg_gap = sum(gaps) / length(gaps)
+ gap_penalty = 1.0 / (1.0 + avg_gap / 3.0)
+
+ # Bonus for matches at word boundaries
+ boundary_bonus = 0.0
+ for pos in matched_positions
+ if pos == 1 || haystack_chars[pos - 1] in ['_', '-', '.']
+ boundary_bonus += 0.1
end
end
- start == lena && return lenb - start
- vzero = collect(1:(lenb - start))
- vone = similar(vzero)
- prev_a, prev_b = first(a), first(b)
- current = 0
- for (i, ai) in enumerate(a)
- i > start || (prev_a = ai; continue)
- left = i - start - 1
- current = i - start
- transition_next = 0
- for (j, bj) in enumerate(b)
- j > start || (prev_b = bj; continue)
- # No need to look beyond window of lower right diagonal
- above = current
- this_transition = transition_next
- transition_next = vone[j - start]
- vone[j - start] = current = left
- left = vzero[j - start]
- if ai != bj
- # Minimum between substitution, deletion and insertion
- current = min(current + 1, above + 1, left + 1)
- if i > start + 1 && j > start + 1 && ai == prev_b && prev_a == bj
- current = min(current, (this_transition += 1))
- end
+
+ coverage = length(needle) / length(haystack)
+ return min(1.0, gap_penalty + boundary_bonus) * coverage
+end
+
+# Improved character-level similarity
+function character_similarity_score(needle::AbstractString, haystack::AbstractString)
+ if isempty(needle) || isempty(haystack)
+ return 0.0
+ end
+
+ # Use Damerau-Levenshtein distance with character confusion weights
+ distance = weighted_edit_distance(needle, haystack)
+ max_len = max(length(needle), length(haystack))
+
+ return max(0.0, 1.0 - distance / max_len)
+end
+
+# Weighted edit distance accounting for common typos
+function weighted_edit_distance(s1::AbstractString, s2::AbstractString)
+
+ a, b = collect(s1), collect(s2)
+ m, n = length(a), length(b)
+
+ # Initialize distance matrix
+ d = Matrix{Float64}(undef, m + 1, n + 1)
+ d[1:(m + 1), 1] = 0:m
+ d[1, 1:(n + 1)] = 0:n
+
+ for i in 1:m, j in 1:n
+ if a[i] == b[j]
+ d[i + 1, j + 1] = d[i, j] # No cost for exact match
+ else
+ # Standard operations
+ insert_cost = d[i, j + 1] + 1.0
+ delete_cost = d[i + 1, j] + 1.0
+
+ # Check for repeated character deletion (common typo)
+ if i > 1 && a[i] == a[i - 1] && a[i - 1] == b[j]
+ delete_cost = d[i, j + 1] + 0.3 # Low cost for deleting repeated char
+ end
+
+ # Check for repeated character insertion (common typo)
+ if j > 1 && b[j] == b[j - 1] && a[i] == b[j - 1]
+ insert_cost = d[i, j + 1] + 0.3 # Low cost for inserting repeated char
+ end
+
+ # Substitution with confusion weighting
+ confusion_key = (a[i], b[j])
+ subst_cost = d[i, j] + get(CHARACTER_CONFUSIONS, confusion_key, 1.0)
+
+ d[i + 1, j + 1] = min(insert_cost, delete_cost, subst_cost)
+
+ # Transposition
+ if i > 1 && j > 1 && a[i] == b[j - 1] && a[i - 1] == b[j]
+ d[i + 1, j + 1] = min(d[i + 1, j + 1], d[i - 1, j - 1] + 1.0)
end
- vzero[j - start] = current
- prev_b = bj
end
- prev_a = ai
end
- current
-end
-function fuzzyscore(needle::AbstractString, haystack::AbstractString)
- lena, lenb = length(needle), length(haystack)
- 1 - (string_distance(needle, lena, haystack, lenb) / max(lena, lenb))
+ return d[m + 1, n + 1]
end
-function fuzzysort(search::String, candidates::Vector{String})
- scores = map(cand -> (FuzzySorting.fuzzyscore(search, cand), -Float64(FuzzySorting.levenshtein(search, cand))), candidates)
- candidates[sortperm(scores)] |> reverse, any(s -> s[1] >= print_score_threshold, scores)
+# Case preservation bonus
+function case_preservation_score(needle::AbstractString, haystack::AbstractString)
+ if isempty(needle) || isempty(haystack)
+ return 0.0
+ end
+
+ matches = 0
+ min_len = min(length(needle), length(haystack))
+
+ for (n, h) in zip(needle, haystack)
+ if n == h
+ matches += 1
+ end
+ end
+
+ return matches / min_len
end
-# Levenshtein Distance
+# Length penalty for very long matches
+function length_penalty_score(needle::AbstractString, haystack::AbstractString)
+ needle_len = length(needle)
+ haystack_len = length(haystack)
-function levenshtein(s1, s2)
- a, b = collect(s1), collect(s2)
- m = length(a)
- n = length(b)
- d = Matrix{Int}(undef, m+1, n+1)
+ if needle_len == 0
+ return 0.0
+ end
+
+ # Strong preference for similar lengths
+ length_ratio = haystack_len / needle_len
+ length_diff = abs(haystack_len - needle_len)
- d[1:m+1, 1] = 0:m
- d[1, 1:n+1] = 0:n
+ # Bonus for very close lengths (within 1-2 characters)
+ if length_diff <= 1
+ return 1.1 # Small bonus for near-exact length
+ elseif length_diff <= 2
+ return 1.05
+ elseif length_ratio <= 1.5
+ return 1.0
+ elseif length_ratio <= 2.0
+ return 0.8
+ elseif length_ratio <= 3.0
+ return 0.6
+ else
+ return 0.4 # Heavy penalty for very long matches
+ end
+end
- for i = 1:m, j = 1:n
- d[i+1,j+1] = min(d[i , j+1] + 1,
- d[i+1, j ] + 1,
- d[i , j ] + (a[i] != b[j]))
+# Main sorting function with optional popularity weighting
+function fuzzysort(search::String, candidates::Vector{String}; popularity_weights::Dict{String, Float64} = Dict{String, Float64}())
+ scores = map(candidates) do cand
+ base_score = fuzzyscore(search, cand)
+ weight = get(popularity_weights, cand, 1.0)
+ score = base_score * weight
+ return (score, cand)
end
- return d[m+1, n+1]
+ # Sort by score descending, then by candidate name for ties
+ sorted_scores = sort(scores, by = x -> (-x[1], x[2]))
+
+ # Extract candidates and check if any meet threshold
+ result_candidates = [x[2] for x in sorted_scores]
+ has_good_matches = any(x -> x[1] >= print_score_threshold, sorted_scores)
+
+ return result_candidates, has_good_matches
end
-function levsort(search::String, candidates::Vector{String})
- scores = map(cand -> (Float64(levenshtein(search, cand)), -fuzzyscore(search, cand)), candidates)
- candidates = candidates[sortperm(scores)]
- i = 0
- for outer i = 1:length(candidates)
- levenshtein(search, candidates[i]) > 3 && break
+# Keep existing interface functions for compatibility
+function matchinds(needle, haystack; acronym::Bool = false)
+ chars = collect(needle)
+ is = Int[]
+ lastc = '\0'
+ for (i, char) in enumerate(haystack)
+ while !isempty(chars) && isspace(first(chars))
+ popfirst!(chars) # skip spaces
+ end
+ isempty(chars) && break
+ if lowercase(char) == lowercase(chars[1]) &&
+ (!acronym || !isletter(lastc))
+ push!(is, i)
+ popfirst!(chars)
+ end
+ lastc = char
end
- return candidates[1:i]
+ return is
end
-# Result printing
+longer(x, y) = length(x) ≥ length(y) ? (x, true) : (y, false)
+
+bestmatch(needle, haystack) =
+ longer(
+ matchinds(needle, haystack, acronym = true),
+ matchinds(needle, haystack)
+)
function printmatch(io::IO, word, match)
is, _ = bestmatch(word, match)
- for (i, char) = enumerate(match)
+ for (i, char) in enumerate(match)
if i in is
- printstyled(io, char, bold=true)
+ printstyled(io, char, bold = true)
else
print(io, char)
end
end
+ return
end
-const print_score_threshold = 0.5
+const print_score_threshold = 0.25
function printmatches(io::IO, word, matches; cols::Int = _displaysize(io)[2])
total = 0
@@ -148,29 +305,10 @@ function printmatches(io::IO, word, matches; cols::Int = _displaysize(io)[2])
printmatch(io, word, match)
total += length(match) + 1
end
+ return
end
printmatches(args...; cols::Int = _displaysize(stdout)[2]) = printmatches(stdout, args..., cols = cols)
-function print_joined_cols(io::IO, ss::Vector{String}, delim = "", last = delim; cols::Int = _displaysize(io)[2])
- i = 0
- total = 0
- for outer i = 1:length(ss)
- total += length(ss[i])
- total + max(i-2,0)*length(delim) + (i>1 ? 1 : 0)*length(last) > cols && (i-=1; break)
- end
- join(io, ss[1:i], delim, last)
-end
-
-print_joined_cols(args...; cols::Int = _displaysize(stdout)[2]) = print_joined_cols(stdout, args...; cols=cols)
-
-function print_correction(io::IO, word::String, mod::Module)
- cors = map(quote_spaces, levsort(word, accessible(mod)))
- pre = "Perhaps you meant "
- print(io, pre)
- print_joined_cols(io, cors, ", ", " or "; cols = _displaysize(io)[2] - length(pre))
- println(io)
- return
-end
end
diff --git a/src/generate.jl b/src/generate.jl
index 6134a1686c..f031c1eeb9 100644
--- a/src/generate.jl
+++ b/src/generate.jl
@@ -1,10 +1,24 @@
# This file is a part of Julia. License is MIT: https://julialang.org/license
-function generate(path::String; io::IO=stderr_f())
- base = basename(path)
- pkg = endswith(lowercase(base), ".jl") ? chop(base, tail=3) : base
+function generate(path::String; io::IO = stderr_f())
+ # Handle "." to generate in current directory
+ abspath_path = abspath(path)
+ # Remove trailing path separator to ensure basename works correctly
+ abspath_path = rstrip(abspath_path, ('/', '\\'))
+ base = basename(abspath_path)
+ pkg = endswith(lowercase(base), ".jl") ? chop(base, tail = 3) : base
Base.isidentifier(pkg) || pkgerror("$(repr(pkg)) is not a valid package name")
- isdir(path) && pkgerror("$(abspath(path)) already exists")
+
+ if isdir(abspath_path)
+ # Allow generating in existing directory only if it's effectively empty for our purposes
+ files = readdir(abspath_path)
+ # Filter out common hidden files that are okay to have
+ relevant_files = filter(f -> f != ".git" && f != ".gitignore", files)
+ if !isempty(relevant_files)
+ pkgerror("$(abspath_path) already exists and is not empty")
+ end
+ end
+
printpkgstyle(io, :Generating, " project $pkg:")
uuid = project(io, pkg, path)
entrypoint(io, pkg, path)
@@ -38,7 +52,7 @@ function project(io::IO, pkg::AbstractString, dir::AbstractString)
name === nothing && (name = "Unknown")
if email === nothing
- for env in ["GIT_AUTHOR_EMAIL", "GIT_COMMITTER_EMAIL", "EMAIL"];
+ for env in ["GIT_AUTHOR_EMAIL", "GIT_COMMITTER_EMAIL", "EMAIL"]
email = get(ENV, env, nothing)
email !== nothing && break
end
@@ -48,23 +62,37 @@ function project(io::IO, pkg::AbstractString, dir::AbstractString)
uuid = UUIDs.uuid4()
genfile(io, dir, "Project.toml") do file_io
- toml = Dict{String,Any}("authors" => authors,
- "name" => pkg,
- "uuid" => string(uuid),
- "version" => "0.1.0",
- )
- TOML.print(file_io, toml, sorted=true, by=key -> (Types.project_key_order(key), key))
+ toml = Dict{String, Any}(
+ "authors" => authors,
+ "name" => pkg,
+ "uuid" => string(uuid),
+ "version" => "0.1.0",
+ )
+ TOML.print(file_io, toml, sorted = true, by = key -> (Types.project_key_order(key), key))
end
return uuid
end
function entrypoint(io::IO, pkg::AbstractString, dir)
- genfile(io, joinpath(dir, "src"), "$pkg.jl") do file_io
- print(file_io,
- """
+ return genfile(io, joinpath(dir, "src"), "$pkg.jl") do file_io
+ print(
+ file_io,
+ """
module $pkg
- greet() = print("Hello World!")
+ \"""
+ hello(who::String)
+
+ Return "Hello, `who`".
+ \"""
+ hello(who::String) = "Hello, \$who"
+
+ \"""
+ domath(x::Number)
+
+ Return `x + 5`.
+ \"""
+ domath(x::Number) = x + 5
end # module $pkg
"""
diff --git a/src/manifest.jl b/src/manifest.jl
index db04bdbe7f..d7a5261a23 100644
--- a/src/manifest.jl
+++ b/src/manifest.jl
@@ -19,7 +19,8 @@ function read_pinned(pinned)
end
function safe_SHA1(sha::String)
- try sha = SHA1(sha)
+ try
+ sha = SHA1(sha)
catch err
err isa ArgumentError || rethrow()
pkgerror("Could not parse `git-tree-sha1` field as a SHA.")
@@ -28,7 +29,8 @@ function safe_SHA1(sha::String)
end
function safe_uuid(uuid::String)::UUID
- try uuid = UUID(uuid)
+ try
+ uuid = UUID(uuid)
catch err
err isa ArgumentError || rethrow()
pkgerror("Could not parse `uuid` field as a UUID.")
@@ -37,7 +39,8 @@ function safe_uuid(uuid::String)::UUID
end
function safe_bool(bool::String)
- try bool = parse(Bool, bool)
+ try
+ bool = parse(Bool, bool)
catch err
err isa ArgumentError || rethrow()
pkgerror("Could not parse `pinned` field as a Bool.")
@@ -47,7 +50,8 @@ end
# note: returns raw version *not* parsed version
function safe_version(version::String)::VersionNumber
- try version = VersionNumber(version)
+ try
+ version = VersionNumber(version)
catch err
err isa ArgumentError || rethrow()
pkgerror("Could not parse `version` as a `VersionNumber`.")
@@ -63,6 +67,27 @@ function safe_path(path::String)
return path
end
+function read_registry_entry(id::String, info::Dict{String, Any})
+ uuid_val = get(info, "uuid", nothing)
+ uuid_val isa String || pkgerror("Registry entry `$id` is missing a string `uuid` field.")
+ uuid = safe_uuid(uuid_val)
+ url_val = get(info, "url", nothing)
+ url_val === nothing || url_val isa String || pkgerror("Field `url` for registry `$id` must be a String.")
+
+ return ManifestRegistryEntry(
+ id = id,
+ uuid = uuid,
+ url = url_val === nothing ? nothing : String(url_val),
+ )
+end
+
+function registry_entry_toml(entry::ManifestRegistryEntry)
+ d = Dict{String, Any}()
+ d["uuid"] = string(entry.uuid)
+ entry.url === nothing || (d["url"] = entry.url)
+ return d
+end
+
read_deps(::Nothing) = Dict{String, UUID}()
read_deps(deps) = pkgerror("Expected `deps` field to be either a list or a table.")
function read_deps(deps::AbstractVector)
@@ -73,52 +98,94 @@ function read_deps(deps::AbstractVector)
end
return ret
end
-function read_deps(raw::Dict{String, Any})::Dict{String,UUID}
- deps = Dict{String,UUID}()
+function read_deps(raw::Dict{String, Any})::Dict{String, UUID}
+ deps = Dict{String, UUID}()
for (name, uuid) in raw
deps[name] = safe_uuid(uuid)
end
return deps
end
+read_apps(::Nothing) = Dict{String, AppInfo}()
+read_apps(::Any) = pkgerror("Expected `apps` field to be a Dict")
+function read_apps(apps::Dict)
+ appinfos = Dict{String, AppInfo}()
+ for (appname, app) in apps
+ submodule = get(app, "submodule", nothing)
+ julia_flags_raw = get(app, "julia_flags", nothing)
+ julia_flags = if julia_flags_raw === nothing
+ String[]
+ else
+ String[flag::String for flag in julia_flags_raw]
+ end
+ appinfo = AppInfo(
+ appname::String,
+ app["julia_command"]::String,
+ submodule,
+ julia_flags,
+ app
+ )
+ appinfos[appinfo.name] = appinfo
+ end
+ return appinfos
+end
+
+read_exts(::Nothing) = Dict{String, Union{String, Vector{String}}}()
+function read_exts(raw::Dict{String, Any})
+ exts = Dict{String, Union{String, Vector{String}}}()
+ for (key, val) in raw
+ val isa Union{String, Vector{String}} || pkgerror("Expected `ext` entry to be a `Union{String, Vector{String}}`.")
+ exts[key] = val
+ end
+ return exts
+end
+
struct Stage1
uuid::UUID
entry::PackageEntry
- deps::Union{Vector{String}, Dict{String,UUID}}
- weakdeps::Union{Vector{String}, Dict{String,UUID}}
+ deps::Union{Vector{String}, Dict{String, UUID}}
+ weakdeps::Union{Vector{String}, Dict{String, UUID}}
end
-normalize_deps(name, uuid, deps, manifest; isext=false) = deps
-function normalize_deps(name, uuid, deps::Vector{String}, manifest::Dict{String,Vector{Stage1}}; isext=false)
+normalize_deps(name, uuid, deps, manifest, manifest_path; isext = false) = deps
+function normalize_deps(name, uuid, deps::Vector{String}, manifest::Dict{String, Vector{Stage1}}, manifest_path; isext = false)
if length(deps) != length(unique(deps))
pkgerror("Duplicate entry in `$name=$uuid`'s `deps` field.")
end
- final = Dict{String,UUID}()
+ final = Dict{String, UUID}()
for dep in deps
infos = get(manifest, dep, nothing)
if !isext
if infos === nothing
- pkgerror("`$name=$uuid` depends on `$dep`, ",
- "but no such entry exists in the manifest.")
+ pkgerror(
+ "`$name=$uuid` depends on `$dep`, ",
+ "but no such entry exists in the manifest at `$manifest_path`."
+ )
end
end
# should have used dict format instead of vector format
if isnothing(infos) || length(infos) != 1
- pkgerror("Invalid manifest format. ",
- "`$name=$uuid`'s dependency on `$dep` is ambiguous.")
+ pkgerror(
+ "Invalid manifest format at `$manifest_path`. ",
+ "`$name=$uuid`'s dependency on `$dep` is ambiguous."
+ )
end
final[dep] = infos[1].uuid
end
return final
end
-function validate_manifest(julia_version::Union{Nothing,VersionNumber}, project_hash::Union{Nothing,SHA1}, manifest_format::VersionNumber, stage1::Dict{String,Vector{Stage1}}, other::Dict{String, Any})
+manifest_path_str(f_or_io::IO) = "streamed manifest"
+manifest_path_str(path::String) = path
+
+function validate_manifest(julia_version::Union{Nothing, VersionNumber}, project_hash::Union{Nothing, SHA1}, manifest_format::VersionNumber, stage1::Dict{String, Vector{Stage1}}, other::Dict{String, Any}, registries::Dict{String, ManifestRegistryEntry}, f_or_io)
+ manifest_path = manifest_path_str(f_or_io)
# expand vector format deps
for (name, infos) in stage1, info in infos
- info.entry.deps = normalize_deps(name, info.uuid, info.deps, stage1)
+ info.entry.deps = normalize_deps(name, info.uuid, info.deps, stage1, manifest_path)
end
for (name, infos) in stage1, info in infos
- info.entry.weakdeps = normalize_deps(name, info.uuid, info.weakdeps, stage1; isext=true)
+ info.entry.weakdeps = normalize_deps(name, info.uuid, info.weakdeps, stage1, manifest_path; isext = true)
end
# invariant: all dependencies are now normalized to Dict{String,UUID}
deps = Dict{UUID, PackageEntry}()
@@ -132,18 +199,22 @@ function validate_manifest(julia_version::Union{Nothing,VersionNumber}, project_
dep_entry = get(deps, uuid, nothing)
if !isext
if dep_entry === nothing
- pkgerror("`$(entry.name)=$(entry_uuid)` depends on `$name=$uuid`, ",
- "but no such entry exists in the manifest.")
+ pkgerror(
+ "`$(entry.name)=$(entry_uuid)` depends on `$name=$uuid`, ",
+ "but no such entry exists in the manifest at `$manifest_path`."
+ )
end
if dep_entry.name != name
- pkgerror("`$(entry.name)=$(entry_uuid)` depends on `$name=$uuid`, ",
- "but entry with UUID `$uuid` has name `$(dep_entry.name)`.")
+ pkgerror(
+ "`$(entry.name)=$(entry_uuid)` depends on `$name=$uuid`, ",
+ "but entry with UUID `$uuid` has name `$(dep_entry.name)` in the manifest at `$manifest_path`."
+ )
end
end
end
end
end
- return Manifest(; julia_version, project_hash, manifest_format, deps, other)
+ return Manifest(; julia_version, project_hash, manifest_format, deps, registries, other)
end
function Manifest(raw::Dict{String, Any}, f_or_io::Union{String, IO})::Manifest
@@ -158,7 +229,7 @@ function Manifest(raw::Dict{String, Any}, f_or_io::Union{String, IO})::Manifest
@warn "Unknown Manifest.toml format version detected in file `$(f_or_io)`. Unexpected behavior may occur" manifest_format maxlog = 1 _id = Symbol(f_or_io)
end
end
- stage1 = Dict{String,Vector{Stage1}}()
+ stage1 = Dict{String, Vector{Stage1}}()
if haskey(raw, "deps") # deps field doesn't exist if there are no deps
deps_raw = raw["deps"]::Dict{String, Any}
for (name::String, infos) in deps_raw
@@ -171,18 +242,31 @@ function Manifest(raw::Dict{String, Any}, f_or_io::Union{String, IO})::Manifest
deps = nothing
weakdeps = nothing
try
- entry.pinned = read_pinned(get(info, "pinned", nothing))
- uuid = read_field("uuid", nothing, info, safe_uuid)::UUID
- entry.version = read_field("version", nothing, info, safe_version)
- entry.path = read_field("path", nothing, info, safe_path)
- entry.repo.source = read_field("repo-url", nothing, info, identity)
- entry.repo.rev = read_field("repo-rev", nothing, info, identity)
- entry.repo.subdir = read_field("repo-subdir", nothing, info, identity)
- entry.tree_hash = read_field("git-tree-sha1", nothing, info, safe_SHA1)
- entry.uuid = uuid
+ entry.pinned = read_pinned(get(info, "pinned", nothing))
+ uuid = read_field("uuid", nothing, info, safe_uuid)::UUID
+ entry.version = read_field("version", nothing, info, safe_version)
+ entry.path = read_field("path", nothing, info, safe_path)
+ entry.repo.source = read_field("repo-url", nothing, info, identity)
+ entry.repo.rev = read_field("repo-rev", nothing, info, identity)
+ entry.repo.subdir = read_field("repo-subdir", nothing, info, identity)
+ entry.tree_hash = read_field("git-tree-sha1", nothing, info, safe_SHA1)
+ entry.uuid = uuid
+ reg_field = get(info, "registries", nothing)::Union{Nothing, String, Vector{String}}
+ if reg_field isa String
+ entry.registries = [reg_field]
+ elseif reg_field isa Vector{String}
+ entry.registries = String[r for r in reg_field]
+ elseif reg_field !== nothing
+ pkgerror("Expected `registries` field to be a String or Vector{String}, got $(typeof(reg_field)).")
+ end
deps = read_deps(get(info::Dict, "deps", nothing)::Union{Nothing, Dict{String, Any}, Vector{String}})
weakdeps = read_deps(get(info::Dict, "weakdeps", nothing)::Union{Nothing, Dict{String, Any}, Vector{String}})
- entry.exts = get(Dict{String, String}, info, "extensions")
+ entry.apps = read_apps(get(info::Dict, "apps", nothing)::Union{Nothing, Dict{String, Any}})
+ entry.exts = read_exts(get(info, "extensions", nothing)::Union{Nothing, Dict{String, Any}})
+ syntax = get(info, "syntax", nothing)::Union{Dict{String, Any}, Nothing}
+ if syntax !== nothing
+ entry.julia_syntax_version = read_field("julia_version", nothing, syntax, safe_version)
+ end
catch
# TODO: Should probably not unconditionally log something
# @debug "Could not parse manifest entry for `$name`" f_or_io
@@ -195,14 +279,23 @@ function Manifest(raw::Dict{String, Any}, f_or_io::Union{String, IO})::Manifest
# by this point, all the fields of the `PackageEntry`s have been type casted
# but we have *not* verified the _graph_ structure of the manifest
end
+ registries = Dict{String, ManifestRegistryEntry}()
+ if haskey(raw, "registries")
+ regs_raw = raw["registries"]::Dict{String, Any}
+ for (reg_id, info_any) in regs_raw
+ info = info_any::Dict{String, Any}
+ registries[reg_id] = read_registry_entry(reg_id, info)
+ end
+ end
+
other = Dict{String, Any}()
for (k, v) in raw
- if k in ("julia_version", "deps", "manifest_format")
+ if k in ("julia_version", "deps", "manifest_format", "registries")
continue
end
other[k] = v
end
- return validate_manifest(julia_version, project_hash, manifest_format, stage1, other)
+ return validate_manifest(julia_version, project_hash, manifest_format, stage1, other, registries, f_or_io)
end
function read_manifest(f_or_io::Union{String, IO})
@@ -229,11 +322,10 @@ function read_manifest(f_or_io::Union{String, IO})
end
function convert_v1_format_manifest(old_raw_manifest::Dict)
- new_raw_manifest = Dict{String, Any}(
- "deps" => old_raw_manifest,
- "manifest_format" => "1.0.0" # must be a string here to match raw dict
- # don't set julia_version as it is unknown in old manifests
- )
+ new_raw_manifest = Dict{String, Any}()
+ new_raw_manifest["deps"] = old_raw_manifest
+ new_raw_manifest["manifest_format"] = "1.0.0" # must be a string here to match raw dict
+ # don't set julia_version as it is unknown in old manifests
return new_raw_manifest
end
@@ -241,24 +333,28 @@ end
# WRITING #
###########
function destructure(manifest::Manifest)::Dict
- function entry!(entry, key, value; default=nothing)
- if value == default
+ function entry!(entry, key, value; default = nothing)
+ return if value == default
delete!(entry, key)
else
entry[key] = value
end
end
- unique_name = Dict{String,Bool}()
+ if !isempty(manifest.registries) && manifest.manifest_format < v"2.1.0"
+ manifest.manifest_format = v"2.1.0"
+ end
+
+ unique_name = Dict{String, Bool}()
for (uuid, entry) in manifest
unique_name[entry.name] = !haskey(unique_name, entry.name)
end
# maintain the format of the manifest when writing
if manifest.manifest_format.major == 1
- raw = Dict{String,Vector{Dict{String,Any}}}()
+ raw = Dict{String, Vector{Dict{String, Any}}}()
elseif manifest.manifest_format.major == 2
- raw = Dict{String,Any}()
+ raw = Dict{String, Any}()
if !isnothing(manifest.julia_version)
raw["julia_version"] = manifest.julia_version
end
@@ -266,31 +362,58 @@ function destructure(manifest::Manifest)::Dict
raw["project_hash"] = manifest.project_hash
end
raw["manifest_format"] = string(manifest.manifest_format.major, ".", manifest.manifest_format.minor)
- raw["deps"] = Dict{String,Vector{Dict{String,Any}}}()
+ raw["deps"] = Dict{String, Vector{Dict{String, Any}}}()
for (k, v) in manifest.other
raw[k] = v
end
+ if !isempty(manifest.registries)
+ regs = Dict{String, Any}()
+ for (id, entry) in manifest.registries
+ regs[id] = registry_entry_toml(entry)
+ end
+ raw["registries"] = regs
+ end
end
for (uuid, entry) in manifest
- new_entry = something(entry.other, Dict{String,Any}())
+ # https://github.com/JuliaLang/Pkg.jl/issues/4086
+ @assert !(entry.tree_hash !== nothing && entry.path !== nothing)
+
+ new_entry = something(entry.other, Dict{String, Any}())
new_entry["uuid"] = string(uuid)
entry!(new_entry, "version", entry.version)
entry!(new_entry, "git-tree-sha1", entry.tree_hash)
- entry!(new_entry, "pinned", entry.pinned; default=false)
+ entry!(new_entry, "pinned", entry.pinned; default = false)
path = entry.path
- if path !== nothing && Sys.iswindows() && !isabspath(path)
- path = join(splitpath(path), "/")
+ if path !== nothing
+ path = normalize_path_for_toml(path)
end
entry!(new_entry, "path", path)
entry!(new_entry, "entryfile", entry.entryfile)
repo_source = entry.repo.source
- if repo_source !== nothing && Sys.iswindows() && !isabspath(repo_source) && !isurl(repo_source)
- repo_source = join(splitpath(repo_source), "/")
+ if repo_source !== nothing && !isurl(repo_source)
+ repo_source = normalize_path_for_toml(repo_source)
end
entry!(new_entry, "repo-url", repo_source)
entry!(new_entry, "repo-rev", entry.repo.rev)
entry!(new_entry, "repo-subdir", entry.repo.subdir)
+ syntax_ver = entry.julia_syntax_version
+ if syntax_ver !== nothing
+ entry!(new_entry, "syntax", Dict("julia_version" => string(syntax_ver)))
+ end
+
+ # Write registries as a vector (or nothing if empty)
+ if !isempty(entry.registries)
+ if length(entry.registries) == 1
+ # For backwards compatibility, write a single registry as a string
+ entry!(new_entry, "registries", entry.registries[1])
+ else
+ entry!(new_entry, "registries", entry.registries)
+ end
+ else
+ delete!(new_entry, "registries")
+ delete!(new_entry, "registry") # Remove old field if present
+ end
for (deptype, depname) in [(entry.deps, "deps"), (entry.weakdeps, "weakdeps")]
if isempty(deptype)
delete!(new_entry, depname)
@@ -298,7 +421,7 @@ function destructure(manifest::Manifest)::Dict
if all(dep -> haskey(unique_name, first(dep)), deptype) && all(dep -> unique_name[first(dep)], deptype)
new_entry[depname] = sort(collect(keys(deptype)))
else
- new_entry[depname] = Dict{String,String}()
+ new_entry[depname] = Dict{String, String}()
for (name, uuid) in deptype
new_entry[depname][name] = string(uuid)
end
@@ -310,24 +433,41 @@ function destructure(manifest::Manifest)::Dict
if !isempty(entry.exts)
entry!(new_entry, "extensions", entry.exts)
end
+
+ if !isempty(entry.apps)
+ new_entry["apps"] = Dict{String, Any}()
+ for (appname, appinfo) in entry.apps
+ julia_command = @something appinfo.julia_command joinpath(Sys.BINDIR, "julia" * (Sys.iswindows() ? ".exe" : ""))
+ app_dict = Dict{String, Any}("julia_command" => julia_command)
+ if appinfo.submodule !== nothing
+ app_dict["submodule"] = appinfo.submodule
+ end
+ if !isempty(appinfo.julia_flags)
+ app_dict["julia_flags"] = appinfo.julia_flags
+ end
+ new_entry["apps"][appname] = app_dict
+ end
+ end
if manifest.manifest_format.major == 1
- push!(get!(raw, entry.name, Dict{String,Any}[]), new_entry)
+ push!(get!(raw, entry.name, Dict{String, Any}[]), new_entry)
elseif manifest.manifest_format.major == 2
- push!(get!(raw["deps"], entry.name, Dict{String,Any}[]), new_entry)
+ push!(get!(raw["deps"], entry.name, Dict{String, Any}[]), new_entry)
end
end
return raw
end
function write_manifest(env::EnvCache)
+ if env.project.readonly
+ pkgerror("Cannot write to readonly manifest file at $(env.manifest_file)")
+ end
mkpath(dirname(env.manifest_file))
- write_manifest(env.manifest, env.manifest_file)
+ return write_manifest(env.manifest, env.manifest_file)
end
function write_manifest(manifest::Manifest, manifest_file::AbstractString)
if manifest.manifest_format.major == 1
- @warn """The active manifest file at `$(manifest_file)` has an old format that is being maintained.
- To update to the new format, which is supported by Julia versions ≥ 1.6.2, run `import Pkg; Pkg.upgrade_manifest()` which will upgrade the format without re-resolving.
- To then record the julia version re-resolve with `Pkg.resolve()` and if there are resolve conflicts consider `Pkg.update()`.""" maxlog = 1 _id = Symbol(manifest_file)
+ @warn """The active manifest file at `$(manifest_file)` has an old format.
+ Any package operation (add, remove, update, etc.) will automatically upgrade it to format v2.1.""" maxlog = 1 _id = Symbol(manifest_file)
end
return write_manifest(destructure(manifest), manifest_file)
end
@@ -336,7 +476,7 @@ function write_manifest(io::IO, manifest::Manifest)
end
function write_manifest(io::IO, raw_manifest::Dict)
print(io, "# This file is machine-generated - editing it directly is not advised\n\n")
- TOML.print(io, raw_manifest, sorted=true) do x
+ TOML.print(io, raw_manifest, sorted = true) do x
(typeof(x) in [String, Nothing, UUID, SHA1, VersionNumber]) && return string(x)
error("unhandled type `$(typeof(x))`")
end
@@ -344,7 +484,8 @@ function write_manifest(io::IO, raw_manifest::Dict)
end
function write_manifest(raw_manifest::Dict, manifest_file::AbstractString)
str = sprint(write_manifest, raw_manifest)
- write(manifest_file, str)
+ mkpath(dirname(manifest_file))
+ return write(manifest_file, str)
end
############
@@ -374,7 +515,7 @@ function check_manifest_julia_version_compat(manifest::Manifest, manifest_file::
return
end
end
- if Base.thisminor(v) != Base.thisminor(VERSION)
+ return if Base.thisminor(v) != Base.thisminor(VERSION)
msg = """The active manifest file has dependencies that were resolved with a different julia \
version ($(manifest.julia_version)). Unexpected behavior may occur."""
if julia_version_strict
diff --git a/src/precompile.jl b/src/precompile.jl
index 761f64efda..9f3e6313e1 100644
--- a/src/precompile.jl
+++ b/src/precompile.jl
@@ -104,92 +104,122 @@ let
original_load_path = copy(LOAD_PATH)
Pkg.UPDATED_REGISTRY_THIS_SESSION[] = true
- # Default 30 sec grace period means we hang 30 seconds before precompiling finishes
- DEFAULT_IO[] = unstableio(devnull)
- Downloads.DOWNLOADER[] = Downloads.Downloader(; grace = 1.0)
+ @Base.ScopedValues.with Pkg.DEFAULT_IO => Pkg.unstableio(devnull) begin
+ # Default 30 sec grace period means we hang 30 seconds before precompiling finishes
+ Downloads.DOWNLOADER[] = Downloads.Downloader(; grace = 1.0)
- # We need to override JULIA_PKG_UNPACK_REGISTRY to fix https://github.com/JuliaLang/Pkg.jl/issues/3663
- withenv("JULIA_PKG_SERVER" => nothing, "JULIA_PKG_UNPACK_REGISTRY" => nothing) do
- tmp = _run_precompilation_script_setup()
- cd(tmp) do
- withenv("JULIA_PKG_PRECOMPILE_AUTO" => 0) do
- Pkg.add("TestPkg")
- Pkg.develop(Pkg.PackageSpec(path = "TestPkg.jl"))
- Pkg.add(Pkg.PackageSpec(path = "TestPkg.jl/"))
- Pkg.update(; update_registry = false)
- Pkg.status()
- pkgs_path = pkgdir(Pkg, "test", "test_packages")
- # Precompile a diverse set of test packages
- # Check all test packages occasionally if anything has been missed
- # test_packages = readdir(pkgs_path)
- test_packages = (
- "ActiveProjectInTestSubgraph",
- "BasicSandbox",
- "DependsOnExample",
- "PackageWithDependency",
- "SameNameDifferentUUID",
- "SimplePackage",
- "BasicCompat",
- "PackageWithDependency",
- "SameNameDifferentUUID",
- "SimplePackage",
- joinpath("ExtensionExamples", "HasExtensions.jl"),
- )
- for test_package in test_packages
- Pkg.activate(joinpath(pkgs_path, test_package))
+ # We need to override JULIA_PKG_UNPACK_REGISTRY to fix https://github.com/JuliaLang/Pkg.jl/issues/3663
+ withenv("JULIA_PKG_SERVER" => nothing, "JULIA_PKG_UNPACK_REGISTRY" => nothing) do
+ tmp = _run_precompilation_script_setup()
+ cd(tmp) do
+ withenv("JULIA_PKG_PRECOMPILE_AUTO" => 0) do
+ Pkg.add("TestPkg")
+ Pkg.develop(Pkg.PackageSpec(path = "TestPkg.jl"))
+ Pkg.add(Pkg.PackageSpec(path = "TestPkg.jl/"))
+ Pkg.update(; update_registry = false)
+ Pkg.status()
+ pkgs_path = pkgdir(Pkg, "test", "test_packages")
+ # Precompile a diverse set of test packages
+ # Check all test packages occasionally if anything has been missed
+ # test_packages = readdir(pkgs_path)
+ test_packages = (
+ "ActiveProjectInTestSubgraph",
+ "BasicSandbox",
+ "DependsOnExample",
+ "PackageWithDependency",
+ "SameNameDifferentUUID",
+ "SimplePackage",
+ "BasicCompat",
+ "PackageWithDependency",
+ "SameNameDifferentUUID",
+ "SimplePackage",
+ joinpath("ExtensionExamples", "HasExtensions.jl"),
+ )
+ for test_package in test_packages
+ Pkg.activate(joinpath(pkgs_path, test_package))
+ end
+ Pkg.activate(; temp = true)
+ Pkg.activate()
+ Pkg.activate("TestPkg.jl")
end
- Pkg.activate(; temp = true)
- Pkg.activate()
- Pkg.activate("TestPkg.jl")
+ Pkg.precompile()
end
- Pkg.precompile()
- end
- try
- Base.rm(tmp; recursive = true)
- catch
- end
+ try
+ Base.rm(tmp; recursive = true)
+ catch
+ end
+
+ Base.precompile(Tuple{typeof(Pkg.API.status)})
+ Base.precompile(Tuple{typeof(Pkg.Types.read_project_compat), Base.Dict{String, Any}, Pkg.Types.Project})
+ Base.precompile(Tuple{typeof(Pkg.Versions.semver_interval), Base.RegexMatch})
+
+ Base.precompile(Tuple{typeof(Pkg.REPLMode.do_cmds), Array{Pkg.REPLMode.Command, 1}, Base.TTY})
+
+ Base.precompile(Tuple{typeof(Pkg.Types.read_project_workspace), Base.Dict{String, Any}, Pkg.Types.Project})
+ Base.precompile(Tuple{Type{Pkg.REPLMode.QString}, String, Bool})
+ Base.precompile(Tuple{typeof(Pkg.REPLMode.parse_package), Array{Pkg.REPLMode.QString, 1}, Base.Dict{Symbol, Any}})
+ Base.precompile(Tuple{Type{Pkg.REPLMode.Command}, Pkg.REPLMode.CommandSpec, Base.Dict{Symbol, Any}, Array{Pkg.Types.PackageSpec, 1}})
- Base.precompile(Tuple{typeof(Pkg.API.status)})
- Base.precompile(Tuple{typeof(Pkg.Types.read_project_compat),Base.Dict{String,Any},Pkg.Types.Project,},)
- Base.precompile(Tuple{typeof(Pkg.Versions.semver_interval),Base.RegexMatch})
+ # Manually added from trace compiling Pkg.status.
+ Base.precompile(Tuple{typeof(Core.kwcall), NamedTuple{(:color,), Tuple{Symbol}}, typeof(Base.printstyled), Base.IOContext{Base.GenericIOBuffer{Memory{UInt8}}}, Char})
+ Base.precompile(Tuple{typeof(Base.join), Base.GenericIOBuffer{Memory{UInt8}}, Tuple{UInt64}, Char})
+ Base.precompile(Tuple{typeof(Base.empty), Base.Dict{Any, Any}, Type{String}, Type{Base.UUID}})
+ Base.precompile(Tuple{typeof(Base.join), Base.GenericIOBuffer{Memory{UInt8}}, Tuple{UInt32}, Char})
+ Base.precompile(Tuple{typeof(Base.unsafe_read), Base.PipeEndpoint, Ptr{UInt8}, UInt64})
+ Base.precompile(Tuple{typeof(Base.readbytes!), Base.PipeEndpoint, Array{UInt8, 1}, Int64})
+ Base.precompile(Tuple{typeof(Base.closewrite), Base.PipeEndpoint})
+ Base.precompile(Tuple{typeof(Base.convert), Type{Base.Dict{String, Union{Array{String, 1}, String}}}, Base.Dict{String, Any}})
+ Base.precompile(Tuple{typeof(Base.map), Function, Array{Any, 1}})
+ Base.precompile(Tuple{Type{Array{Dates.DateTime, 1}}, UndefInitializer, Tuple{Int64}})
+ Base.precompile(Tuple{typeof(Base.maximum), Array{Dates.DateTime, 1}})
+ Base.precompile(Tuple{Type{Pair{A, B} where {B} where {A}}, String, Dates.DateTime})
+ Base.precompile(Tuple{typeof(Base.map), Function, Array{Base.Dict{String, Dates.DateTime}, 1}})
+ Base.precompile(Tuple{typeof(TOML.Internals.Printer.is_array_of_tables), Array{Base.Dict{String, Dates.DateTime}, 1}})
+ Base.precompile(Tuple{typeof(Core.kwcall), NamedTuple{(:indent, :sorted, :by, :inline_tables), Tuple{Int64, Bool, typeof(Base.identity), Base.IdSet{Base.Dict{String, V} where {V}}}}, typeof(TOML.Internals.Printer.print_table), Nothing, Base.IOStream, Base.Dict{String, Dates.DateTime}, Array{String, 1}})
+ Base.precompile(Tuple{typeof(Base.deepcopy_internal), Base.Dict{String, Base.UUID}, Base.IdDict{Any, Any}})
+ Base.precompile(Tuple{typeof(Base.deepcopy_internal), Base.Dict{String, Union{Array{String, 1}, String}}, Base.IdDict{Any, Any}})
+ Base.precompile(Tuple{typeof(Base.deepcopy_internal), Base.Dict{String, Array{String, 1}}, Base.IdDict{Any, Any}})
+ Base.precompile(Tuple{typeof(Base.deepcopy_internal), Base.Dict{String, Base.Dict{String, String}}, Base.IdDict{Any, Any}})
+ Base.precompile(Tuple{typeof(Base.deepcopy_internal), Tuple{String}, Base.IdDict{Any, Any}})
+ Base.precompile(Tuple{Type{Memory{Pkg.Types.PackageSpec}}, UndefInitializer, Int64})
- Base.precompile(Tuple{typeof(Pkg.REPLMode.do_cmds), Array{Pkg.REPLMode.Command, 1}, Base.TTY})
+ # Manually added from trace compiling Pkg.add
+ # Why needed? Something with constant prop overspecialization?
+ Base.precompile(Tuple{typeof(Core.kwcall), NamedTuple{(:io, :update_cooldown), Tuple{Base.IOContext{IO}, Dates.Day}}, typeof(Pkg.Registry.update)})
- Base.precompile(Tuple{typeof(Pkg.Types.read_project_workspace), Base.Dict{String, Any}, Pkg.Types.Project})
- Base.precompile(Tuple{Type{Pkg.REPLMode.QString}, String, Bool})
- Base.precompile(Tuple{typeof(Pkg.REPLMode.parse_package), Array{Pkg.REPLMode.QString, 1}, Base.Dict{Symbol, Any}})
- Base.precompile(Tuple{Type{Pkg.REPLMode.Command}, Pkg.REPLMode.CommandSpec, Base.Dict{Symbol, Any}, Array{Pkg.Types.PackageSpec, 1}})
+ Base.precompile(Tuple{Type{Memory{Pkg.Types.PackageSpec}}, UndefInitializer, Int64})
+ Base.precompile(Tuple{typeof(Base.hash), Tuple{String, UInt64}, UInt64})
+ Base.precompile(Tuple{typeof(Core.kwcall), NamedTuple{(:context,), Tuple{Base.TTY}}, typeof(Base.sprint), Function, Tuple{Pkg.Versions.VersionSpec}})
+ Base.precompile(Tuple{typeof(Core.kwcall), NamedTuple{(:context,), Tuple{Base.TTY}}, typeof(Base.sprint), Function, Tuple{String}})
+ Base.precompile(Tuple{typeof(Core.kwcall), NamedTuple{(:context,), Tuple{Base.TTY}}, typeof(Base.sprint), Function, Tuple{Base.VersionNumber}})
+ Base.precompile(Tuple{typeof(Base.join), Base.IOContext{Base.GenericIOBuffer{Memory{UInt8}}}, Tuple{String, UInt64}, Char})
+ Base.precompile(Tuple{typeof(Base.vcat), Base.BitArray{2}, Base.BitArray{2}})
+ Base.precompile(Tuple{typeof(Base.vcat), Base.BitArray{2}})
+ Base.precompile(Tuple{typeof(Base.vcat), Base.BitArray{2}, Base.BitArray{2}, Base.BitArray{2}})
+ Base.precompile(Tuple{typeof(Base.vcat), Base.BitArray{2}, Base.BitArray{2}, Base.BitArray{2}, Vararg{Base.BitArray{2}}})
+ Base.precompile(Tuple{typeof(Base.vcat), Base.BitArray{1}, Base.BitArray{1}})
+ Base.precompile(Tuple{typeof(Base.vcat), Base.BitArray{1}, Base.BitArray{1}, Base.BitArray{1}, Vararg{Base.BitArray{1}}})
+ Base.precompile(Tuple{typeof(Base.:(==)), Base.Dict{String, Any}, Base.Dict{String, Any}})
+ Base.precompile(Tuple{typeof(Base.join), Base.GenericIOBuffer{Memory{UInt8}}, Tuple{String}, Char})
+ Base.precompile(Tuple{typeof(Base.values), Base.Dict{String, Array{Base.Dict{String, Any}, 1}}})
+ Base.precompile(Tuple{typeof(TOML.Internals.Printer.is_array_of_tables), Array{Base.Dict{String, Any}, 1}})
+ Base.precompile(Tuple{Type{Array{Dates.DateTime, 1}}, UndefInitializer, Tuple{Int64}})
+ Base.precompile(Tuple{Type{Pair{A, B} where {B} where {A}}, String, Dates.DateTime})
+ Base.precompile(Tuple{typeof(Core.kwcall), NamedTuple{(:internal_call, :strict, :warn_loaded, :timing, :_from_loading, :configs, :manifest, :io, :detachable), Tuple{Bool, Bool, Bool, Bool, Bool, Pair{Base.Cmd, Base.CacheFlags}, Bool, Base.TTY, Bool}}, typeof(Base.Precompilation.precompilepkgs), Array{String, 1}})
+ ################
+ end
end
copy!(DEPOT_PATH, original_depot_path)
copy!(LOAD_PATH, original_load_path)
return nothing
end
- # Copied from REPL (originally PrecompileTools.jl)
- function check_edges(node)
- parentmi = node.mi_info.mi
- for child in node.children
- childmi = child.mi_info.mi
- if !(isdefined(childmi, :backedges) && parentmi ∈ childmi.backedges)
- Base.precompile(childmi.specTypes)
- end
- check_edges(child)
- end
- end
-
if Base.generating_output() && Base.JLOptions().use_pkgimages != 0
- Core.Compiler.Timings.reset_timings()
- Core.Compiler.__set_measure_typeinf(true)
+ ccall(:jl_tag_newly_inferred_enable, Cvoid, ())
try
pkg_precompile()
finally
- Core.Compiler.__set_measure_typeinf(false)
- Core.Compiler.Timings.close_current_timer()
- end
- roots = Core.Compiler.Timings._timings[1].children
- for child in roots
- Base.precompile(child.mi_info.mi.specTypes)
- check_edges(child)
+ ccall(:jl_tag_newly_inferred_disable, Cvoid, ())
end
end
end
diff --git a/src/project.jl b/src/project.jl
index f7a7e83757..c73f4b3b90 100644
--- a/src/project.jl
+++ b/src/project.jl
@@ -4,19 +4,23 @@
listed_deps(project::Project; include_weak::Bool) =
vcat(collect(keys(project.deps)), collect(keys(project.extras)), include_weak ? collect(keys(project.weakdeps)) : String[])
-function get_path_repo(project::Project, name::String)
+function get_path_repo(project::Project, project_file::String, manifest_file::String, name::String)
source = get(project.sources, name, nothing)
if source === nothing
return nothing, GitRepo()
end
- path = get(source, "path", nothing)::Union{String, Nothing}
- url = get(source, "url", nothing)::Union{String, Nothing}
- rev = get(source, "rev", nothing)::Union{String, Nothing}
+ path = get(source, "path", nothing)::Union{String, Nothing}
+ url = get(source, "url", nothing)::Union{String, Nothing}
+ rev = get(source, "rev", nothing)::Union{String, Nothing}
subdir = get(source, "subdir", nothing)::Union{String, Nothing}
if path !== nothing && url !== nothing
pkgerror("`path` and `url` are conflicting specifications")
end
repo = GitRepo(url, rev, subdir)
+ # Convert path from project-relative to manifest-relative
+ if path !== nothing
+ path = project_path_to_manifest_path(project_file, manifest_file, path)
+ end
return path, repo
end
@@ -25,7 +29,8 @@ end
###########
read_project_uuid(::Nothing) = nothing
function read_project_uuid(uuid::String)
- try uuid = UUID(uuid)
+ try
+ uuid = UUID(uuid)
catch err
err isa ArgumentError || rethrow()
pkgerror("Could not parse project UUID as a UUID")
@@ -36,7 +41,8 @@ read_project_uuid(uuid) = pkgerror("Expected project UUID to be a string")
read_project_version(::Nothing) = nothing
function read_project_version(version::String)
- try version = VersionNumber(version)
+ return try
+ version = VersionNumber(version)
catch err
err isa ArgumentError || rethrow()
pkgerror("Could not parse project version as a version")
@@ -44,9 +50,9 @@ function read_project_version(version::String)
end
read_project_version(version) = pkgerror("Expected project version to be a string")
-read_project_deps(::Nothing, section::String) = Dict{String,UUID}()
-function read_project_deps(raw::Dict{String,Any}, section_name::String)
- deps = Dict{String,UUID}()
+read_project_deps(::Nothing, section::String) = Dict{String, UUID}()
+function read_project_deps(raw::Dict{String, Any}, section_name::String)
+ deps = Dict{String, UUID}()
for (name, uuid) in raw
try
uuid = UUID(uuid)
@@ -62,38 +68,67 @@ function read_project_deps(raw, section_name::String)
pkgerror("Expected `$(section_name)` section to be a key-value list")
end
-read_project_targets(::Nothing, project::Project) = Dict{String,Any}()
-function read_project_targets(raw::Dict{String,Any}, project::Project)
+read_project_targets(::Nothing, project::Project) = Dict{String, Vector{String}}()
+function read_project_targets(raw::Dict{String, Any}, project::Project)
+ targets = Dict{String, Vector{String}}()
for (target, deps) in raw
- deps isa Vector{String} || pkgerror("""
- Expected value for target `$target` to be a list of dependency names.
- """)
+ deps isa Vector{String} || pkgerror(
+ """
+ Expected value for target `$target` to be a list of dependency names.
+ """
+ )
+ targets[target] = deps
end
- return raw
+ return targets
end
read_project_targets(raw, project::Project) =
pkgerror("Expected `targets` section to be a key-value list")
-read_project_compat(::Nothing, project::Project) = Dict{String,Compat}()
-function read_project_compat(raw::Dict{String,Any}, project::Project)
- compat = Dict{String,Compat}()
+read_project_apps(::Nothing, project::Project) = Dict{String, AppInfo}()
+function read_project_apps(raw::Dict{String, Any}, project::Project)
+ other = raw
+ appinfos = Dict{String, AppInfo}()
+ for (name, info) in raw
+ info isa Dict{String, Any} || pkgerror(
+ """
+ Expected value for app `$name` to be a dictionary.
+ """
+ )
+ submodule = get(info, "submodule", nothing)
+ julia_flags_raw = get(info, "julia_flags", nothing)
+ julia_flags = if julia_flags_raw === nothing
+ String[]
+ elseif julia_flags_raw isa Vector
+ String[flag::String for flag in julia_flags_raw]
+ else
+ pkgerror("Expected `julia_flags` for app `$name` to be an array of strings")
+ end
+ appinfos[name] = AppInfo(name, nothing, submodule, julia_flags, other)
+ end
+ return appinfos
+end
+
+read_project_compat(::Nothing, project::Project; file = nothing) = Dict{String, Compat}()
+function read_project_compat(raw::Dict{String, Any}, project::Project; file = nothing)
+ compat = Dict{String, Compat}()
+ location_string = file === nothing ? "" : " in $(repr(file))"
for (name, version) in raw
version = version::String
try
compat[name] = Compat(semver_spec(version), version)
catch err
- pkgerror("Could not parse compatibility version for dependency `$name`")
+ pkgerror("Could not parse compatibility version spec $(repr(version)) for dependency `$name`$location_string")
end
end
return compat
end
-read_project_compat(raw, project::Project) =
- pkgerror("Expected `compat` section to be a key-value list")
+read_project_compat(raw, project::Project; file = nothing) =
+ pkgerror("Expected `compat` section to be a key-value list" * (file === nothing ? "" : " in $(repr(file))"))
-read_project_sources(::Nothing, project::Project) = Dict{String,Any}()
-function read_project_sources(raw::Dict{String,Any}, project::Project)
- valid_keys = ("path", "url", "rev")
- sources = Dict{String,Any}()
+read_project_sources(::Nothing, project::Project) = Dict{String, Dict{String, String}}()
+function read_project_sources(raw::Dict{String, Any}, project::Project)
+ valid_keys = ("path", "url", "rev", "subdir")
+ sources = Dict{String, Dict{String, String}}()
for (name, source) in raw
if !(source isa AbstractDict)
pkgerror("Expected `source` section to be a table")
@@ -109,9 +144,9 @@ function read_project_sources(raw::Dict{String,Any}, project::Project)
return sources
end
-read_project_workspace(::Nothing, project::Project) = Dict{String,Any}()
+read_project_workspace(::Nothing, project::Project) = Dict{String, Any}()
function read_project_workspace(raw::Dict, project::Project)
- workspace_table = Dict{String,Any}()
+ workspace_table = Dict{String, Any}()
for (key, val) in raw
if key == "projects"
for path in val
@@ -128,7 +163,7 @@ read_project_workspace(raw, project::Project) =
pkgerror("Expected `workspace` section to be a key-value list")
-function validate(project::Project; file=nothing)
+function validate(project::Project; file = nothing)
# deps
location_string = file === nothing ? "" : " at $(repr(file))."
dep_uuids = collect(values(project.deps))
@@ -154,14 +189,16 @@ function validate(project::Project; file=nothing)
end
=#
# targets
- listed = listed_deps(project; include_weak=true)
+ listed = listed_deps(project; include_weak = true)
for (target, deps) in project.targets, dep in deps
if length(deps) != length(unique(deps))
pkgerror("A dependency was named twice in target `$target`")
end
- dep in listed || pkgerror("""
+ dep in listed || pkgerror(
+ """
Dependency `$dep` in target `$target` not listed in `deps`, `weakdeps` or `extras` section
- """ * location_string)
+ """ * location_string
+ )
end
# compat
for name in keys(project.compat)
@@ -169,37 +206,42 @@ function validate(project::Project; file=nothing)
name in listed ||
pkgerror("Compat `$name` not listed in `deps`, `weakdeps` or `extras` section" * location_string)
end
- # sources
- listed_nonweak = listed_deps(project; include_weak=false)
- for name in keys(project.sources)
+ # sources
+ listed_nonweak = listed_deps(project; include_weak = false)
+ for name in keys(project.sources)
name in listed_nonweak ||
pkgerror("Sources for `$name` not listed in `deps` or `extras` section" * location_string)
end
+ return
end
-function Project(raw::Dict; file=nothing)
+function Project(raw::Dict; file = nothing)
project = Project()
- project.other = raw
- project.name = get(raw, "name", nothing)::Union{String, Nothing}
+ project.other = raw
+ project.name = get(raw, "name", nothing)::Union{String, Nothing}
project.manifest = get(raw, "manifest", nothing)::Union{String, Nothing}
- project.entryfile = get(raw, "path", nothing)::Union{String, Nothing}
+ project.entryfile = get(raw, "path", nothing)::Union{String, Nothing}
if project.entryfile === nothing
project.entryfile = get(raw, "entryfile", nothing)::Union{String, Nothing}
end
- project.uuid = read_project_uuid(get(raw, "uuid", nothing))
- project.version = read_project_version(get(raw, "version", nothing))
- project.deps = read_project_deps(get(raw, "deps", nothing), "deps")
+ project.uuid = read_project_uuid(get(raw, "uuid", nothing))
+ project.version = read_project_version(get(raw, "version", nothing))
+ project.deps = read_project_deps(get(raw, "deps", nothing), "deps")
project.weakdeps = read_project_deps(get(raw, "weakdeps", nothing), "weakdeps")
- project.exts = get(Dict{String, String}, raw, "extensions")
- project.sources = read_project_sources(get(raw, "sources", nothing), project)
- project.extras = read_project_deps(get(raw, "extras", nothing), "extras")
- project.compat = read_project_compat(get(raw, "compat", nothing), project)
- project.targets = read_project_targets(get(raw, "targets", nothing), project)
+ project.exts = get(Dict{String, String}, raw, "extensions")
+ project.sources = read_project_sources(get(raw, "sources", nothing), project)
+ project.extras = read_project_deps(get(raw, "extras", nothing), "extras")
+ project.compat = read_project_compat(get(raw, "compat", nothing), project; file)
+ project.targets = read_project_targets(get(raw, "targets", nothing), project)
project.workspace = read_project_workspace(get(raw, "workspace", nothing), project)
+ project.apps = read_project_apps(get(raw, "apps", nothing), project)
+ project.readonly = get(raw, "readonly", false)::Bool
+ syntax = get(raw, "syntax", nothing)::Union{Dict, Nothing}
+ project.julia_syntax_version = syntax === nothing ? nothing : read_project_version(get(syntax, "julia_version", nothing))
# Handle deps in both [deps] and [weakdeps]
project._deps_weak = Dict(intersect(project.deps, project.weakdeps))
- filter!(p->!haskey(project._deps_weak, p.first), project.deps)
+ filter!(p -> !haskey(project._deps_weak, p.first), project.deps)
validate(project; file)
return project
end
@@ -217,7 +259,7 @@ function read_project(f_or_io::Union{String, IO})
end
pkgerror("Errored when reading $f_or_io, got: ", sprint(showerror, e))
end
- return Project(raw; file= f_or_io isa IO ? nothing : f_or_io)
+ return Project(raw; file = f_or_io isa IO ? nothing : f_or_io)
end
@@ -237,32 +279,60 @@ function destructure(project::Project)::Dict
# if a field is set to its default value, don't include it in the write
function entry!(key::String, src)
should_delete(x::Dict) = isempty(x)
- should_delete(x) = x === nothing
- should_delete(src) ? delete!(raw, key) : (raw[key] = src)
+ should_delete(x) = x === nothing
+ return should_delete(src) ? delete!(raw, key) : (raw[key] = src)
end
- entry!("name", project.name)
- entry!("uuid", project.uuid)
- entry!("version", project.version)
+ entry!("name", project.name)
+ entry!("uuid", project.uuid)
+ entry!("version", project.version)
entry!("workspace", project.workspace)
entry!("manifest", project.manifest)
- entry!("entryfile", project.entryfile)
- entry!("deps", merge(project.deps, project._deps_weak))
+ entry!("entryfile", project.entryfile)
+ entry!("deps", merge(project.deps, project._deps_weak))
entry!("weakdeps", project.weakdeps)
- entry!("sources", project.sources)
- entry!("extras", project.extras)
- entry!("compat", Dict(name => x.str for (name, x) in project.compat))
- entry!("targets", project.targets)
+
+ # Normalize paths in sources to use forward slashes on Windows (matching Manifest.toml behavior)
+ normalized_sources = project.sources
+ if !isempty(project.sources)
+ normalized_sources = Dict{String, Dict{String, String}}()
+ for (name, source) in project.sources
+ normalized_source = copy(source)
+ path = get(source, "path", nothing)
+ if path !== nothing
+ normalized_source["path"] = normalize_path_for_toml(path)
+ end
+ normalized_sources[name] = normalized_source
+ end
+ end
+ entry!("sources", normalized_sources)
+ entry!("extras", project.extras)
+ entry!("compat", Dict(name => x.str for (name, x) in project.compat))
+ entry!("targets", project.targets)
+ entry!(
+ "syntax", project.julia_syntax_version === nothing ? nothing :
+ Dict("julia_version" => string(project.julia_syntax_version))
+ )
+
+ # Only write readonly if it's true (not the default false)
+ if project.readonly
+ raw["readonly"] = true
+ else
+ delete!(raw, "readonly")
+ end
+
return raw
end
-const _project_key_order = ["name", "uuid", "keywords", "license", "desc", "version", "workspace", "deps", "weakdeps", "sources", "extensions", "compat"]
+const _project_key_order = ["name", "uuid", "keywords", "license", "desc", "version", "readonly", "workspace", "deps", "weakdeps", "sources", "extensions", "compat"]
project_key_order(key::String) =
something(findfirst(x -> x == key, _project_key_order), length(_project_key_order) + 1)
-function write_project(env::EnvCache)
- mkpath(dirname(env.project_file))
- write_project(env.project, env.project_file)
+function write_project(env::EnvCache, skip_readonly_check::Bool = false)
+ if env.project.readonly && !skip_readonly_check
+ pkgerror("Cannot write to readonly project file at $(env.project_file)")
+ end
+ return write_project(env.project, env.project_file)
end
write_project(project::Project, project_file::AbstractString) =
write_project(destructure(project), project_file)
@@ -274,7 +344,7 @@ function write_project(io::IO, project::Dict)
push!(inline_tables, source)
end
end
- TOML.print(io, project; inline_tables, sorted=true, by=key -> (project_key_order(key), key)) do x
+ TOML.print(io, project; inline_tables, sorted = true, by = key -> (project_key_order(key), key)) do x
x isa UUID || x isa VersionNumber || pkgerror("unhandled type `$(typeof(x))`")
return string(x)
end
@@ -282,5 +352,6 @@ function write_project(io::IO, project::Dict)
end
function write_project(project::Dict, project_file::AbstractString)
str = sprint(write_project, project)
- write(project_file, str)
+ mkpath(dirname(project_file))
+ return write(project_file, str)
end
diff --git a/src/utils.jl b/src/utils.jl
index 12826de397..6e40c40da8 100644
--- a/src/utils.jl
+++ b/src/utils.jl
@@ -1,9 +1,12 @@
+# "Precompiling" is the longest operation
+const pkgstyle_indent = textwidth(string(:Precompiling))
-function printpkgstyle(io::IO, cmd::Symbol, text::String, ignore_indent::Bool=false; color=:green)
- indent = textwidth(string(:Precompiling)) # "Precompiling" is the longest operation
- ignore_indent && (indent = 0)
- printstyled(io, lpad(string(cmd), indent), color=color, bold=true)
- println(io, " ", text)
+function printpkgstyle(io::IO, cmd::Symbol, text::String, ignore_indent::Bool = false; color = :green)
+ indent = ignore_indent ? 0 : pkgstyle_indent
+ return @lock io begin
+ printstyled(io, lpad(string(cmd), indent), color = color, bold = true)
+ println(io, " ", text)
+ end
end
function linewrap(str::String; io = stdout_f(), padding = 0, width = Base.displaysize(io)[2])
@@ -21,20 +24,33 @@ function linewrap(str::String; io = stdout_f(), padding = 0, width = Base.displa
return lines
end
-const URL_regex = r"((file|git|ssh|http(s)?)|(git@[\w\-\.]+))(:(//)?)([\w\.@\:/\-~]+)(\.git)?(/)?"x
+const URL_regex = r"((file|git|ssh|http(s)?)|([\w\-\.]+@[\w\-\.]+))(:(//)?)([\w\.@\:/\-~]+)(\.git)?(/)?"x
isurl(r::String) = occursin(URL_regex, r)
-stdlib_dir() = normpath(joinpath(Sys.BINDIR::String, "..", "share", "julia", "stdlib", "v$(VERSION.major).$(VERSION.minor)"))
-stdlib_path(stdlib::String) = joinpath(stdlib_dir(), stdlib)
+stdlib_path(stdlib::String) = joinpath(Sys.STDLIB, stdlib)
function pathrepr(path::String)
# print stdlib paths as @stdlib/Name
- if startswith(path, stdlib_dir())
+ if startswith(path, Sys.STDLIB)
path = "@stdlib/" * basename(path)
end
return "`" * Base.contractuser(path) * "`"
end
+"""
+ normalize_path_for_toml(path::String)
+
+Normalize a path for writing to TOML files (Project.toml/Manifest.toml).
+On Windows, converts relative paths to use forward slashes for cross-platform compatibility.
+Absolute paths are left unchanged as they are platform-specific by nature.
+"""
+function normalize_path_for_toml(path::String)
+ if Sys.iswindows() && !isabspath(path)
+ return join(splitpath(path), "/")
+ end
+ return path
+end
+
function set_readonly(path)
for (root, dirs, files) in walkdir(path)
for file in files
@@ -61,9 +77,62 @@ function set_readonly(path)
end
set_readonly(::Nothing) = nothing
+"""
+ mv_temp_dir_retries(temp_dir::String, new_path::String; set_permissions::Bool=true)::Nothing
+
+Either rename the directory at `temp_dir` to `new_path` and set it to read-only
+or if `new_path` already exists try to do nothing. Both `temp_dir` and `new_path` must
+be on the same filesystem.
+"""
+function mv_temp_dir_retries(temp_dir::String, new_path::String; set_permissions::Bool = true)::Nothing
+ # Sometimes a rename can fail because the temp_dir is locked by
+ # anti-virus software scanning the new files.
+ # In this case we want to sleep and try again.
+ # I am using the list of error codes to retry from:
+ # https://github.com/isaacs/node-graceful-fs/blob/234379906b7d2f4c9cfeb412d2516f42b0fb4953/polyfills.js#L87
+ # Retry for up to about 60 seconds by retrying 20 times with exponential backoff.
+ retry = 0
+ max_num_retries = 20 # maybe this should be configurable?
+ sleep_amount = 0.01 # seconds
+ max_sleep_amount = 5.0 # seconds
+ while true
+ isdir(new_path) && return
+ # This next step is like
+ # `mv(temp_dir, new_path)`.
+ # However, `mv` defaults to `cp` if `rename` returns an error.
+ # `cp` is not atomic, so avoid the potential of calling it.
+ err = ccall(:jl_fs_rename, Int32, (Cstring, Cstring), temp_dir, new_path)
+ if err ≥ 0
+ if set_permissions
+ # rename worked
+ new_path_mode = filemode(dirname(new_path))
+ if Sys.iswindows()
+ # If this is Windows, ensure the directory mode is executable,
+ # as `filemode()` is incomplete. Some day, that may not be the
+ # case, there exists a test that will fail if this is changes.
+ new_path_mode |= 0o111
+ end
+ chmod(new_path, new_path_mode)
+ set_readonly(new_path)
+ end
+ return
+ else
+ # Ignore rename error if `new_path` exists.
+ isdir(new_path) && return
+ if retry < max_num_retries && err ∈ (Base.UV_EACCES, Base.UV_EPERM, Base.UV_EBUSY)
+ sleep(sleep_amount)
+ sleep_amount = min(sleep_amount * 2.0, max_sleep_amount)
+ retry += 1
+ else
+ Base.uv_error("rename of $(repr(temp_dir)) to $(repr(new_path))", err)
+ end
+ end
+ end
+ return
+end
+
# try to call realpath on as much as possible
function safe_realpath(path)
- isempty(path) && return path
if ispath(path)
try
return realpath(path)
@@ -72,31 +141,77 @@ function safe_realpath(path)
end
end
a, b = splitdir(path)
+ # path cannot be reduced at the root or drive, avoid stack overflow
+ isempty(b) && return path
return joinpath(safe_realpath(a), b)
end
# Windows sometimes throw on `isdir`...
function isdir_nothrow(path::String)
- try isdir(path)
+ return try
+ isdir(path)
catch e
false
end
end
function isfile_nothrow(path::String)
- try isfile(path)
+ return try
+ isfile(path)
catch e
false
end
end
-function casesensitive_isdir(dir::String)
- dir = abspath(dir)
- lastdir = splitpath(dir)[end]
- isdir_nothrow(dir) && lastdir in readdir(joinpath(dir, ".."))
+
+"""
+ atomic_toml_write(path::String, data; kws...)
+
+Write TOML data to a file atomically by first writing to a temporary file and then moving it into place.
+This prevents "teared" writes if the process is interrupted or if multiple processes write to the same file.
+
+The `kws` are passed to `TOML.print`.
+"""
+function atomic_toml_write(path::String, data; kws...)
+ dir = dirname(path)
+ isempty(dir) && (dir = pwd())
+
+ temp_path, temp_io = mktemp(dir)
+ return try
+ TOML.print(temp_io, data; kws...)
+ close(temp_io)
+ mv(temp_path, path; force = true)
+ catch
+ close(temp_io)
+ rm(temp_path; force = true)
+ rethrow()
+ end
end
## ordering of UUIDs ##
if VERSION < v"1.2.0-DEV.269" # Defined in Base as of #30947
Base.isless(a::UUID, b::UUID) = a.value < b.value
end
+
+function discover_repo(path::AbstractString)
+ dir = abspath(path)
+ stop_dir = homedir()
+ depot = Pkg.depots1()
+
+ while true
+ dir == depot && return nothing
+ gitdir = joinpath(dir, ".git")
+ if isdir(gitdir) || isfile(gitdir)
+ return dir
+ end
+ dir == stop_dir && return nothing
+ parent = dirname(dir)
+ parent == dir && return nothing
+ dir = parent
+ end
+ return
+end
+
+# Resolve a manifest-relative path to an absolute path
+# Note: Despite the name "manifest_rel_path", this resolves relative to the manifest file
+manifest_rel_path(env, path::String) = normpath(joinpath(dirname(env.manifest_file), path))
diff --git a/test/FakeTerminals.jl b/test/FakeTerminals.jl
index 1fe587b144..c359142c21 100644
--- a/test/FakeTerminals.jl
+++ b/test/FakeTerminals.jl
@@ -8,8 +8,8 @@ mutable struct FakeTerminal <: REPL.Terminals.UnixTerminal
err_stream::Base.IO
hascolor::Bool
raw::Bool
- FakeTerminal(stdin,stdout,stderr,hascolor=true) =
- new(stdin,stdout,stderr,hascolor,false)
+ FakeTerminal(stdin, stdout, stderr, hascolor = true) =
+ new(stdin, stdout, stderr, hascolor, false)
end
REPL.Terminals.hascolor(t::FakeTerminal) = t.hascolor
diff --git a/test/NastyGenerator.jl b/test/NastyGenerator.jl
index 16d52beee1..06fd22a635 100644
--- a/test/NastyGenerator.jl
+++ b/test/NastyGenerator.jl
@@ -29,34 +29,35 @@ Note that the "problematic" output assumes that all non-planted versions will be
uninstallable, which is only the case for some regimes of the parameters (e.g. large
enough d).
"""
-function generate_nasty(n::Int, # size of planted solutions
- m::Int; # size of the graph
- k::Int = 10, # version number limit
- q::Int = 10, # versions per package (upper bound)
- d::Int = 10, # neighbors per package
- seed::Integer = 32524,
- sat::Bool = true # create a satisfiable problem?
- )
+function generate_nasty(
+ n::Int, # size of planted solutions
+ m::Int; # size of the graph
+ k::Int = 10, # version number limit
+ q::Int = 10, # versions per package (upper bound)
+ d::Int = 10, # neighbors per package
+ seed::Integer = 32524,
+ sat::Bool = true # create a satisfiable problem?
+ )
@assert m ≥ n
- d ≤ m-1 || @warn "d=$d, should be ≤ m-1=$(m-1)"
+ d ≤ m - 1 || @warn "d=$d, should be ≤ m-1=$(m - 1)"
Random.seed!(seed)
- allvers = [sort(unique(randvers(k) for j = 1:q)) for i = 1:m]
+ allvers = [sort(unique(randvers(k) for j in 1:q)) for i in 1:m]
- planted1 = [rand(2:length(allvers[i])) for i = 1:n]
+ planted1 = [rand(2:length(allvers[i])) for i in 1:n]
- planted2 = [rand(1:(planted1[i]-1)) for i = 1:n]
+ planted2 = [rand(1:(planted1[i] - 1)) for i in 1:n]
deps = []
problematic = []
# random dependencies
- for i = 1:m, j = 1:length(allvers[i])
+ for i in 1:m, j in 1:length(allvers[i])
if i ≤ n && (planted1[i] == j || planted2[i] == j)
if j == planted1[i]
if i < n
- push!(deps, [pn(i), allvers[i][j], pn(i+1), "$(allvers[i+1][planted1[i+1]])-*"])
+ push!(deps, [pn(i), allvers[i][j], pn(i + 1), "$(allvers[i + 1][planted1[i + 1]])-*"])
else
if !sat
push!(deps, [pn(i), allvers[i][j], pn(1), "0-$(allvers[1][planted2[1]])"])
@@ -66,7 +67,7 @@ function generate_nasty(n::Int, # size of planted solutions
end
else # j == planted2[i]
if i < n
- push!(deps, [pn(i), allvers[i][j], pn(i+1), "0-$(allvers[i+1][planted2[i+1]])"])
+ push!(deps, [pn(i), allvers[i][j], pn(i + 1), "0-$(allvers[i + 1][planted2[i + 1]])"])
else
if !sat
push!(deps, [pn(i), allvers[i][j], pn(1), "$(allvers[1][planted1[1]])-*"])
@@ -79,7 +80,7 @@ function generate_nasty(n::Int, # size of planted solutions
continue
end
- s = shuffle([1:(i-1); (i+1):m])[1:min(d,m-1)]
+ s = shuffle([1:(i - 1); (i + 1):m])[1:min(d, m - 1)]
for a in s
push!(deps, [pn(i), allvers[i][j], pn(a), randvspec(k)])
end
@@ -92,7 +93,7 @@ function generate_nasty(n::Int, # size of planted solutions
# info("SOLUTION: $([(i,planted1[i]) for i = 1:n])")
# info("REST: $([(i,length(allvers[i])+1) for i = (n+1):m])")
- want = Dict(pn(i) => allvers[i][planted1[i]] for i = 1:n)
+ want = Dict(pn(i) => allvers[i][planted1[i]] for i in 1:n)
return deps, reqs, want, problematic
end
diff --git a/test/Project.toml b/test/Project.toml
index 0922624374..4760acba31 100644
--- a/test/Project.toml
+++ b/test/Project.toml
@@ -1,4 +1,5 @@
[deps]
+Aqua = "4c88cf16-eb10-579e-8560-4a9242c79595"
Dates = "ade2ca70-3891-5945-98fb-dc099432e06a"
HistoricalStdlibVersions = "6df8b67a-e8a0-4029-b4b7-ac196fe72102"
LibGit2 = "76f85450-5226-5b5a-8eaa-529ad045b433"
@@ -15,4 +16,5 @@ Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40"
UUIDs = "cf7118a7-6976-5b1a-9a39-7adc72f591a4"
[compat]
+Aqua = "0.8.10"
HistoricalStdlibVersions = "2"
diff --git a/test/api.jl b/test/api.jl
index 242b5af2e5..39633fe14e 100644
--- a/test/api.jl
+++ b/test/api.jl
@@ -10,37 +10,39 @@ using UUIDs
using ..Utils
@testset "Pkg.activate" begin
- isolate() do; cd_tempdir() do tmp
- path = pwd()
- Pkg.activate(".")
- mkdir("Foo")
- cd(mkdir("modules")) do
- Pkg.generate("Foo")
+ isolate() do;
+ cd_tempdir() do tmp
+ path = pwd()
+ Pkg.activate(".")
+ mkdir("Foo")
+ cd(mkdir("modules")) do
+ Pkg.generate("Foo")
+ end
+ Pkg.develop(Pkg.PackageSpec(path = "modules/Foo")) # to avoid issue #542
+ Pkg.activate("Foo") # activate path Foo over deps Foo
+ @test Base.active_project() == joinpath(path, "Foo", "Project.toml")
+ Pkg.activate(".")
+ rm("Foo"; force = true, recursive = true)
+ Pkg.activate("Foo") # activate path from developed Foo
+ @test Base.active_project() == joinpath(path, "modules", "Foo", "Project.toml")
+ Pkg.activate(".")
+ Pkg.activate("./Foo") # activate empty directory Foo (sidestep the developed Foo)
+ @test Base.active_project() == joinpath(path, "Foo", "Project.toml")
+ Pkg.activate(".")
+ Pkg.activate("Bar") # activate empty directory Bar
+ @test Base.active_project() == joinpath(path, "Bar", "Project.toml")
+ Pkg.activate(".")
+ Pkg.add("Example") # non-deved deps should not be activated
+ Pkg.activate("Example")
+ @test Base.active_project() == joinpath(path, "Example", "Project.toml")
+ Pkg.activate(".")
+ cd(mkdir("tests"))
+ Pkg.activate("Foo") # activate developed Foo from another directory
+ @test Base.active_project() == joinpath(path, "modules", "Foo", "Project.toml")
+ Pkg.activate() # activate LOAD_PATH project
+ @test Base.ACTIVE_PROJECT[] === nothing
end
- Pkg.develop(Pkg.PackageSpec(path="modules/Foo")) # to avoid issue #542
- Pkg.activate("Foo") # activate path Foo over deps Foo
- @test Base.active_project() == joinpath(path, "Foo", "Project.toml")
- Pkg.activate(".")
- rm("Foo"; force=true, recursive=true)
- Pkg.activate("Foo") # activate path from developed Foo
- @test Base.active_project() == joinpath(path, "modules", "Foo", "Project.toml")
- Pkg.activate(".")
- Pkg.activate("./Foo") # activate empty directory Foo (sidestep the developed Foo)
- @test Base.active_project() == joinpath(path, "Foo", "Project.toml")
- Pkg.activate(".")
- Pkg.activate("Bar") # activate empty directory Bar
- @test Base.active_project() == joinpath(path, "Bar", "Project.toml")
- Pkg.activate(".")
- Pkg.add("Example") # non-deved deps should not be activated
- Pkg.activate("Example")
- @test Base.active_project() == joinpath(path, "Example", "Project.toml")
- Pkg.activate(".")
- cd(mkdir("tests"))
- Pkg.activate("Foo") # activate developed Foo from another directory
- @test Base.active_project() == joinpath(path, "modules", "Foo", "Project.toml")
- Pkg.activate() # activate LOAD_PATH project
- @test Base.ACTIVE_PROJECT[] === nothing
- end end
+ end
end
include("FakeTerminals.jl")
@@ -48,219 +50,296 @@ import .FakeTerminals.FakeTerminal
@testset "Pkg.precompile" begin
# sequential precompile, depth-first
- isolate() do; cd_tempdir() do tmp
- Pkg.activate(".")
- cd(mkdir("packages")) do
- Pkg.generate("Dep1")
- Pkg.generate("Dep2")
- Pkg.generate("Dep3")
- Pkg.generate("Dep4")
- Pkg.generate("Dep5")
- Pkg.generate("Dep6")
- Pkg.generate("Dep7")
- Pkg.generate("Dep8")
- Pkg.generate("NoVersion")
- open(joinpath("NoVersion","Project.toml"), "w") do io
- write(io, "name = \"NoVersion\"\nuuid = \"$(UUIDs.uuid4())\"")
- end
- Pkg.generate("BrokenDep")
- open(joinpath("BrokenDep","src","BrokenDep.jl"), "w") do io
- write(io, "module BrokenDep\nerror()\nend")
- end
- Pkg.generate("TrailingTaskDep")
- open(joinpath("TrailingTaskDep","src","TrailingTaskDep.jl"), "w") do io
- write(io, """
- module TrailingTaskDep
- println(stderr, "waiting for IO to finish") # pretend to be a warning
- sleep(2)
- end""")
- end
- Pkg.generate("SlowPrecompile")
- open(joinpath("SlowPrecompile","src","SlowPrecompile.jl"), "w") do io
- write(io, """
- module SlowPrecompile
- sleep(10)
- end""")
- end
- end
- Pkg.develop(Pkg.PackageSpec(path="packages/Dep1"))
-
- Pkg.activate("Dep1")
- Pkg.develop(Pkg.PackageSpec(path="packages/Dep2"))
- Pkg.activate("Dep2")
- Pkg.develop(Pkg.PackageSpec(path="packages/Dep3"))
-
- Pkg.activate(".")
- Pkg.resolve()
- Pkg.precompile()
-
- iob = IOBuffer()
- ENV["JULIA_PKG_PRECOMPILE_AUTO"]=1
- @info "Auto precompilation enabled"
- Pkg.develop(Pkg.PackageSpec(path="packages/Dep4"))
- Pkg.develop(Pkg.PackageSpec(path="packages/NoVersion")) # a package with no version number
- Pkg.build(io=iob) # should trigger auto-precomp
- @test occursin("Precompiling", String(take!(iob)))
- Pkg.precompile(io=iob)
- @test !occursin("Precompiling", String(take!(iob))) # test that the previous precompile was a no-op
-
- Pkg.precompile("Dep4", io=iob)
- @test !occursin("Precompiling", String(take!(iob))) # should be a no-op
- Pkg.precompile(["Dep4", "NoVersion"], io=iob)
- @test !occursin("Precompiling", String(take!(iob))) # should be a no-op
-
- Pkg.precompile(Pkg.PackageSpec(name="Dep4"))
- @test !occursin("Precompiling", String(take!(iob))) # should be a no-op
- Pkg.precompile([Pkg.PackageSpec(name="Dep4"), Pkg.PackageSpec(name="NoVersion")])
- @test !occursin("Precompiling", String(take!(iob))) # should be a no-op
-
- ENV["JULIA_PKG_PRECOMPILE_AUTO"]=0
- @info "Auto precompilation disabled"
- Pkg.develop(Pkg.PackageSpec(path="packages/Dep5"))
- Pkg.precompile(io=iob)
- @test occursin("Precompiling", String(take!(iob)))
-
- ENV["JULIA_PKG_PRECOMPILE_AUTO"]=1
- Pkg.develop(Pkg.PackageSpec(path="packages/BrokenDep"))
- Pkg.build(io=iob) # should trigger auto-precomp and soft-error
- @test occursin("Precompiling", String(take!(iob)))
-
- ptoml = joinpath("packages","BrokenDep","Project.toml")
- lines = readlines(ptoml)
- open(joinpath("packages","BrokenDep","src","BrokenDep.jl"), "w") do io
- write(io, "module BrokenDep\n\nend") # remove error
- end
- open(ptoml, "w") do io
- for line in lines
- if startswith(line, "version = \"0.1.0\"")
- println(io, replace(line, "version = \"0.1.0\"" => "version = \"0.1.1\"", count=1)) # up version
- else
- println(io, line)
+ isolate(loaded_depot = true) do;
+ cd_tempdir() do tmp
+ Pkg.activate(".")
+ cd(mkdir("packages")) do
+ Pkg.generate("Dep1")
+ Pkg.generate("Dep2")
+ Pkg.generate("Dep3")
+ Pkg.generate("Dep4")
+ Pkg.generate("Dep5")
+ Pkg.generate("Dep6")
+ Pkg.generate("Dep7")
+ Pkg.generate("Dep8")
+ Pkg.generate("NoVersion")
+ open(joinpath("NoVersion", "Project.toml"), "w") do io
+ write(io, "name = \"NoVersion\"\nuuid = \"$(UUIDs.uuid4())\"")
+ end
+ Pkg.generate("BrokenDep")
+ open(joinpath("BrokenDep", "src", "BrokenDep.jl"), "w") do io
+ write(io, "module BrokenDep\nerror()\nend")
+ end
+ Pkg.generate("TrailingTaskDep")
+ open(joinpath("TrailingTaskDep", "src", "TrailingTaskDep.jl"), "w") do io
+ write(
+ io, """
+ module TrailingTaskDep
+ println(stderr, "waiting for IO to finish") # pretend to be a warning
+ sleep(2)
+ end"""
+ )
+ end
+ Pkg.generate("SlowPrecompile")
+ open(joinpath("SlowPrecompile", "src", "SlowPrecompile.jl"), "w") do io
+ write(
+ io, """
+ module SlowPrecompile
+ sleep(10)
+ end"""
+ )
end
end
- end
- Pkg.update("BrokenDep") # should trigger auto-precomp including the fixed BrokenDep
- Pkg.precompile(io=iob)
- @test !occursin("Precompiling", String(take!(iob))) # test that the previous precompile was a no-op
+ Pkg.develop(Pkg.PackageSpec(path = "packages/Dep1"))
- # https://github.com/JuliaLang/Pkg.jl/pull/2142
- Pkg.build(; verbose=true)
+ Pkg.activate("Dep1")
+ Pkg.develop(Pkg.PackageSpec(path = "packages/Dep2"))
+ Pkg.activate("Dep2")
+ Pkg.develop(Pkg.PackageSpec(path = "packages/Dep3"))
+
+ Pkg.activate(".")
+ Pkg.resolve()
+ Pkg.precompile()
- @testset "timing mode" begin
iob = IOBuffer()
- Pkg.develop(Pkg.PackageSpec(path="packages/Dep6"))
- Pkg.precompile(io=iob, timing=true)
- str = String(take!(iob))
- @test occursin("Precompiling", str)
- @test occursin(" ms", str)
- @test occursin("Dep6", str)
- Pkg.precompile(io=iob)
+ ENV["JULIA_PKG_PRECOMPILE_AUTO"] = 1
+ @info "Auto precompilation enabled"
+ Pkg.develop(Pkg.PackageSpec(path = "packages/Dep4"))
+ Pkg.develop(Pkg.PackageSpec(path = "packages/NoVersion")) # a package with no version number
+ Pkg.build(io = iob) # should trigger auto-precomp
+ @test occursin("Precompiling", String(take!(iob)))
+ Pkg.precompile(io = iob)
@test !occursin("Precompiling", String(take!(iob))) # test that the previous precompile was a no-op
- end
- @testset "instantiate" begin
- iob = IOBuffer()
- Pkg.activate("packages/Dep7")
- Pkg.resolve()
- @test isfile("packages/Dep7/Project.toml")
- @test isfile("packages/Dep7/Manifest.toml")
- Pkg.instantiate(io=iob) # with a Project.toml and Manifest.toml
+ Pkg.precompile("Dep4", io = iob)
+ @test !occursin("Precompiling", String(take!(iob))) # should be a no-op
+ Pkg.precompile(["Dep4", "NoVersion"], io = iob)
+ @test !occursin("Precompiling", String(take!(iob))) # should be a no-op
+
+ Pkg.precompile(Pkg.PackageSpec(name = "Dep4"))
+ @test !occursin("Precompiling", String(take!(iob))) # should be a no-op
+ Pkg.precompile([Pkg.PackageSpec(name = "Dep4"), Pkg.PackageSpec(name = "NoVersion")])
+ @test !occursin("Precompiling", String(take!(iob))) # should be a no-op
+
+ ENV["JULIA_PKG_PRECOMPILE_AUTO"] = 0
+ @info "Auto precompilation disabled"
+ Pkg.develop(Pkg.PackageSpec(path = "packages/Dep5"))
+ Pkg.precompile(io = iob)
@test occursin("Precompiling", String(take!(iob)))
- Pkg.activate("packages/Dep8")
- @test isfile("packages/Dep8/Project.toml")
- @test !isfile("packages/Dep8/Manifest.toml")
- Pkg.instantiate(io=iob) # with only a Project.toml
+ ENV["JULIA_PKG_PRECOMPILE_AUTO"] = 1
+ Pkg.develop(Pkg.PackageSpec(path = "packages/BrokenDep"))
+ Pkg.build(io = iob) # should trigger auto-precomp and soft-error
@test occursin("Precompiling", String(take!(iob)))
- end
- ENV["JULIA_PKG_PRECOMPILE_AUTO"]=0
+ ptoml = joinpath("packages", "BrokenDep", "Project.toml")
+ lines = readlines(ptoml)
+ open(joinpath("packages", "BrokenDep", "src", "BrokenDep.jl"), "w") do io
+ write(io, "module BrokenDep\n\nend") # remove error
+ end
+ open(ptoml, "w") do io
+ for line in lines
+ if startswith(line, "version = \"0.1.0\"")
+ println(io, replace(line, "version = \"0.1.0\"" => "version = \"0.1.1\"", count = 1)) # up version
+ else
+ println(io, line)
+ end
+ end
+ end
+ Pkg.update("BrokenDep") # should trigger auto-precomp including the fixed BrokenDep
+ Pkg.precompile(io = iob)
+ @test !occursin("Precompiling", String(take!(iob))) # test that the previous precompile was a no-op
- @testset "waiting for trailing tasks" begin
- Pkg.activate("packages/TrailingTaskDep")
- iob = IOBuffer()
- Pkg.precompile(io=iob)
- str = String(take!(iob))
- @test occursin("Precompiling", str)
- @test occursin("Waiting for background task / IO / timer.", str)
- end
+ # https://github.com/JuliaLang/Pkg.jl/pull/2142
+ Pkg.build(; verbose = true)
+
+ @testset "timing mode" begin
+ iob = IOBuffer()
+ Pkg.develop(Pkg.PackageSpec(path = "packages/Dep6"))
+ Pkg.precompile(io = iob, timing = true)
+ str = String(take!(iob))
+ @test occursin("Precompiling", str)
+ @test occursin(r"\d+\.\d+ s\b", str)
+ @test occursin("Dep6", str)
+ Pkg.precompile(io = iob)
+ @test !occursin("Precompiling", String(take!(iob))) # test that the previous precompile was a no-op
+ end
+
+ dep8_path = git_init_package(tmp, joinpath("packages", "Dep8"))
+ function clear_dep8_cache()
+ rm(joinpath(Pkg.depots1(), "compiled", "v$(VERSION.major).$(VERSION.minor)", "Dep8"), force = true, recursive = true)
+ end
+ @testset "delayed precompilation with do-syntax" begin
+ iob = IOBuffer()
+ # Test that operations inside Pkg.precompile() do block don't trigger auto-precompilation
+ Pkg.precompile(io = iob) do
+ Pkg.add(Pkg.PackageSpec(path = dep8_path))
+ Pkg.rm("Dep8")
+ clear_dep8_cache()
+ Pkg.add(Pkg.PackageSpec(path = dep8_path))
+ end
- @testset "pidlocked precompile" begin
- proj = joinpath(pwd(), "packages", "SlowPrecompile")
- cmd = addenv(`$(Base.julia_cmd()) --color=no --startup-file=no --project="$(pkgdir(Pkg))" -e "
+ # The precompile should happen once at the end
+ @test count(r"Precompiling", String(take!(iob))) == 1 # should only precompile once
+
+ # Verify it was precompiled by checking a second call is a no-op
+ Pkg.precompile(io = iob)
+ @test !occursin("Precompiling", String(take!(iob)))
+ end
+
+ Pkg.rm("Dep8")
+
+ @testset "autoprecompilation_enabled global control" begin
+ iob = IOBuffer()
+ withenv("JULIA_PKG_PRECOMPILE_AUTO" => nothing) do
+ original_state = Pkg._autoprecompilation_enabled
+ try
+ Pkg.autoprecompilation_enabled(false)
+ @test Pkg._autoprecompilation_enabled == false
+
+ # Operations should not trigger autoprecompilation when globally disabled
+ clear_dep8_cache()
+ Pkg.add(Pkg.PackageSpec(path = dep8_path), io = iob)
+ @test !occursin("Precompiling", String(take!(iob)))
+
+ # Manual precompile should still work
+ @test Base.isprecompiled(Base.identify_package("Dep8")) == false
+ Pkg.precompile(io = iob)
+ @test occursin("Precompiling", String(take!(iob)))
+ @test Base.isprecompiled(Base.identify_package("Dep8"))
+
+ # Re-enable autoprecompilation
+ Pkg.autoprecompilation_enabled(true)
+ @test Pkg._autoprecompilation_enabled == true
+
+ # Operations should now trigger autoprecompilation again
+ Pkg.rm("Dep8", io = iob)
+ clear_dep8_cache()
+ Pkg.add(Pkg.PackageSpec(path = dep8_path), io = iob)
+ @test Base.isprecompiled(Base.identify_package("Dep8"))
+ @test occursin("Precompiling", String(take!(iob)))
+
+ finally
+ # Restore original state
+ Pkg.autoprecompilation_enabled(original_state)
+ end
+ end
+ end
+
+ @testset "instantiate" begin
+ iob = IOBuffer()
+ Pkg.activate("packages/Dep7")
+ Pkg.resolve()
+ @test isfile("packages/Dep7/Project.toml")
+ @test isfile("packages/Dep7/Manifest.toml")
+ Pkg.instantiate(io = iob) # with a Project.toml and Manifest.toml
+ @test occursin("Precompiling", String(take!(iob)))
+
+ Pkg.activate("packages/Dep8")
+ @test isfile("packages/Dep8/Project.toml")
+ @test !isfile("packages/Dep8/Manifest.toml")
+ Pkg.instantiate(io = iob) # with only a Project.toml
+ @test occursin("Precompiling", String(take!(iob)))
+ end
+
+ ENV["JULIA_PKG_PRECOMPILE_AUTO"] = 0
+
+ @testset "waiting for trailing tasks" begin
+ Pkg.activate("packages/TrailingTaskDep")
+ iob = IOBuffer()
+ Pkg.precompile(io = iob)
+ str = String(take!(iob))
+ @test occursin("Precompiling", str)
+ @test occursin("waiting for IO to finish", str) || occursin("Waiting for background task / IO / timer.", str)
+ end
+
+ @testset "pidlocked precompile" begin
+ proj = joinpath(pwd(), "packages", "SlowPrecompile")
+ cmd = addenv(
+ `$(Base.julia_cmd()) --color=no --startup-file=no --project="$(pkgdir(Pkg))" -e "
using Pkg
Pkg.activate(\"$(escape_string(proj))\")
Pkg.precompile()
"`,
- "JULIA_PKG_PRECOMPILE_AUTO" => "0")
- iob1 = IOBuffer()
- iob2 = IOBuffer()
- try
- Base.Experimental.@sync begin
- @async run(pipeline(cmd, stderr=iob1, stdout=iob1))
- @async run(pipeline(cmd, stderr=iob2, stdout=iob2))
+ "JULIA_PKG_PRECOMPILE_AUTO" => "0",
+ "JULIA_DEPOT_PATH" => join(Base.DEPOT_PATH, Sys.iswindows() ? ";" : ":"),
+ )
+ iob1 = IOBuffer()
+ iob2 = IOBuffer()
+ try
+ Base.Experimental.@sync begin
+ @async run(pipeline(cmd, stderr = iob1, stdout = iob1))
+ @async run(pipeline(cmd, stderr = iob2, stdout = iob2))
+ end
+ catch
+ println("pidlocked precompile tests failed:")
+ println("process 1:\n", String(take!(iob1)))
+ println("process 2:\n", String(take!(iob2)))
+ rethrow()
end
- catch
- println("pidlocked precompile tests failed:")
- println("process 1:\n", String(take!(iob1)))
- println("process 2:\n", String(take!(iob2)))
- rethrow()
+ s1 = String(take!(iob1))
+ s2 = String(take!(iob2))
+ @test occursin("Precompiling", s1)
+ @test occursin("Precompiling", s2)
+ @test any(contains("Being precompiled by another process (pid: "), (s1, s2))
end
- s1 = String(take!(iob1))
- s2 = String(take!(iob2))
- @test occursin("Precompiling", s1)
- @test occursin("Precompiling", s2)
- @test any(contains("Being precompiled by another process (pid: "), (s1, s2))
- end
- end end
+ end
+ end
# ignoring circular deps, to avoid deadlock
- isolate() do; cd_tempdir() do tmp
- Pkg.activate(".")
- cd(mkdir("packages")) do
- Pkg.generate("CircularDep1")
- Pkg.generate("CircularDep2")
- Pkg.generate("CircularDep3")
+ isolate() do;
+ cd_tempdir() do tmp
+ Pkg.activate(".")
+ cd(mkdir("packages")) do
+ Pkg.generate("CircularDep1")
+ Pkg.generate("CircularDep2")
+ Pkg.generate("CircularDep3")
+ end
+ Pkg.develop(Pkg.PackageSpec(path = "packages/CircularDep1"))
+ Pkg.develop(Pkg.PackageSpec(path = "packages/CircularDep2"))
+ Pkg.develop(Pkg.PackageSpec(path = "packages/CircularDep3"))
+
+ Pkg.activate("CircularDep1")
+ Pkg.develop(Pkg.PackageSpec(path = "packages/CircularDep2"))
+ Pkg.activate("CircularDep2")
+ Pkg.develop(Pkg.PackageSpec(path = "packages/CircularDep3"))
+ Pkg.activate("CircularDep3")
+ Pkg.develop(Pkg.PackageSpec(path = "packages/CircularDep1"))
+
+ Pkg.activate(".")
+ Pkg.resolve()
+
+ ## Tests when circularity is in dependencies
+ iob = IOBuffer()
+ Pkg.precompile(io = iob)
+ @test occursin("Circular dependency detected", String(take!(iob)))
+
+ ## Tests when circularity goes through the active project
+ Pkg.activate("CircularDep1")
+ Pkg.resolve() # necessary because resolving in `Pkg.precompile` has been removed
+ Pkg.precompile(io = iob)
+ @test occursin("Circular dependency detected", String(take!(iob)))
+ Pkg.activate(".")
+ Pkg.activate("CircularDep2")
+ Pkg.resolve() # necessary because resolving in `Pkg.precompile` has been removed
+ Pkg.precompile(io = iob)
+ @test occursin("Circular dependency detected", String(take!(iob)))
+ Pkg.activate(".")
+ Pkg.activate("CircularDep3")
+ Pkg.resolve() # necessary because resolving in `Pkg.precompile` has been removed
+ Pkg.precompile(io = iob)
+ @test occursin("Circular dependency detected", String(take!(iob)))
+
+ Pkg.activate(temp = true)
+ Pkg.precompile() # precompile an empty env should be a no-op
+ # TODO: Reenable
+ #@test_throws ErrorException Pkg.precompile("DoesNotExist") # fail to find a nonexistant dep in an empty env
+
+ Pkg.add("Random")
+ #@test_throws ErrorException Pkg.precompile("DoesNotExist")
+ Pkg.precompile() # should be a no-op
end
- Pkg.develop(Pkg.PackageSpec(path="packages/CircularDep1"))
- Pkg.develop(Pkg.PackageSpec(path="packages/CircularDep2"))
- Pkg.develop(Pkg.PackageSpec(path="packages/CircularDep3"))
-
- Pkg.activate("CircularDep1")
- Pkg.develop(Pkg.PackageSpec(path="packages/CircularDep2"))
- Pkg.activate("CircularDep2")
- Pkg.develop(Pkg.PackageSpec(path="packages/CircularDep3"))
- Pkg.activate("CircularDep3")
- Pkg.develop(Pkg.PackageSpec(path="packages/CircularDep1"))
-
- Pkg.activate(".")
- Pkg.resolve()
-
- ## Tests when circularity is in dependencies
- @test_logs (:warn, r"Circular dependency detected") Pkg.precompile()
-
- ## Tests when circularity goes through the active project
- Pkg.activate("CircularDep1")
- Pkg.resolve() # necessary because resolving in `Pkg.precompile` has been removed
- @test_logs (:warn, r"Circular dependency detected") Pkg.precompile()
- Pkg.activate(".")
- Pkg.activate("CircularDep2")
- Pkg.resolve() # necessary because resolving in `Pkg.precompile` has been removed
- @test_logs (:warn, r"Circular dependency detected") Pkg.precompile()
- Pkg.activate(".")
- Pkg.activate("CircularDep3")
- Pkg.resolve() # necessary because resolving in `Pkg.precompile` has been removed
- @test_logs (:warn, r"Circular dependency detected") Pkg.precompile()
-
- Pkg.activate(temp=true)
- Pkg.precompile() # precompile an empty env should be a no-op
- # TODO: Reenable
- #@test_throws ErrorException Pkg.precompile("DoesNotExist") # fail to find a nonexistant dep in an empty env
-
- Pkg.add("Random")
- #@test_throws ErrorException Pkg.precompile("DoesNotExist")
- Pkg.precompile() # should be a no-op
- end end
+ end
end
@testset "Pkg.API.check_package_name: Error message if package name ends in .jl" begin
@@ -296,21 +375,83 @@ end
@testset "set number of concurrent requests" begin
@test Pkg.Types.num_concurrent_downloads() == 8
- withenv("JULIA_PKG_CONCURRENT_DOWNLOADS"=>"5") do
+ withenv("JULIA_PKG_CONCURRENT_DOWNLOADS" => "5") do
@test Pkg.Types.num_concurrent_downloads() == 5
end
- withenv("JULIA_PKG_CONCURRENT_DOWNLOADS"=>"0") do
+ withenv("JULIA_PKG_CONCURRENT_DOWNLOADS" => "0") do
@test_throws ErrorException Pkg.Types.num_concurrent_downloads()
end
end
@testset "`[compat]` entries for `julia`" begin
- isolate(loaded_depot=true) do; mktempdir() do tempdir
- pathf = git_init_package(tempdir, joinpath(@__DIR__, "test_packages", "FarFuture"))
- pathp = git_init_package(tempdir, joinpath(@__DIR__, "test_packages", "FarPast"))
- @test_throws "julia version requirement from Project.toml's compat section not satisfied for package" Pkg.add(path=pathf)
- @test_throws "julia version requirement from Project.toml's compat section not satisfied for package" Pkg.add(path=pathp)
- end end
+ isolate(loaded_depot = true) do;
+ mktempdir() do tempdir
+ pathf = git_init_package(tempdir, joinpath(@__DIR__, "test_packages", "FarFuture"))
+ pathp = git_init_package(tempdir, joinpath(@__DIR__, "test_packages", "FarPast"))
+ @test_throws "julia version requirement for package" Pkg.add(path = pathf)
+ @test_throws "julia version requirement for package" Pkg.add(path = pathp)
+ end
+ end
+end
+
+@testset "allow_reresolve parameter" begin
+ isolate(loaded_depot = false) do;
+ mktempdir() do tempdir
+ Pkg.Registry.add(url = "https://github.com/JuliaRegistries/Test")
+ # AllowReresolveTest has Example v0.5.1 which is yanked in the test registry.
+ test_dir = joinpath(tempdir, "AllowReresolveTest")
+
+ # Test that we can build and test with allow_reresolve=true
+ copy_test_package(tempdir, "AllowReresolveTest")
+ Pkg.activate(joinpath(tempdir, "AllowReresolveTest"))
+ @test Pkg.build(; allow_reresolve = true) == nothing
+
+ rm(test_dir, force = true, recursive = true)
+ copy_test_package(tempdir, "AllowReresolveTest")
+ Pkg.activate(joinpath(tempdir, "AllowReresolveTest"))
+ @test Pkg.test(; allow_reresolve = true) == nothing
+
+ # Test that allow_reresolve=false fails with the broken manifest
+ rm(test_dir, force = true, recursive = true)
+ copy_test_package(tempdir, "AllowReresolveTest")
+ Pkg.activate(joinpath(tempdir, "AllowReresolveTest"))
+ @test_throws Pkg.Resolve.ResolverError Pkg.build(; allow_reresolve = false)
+
+ rm(test_dir, force = true, recursive = true)
+ copy_test_package(tempdir, "AllowReresolveTest")
+ Pkg.activate(joinpath(tempdir, "AllowReresolveTest"))
+ @test_throws Pkg.Resolve.ResolverError Pkg.test(; allow_reresolve = false)
+ end
+ end
+end
+
+@testset "Yanked package handling" begin
+ isolate() do;
+ mktempdir() do tempdir
+ # Copy the yanked test environment
+ test_env_dir = joinpath(tempdir, "yanked_test")
+ cp(joinpath(@__DIR__, "manifest", "yanked"), test_env_dir)
+ Pkg.activate(test_env_dir)
+
+ @testset "status shows yanked packages" begin
+ iob = IOBuffer()
+ Pkg.status(io = iob)
+ status_output = String(take!(iob))
+
+ @test occursin("Mocking v0.7.4 [yanked]", status_output)
+ @test occursin("Package versions marked with [yanked] have been pulled from their registry.", status_output)
+ end
+ @testset "resolve error shows yanked packages warning" begin
+ # Try to add a package that will cause resolve conflicts with yanked package
+ iob = IOBuffer()
+ @test_throws Pkg.Resolve.ResolverError Pkg.add("Example"; preserve = Pkg.PRESERVE_ALL, io = iob)
+ error_output = String(take!(iob))
+
+ @test occursin("The following package versions were yanked from their registry and are not resolvable:", error_output)
+ @test occursin("Mocking [78c3b35d] 0.7.4", error_output)
+ end
+ end
+ end
end
end # module APITests
diff --git a/test/apps.jl b/test/apps.jl
new file mode 100644
index 0000000000..72caca293d
--- /dev/null
+++ b/test/apps.jl
@@ -0,0 +1,132 @@
+module AppsTests
+
+import ..Pkg # ensure we are using the correct Pkg
+using ..Utils
+
+using Test
+
+@testset "Apps" begin
+
+ isolate(loaded_depot = true) do
+ sep = Sys.iswindows() ? ';' : ':'
+ Pkg.Apps.develop(path = joinpath(@__DIR__, "test_packages", "Rot13.jl"))
+ current_path = ENV["PATH"]
+ exename = Sys.iswindows() ? "juliarot13.bat" : "juliarot13"
+ cliexename = Sys.iswindows() ? "juliarot13cli.bat" : "juliarot13cli"
+ flagsexename = Sys.iswindows() ? "juliarot13flags.bat" : "juliarot13flags"
+ withenv("PATH" => string(joinpath(first(DEPOT_PATH), "bin"), sep, current_path)) do
+ # Test original app
+ @test contains(Sys.which("$exename"), first(DEPOT_PATH))
+ @test read(`$exename test`, String) == "grfg\n"
+
+ # Test submodule app
+ @test contains(Sys.which("$cliexename"), first(DEPOT_PATH))
+ @test read(`$cliexename test`, String) == "CLI: grfg\n"
+
+ # Test flags app with default julia_flags
+ @test contains(Sys.which("$flagsexename"), first(DEPOT_PATH))
+ flags_output = read(`$flagsexename arg1 arg2`, String)
+ @test contains(flags_output, "Julia flags demo!")
+ @test contains(flags_output, "Thread count: 2") # from --threads=2
+ @test contains(flags_output, "Optimization level: 3") # from --optimize=3
+ @test contains(flags_output, "App arguments: arg1 arg2")
+
+ # Test flags app with runtime julia flags (should override defaults)
+ runtime_output = read(`$flagsexename --threads=4 -- runtime_arg`, String)
+ @test contains(runtime_output, "Thread count: 4") # overridden by runtime
+ @test contains(runtime_output, "App arguments: runtime_arg")
+
+ # Test JULIA_APPS_JULIA_CMD environment variable override
+ mktempdir() do tmpdir
+ # Create a mock Julia executable that outputs an identifiable string
+ mock_julia_path = joinpath(tmpdir, Sys.iswindows() ? "mock-julia.bat" : "mock-julia")
+ mock_script = if Sys.iswindows()
+ "@echo off\necho MOCK_JULIA_EXECUTED\n"
+ else
+ "#!/bin/sh\necho MOCK_JULIA_EXECUTED\n"
+ end
+ write(mock_julia_path, mock_script)
+ if !Sys.iswindows()
+ chmod(mock_julia_path, 0o755)
+ end
+
+ # Test that JULIA_APPS_JULIA_CMD overrides the Julia executable
+ withenv("JULIA_APPS_JULIA_CMD" => mock_julia_path) do
+ mock_output = read(`$exename test`, String)
+ @test contains(mock_output, "MOCK_JULIA_EXECUTED")
+ end
+ end
+
+ Pkg.Apps.rm("Rot13")
+ @test Sys.which(exename) == nothing
+ @test Sys.which(cliexename) == nothing
+ @test Sys.which(flagsexename) == nothing
+ end
+ end
+
+ isolate(loaded_depot = true) do
+ mktempdir() do tmpdir
+ sep = Sys.iswindows() ? ';' : ':'
+ path = git_init_package(tmpdir, joinpath(@__DIR__, "test_packages", "Rot13.jl"))
+ Pkg.Apps.add(path = path)
+ exename = Sys.iswindows() ? "juliarot13.bat" : "juliarot13"
+ cliexename = Sys.iswindows() ? "juliarot13cli.bat" : "juliarot13cli"
+ flagsexename = Sys.iswindows() ? "juliarot13flags.bat" : "juliarot13flags"
+ current_path = ENV["PATH"]
+ withenv("PATH" => string(joinpath(first(DEPOT_PATH), "bin"), sep, current_path)) do
+ # Test original app
+ @test contains(Sys.which(exename), first(DEPOT_PATH))
+ @test read(`$exename test`, String) == "grfg\n"
+
+ # Test submodule app
+ @test contains(Sys.which(cliexename), first(DEPOT_PATH))
+ @test read(`$cliexename test`, String) == "CLI: grfg\n"
+
+ # Test flags app functionality
+ @test contains(Sys.which(flagsexename), first(DEPOT_PATH))
+ flags_output = read(`$flagsexename hello`, String)
+ @test contains(flags_output, "Julia flags demo!")
+ @test contains(flags_output, "App arguments: hello")
+
+ Pkg.Apps.rm("Rot13")
+ @test Sys.which(exename) == nothing
+ @test Sys.which(cliexename) == nothing
+ @test Sys.which(flagsexename) == nothing
+ end
+
+ # Test both absolute path and relative path "." work for develop
+ # https://github.com/JuliaLang/Pkg.jl/issues/4258 and #4480
+ for test_path in [path, "."]
+ if test_path == "."
+ cd(path) do
+ Pkg.Apps.develop(path = test_path)
+ end
+ else
+ Pkg.Apps.develop(path = test_path)
+ end
+
+ # Verify that dev does not create an app environment directory
+ app_env_dir = joinpath(first(DEPOT_PATH), "environments", "apps", "Rot13")
+ @test !isdir(app_env_dir)
+
+ # Verify that changes to the dev'd package are immediately reflected (only test once)
+ if test_path == path
+ mv(joinpath(path, "src", "Rot13_edited.jl"), joinpath(path, "src", "Rot13.jl"); force = true)
+ withenv("PATH" => string(joinpath(first(DEPOT_PATH), "bin"), sep, current_path)) do
+ @test read(`$exename test`, String) == "Updated!\n"
+ end
+ end
+
+ Pkg.Apps.rm("Rot13")
+ end
+ end
+ end
+
+ isolate(loaded_depot = true) do
+ Pkg.Registry.add("General")
+ Pkg.Apps.add(name = "Runic", version = "1.5.1")
+ Pkg.Apps.update("Runic")
+ end
+end
+
+end # module
diff --git a/test/aqua.jl b/test/aqua.jl
new file mode 100644
index 0000000000..c5aeb90392
--- /dev/null
+++ b/test/aqua.jl
@@ -0,0 +1,2 @@
+using Aqua
+Aqua.test_all(Pkg)
diff --git a/test/artifacts.jl b/test/artifacts.jl
index 605c3b26f8..8595f1890e 100644
--- a/test/artifacts.jl
+++ b/test/artifacts.jl
@@ -3,23 +3,18 @@ import ..Pkg # ensure we are using the correct Pkg
using Test, Random, Pkg.Artifacts, Base.BinaryPlatforms, Pkg.PlatformEngines
import Pkg.Artifacts: pack_platform!, unpack_platform, with_artifacts_directory, ensure_all_artifacts_installed, extract_all_hashes
+import Pkg.Operations: count_artifacts, artifact_suffix
using TOML, Dates
import Base: SHA1
-# Order-dependence in the tests, so we delay this until we need it
-if Base.find_package("Preferences") === nothing
- @info "Installing Preferences for Pkg tests"
- Pkg.add("Preferences") # Needed for sandbox and artifacts tests
-end
-using Preferences
-
using ..Utils
+using Preferences
# Helper function to create an artifact, then chmod() the whole thing to 0o644. This is
# important to keep hashes stable across platforms that have different umasks, changing
# the permissions within a tree hash, breaking our tests.
function create_artifact_chmod(f::Function)
- create_artifact() do path
+ return create_artifact() do path
f(path)
# Change all files to have 644 permissions, leave directories alone
@@ -36,51 +31,59 @@ end
# We're going to ensure that our artifact creation does in fact give git-tree-sha1's.
creators = [
# First test the empty artifact
- (path -> begin
- # add no contents
- end, "4b825dc642cb6eb9a060e54bf8d69288fbee4904"),
+ (
+ path -> begin
+ # add no contents
+ end, "4b825dc642cb6eb9a060e54bf8d69288fbee4904",
+ ),
# Next test creating a single file
- (path -> begin
- open(joinpath(path, "foo"), "w") do io
- print(io, "Hello, world!")
- end
- end, "339aad93c0f854604248ea3b7c5b7edea20625a9"),
+ (
+ path -> begin
+ open(joinpath(path, "foo"), "w") do io
+ print(io, "Hello, world!")
+ end
+ end, "339aad93c0f854604248ea3b7c5b7edea20625a9",
+ ),
# Next we will test creating multiple files
- (path -> begin
- open(joinpath(path, "foo1"), "w") do io
- print(io, "Hello")
- end
- open(joinpath(path, "foo2"), "w") do io
- print(io, "world!")
- end
- end, "98cda294312216b19e2a973e9c291c0f5181c98c"),
+ (
+ path -> begin
+ open(joinpath(path, "foo1"), "w") do io
+ print(io, "Hello")
+ end
+ open(joinpath(path, "foo2"), "w") do io
+ print(io, "world!")
+ end
+ end, "98cda294312216b19e2a973e9c291c0f5181c98c",
+ ),
# Finally, we will have nested directories and all that good stuff
- (path -> begin
- mkpath(joinpath(path, "bar", "bar"))
- open(joinpath(path, "bar", "bar", "foo1"), "w") do io
- print(io, "Hello")
- end
- open(joinpath(path, "bar", "foo2"), "w") do io
- print(io, "world!")
- end
- open(joinpath(path, "foo3"), "w") do io
- print(io, "baz!")
- end
+ (
+ path -> begin
+ mkpath(joinpath(path, "bar", "bar"))
+ open(joinpath(path, "bar", "bar", "foo1"), "w") do io
+ print(io, "Hello")
+ end
+ open(joinpath(path, "bar", "foo2"), "w") do io
+ print(io, "world!")
+ end
+ open(joinpath(path, "foo3"), "w") do io
+ print(io, "baz!")
+ end
- # Empty directories do nothing to effect the hash, so we create one with a
- # random name to prove that it does not get hashed into the rest. Also, it
- # turns out that life is cxomplex enough that we need to test the nested
- # empty directories case as well.
- rand_dir = joinpath(path, Random.randstring(8), "inner")
- mkpath(rand_dir)
-
- # Symlinks are not followed, even if they point to directories
- symlink("foo3", joinpath(path, "foo3_link"))
- symlink("../bar", joinpath(path, "bar", "infinite_link"))
- end, "86a1ce580587d5851fdfa841aeb3c8d55663f6f9"),
+ # Empty directories do nothing to effect the hash, so we create one with a
+ # random name to prove that it does not get hashed into the rest. Also, it
+ # turns out that life is cxomplex enough that we need to test the nested
+ # empty directories case as well.
+ rand_dir = joinpath(path, Random.randstring(8), "inner")
+ mkpath(rand_dir)
+
+ # Symlinks are not followed, even if they point to directories
+ symlink("foo3", joinpath(path, "foo3_link"))
+ symlink("../bar", joinpath(path, "bar", "infinite_link"))
+ end, "86a1ce580587d5851fdfa841aeb3c8d55663f6f9",
+ ),
]
# Enable the following code snippet to figure out the correct gitsha's:
@@ -145,7 +148,7 @@ end
@test !iszero(filemode(joinpath(artifact_dir, dir_link)) & 0o222)
# Make sure we can delete the artifact directory without having
# to manually change permissions
- rm(artifact_dir; recursive=true)
+ rm(artifact_dir; recursive = true)
end
end
end
@@ -166,10 +169,10 @@ end
# First, let's test our ability to find Artifacts.toml files;
ATS = joinpath(@__DIR__, "test_packages", "ArtifactTOMLSearch")
test_modules = [
- joinpath(ATS, "pkg.jl") => joinpath(ATS, "Artifacts.toml"),
- joinpath(ATS, "sub_module", "pkg.jl") => joinpath(ATS, "Artifacts.toml"),
- joinpath(ATS, "sub_package", "pkg.jl") => joinpath(ATS, "sub_package", "Artifacts.toml"),
- joinpath(ATS, "julia_artifacts_test", "pkg.jl") => joinpath(ATS, "julia_artifacts_test", "JuliaArtifacts.toml"),
+ joinpath(ATS, "pkg.jl") => joinpath(ATS, "Artifacts.toml"),
+ joinpath(ATS, "sub_module", "pkg.jl") => joinpath(ATS, "Artifacts.toml"),
+ joinpath(ATS, "sub_package", "pkg.jl") => joinpath(ATS, "sub_package", "Artifacts.toml"),
+ joinpath(ATS, "julia_artifacts_test", "pkg.jl") => joinpath(ATS, "julia_artifacts_test", "JuliaArtifacts.toml"),
joinpath(@__DIR__, "test_packages", "BasicSandbox", "src", "Foo.jl") => nothing,
]
for (test_src, artifacts_toml) in test_modules
@@ -229,7 +232,7 @@ end
end
@test_throws ErrorException bind_artifact!(artifacts_toml, "foo_txt", hash2)
@test artifact_hash("foo_txt", artifacts_toml) == hash
- bind_artifact!(artifacts_toml, "foo_txt", hash2; force=true)
+ bind_artifact!(artifacts_toml, "foo_txt", hash2; force = true)
@test artifact_hash("foo_txt", artifacts_toml) == hash2
# Test that we can un-bind
@@ -238,28 +241,36 @@ end
# Test platform-specific binding and providing download_info
download_info = [
- ("http://google.com/hello_world", "0"^64),
- ("http://microsoft.com/hello_world", "a"^64),
+ ArtifactDownloadInfo("http://google.com/hello_world", "0"^64),
+ ArtifactDownloadInfo("http://microsoft.com/hello_world", "a"^64, 1),
]
# First, test the binding of things with various platforms and overwriting and such works properly
linux64 = Platform("x86_64", "linux")
win32 = Platform("i686", "windows")
- bind_artifact!(artifacts_toml, "foo_txt", hash; download_info=download_info, platform=linux64)
- @test artifact_hash("foo_txt", artifacts_toml; platform=linux64) == hash
- @test artifact_hash("foo_txt", artifacts_toml; platform=Platform("x86_64", "macos")) == nothing
- @test_throws ErrorException bind_artifact!(artifacts_toml, "foo_txt", hash2; download_info=download_info, platform=linux64)
- bind_artifact!(artifacts_toml, "foo_txt", hash2; download_info=download_info, platform=linux64, force=true)
- bind_artifact!(artifacts_toml, "foo_txt", hash; download_info=download_info, platform=win32)
- @test artifact_hash("foo_txt", artifacts_toml; platform=linux64) == hash2
- @test artifact_hash("foo_txt", artifacts_toml; platform=win32) == hash
- @test ensure_artifact_installed("foo_txt", artifacts_toml; platform=linux64) == artifact_path(hash2)
- @test ensure_artifact_installed("foo_txt", artifacts_toml; platform=win32) == artifact_path(hash)
+ bind_artifact!(artifacts_toml, "foo_txt", hash; download_info = download_info, platform = linux64)
+ @test artifact_hash("foo_txt", artifacts_toml; platform = linux64) == hash
+ @test artifact_hash("foo_txt", artifacts_toml; platform = Platform("x86_64", "macos")) == nothing
+ @test_throws ErrorException bind_artifact!(artifacts_toml, "foo_txt", hash2; download_info = download_info, platform = linux64)
+ bind_artifact!(artifacts_toml, "foo_txt", hash; download_info = download_info, platform = win32)
+ bind_artifact!(artifacts_toml, "foo_txt", hash2; download_info = download_info, platform = linux64, force = true)
+ @test artifact_hash("foo_txt", artifacts_toml; platform = linux64) == hash2
+ @test artifact_hash("foo_txt", artifacts_toml; platform = win32) == hash
+ @test ensure_artifact_installed("foo_txt", artifacts_toml; platform = linux64) == artifact_path(hash2)
+ @test ensure_artifact_installed("foo_txt", artifacts_toml; platform = win32) == artifact_path(hash)
+
+ # Default HostPlatform() adds a compare_strategy key that doesn't get picked up from
+ # the Artifacts.toml
+ testhost = Platform("x86_64", "linux", Dict("libstdcxx_version" => "1.2.3"))
+ BinaryPlatforms.set_compare_strategy!(testhost, "libstdcxx_version", BinaryPlatforms.compare_version_cap)
+ @test_throws ErrorException bind_artifact!(artifacts_toml, "foo_txt", hash; download_info = download_info, platform = testhost)
# Next, check that we can get the download_info properly:
- meta = artifact_meta("foo_txt", artifacts_toml; platform=win32)
+ meta = artifact_meta("foo_txt", artifacts_toml; platform = win32)
@test meta["download"][1]["url"] == "http://google.com/hello_world"
+ @test !haskey(meta["download"][1], "size")
@test meta["download"][2]["sha256"] == "a"^64
+ @test meta["download"][2]["size"] == 1
rm(artifacts_toml)
@@ -290,20 +301,24 @@ end
@test_logs (:error, r"malformed, must be array or dict!") artifact_meta("broken_artifact", joinpath(badifact_dir, "not_a_table.toml"))
# Next, test incorrect download errors
- for ignore_hash in (false, true); withenv("JULIA_PKG_IGNORE_HASHES" => ignore_hash ? "1" : nothing) do; mktempdir() do dir
- with_artifacts_directory(dir) do
- @test artifact_meta("broken_artifact", joinpath(badifact_dir, "incorrect_gitsha.toml")) != nothing
- if !ignore_hash
- @test_throws ErrorException ensure_artifact_installed("broken_artifact", joinpath(badifact_dir, "incorrect_gitsha.toml"))
- else
- @test_logs (:error, r"Tree Hash Mismatch!") match_mode=:any begin
- path = ensure_artifact_installed("broken_artifact", joinpath(badifact_dir, "incorrect_gitsha.toml"))
- @test endswith(path, "0000000000000000000000000000000000000000")
- @test isdir(path)
+ for ignore_hash in (false, true)
+ withenv("JULIA_PKG_IGNORE_HASHES" => ignore_hash ? "1" : nothing) do;
+ mktempdir() do dir
+ with_artifacts_directory(dir) do
+ @test artifact_meta("broken_artifact", joinpath(badifact_dir, "incorrect_gitsha.toml")) != nothing
+ if !ignore_hash
+ @test_throws ErrorException ensure_artifact_installed("broken_artifact", joinpath(badifact_dir, "incorrect_gitsha.toml"))
+ else
+ @test_logs (:error, r"Tree Hash Mismatch!") match_mode = :any begin
+ path = ensure_artifact_installed("broken_artifact", joinpath(badifact_dir, "incorrect_gitsha.toml"))
+ @test endswith(path, "0000000000000000000000000000000000000000")
+ @test isdir(path)
+ end
end
end
end
- end end end
+ end
+ end
mktempdir() do dir
with_artifacts_directory(dir) do
@@ -340,20 +355,24 @@ end
with_pkg_env(project_path) do
path = git_init_package(project_path, joinpath(@__DIR__, "test_packages", "ArtifactInstallation"))
add_this_pkg()
- Pkg.add(Pkg.Types.PackageSpec(
- name="ArtifactInstallation",
- uuid=Base.UUID("02111abe-2050-1119-117e-b30112b5bdc4"),
- path=path,
- ))
+ Pkg.add(
+ Pkg.Types.PackageSpec(
+ name = "ArtifactInstallation",
+ uuid = Base.UUID("02111abe-2050-1119-117e-b30112b5bdc4"),
+ path = path,
+ )
+ )
# Run test harness
Pkg.test("ArtifactInstallation")
# Also manually do it
- Core.eval(Module(:__anon__), quote
- using ArtifactInstallation
- do_test()
- end)
+ Core.eval(
+ Module(:__anon__), quote
+ using ArtifactInstallation
+ do_test()
+ end
+ )
end
end
@@ -362,7 +381,7 @@ end
copy_test_package(project_path, "ArtifactInstallation")
Pkg.activate(joinpath(project_path, "ArtifactInstallation"))
add_this_pkg()
- Pkg.instantiate(; verbose=true)
+ Pkg.instantiate(; verbose = true)
# Manual test that artifact is installed by instantiate()
artifacts_toml = joinpath(project_path, "ArtifactInstallation", "Artifacts.toml")
@@ -378,21 +397,21 @@ end
# Try to install all artifacts for the given platform, knowing full well that
# HelloWorldC will fail to match any artifact to this bogus platform
bogus_platform = Platform("bogus", "linux")
- artifacts = select_downloadable_artifacts(artifacts_toml; platform=bogus_platform)
+ artifacts = select_downloadable_artifacts(artifacts_toml; platform = bogus_platform)
for name in keys(artifacts)
- ensure_artifact_installed(name, artifacts[name], artifacts_toml; platform=bogus_platform)
+ ensure_artifact_installed(name, artifacts[name], artifacts_toml; platform = bogus_platform)
end
# Test that HelloWorldC doesn't even show up
- hwc_hash = artifact_hash("HelloWorldC", artifacts_toml; platform=bogus_platform)
+ hwc_hash = artifact_hash("HelloWorldC", artifacts_toml; platform = bogus_platform)
@test hwc_hash === nothing
# Test that socrates shows up, but is not installed, because it's lazy
- socrates_hash = artifact_hash("socrates", artifacts_toml; platform=bogus_platform)
+ socrates_hash = artifact_hash("socrates", artifacts_toml; platform = bogus_platform)
@test !artifact_exists(socrates_hash)
# Test that collapse_the_symlink is installed
- cts_hash = artifact_hash("collapse_the_symlink", artifacts_toml; platform=bogus_platform)
+ cts_hash = artifact_hash("collapse_the_symlink", artifacts_toml; platform = bogus_platform)
@test artifact_exists(cts_hash)
end
@@ -419,11 +438,12 @@ end
)
disengaged_platform = HostPlatform()
disengaged_platform["flooblecrank"] = "disengaged"
+ disengaged_adi = ArtifactDownloadInfo(disengaged_url, disengaged_sha256)
Pkg.Artifacts.bind_artifact!(
artifacts_toml,
"gooblebox",
disengaged_hash;
- download_info = [(disengaged_url, disengaged_sha256)],
+ download_info = [disengaged_adi],
platform = disengaged_platform,
)
end
@@ -463,22 +483,24 @@ end
artifacts_toml = joinpath(ap_path, "Artifacts.toml")
p = HostPlatform()
p["flooblecrank"] = flooblecrank_status
- flooblecrank_hash = artifact_hash("gooblebox", artifacts_toml; platform=p)
+ flooblecrank_hash = artifact_hash("gooblebox", artifacts_toml; platform = p)
@test flooblecrank_hash == right_hash
@test artifact_exists(flooblecrank_hash)
# Test that if we load the package, it knows how to find its own artifact,
# because it feeds the right `Platform` object through to `@artifact_str()`
- cmd = addenv(`$(Base.julia_cmd()) --color=yes --project=$(ap_path) -e 'using AugmentedPlatform; print(get_artifact_dir("gooblebox"))'`,
- "JULIA_DEPOT_PATH" => join(Base.DEPOT_PATH, Sys.iswindows() ? ";" : ":"),
- "FLOOBLECRANK" => flooblecrank_status)
+ cmd = addenv(
+ `$(Base.julia_cmd()) --color=yes --project=$(ap_path) -e 'using AugmentedPlatform; print(get_artifact_dir("gooblebox"))'`,
+ "JULIA_DEPOT_PATH" => join(Base.DEPOT_PATH, Sys.iswindows() ? ";" : ":"),
+ "FLOOBLECRANK" => flooblecrank_status
+ )
using_output = chomp(String(read(cmd)))
@test success(cmd)
@test artifact_path(right_hash) == using_output
tmpdir = mktempdir()
mkpath("$tmpdir/foo/$(flooblecrank_status)")
- rm("$tmpdir/foo/$(flooblecrank_status)"; recursive=true, force=true)
+ rm("$tmpdir/foo/$(flooblecrank_status)"; recursive = true, force = true)
cp(project_path, "$tmpdir/foo/$(flooblecrank_status)")
cp(Base.DEPOT_PATH[1], "$tmpdir/foo/$(flooblecrank_status)/depot")
end
@@ -503,7 +525,7 @@ end
p = HostPlatform()
p["flooblecrank"] = "engaged"
- add_this_pkg(; platform=p)
+ add_this_pkg(; platform = p)
@test isdir(artifact_path(engaged_hash))
@test !isdir(artifact_path(disengaged_hash))
end
@@ -529,7 +551,7 @@ end
p = HostPlatform()
p["flooblecrank"] = "engaged"
- Pkg.API.instantiate(; platform=p)
+ Pkg.API.instantiate(; platform = p)
@test isdir(artifact_path(engaged_hash))
@test isdir(artifact_path(disengaged_hash))
@@ -581,34 +603,14 @@ end
# Test that unbinding the `die_hash` and running `gc()` again still doesn't
# remove it, but it does add it to the orphan list
unbind_artifact!(artifacts_toml, "die")
+ # Test that unbound artifacts are cleaned up
Pkg.gc()
@test artifact_exists(live_hash)
- @test artifact_exists(die_hash)
-
- orphaned_path = joinpath(Pkg.logdir(), "orphaned.toml")
- orphanage = TOML.parsefile(orphaned_path)
- @test any(x -> startswith(x, artifact_path(die_hash)), keys(orphanage))
-
- # Now, sleep for 0.2 seconds, then gc with a collect delay of 0.1 seconds
- # This should reap the `die_hash` immediately, as it has already been moved to
- # the orphaned list.
- sleep(0.2)
- Pkg.gc(;collect_delay=Millisecond(100))
- @test artifact_exists(live_hash)
@test !artifact_exists(die_hash)
- # die_hash should still be listed within the orphan list, but one more gc() will
- # remove it; this is intentional and allows for robust removal scheduling.
- orphanage = TOML.parsefile(orphaned_path)
- @test any(x -> startswith(x, artifact_path(die_hash)), keys(orphanage))
- Pkg.gc()
- orphanage = TOML.parsefile(orphaned_path)
- @test !any(x -> startswith(x, artifact_path(die_hash)), keys(orphanage))
-
- # Next, unbind the live_hash, then run with collect_delay=0, and ensure that
- # things are cleaned up immediately.
+ # Test cleanup of the remaining artifact
unbind_artifact!(artifacts_toml, "live")
- Pkg.gc(;collect_delay=Second(0))
+ Pkg.gc()
@test !artifact_exists(live_hash)
@test !artifact_exists(die_hash)
end
@@ -684,7 +686,7 @@ end
end
# Force Pkg to reload what it knows about artifact overrides
- @inferred Union{Nothing,Dict{Symbol,Any}} Pkg.Artifacts.load_overrides(;force=true)
+ @inferred Union{Nothing, Dict{Symbol, Any}} Pkg.Artifacts.load_overrides(; force = true)
# Verify that the hash-based override worked
@test artifact_path(baz_hash) == artifact_path(bar_hash)
@@ -694,17 +696,21 @@ end
# loads overridden package artifacts.
Pkg.activate(depot_container) do
copy_test_package(depot_container, "ArtifactOverrideLoading")
- Pkg.develop(Pkg.Types.PackageSpec(
- name="ArtifactOverrideLoading",
- uuid=aol_uuid,
- path=joinpath(depot_container, "ArtifactOverrideLoading"),
- ))
-
- (arty_path, barty_path) = Core.eval(Module(:__anon__), quote
- # TODO: This causes a loading.jl warning, probably Pkg is clashing because of a different UUID??
- using ArtifactOverrideLoading
- arty_path, barty_path
- end)
+ Pkg.develop(
+ Pkg.Types.PackageSpec(
+ name = "ArtifactOverrideLoading",
+ uuid = aol_uuid,
+ path = joinpath(depot_container, "ArtifactOverrideLoading"),
+ )
+ )
+
+ (arty_path, barty_path) = Core.eval(
+ Module(:__anon__), quote
+ # TODO: This causes a loading.jl warning, probably Pkg is clashing because of a different UUID??
+ using ArtifactOverrideLoading
+ arty_path, barty_path
+ end
+ )
@test arty_path == artifact_path(bar_hash)
@test barty_path == barty_override_path
@@ -727,7 +733,7 @@ end
end
# Force Pkg to reload what it knows about artifact overrides
- Pkg.Artifacts.load_overrides(;force=true)
+ Pkg.Artifacts.load_overrides(; force = true)
# Force Julia to re-load ArtifactOverrideLoading from scratch
pkgid = Base.PkgId(aol_uuid, "ArtifactOverrideLoading")
@@ -742,10 +748,12 @@ end
# loads overridden package artifacts.
Pkg.activate(depot_container) do
# TODO: This causes a loading.jl warning, probably Pkg is clashing because of a different UUID??
- (arty_path, barty_path) = Core.eval(Module(:__anon__), quote
- using ArtifactOverrideLoading
- arty_path, barty_path
- end)
+ (arty_path, barty_path) = Core.eval(
+ Module(:__anon__), quote
+ using ArtifactOverrideLoading
+ arty_path, barty_path
+ end
+ )
@test arty_path == barty_override_path
@test barty_path == barty_override_path
@@ -756,7 +764,7 @@ end
open(joinpath(depot1, "artifacts", "Overrides.toml"), "w") do io
TOML.print(io, overrides)
end
- @test_logs (:error, msg) match_mode=:any Pkg.Artifacts.load_overrides(;force=true)
+ @test_logs (:error, msg) match_mode = :any Pkg.Artifacts.load_overrides(; force = true)
end
# Mapping to a non-absolute path or SHA1 hash
@@ -781,7 +789,7 @@ end
empty!(DEPOT_PATH)
append!(DEPOT_PATH, old_depot_path)
Base.append_bundled_depot_path!(DEPOT_PATH)
- Pkg.Artifacts.load_overrides(;force=true)
+ Pkg.Artifacts.load_overrides(; force = true)
end
end
@@ -800,22 +808,22 @@ end
@testset "installing artifacts when symlinks are copied" begin
# copy symlinks to simulate the typical Microsoft Windows user experience where
# developer mode is not enabled (no admin rights)
- withenv("BINARYPROVIDER_COPYDEREF"=>"true", "JULIA_PKG_IGNORE_HASHES"=>"true") do
+ withenv("BINARYPROVIDER_COPYDEREF" => "true", "JULIA_PKG_IGNORE_HASHES" => "true") do
temp_pkg_dir() do tmpdir
artifacts_toml = joinpath(tmpdir, "Artifacts.toml")
cp(joinpath(@__DIR__, "test_packages", "ArtifactInstallation", "Artifacts.toml"), artifacts_toml)
Pkg.activate(tmpdir)
cts_real_hash = create_artifact() do dir
- local meta = Artifacts.artifact_meta("collapse_the_symlink", artifacts_toml)
+ local meta = Pkg.Artifacts.artifact_meta("collapse_the_symlink", artifacts_toml)
local collapse_url = meta["download"][1]["url"]
local collapse_hash = meta["download"][1]["sha256"]
# Because "BINARYPROVIDER_COPYDEREF"=>"true", this will copy symlinks.
- download_verify_unpack(collapse_url, collapse_hash, dir; verbose=true, ignore_existence=true)
+ download_verify_unpack(collapse_url, collapse_hash, dir; verbose = true, ignore_existence = true)
end
cts_hash = artifact_hash("collapse_the_symlink", artifacts_toml)
@test !artifact_exists(cts_hash)
@test artifact_exists(cts_real_hash)
- @test_logs (:error, r"Tree Hash Mismatch!") match_mode=:any Pkg.instantiate()
+ @test_logs (:error, r"Tree Hash Mismatch!") match_mode = :any Pkg.instantiate()
@test artifact_exists(cts_hash)
# Make sure existing artifacts don't get deleted.
@test artifact_exists(cts_real_hash)
@@ -823,4 +831,57 @@ end
end
end
+@testset "count_artifacts and artifact_suffix" begin
+ artifacts_toml_dir = joinpath(@__DIR__, "test_packages", "ArtifactInstallation")
+ bogus_platform = Platform("bogus", "linux")
+
+ # No Artifacts.toml → nothing, no suffix
+ mktempdir() do empty_dir
+ @test count_artifacts(empty_dir) === nothing
+ @test artifact_suffix(nothing) == ""
+ end
+
+ # Platform with no matching artifacts → (0, 0), warning suffix
+ # Use a temp dir with a platform-specific-only Artifacts.toml (no platform-independent entries)
+ # so that the bogus platform genuinely matches nothing.
+ mktempdir() do platform_specific_dir
+ write(
+ joinpath(platform_specific_dir, "Artifacts.toml"), """
+ [[HelloWorldC]]
+ arch = "x86_64"
+ os = "linux"
+ git-tree-sha1 = "0000000000000000000000000000000000000000"
+
+ [[HelloWorldC.download]]
+ sha256 = "0000000000000000000000000000000000000000000000000000000000000000"
+ url = "https://example.com/HelloWorldC.tar.gz"
+ """
+ )
+ result = count_artifacts(platform_specific_dir; platform = bogus_platform)
+ @test result === (0, 0)
+ @test artifact_suffix(result) == " (no artifacts on this platform)"
+ end
+
+ # HostPlatform → at least one eager match (HelloWorldC) and one lazy (socrates)
+ host_result = count_artifacts(artifacts_toml_dir; platform = HostPlatform())
+ @test host_result !== nothing
+ n_eager, n_lazy = host_result
+ @test n_eager >= 1
+ @test n_lazy >= 1 # socrates is lazy = true
+ @test artifact_suffix(host_result) == ""
+end
+
+
+if Sys.iswindows()
+ @testset "filemode(dir) non-executable on windows" begin
+ mktempdir() do dir
+ touch(joinpath(dir, "foo"))
+ @test !isempty(readdir(dir))
+ # This technically should be true, the fact that it's not is
+ # a wrinkle of libuv, it would be nice to fix it and so if we
+ # do, this test will let us know.
+ @test filemode(dir) & 0o001 == 0
+ end
+ end
+end
end # module
diff --git a/test/binaryplatforms.jl b/test/binaryplatforms.jl
index 3400f7ff2f..22482e60c4 100644
--- a/test/binaryplatforms.jl
+++ b/test/binaryplatforms.jl
@@ -9,39 +9,39 @@ const platform = @inferred Platform platform_key_abi()
# This is a compatibility test; once we've fully migrated away from Pkg.BinaryPlatforms
# to the new Base.BinaryPlatforms module, we can throw away the shim definitions in
-# `BinaryPlatforms_compat.jl` and drop these tests.
+# `BinaryPlatformsCompat.jl` and drop these tests.
@testset "Compat - PlatformNames" begin
# Ensure the platform type constructors are well behaved
@testset "Platform constructors" begin
@test_throws ArgumentError Linux(:not_a_platform)
- @test_throws ArgumentError Linux(:x86_64; libc=:crazy_libc)
- @test_throws ArgumentError Linux(:x86_64; libc=:glibc, call_abi=:crazy_abi)
- @test_throws ArgumentError Linux(:x86_64; libc=:glibc, call_abi=:eabihf)
- @test_throws ArgumentError Linux(:armv7l; libc=:glibc, call_abi=:kekeke)
+ @test_throws ArgumentError Linux(:x86_64; libc = :crazy_libc)
+ @test_throws ArgumentError Linux(:x86_64; libc = :glibc, call_abi = :crazy_abi)
+ @test_throws ArgumentError Linux(:x86_64; libc = :glibc, call_abi = :eabihf)
+ @test_throws ArgumentError Linux(:armv7l; libc = :glibc, call_abi = :kekeke)
@test_throws ArgumentError MacOS(:i686)
- @test_throws ArgumentError MacOS(:x86_64; libc=:glibc)
- @test_throws ArgumentError MacOS(:x86_64; call_abi=:eabihf)
- @test_throws ArgumentError Windows(:x86_64; libc=:glibc)
- @test_throws ArgumentError Windows(:x86_64; call_abi=:eabihf)
+ @test_throws ArgumentError MacOS(:x86_64; libc = :glibc)
+ @test_throws ArgumentError MacOS(:x86_64; call_abi = :eabihf)
+ @test_throws ArgumentError Windows(:x86_64; libc = :glibc)
+ @test_throws ArgumentError Windows(:x86_64; call_abi = :eabihf)
@test_throws ArgumentError FreeBSD(:not_a_platform)
- @test_throws ArgumentError FreeBSD(:x86_64; libc=:crazy_libc)
- @test_throws ArgumentError FreeBSD(:x86_64; call_abi=:crazy_abi)
- @test_throws ArgumentError FreeBSD(:x86_64; call_abi=:eabihf)
+ @test_throws ArgumentError FreeBSD(:x86_64; libc = :crazy_libc)
+ @test_throws ArgumentError FreeBSD(:x86_64; call_abi = :crazy_abi)
+ @test_throws ArgumentError FreeBSD(:x86_64; call_abi = :eabihf)
# Test copy constructor
cabi = CompilerABI(;
- libgfortran_version=v"3",
- libstdcxx_version=v"3.4.18",
- cxxstring_abi=:cxx03,
+ libgfortran_version = v"3",
+ libstdcxx_version = v"3.4.18",
+ cxxstring_abi = :cxx03,
)
- cabi2 = CompilerABI(cabi; cxxstring_abi=:cxx11)
+ cabi2 = CompilerABI(cabi; cxxstring_abi = :cxx11)
@test libgfortran_version(cabi) == libgfortran_version(cabi2)
@test libstdcxx_version(cabi) == libstdcxx_version(cabi2)
@test cxxstring_abi(cabi) != cxxstring_abi(cabi2)
# Explicitly test that we can pass arguments to UnknownPlatform,
# and it doesn't do anything.
- @test UnknownPlatform(:riscv; libc=:fuschia_libc) == UnknownPlatform()
+ @test UnknownPlatform(:riscv; libc = :fuschia_libc) == UnknownPlatform()
end
@testset "Platform properties" begin
@@ -51,7 +51,7 @@ const platform = @inferred Platform platform_key_abi()
end
# Test that we can get the arch of various platforms
- @test arch(Linux(:aarch64; libc=:musl)) == :aarch64
+ @test arch(Linux(:aarch64; libc = :musl)) == :aarch64
@test arch(Windows(:i686)) == :i686
@test arch(FreeBSD(:amd64)) == :x86_64
@test arch(FreeBSD(:i386)) == :i686
@@ -70,13 +70,13 @@ const platform = @inferred Platform platform_key_abi()
@test call_abi(Linux(:x86_64)) === nothing
@test call_abi(Linux(:armv6l)) == :eabihf
- @test call_abi(Linux(:armv7l; call_abi=:eabihf)) == :eabihf
- @test call_abi(UnknownPlatform(;call_abi=:eabihf)) === nothing
+ @test call_abi(Linux(:armv7l; call_abi = :eabihf)) == :eabihf
+ @test call_abi(UnknownPlatform(; call_abi = :eabihf)) === nothing
@test triplet(Windows(:i686)) == "i686-w64-mingw32"
- @test triplet(Linux(:x86_64; libc=:musl)) == "x86_64-linux-musl"
- @test triplet(Linux(:armv7l; libc=:musl)) == "armv7l-linux-musleabihf"
- @test triplet(Linux(:armv6l; libc=:musl, call_abi=:eabihf)) == "armv6l-linux-musleabihf"
+ @test triplet(Linux(:x86_64; libc = :musl)) == "x86_64-linux-musl"
+ @test triplet(Linux(:armv7l; libc = :musl)) == "armv7l-linux-musleabihf"
+ @test triplet(Linux(:armv6l; libc = :musl, call_abi = :eabihf)) == "armv6l-linux-musleabihf"
@test triplet(Linux(:x86_64)) == "x86_64-linux-gnu"
@test triplet(Linux(:armv6l)) == "armv6l-linux-gnueabihf"
@test triplet(MacOS()) == "x86_64-apple-darwin14"
@@ -100,20 +100,20 @@ const platform = @inferred Platform platform_key_abi()
@testset "platforms_match()" begin
# Just do a quick combinatorial sweep for completeness' sake for platform matching
for libgfortran_version in (nothing, v"3", v"5"),
- libstdcxx_version in (nothing, v"3.4.18", v"3.4.26"),
- cxxstring_abi in (nothing, :cxx03, :cxx11)
+ libstdcxx_version in (nothing, v"3.4.18", v"3.4.26"),
+ cxxstring_abi in (nothing, :cxx03, :cxx11)
cabi = CompilerABI(;
- libgfortran_version=libgfortran_version,
- libstdcxx_version=libstdcxx_version,
- cxxstring_abi=cxxstring_abi,
+ libgfortran_version = libgfortran_version,
+ libstdcxx_version = libstdcxx_version,
+ cxxstring_abi = cxxstring_abi,
)
- @test platforms_match(Linux(:x86_64), Linux(:x86_64, compiler_abi=cabi))
- @test platforms_match(Linux(:x86_64, compiler_abi=cabi), Linux(:x86_64))
+ @test platforms_match(Linux(:x86_64), Linux(:x86_64, compiler_abi = cabi))
+ @test platforms_match(Linux(:x86_64, compiler_abi = cabi), Linux(:x86_64))
# Also test auto-string-parsing
- @test platforms_match(triplet(Linux(:x86_64)), Linux(:x86_64, compiler_abi=cabi))
- @test platforms_match(Linux(:x86_64), triplet(Linux(:x86_64, compiler_abi=cabi)))
+ @test platforms_match(triplet(Linux(:x86_64)), Linux(:x86_64, compiler_abi = cabi))
+ @test platforms_match(Linux(:x86_64), triplet(Linux(:x86_64, compiler_abi = cabi)))
end
# Ensure many of these things do NOT match
@@ -124,18 +124,18 @@ const platform = @inferred Platform platform_key_abi()
# Make some explicitly non-matching cabi's
base_cabi = CompilerABI(;
- libgfortran_version=v"5",
- cxxstring_abi=:cxx11,
+ libgfortran_version = v"5",
+ cxxstring_abi = :cxx11,
)
for arch in (:x86_64, :i686, :aarch64, :armv6l, :armv7l),
- cabi in (
- CompilerABI(libgfortran_version=v"3"),
- CompilerABI(cxxstring_abi=:cxx03),
- CompilerABI(libgfortran_version=v"4", cxxstring_abi=:cxx11),
- CompilerABI(libgfortran_version=v"3", cxxstring_abi=:cxx03),
- )
-
- @test !platforms_match(Linux(arch, compiler_abi=base_cabi), Linux(arch, compiler_abi=cabi))
+ cabi in (
+ CompilerABI(libgfortran_version = v"3"),
+ CompilerABI(cxxstring_abi = :cxx03),
+ CompilerABI(libgfortran_version = v"4", cxxstring_abi = :cxx11),
+ CompilerABI(libgfortran_version = v"3", cxxstring_abi = :cxx03),
+ )
+
+ @test !platforms_match(Linux(arch, compiler_abi = base_cabi), Linux(arch, compiler_abi = cabi))
end
end
@@ -149,7 +149,7 @@ const platform = @inferred Platform platform_key_abi()
@test !Sys.isapple(Linux(:powerpc64le))
@test Sys.isbsd(MacOS())
@test Sys.isbsd(FreeBSD(:x86_64))
- @test !Sys.isbsd(Linux(:powerpc64le; libc=:musl))
+ @test !Sys.isbsd(Linux(:powerpc64le; libc = :musl))
end
end
diff --git a/test/extensions.jl b/test/extensions.jl
index f7d7ab26b9..d82dfa020b 100644
--- a/test/extensions.jl
+++ b/test/extensions.jl
@@ -1,43 +1,43 @@
-using .Utils
+using .Utils
using Test
using UUIDs
@testset "weak deps" begin
he_root = joinpath(@__DIR__, "test_packages", "ExtensionExamples", "HasExtensions.jl")
hdwe_root = joinpath(@__DIR__, "test_packages", "ExtensionExamples", "HasDepWithExtensions.jl")
- isolate(loaded_depot=true) do
+ isolate(loaded_depot = true) do
# clean out any .cov files from previous test runs
recursive_rm_cov_files(he_root)
recursive_rm_cov_files(hdwe_root)
- Pkg.activate(; temp=true)
- Pkg.develop(path=he_root)
- Pkg.test("HasExtensions", julia_args=`--depwarn=no`) # OffsetArrays errors from depwarn
+ Pkg.activate(; temp = true)
+ Pkg.develop(path = he_root)
+ Pkg.test("HasExtensions", julia_args = `--depwarn=no`) # OffsetArrays errors from depwarn
@test !any(endswith(".cov"), readdir(joinpath(he_root, "src")))
@test !any(endswith(".cov"), readdir(joinpath(he_root, "ext")))
- Pkg.test("HasExtensions", coverage=true, julia_args=`--depwarn=no`) # OffsetArrays errors from depwarn
+ Pkg.test("HasExtensions", coverage = true, julia_args = `--depwarn=no`) # OffsetArrays errors from depwarn
@test any(endswith(".cov"), readdir(joinpath(he_root, "src")))
@test any(endswith(".cov"), readdir(joinpath(he_root, "ext")))
end
- isolate(loaded_depot=true) do
+ isolate(loaded_depot = true) do
# clean out any .cov files from previous test runs
recursive_rm_cov_files(he_root)
recursive_rm_cov_files(hdwe_root)
- Pkg.activate(; temp=true)
- Pkg.develop(path=hdwe_root)
- Pkg.test("HasDepWithExtensions", julia_args=`--depwarn=no`) # OffsetArrays errors from depwarn
+ Pkg.activate(; temp = true)
+ Pkg.develop(path = hdwe_root)
+ Pkg.test("HasDepWithExtensions", julia_args = `--depwarn=no`) # OffsetArrays errors from depwarn
io = IOBuffer()
- Pkg.status(; extensions=true, mode=Pkg.PKGMODE_MANIFEST, io)
- # TODO: Test output when ext deps are loaded etc.
+ Pkg.status(; extensions = true, mode = Pkg.PKGMODE_MANIFEST, io)
+ # TODO: Test output when ext deps are loaded etc.
str = String(take!(io))
- @test contains(str, "└─ OffsetArraysExt [OffsetArrays]" )
+ @test contains(str, "└─ OffsetArraysExt [OffsetArrays]") || contains(str, "├─ OffsetArraysExt [OffsetArrays]")
@test !any(endswith(".cov"), readdir(joinpath(hdwe_root, "src")))
@test !any(endswith(".cov"), readdir(joinpath(he_root, "src")))
@test !any(endswith(".cov"), readdir(joinpath(he_root, "ext")))
- Pkg.test("HasDepWithExtensions", coverage=true, julia_args=`--depwarn=no`) # OffsetArrays errors from depwarn
+ Pkg.test("HasDepWithExtensions", coverage = true, julia_args = `--depwarn=no`) # OffsetArrays errors from depwarn
@test any(endswith(".cov"), readdir(joinpath(hdwe_root, "src")))
# No coverage files should be in HasExtensions even though it's used because coverage
@@ -46,61 +46,60 @@ using UUIDs
@test !any(endswith(".cov"), readdir(joinpath(he_root, "ext")))
end
- isolate(loaded_depot=true) do
- Pkg.activate(; temp=true)
- Pkg.develop(path=he_root)
+ isolate(loaded_depot = true) do
+ Pkg.activate(; temp = true)
+ Pkg.develop(path = he_root)
@test_throws Pkg.Resolve.ResolverError Pkg.add(; name = "OffsetArrays", version = "0.9.0")
end
- isolate(loaded_depot=false) do
+ isolate(loaded_depot = false) do
depot = mktempdir(); empty!(DEPOT_PATH); push!(DEPOT_PATH, depot); Base.append_bundled_depot_path!(DEPOT_PATH)
- Pkg.activate(; temp=true)
- Pkg.Registry.add(path=joinpath(@__DIR__, "test_packages", "ExtensionExamples", "ExtensionRegistry"))
+ Pkg.activate(; temp = true)
+ Pkg.Registry.add(path = joinpath(@__DIR__, "test_packages", "ExtensionExamples", "ExtensionRegistry"))
Pkg.Registry.add("General")
Pkg.add("HasExtensions")
- Pkg.test("HasExtensions", julia_args=`--depwarn=no`) # OffsetArrays errors from depwarn
+ Pkg.test("HasExtensions", julia_args = `--depwarn=no`) # OffsetArrays errors from depwarn
Pkg.add("HasDepWithExtensions")
- Pkg.test("HasDepWithExtensions", julia_args=`--depwarn=no`) # OffsetArrays errors from depwarn
+ Pkg.test("HasDepWithExtensions", julia_args = `--depwarn=no`) # OffsetArrays errors from depwarn
@test_throws Pkg.Resolve.ResolverError Pkg.add(; name = "OffsetArrays", version = "0.9.0")
end
- isolate(loaded_depot=false) do
+ isolate(loaded_depot = false) do
withenv("JULIA_PKG_PRECOMPILE_AUTO" => 0) do
depot = mktempdir(); empty!(DEPOT_PATH); push!(DEPOT_PATH, depot); Base.append_bundled_depot_path!(DEPOT_PATH)
- Pkg.activate(; temp=true)
- Pkg.Registry.add(path=joinpath(@__DIR__, "test_packages", "ExtensionExamples", "ExtensionRegistry"))
+ Pkg.activate(; temp = true)
+ Pkg.Registry.add(path = joinpath(@__DIR__, "test_packages", "ExtensionExamples", "ExtensionRegistry"))
Pkg.Registry.add("General")
Pkg.add("HasDepWithExtensions")
end
iob = IOBuffer()
- Pkg.precompile("HasDepWithExtensions", io=iob)
+ Pkg.precompile("HasDepWithExtensions", io = iob)
out = String(take!(iob))
@test occursin("Precompiling", out)
@test occursin("OffsetArraysExt", out)
@test occursin("HasExtensions", out)
@test occursin("HasDepWithExtensions", out)
end
- isolate(loaded_depot=false) do
+ isolate(loaded_depot = false) do
withenv("JULIA_PKG_PRECOMPILE_AUTO" => 0) do
- Pkg.activate(; temp=true)
- Pkg.add("Example", target=:weakdeps)
+ Pkg.activate(; temp = true)
+ Pkg.add("Example", target = :weakdeps)
proj = Pkg.Types.Context().env.project
@test isempty(proj.deps)
@test proj.weakdeps == Dict{String, Base.UUID}("Example" => Base.UUID("7876af07-990d-54b4-ab0e-23690620f79a"))
- Pkg.activate(; temp=true)
- Pkg.add("Example", target=:extras)
+ Pkg.activate(; temp = true)
+ Pkg.add("Example", target = :extras)
proj = Pkg.Types.Context().env.project
@test isempty(proj.deps)
@test proj.extras == Dict{String, Base.UUID}("Example" => Base.UUID("7876af07-990d-54b4-ab0e-23690620f79a"))
end
end
- isolate(loaded_depot=false) do
+ isolate(loaded_depot = false) do
mktempdir() do dir
Pkg.Registry.add("General")
- path = joinpath(@__DIR__, "test_packages", "TestWeakDepProject")
- cp(path, joinpath(dir, "TestWeakDepProject"))
- Pkg.activate(joinpath(dir, "TestWeakDepProject"))
+ path = copy_test_package(dir, "TestWeakDepProject")
+ Pkg.activate(path)
Pkg.resolve()
@test Pkg.dependencies()[UUID("2ab3a3ac-af41-5b50-aa03-7779005ae688")].version == v"0.3.26"
@@ -114,4 +113,47 @@ using UUIDs
@test !("LogExpFunctions" in keys(ctx.env.project.weakdeps))
end
end
+
+ # Test for issue #3766: Weak dependencies should not be required to be in available registries
+ isolate(loaded_depot = false) do
+ mktempdir() do dir
+ # Create a minimal test package with a weak dependency to a non-existent UUID
+ test_pkg_path = joinpath(dir, "TestPkgWeakDepMissing")
+ mkpath(test_pkg_path)
+
+ # Write a Project.toml with a weak dependency that doesn't exist in any registry
+ fake_weak_dep_uuid = "00000000-0000-0000-0000-000000000001"
+ write(
+ joinpath(test_pkg_path, "Project.toml"), """
+ name = "TestPkgWeakDepMissing"
+ uuid = "10000000-0000-0000-0000-000000000001"
+ version = "0.1.0"
+
+ [weakdeps]
+ FakeWeakDep = "$fake_weak_dep_uuid"
+
+ [extensions]
+ FakeExt = "FakeWeakDep"
+ """
+ )
+
+ mkpath(joinpath(test_pkg_path, "src"))
+ write(
+ joinpath(test_pkg_path, "src", "TestPkgWeakDepMissing.jl"), """
+ module TestPkgWeakDepMissing
+ greet() = "Hello from TestPkgWeakDepMissing!"
+ end
+ """
+ )
+
+ depot = mktempdir(); empty!(DEPOT_PATH); push!(DEPOT_PATH, depot); Base.append_bundled_depot_path!(DEPOT_PATH)
+ Pkg.activate(; temp = true)
+ Pkg.Registry.add("General")
+
+ # This should succeed even though FakeWeakDep doesn't exist in any registry
+ # because it's only a weak dependency
+ Pkg.develop(path = test_pkg_path)
+ @test haskey(Pkg.dependencies(), UUID("10000000-0000-0000-0000-000000000001"))
+ end
+ end
end
diff --git a/test/force_latest_compatible_version.jl b/test/force_latest_compatible_version.jl
index 9547c06adc..bc5402b956 100644
--- a/test/force_latest_compatible_version.jl
+++ b/test/force_latest_compatible_version.jl
@@ -297,7 +297,7 @@ const test_package_parent_dir = joinpath(
)
@test_logs(
(:warn, message_2),
- match_mode=:any,
+ match_mode = :any,
Pkg.test(;
force_latest_compatible_version = true,
),
@@ -314,7 +314,7 @@ const test_package_parent_dir = joinpath(
)
@test_logs(
(:warn, message_2),
- match_mode=:any,
+ match_mode = :any,
Pkg.test(;
force_latest_compatible_version = true,
allow_earlier_backwards_compatible_versions,
diff --git a/test/historical_stdlib_version.jl b/test/historical_stdlib_version.jl
new file mode 100644
index 0000000000..c94164e338
--- /dev/null
+++ b/test/historical_stdlib_version.jl
@@ -0,0 +1,376 @@
+module HistoricalStdlibVersionsTests
+using ..Pkg
+using Pkg.Types: is_stdlib
+using Pkg.Artifacts: artifact_meta, artifact_path
+using Base.BinaryPlatforms: HostPlatform, Platform, platforms_match
+using Test
+using TOML
+
+ENV["HISTORICAL_STDLIB_VERSIONS_AUTO_REGISTER"] = "false"
+using HistoricalStdlibVersions
+
+include("utils.jl")
+using .Utils
+
+@testset "is_stdlib() across versions" begin
+ HistoricalStdlibVersions.register!()
+
+ networkoptions_uuid = Base.UUID("ca575930-c2e3-43a9-ace4-1e988b2c1908")
+ pkg_uuid = Base.UUID("44cfe95a-1eb2-52ea-b672-e2afdf69b78f")
+ mbedtls_jll_uuid = Base.UUID("c8ffd9c3-330d-5841-b78e-0817d7145fa1")
+
+ # Test NetworkOptions across multiple versions (It became an stdlib in v1.6+, and was registered)
+ @test is_stdlib(networkoptions_uuid)
+ @test is_stdlib(networkoptions_uuid, v"1.6")
+ @test !is_stdlib(networkoptions_uuid, v"1.5")
+ @test !is_stdlib(networkoptions_uuid, v"1.0.0")
+ @test !is_stdlib(networkoptions_uuid, v"0.7")
+ @test !is_stdlib(networkoptions_uuid, nothing)
+
+ # Pkg is an unregistered stdlib and has always been an stdlib
+ @test is_stdlib(pkg_uuid)
+ @test is_stdlib(pkg_uuid, v"1.0")
+ @test is_stdlib(pkg_uuid, v"1.6")
+ @test is_stdlib(pkg_uuid, v"0.7")
+ @test is_stdlib(pkg_uuid, nothing)
+
+ # We can't serve information for unknown major.minor versions (patches can not match)
+ @test_throws Pkg.Types.PkgError is_stdlib(pkg_uuid, v"999.999.999")
+ @test is_stdlib(pkg_uuid, v"1.10.999")
+
+ # MbedTLS_jll stopped being a stdlib in 1.12
+ @test !is_stdlib(mbedtls_jll_uuid)
+ @test !is_stdlib(mbedtls_jll_uuid, v"1.12")
+ @test is_stdlib(mbedtls_jll_uuid, v"1.11")
+ @test is_stdlib(mbedtls_jll_uuid, v"1.10")
+
+ HistoricalStdlibVersions.unregister!()
+ # Test that we can probe for stdlibs for the current version with no STDLIBS_BY_VERSION,
+ # but that we throw a PkgError if we ask for a particular julia version.
+ @test is_stdlib(networkoptions_uuid)
+ @test_throws Pkg.Types.PkgError is_stdlib(networkoptions_uuid, v"1.6")
+end
+
+
+@testset "Pkg.add() with julia_version" begin
+ HistoricalStdlibVersions.register!()
+
+ # A package with artifacts that went from normal package -> stdlib
+ gmp_jll_uuid = "781609d7-10c4-51f6-84f2-b8444358ff6d"
+ # A package that has always only ever been an stdlib
+ linalg_uuid = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e"
+ # A package that went from normal package - >stdlib
+ networkoptions_uuid = "ca575930-c2e3-43a9-ace4-1e988b2c1908"
+
+ function get_manifest_block(name)
+ manifest_path = joinpath(dirname(Base.active_project()), "Manifest.toml")
+ @test isfile(manifest_path)
+ deps = Base.get_deps(TOML.parsefile(manifest_path))
+ @test haskey(deps, name)
+ return only(deps[name])
+ end
+
+ isolate(loaded_depot = true) do
+ # Next, test that if we ask for `v1.5` it DOES have a version, and that GMP_jll installs v6.1.X
+ Pkg.add(["NetworkOptions", "GMP_jll"]; julia_version = v"1.5")
+ no_block = get_manifest_block("NetworkOptions")
+ @test haskey(no_block, "uuid")
+ @test no_block["uuid"] == networkoptions_uuid
+ @test haskey(no_block, "version")
+
+ gmp_block = get_manifest_block("GMP_jll")
+ @test haskey(gmp_block, "uuid")
+ @test gmp_block["uuid"] == gmp_jll_uuid
+ @test haskey(gmp_block, "version")
+ @test startswith(gmp_block["version"], "6.1.2")
+
+ # Test that the artifact of GMP_jll contains the right library
+ @test haskey(gmp_block, "git-tree-sha1")
+ gmp_jll_dir = Pkg.Operations.find_installed("GMP_jll", Base.UUID(gmp_jll_uuid), Base.SHA1(gmp_block["git-tree-sha1"]))
+ @test isdir(gmp_jll_dir)
+ artifacts_toml = joinpath(gmp_jll_dir, "Artifacts.toml")
+ @test isfile(artifacts_toml)
+ meta = artifact_meta("GMP", artifacts_toml)
+
+ # `meta` can be `nothing` on some of our newer platforms; we _know_ this should
+ # not be the case on the following platforms, so we check these explicitly to
+ # ensure that we haven't accidentally broken something, and then we gate some
+ # following tests on whether or not `meta` is `nothing`:
+ for arch in ("x86_64", "i686"), os in ("linux", "mac", "windows")
+ if platforms_match(HostPlatform(), Platform(arch, os))
+ @test meta !== nothing
+ end
+ end
+
+ # These tests require a matching platform artifact for this old version of GMP_jll,
+ # which is not the case on some of our newer platforms.
+ if meta !== nothing
+ gmp_artifact_path = artifact_path(Base.SHA1(meta["git-tree-sha1"]))
+ @test isdir(gmp_artifact_path)
+
+ # On linux, we can check the filename to ensure it's grabbing the correct library
+ if Sys.islinux()
+ libgmp_filename = joinpath(gmp_artifact_path, "lib", "libgmp.so.10.3.2")
+ @test isfile(libgmp_filename)
+ end
+ end
+ end
+
+ # Next, test that if we ask for `v1.6`, GMP_jll gets `v6.2.0`, and for `v1.7`, it gets `v6.2.1`
+ function do_gmp_test(julia_version, gmp_version)
+ isolate(loaded_depot = true) do
+ Pkg.add("GMP_jll"; julia_version)
+ gmp_block = get_manifest_block("GMP_jll")
+ @test haskey(gmp_block, "uuid")
+ @test gmp_block["uuid"] == gmp_jll_uuid
+ @test haskey(gmp_block, "version")
+ @test startswith(gmp_block["version"], string(gmp_version))
+ end
+ end
+ do_gmp_test(v"1.6", v"6.2.0")
+ do_gmp_test(v"1.7", v"6.2.1")
+
+ isolate(loaded_depot = true) do
+ # Next, test that if we ask for `nothing`, NetworkOptions has a `version` but `LinearAlgebra` does not.
+ Pkg.add(["LinearAlgebra", "NetworkOptions"]; julia_version = nothing)
+ no_block = get_manifest_block("NetworkOptions")
+ @test haskey(no_block, "uuid")
+ @test no_block["uuid"] == networkoptions_uuid
+ @test haskey(no_block, "version")
+ linalg_block = get_manifest_block("LinearAlgebra")
+ @test haskey(linalg_block, "uuid")
+ @test linalg_block["uuid"] == linalg_uuid
+ @test !haskey(linalg_block, "version")
+ end
+
+ isolate(loaded_depot = true) do
+ # Next, test that stdlibs do not get dependencies from the registry
+ # NOTE: this test depends on the fact that in Julia v1.6+ we added
+ # "fake" JLLs that do not depend on Pkg while the "normal" p7zip_jll does.
+ # A future p7zip_jll in the registry may not depend on Pkg, so be sure
+ # to verify your assumptions when updating this test.
+ Pkg.add("p7zip_jll")
+ p7zip_jll_uuid = Base.UUID("3f19e933-33d8-53b3-aaab-bd5110c3b7a0")
+ @test !("Pkg" in keys(Pkg.dependencies()[p7zip_jll_uuid].dependencies))
+ end
+
+ HistoricalStdlibVersions.unregister!()
+end
+
+@testset "Resolving for another version of Julia" begin
+ HistoricalStdlibVersions.register!()
+ temp_pkg_dir() do dir
+ function find_by_name(versions, name)
+ idx = findfirst(p -> p.name == name, versions)
+ if idx === nothing
+ return nothing
+ end
+ return versions[idx]
+ end
+
+ # First, we're going to resolve for specific versions of Julia, ensuring we get the right dep versions:
+ Pkg.Registry.download_default_registries(Pkg.stdout_f())
+ ctx = Pkg.Types.Context(; julia_version = v"1.5")
+ versions, deps = Pkg.Operations._resolve(
+ ctx.io, ctx.env, ctx.registries, [
+ Pkg.Types.PackageSpec(name = "MPFR_jll", uuid = Base.UUID("3a97d323-0669-5f0c-9066-3539efd106a3")),
+ ], Pkg.Types.PRESERVE_TIERED, ctx.julia_version
+ )
+ gmp = find_by_name(versions, "GMP_jll")
+ @test gmp !== nothing
+ @test gmp.version.major == 6 && gmp.version.minor == 1
+ ctx = Pkg.Types.Context(; julia_version = v"1.6")
+ versions, deps = Pkg.Operations._resolve(
+ ctx.io, ctx.env, ctx.registries, [
+ Pkg.Types.PackageSpec(name = "MPFR_jll", uuid = Base.UUID("3a97d323-0669-5f0c-9066-3539efd106a3")),
+ ], Pkg.Types.PRESERVE_TIERED, ctx.julia_version
+ )
+ gmp = find_by_name(versions, "GMP_jll")
+ @test gmp !== nothing
+ @test gmp.version.major == 6 && gmp.version.minor == 2
+
+ # We'll also test resolving an "impossible" manifest; one that requires two package versions that
+ # are not both loadable by the same Julia:
+ ctx = Pkg.Types.Context(; julia_version = nothing)
+ versions, deps = Pkg.Operations._resolve(
+ ctx.io, ctx.env, ctx.registries, [
+ # This version of GMP only works on Julia v1.6
+ Pkg.Types.PackageSpec(name = "GMP_jll", uuid = Base.UUID("781609d7-10c4-51f6-84f2-b8444358ff6d"), version = v"6.2.0"),
+ # This version of MPFR only works on Julia v1.5
+ Pkg.Types.PackageSpec(name = "MPFR_jll", uuid = Base.UUID("3a97d323-0669-5f0c-9066-3539efd106a3"), version = v"4.0.2"),
+ ], Pkg.Types.PRESERVE_TIERED, ctx.julia_version
+ )
+ gmp = find_by_name(versions, "GMP_jll")
+ @test gmp !== nothing
+ @test gmp.version.major == 6 && gmp.version.minor == 2
+ mpfr = find_by_name(versions, "MPFR_jll")
+ @test mpfr !== nothing
+ @test mpfr.version.major == 4 && mpfr.version.minor == 0
+ end
+ HistoricalStdlibVersions.unregister!()
+end
+
+HelloWorldC_jll_UUID = Base.UUID("dca1746e-5efc-54fc-8249-22745bc95a49")
+GMP_jll_UUID = Base.UUID("781609d7-10c4-51f6-84f2-b8444358ff6d")
+OpenBLAS_jll_UUID = Base.UUID("4536629a-c528-5b80-bd46-f80d51c5b363")
+libcxxwrap_julia_jll_UUID = Base.UUID("3eaa8342-bff7-56a5-9981-c04077f7cee7")
+libblastrampoline_jll_UUID = Base.UUID("8e850b90-86db-534c-a0d3-1478176c7d93")
+
+isolate(loaded_depot = true) do
+ @testset "Elliot and Mosè's mini Pkg test suite" begin # https://github.com/JuliaPackaging/JLLPrefixes.jl/issues/6
+ HistoricalStdlibVersions.register!()
+ @testset "Standard add" begin
+ Pkg.activate(temp = true)
+ # Standard add (non-stdlib, flexible version)
+ Pkg.add(; name = "HelloWorldC_jll")
+ @test haskey(Pkg.dependencies(), HelloWorldC_jll_UUID)
+
+ Pkg.activate(temp = true)
+ # Standard add (non-stdlib, url and rev)
+ Pkg.add(; name = "HelloWorldC_jll", url = "https://github.com/JuliaBinaryWrappers/HelloWorldC_jll.jl", rev = "0b4959a49385d4bb00efd281447dc19348ebac08")
+ @test Pkg.dependencies()[Base.UUID("dca1746e-5efc-54fc-8249-22745bc95a49")].git_revision === "0b4959a49385d4bb00efd281447dc19348ebac08"
+
+ Pkg.activate(temp = true)
+ # Standard add (non-stdlib, specified version)
+ Pkg.add(; name = "HelloWorldC_jll", version = v"1.0.10+1")
+ @test Pkg.dependencies()[Base.UUID("dca1746e-5efc-54fc-8249-22745bc95a49")].version === v"1.0.10+1"
+
+ Pkg.activate(temp = true)
+ # Standard add (non-stdlib, versionspec)
+ Pkg.add(; name = "HelloWorldC_jll", version = Pkg.Types.VersionSpec("1.0.10"))
+ @test Pkg.dependencies()[Base.UUID("dca1746e-5efc-54fc-8249-22745bc95a49")].version === v"1.0.10+1"
+ end
+
+ @testset "Julia-version-dependent add" begin
+ Pkg.activate(temp = true)
+ # Julia-version-dependent add (non-stdlib, flexible version)
+ Pkg.add(; name = "libcxxwrap_julia_jll", julia_version = v"1.7")
+ @test Pkg.dependencies()[libcxxwrap_julia_jll_UUID].version >= v"0.14.0+0"
+
+ Pkg.activate(temp = true)
+ # Julia-version-dependent add (non-stdlib, specified version)
+ Pkg.add(; name = "libcxxwrap_julia_jll", version = v"0.9.4+0", julia_version = v"1.7")
+ @test Pkg.dependencies()[libcxxwrap_julia_jll_UUID].version === v"0.9.4+0"
+
+ Pkg.activate(temp = true)
+ Pkg.add(; name = "libcxxwrap_julia_jll", version = v"0.8.8+1", julia_version = v"1.9")
+ # FIXME? Pkg.dependencies() complains here that mbedtls_jll isn't installed so can't be used here.
+ # Perhaps Pkg.dependencies() should just return state and not error if source isn't installed?
+ @test_skip Pkg.dependencies()[libcxxwrap_julia_jll_UUID].version === v"0.9.4+0"
+ for pkgspec in Pkg.Operations.load_all_deps_loadable(Pkg.Types.Context().env)
+ if pkgspec.uuid == libcxxwrap_julia_jll_UUID
+ @test pkgspec.version === v"0.8.8+1"
+ end
+ end
+ end
+
+ @testset "Old Pkg add regression" begin
+ Pkg.activate(temp = true)
+ Pkg.add(; name = "Pkg", julia_version = v"1.11")
+ end
+
+ @testset "Stdlib add" begin
+ Pkg.activate(temp = true)
+ # Stdlib add (current julia version)
+ Pkg.add(; name = "GMP_jll")
+ @test Pkg.dependencies()[GMP_jll_UUID].version >= v"6.3.0+2" # v1.13.0-DEV
+
+ Pkg.activate(temp = true)
+ # Make sure the source of GMP_jll is installed
+ Pkg.add([PackageSpec("GMP_jll")]; julia_version = v"1.6")
+ src = Pkg.Operations.find_installed(
+ "GMP_jll",
+ Base.UUID("781609d7-10c4-51f6-84f2-b8444358ff6d"),
+ Base.SHA1("40388878122d491a2e55b0e730196098595d8a90")
+ )
+ @test src isa String
+ # issue https://github.com/JuliaLang/Pkg.jl/issues/2930
+ @test_broken isdir(src)
+ @test_broken isfile(joinpath(src, "Artifacts.toml"))
+
+ Pkg.activate(temp = true)
+ # Stdlib add (other julia version)
+ Pkg.add(; name = "GMP_jll", julia_version = v"1.7")
+ @test Pkg.dependencies()[GMP_jll_UUID].version === v"6.2.1+1"
+
+ # Stdlib add (other julia version, with specific version bound)
+ # Note, this doesn't work properly, it adds but doesn't install any artifacts.
+ # Technically speaking, this is probably okay from Pkg's perspective, since
+ # we're asking Pkg to resolve according to what Julia v1.7 would do.... and
+ # Julia v1.7 would not install anything because it's a stdlib! However, we
+ # would sometimes like to resolve the latest version of GMP_jll for Julia v1.7
+ # then install that. If we have to manually work around that and look up what
+ # GMP_jll for Julia v1.7 is, then ask for that version explicitly, that's ok.
+
+ Pkg.activate(temp = true)
+ Pkg.add(; name = "GMP_jll", julia_version = v"1.7")
+
+ # This is expected to fail, that version can't live with `julia_version = v"1.7"`
+ @test_throws Pkg.Resolve.ResolverError Pkg.add(; name = "GMP_jll", version = v"6.2.0+5", julia_version = v"1.7")
+
+ Pkg.activate(temp = true)
+ # Stdlib add (julia_version == nothing)
+ Pkg.add(; name = "GMP_jll", version = v"6.2.1+1", julia_version = nothing)
+ @test Pkg.dependencies()[GMP_jll_UUID].version === v"6.2.1+1"
+ end
+
+ @testset "julia_version = nothing" begin
+ @testset "stdlib add" begin
+ Pkg.activate(temp = true)
+ # Stdlib add (impossible constraints due to julia version compat, so
+ # must pass `julia_version=nothing`). In this case, we always fully
+ # specify versions, but if we don't, it's okay to just give us whatever
+ # the resolver prefers
+ Pkg.add(
+ [
+ PackageSpec(; name = "OpenBLAS_jll", version = v"0.3.13"),
+ PackageSpec(; name = "libblastrampoline_jll", version = v"5.1.1"),
+ ]; julia_version = nothing
+ )
+ @test v"0.3.14" > Pkg.dependencies()[OpenBLAS_jll_UUID].version >= v"0.3.13"
+ @test v"5.1.2" > Pkg.dependencies()[libblastrampoline_jll_UUID].version >= v"5.1.1"
+ end
+ @testset "non-stdlib JLL add" begin
+ platform = Platform("x86_64", "linux"; libc = "musl")
+ # specific version vs. compat spec
+ @testset for version in (v"3.24.3+0", "3.24.3")
+ dependencies = [PackageSpec(; name = "CMake_jll", version = version)]
+ @testset "with context (using private Pkg.add method)" begin
+ Pkg.activate(temp = true)
+ ctx = Pkg.Types.Context(; julia_version = nothing)
+ mydeps = deepcopy(dependencies)
+ foreach(Pkg.API.handle_package_input!, mydeps)
+ Pkg.add(ctx, mydeps; platform)
+ end
+ @testset "with julia_version" begin
+ Pkg.activate(temp = true)
+ Pkg.add(deepcopy(dependencies); platform, julia_version = nothing)
+ end
+ end
+ end
+
+ @testset "Artifacts stdlib never falls back to registry" begin
+ # Test that when resolving for Julia 1.10 (where Artifacts is a stdlib with version=nothing),
+ # Pkg never installs the external Artifacts v1.3.0 from the registry
+ Pkg.activate(temp = true)
+ # Add a package that depends on Artifacts with julia_version = v"1.10"
+ # Artifacts should remain a stdlib, not be resolved to v1.3.0 from registry
+ ctx = Pkg.Types.Context(; julia_version = v"1.10")
+ # GMP_jll for Julia 1.10 should bring in Artifacts as a dependency
+ Pkg.add(ctx, [PackageSpec(; name = "GMP_jll")])
+
+ # Check that Artifacts is not in the manifest as an external package
+ # (If it were incorrectly resolved from registry, it would appear with version v1.3.0)
+ artifacts_uuid = Base.UUID("56f22d72-fd6d-98f1-02f0-08ddc0907c33")
+ manifest_entry = get(ctx.env.manifest, artifacts_uuid, nothing)
+ if manifest_entry !== nothing
+ # Artifacts should not have v1.3.0 (the registry version)
+ @test manifest_entry.version != v"1.3.0"
+ end
+ end
+ end
+ HistoricalStdlibVersions.unregister!()
+ end
+end
+
+end # module
diff --git a/test/manifest/formats/v2.0/Manifest.toml b/test/manifest/formats/v2.0/Manifest.toml
index 1156d8f6cc..da4bec6355 100644
--- a/test/manifest/formats/v2.0/Manifest.toml
+++ b/test/manifest/formats/v2.0/Manifest.toml
@@ -14,4 +14,3 @@ uuid = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c"
[[deps.Serialization]]
uuid = "9e88b42a-f829-5b0c-bbe9-9e923198166b"
-
diff --git a/test/manifest/formats/v2.1/Manifest.toml b/test/manifest/formats/v2.1/Manifest.toml
new file mode 100644
index 0000000000..9586bfccac
--- /dev/null
+++ b/test/manifest/formats/v2.1/Manifest.toml
@@ -0,0 +1,26 @@
+# This file is machine-generated - editing it directly is not advised
+
+julia_version = "1.13.0-DEV"
+manifest_format = "2.1"
+some_other_field = "other"
+some_other_data = [1,2,3,4]
+
+[registries.General]
+uuid = "23338594-aafe-5451-b93e-139f81909106"
+url = "https://github.com/JuliaRegistries/General.git"
+
+[[deps.Example]]
+git-tree-sha1 = "46e44e869b4d90b96bd8ed1fdcf32244fddfb6cc"
+uuid = "7876af07-990d-54b4-ab0e-23690620f79a"
+version = "0.5.3"
+registries = "General"
+
+[[deps.Logging]]
+uuid = "56ddb016-857b-54e1-b83d-db4d58db5568"
+
+[[deps.Random]]
+deps = ["Serialization"]
+uuid = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c"
+
+[[deps.Serialization]]
+uuid = "9e88b42a-f829-5b0c-bbe9-9e923198166b"
diff --git a/test/manifest/formats/v2.1/Project.toml b/test/manifest/formats/v2.1/Project.toml
new file mode 100644
index 0000000000..89f989f872
--- /dev/null
+++ b/test/manifest/formats/v2.1/Project.toml
@@ -0,0 +1,4 @@
+[deps]
+Logging = "56ddb016-857b-54e1-b83d-db4d58db5568"
+Random = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c"
+Example = "7876af07-990d-54b4-ab0e-23690620f79a"
diff --git a/test/manifest/good/withversion.toml b/test/manifest/good/withversion.toml
new file mode 100644
index 0000000000..23fdf14c0d
--- /dev/null
+++ b/test/manifest/good/withversion.toml
@@ -0,0 +1,14 @@
+# This file is machine-generated - editing it directly is not advised
+
+julia_version = "1.14.0-DEV"
+manifest_format = "2.1"
+
+[[deps.VersionedDep1]]
+path = "VersionedDep1"
+uuid = "f08855a0-36cb-4a32-8ae5-a227b709c612"
+syntax.julia_version = "1.13.0"
+
+[[deps.VersionedDep2]]
+path = "VersionedDep2"
+uuid = "e127e659-a899-4a00-b565-5b74face18ba"
+syntax.julia_version = "1.14.0"
diff --git a/test/manifest/yanked/Manifest.toml b/test/manifest/yanked/Manifest.toml
new file mode 100644
index 0000000000..39261c8e24
--- /dev/null
+++ b/test/manifest/yanked/Manifest.toml
@@ -0,0 +1,62 @@
+# This file is machine-generated - editing it directly is not advised
+
+julia_version = "1.13.0-DEV"
+manifest_format = "2.0"
+project_hash = "8a91c3bdaf7537df6f842463e0505fb7c623875c"
+
+[[deps.Compat]]
+deps = ["TOML", "UUIDs"]
+git-tree-sha1 = "3a3dfb30697e96a440e4149c8c51bf32f818c0f3"
+uuid = "34da2185-b29b-5c13-b0c7-acf172513d20"
+version = "4.17.0"
+
+ [deps.Compat.extensions]
+ CompatLinearAlgebraExt = "LinearAlgebra"
+
+ [deps.Compat.weakdeps]
+ Dates = "ade2ca70-3891-5945-98fb-dc099432e06a"
+ LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e"
+
+[[deps.Dates]]
+deps = ["Printf"]
+uuid = "ade2ca70-3891-5945-98fb-dc099432e06a"
+version = "1.11.0"
+
+[[deps.ExprTools]]
+git-tree-sha1 = "27415f162e6028e81c72b82ef756bf321213b6ec"
+uuid = "e2ba6199-217a-4e67-a87a-7c52f15ade04"
+version = "0.1.10"
+
+[[deps.Mocking]]
+deps = ["Compat", "ExprTools"]
+git-tree-sha1 = "d5ca7901d59738132d6f9be9a18da50bc85c5115"
+uuid = "78c3b35d-d492-501b-9361-3d52fe80e533"
+version = "0.7.4"
+
+[[deps.Printf]]
+deps = ["Unicode"]
+uuid = "de0858da-6303-5e67-8744-51eddeeeb8d7"
+version = "1.11.0"
+
+[[deps.Random]]
+deps = ["SHA"]
+uuid = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c"
+version = "1.11.0"
+
+[[deps.SHA]]
+uuid = "ea8e919c-243c-51af-8825-aaa63cd721ce"
+version = "0.7.0"
+
+[[deps.TOML]]
+deps = ["Dates"]
+uuid = "fa267f1f-6049-4f14-aa54-33bafae1ed76"
+version = "1.0.3"
+
+[[deps.UUIDs]]
+deps = ["Random", "SHA"]
+uuid = "cf7118a7-6976-5b1a-9a39-7adc72f591a4"
+version = "1.11.0"
+
+[[deps.Unicode]]
+uuid = "4ec0a83e-493e-50e2-b9ac-8f72acf5a8f5"
+version = "1.11.0"
diff --git a/test/manifest/yanked/Project.toml b/test/manifest/yanked/Project.toml
new file mode 100644
index 0000000000..f61c7f288a
--- /dev/null
+++ b/test/manifest/yanked/Project.toml
@@ -0,0 +1,2 @@
+[deps]
+Mocking = "78c3b35d-d492-501b-9361-3d52fe80e533"
diff --git a/test/manifests.jl b/test/manifests.jl
index a1780673d1..322be3ff8a 100644
--- a/test/manifests.jl
+++ b/test/manifests.jl
@@ -1,56 +1,59 @@
module ManifestTests
-using Test, UUIDs, Dates, TOML
+using Test, UUIDs, Dates, TOML
import ..Pkg, LibGit2
-using ..Utils
+using ..Utils
# used with the reference manifests in `test/manifest/formats`
# ensures the manifests are valid and restored after test
-function reference_manifest_isolated_test(f, dir::String; v1::Bool=false)
- env_dir = joinpath(@__DIR__, "manifest", "formats", dir)
- env_manifest = joinpath(env_dir, "Manifest.toml")
- env_project = joinpath(env_dir, "Project.toml")
- cp(env_manifest, string(env_manifest, "_backup"))
- cp(env_project, string(env_project, "_backup"))
- try
+function reference_manifest_isolated_test(f, dir::String; v1::Bool = false)
+ source_env_dir = joinpath(@__DIR__, "manifest", "formats", dir)
+
+ # Create a temporary directory for the test files
+ temp_base_dir = mktempdir()
+ return try
+ # Copy entire directory structure to preserve paths that tests expect
+ env_dir = joinpath(temp_base_dir, dir)
+ cp(source_env_dir, env_dir)
+
+ env_manifest = joinpath(env_dir, "Manifest.toml")
+
isfile(env_manifest) || error("Reference manifest is missing")
if Base.is_v1_format_manifest(Base.parsed_toml(env_manifest)) == !v1
error("Reference manifest file at $(env_manifest) is invalid")
end
- isolate(loaded_depot=true) do
+ isolate(loaded_depot = true) do
f(env_dir, env_manifest)
end
finally
- cp(string(env_manifest, "_backup"), env_manifest, force = true)
- rm(string(env_manifest, "_backup"))
- cp(string(env_project, "_backup"), env_project, force = true)
- rm(string(env_project, "_backup"))
+ # Clean up temporary directory
+ rm(temp_base_dir, recursive = true)
end
end
##
@testset "Manifest.toml formats" begin
- @testset "Default manifest format is v2" begin
- isolate(loaded_depot=true) do
+ @testset "Default manifest format is v2.1" begin
+ isolate(loaded_depot = true) do
io = IOBuffer()
- Pkg.activate(; io=io, temp=true)
+ Pkg.activate(; io = io, temp = true)
output = String(take!(io))
@test occursin(r"Activating.*project at.*", output)
Pkg.add("Profile")
env_manifest = Pkg.Types.Context().env.manifest_file
@test Base.is_v1_format_manifest(Base.parsed_toml(env_manifest)) == false
- @test Pkg.Types.Context().env.manifest.manifest_format == v"2.0.0"
+ @test Pkg.Types.Context().env.manifest.manifest_format == v"2.1.0"
end
end
@testset "Empty manifest file is automatically upgraded to v2" begin
- isolate(loaded_depot=true) do
+ isolate(loaded_depot = true) do
io = IOBuffer()
d = mktempdir()
manifest = joinpath(d, "Manifest.toml")
touch(manifest)
- Pkg.activate(d; io=io)
+ Pkg.activate(d; io = io)
output = String(take!(io))
@test occursin(r"Activating.*project at.*", output)
env_manifest = Pkg.Types.Context().env.manifest_file
@@ -59,53 +62,60 @@ end
@test Base.is_v1_format_manifest(Base.parsed_toml(env_manifest))
@test Pkg.Types.Context().env.manifest.manifest_format == v"2.0.0"
- Pkg.add("Profile"; io=io)
+ Pkg.add("Profile"; io = io)
env_manifest = Pkg.Types.Context().env.manifest_file
@test samefile(env_manifest, manifest)
@test Base.is_v1_format_manifest(Base.parsed_toml(env_manifest)) == false
- @test Pkg.Types.Context().env.manifest.manifest_format == v"2.0.0"
+ @test Pkg.Types.Context().env.manifest.manifest_format == v"2.1.0"
# check that having a Project with deps, and an empty manifest file doesn't error
rm(manifest)
touch(manifest)
- Pkg.activate(d; io=io)
- Pkg.add("Example"; io=io)
+ Pkg.activate(d; io = io)
+ Pkg.add("Example"; io = io)
end
end
- @testset "v1.0: activate, change, maintain manifest format" begin
+ @testset "v1.0: activate and read, upgrade on write" begin
reference_manifest_isolated_test("v1.0", v1 = true) do env_dir, env_manifest
io = IOBuffer()
- Pkg.activate(env_dir; io=io)
+ Pkg.activate(env_dir; io = io)
output = String(take!(io))
@test occursin(r"Activating.*project at.*`.*v1.0`", output)
+ # Can read v1.0 format
@test Base.is_v1_format_manifest(Base.parsed_toml(env_manifest))
+ # Operations upgrade to v2.1
Pkg.add("Profile")
- @test Base.is_v1_format_manifest(Base.parsed_toml(env_manifest))
+ @test Base.is_v1_format_manifest(Base.parsed_toml(env_manifest)) == false
+ @test Pkg.Types.Context().env.manifest.manifest_format == v"2.1.0"
Pkg.rm("Profile")
- @test Base.is_v1_format_manifest(Base.parsed_toml(env_manifest))
+ @test Base.is_v1_format_manifest(Base.parsed_toml(env_manifest)) == false
+ @test Pkg.Types.Context().env.manifest.manifest_format == v"2.1.0"
end
end
- @testset "v2.0: activate, change, maintain manifest format" begin
+ @testset "v2.0: activate and read, upgrade on write" begin
reference_manifest_isolated_test("v2.0") do env_dir, env_manifest
io = IOBuffer()
- Pkg.activate(env_dir; io=io)
+ Pkg.activate(env_dir; io = io)
output = String(take!(io))
@test occursin(r"Activating.*project at.*`.*v2.0`", output)
@test Base.is_v1_format_manifest(Base.parsed_toml(env_manifest)) == false
+ # Operations upgrade to v2.1
Pkg.add("Profile")
@test Base.is_v1_format_manifest(Base.parsed_toml(env_manifest)) == false
+ @test Pkg.Types.Context().env.manifest.manifest_format == v"2.1.0"
Pkg.rm("Profile")
@test Base.is_v1_format_manifest(Base.parsed_toml(env_manifest)) == false
+ @test Pkg.Types.Context().env.manifest.manifest_format == v"2.1.0"
m = Pkg.Types.read_manifest(env_manifest)
@test m.other["some_other_field"] == "other"
- @test m.other["some_other_data"] == [1,2,3,4]
+ @test m.other["some_other_data"] == [1, 2, 3, 4]
mktemp() do path, io
Pkg.Types.write_manifest(io, m)
@@ -121,55 +131,70 @@ end
m.julia_version = v"1.5.0"
msg = r"The active manifest file has dependencies that were resolved with a different julia version"
@test_logs (:warn, msg) Pkg.Types.check_manifest_julia_version_compat(m, env_manifest)
- @test_throws Pkg.Types.PkgError Pkg.Types.check_manifest_julia_version_compat(m, env_manifest, julia_version_strict=true)
+ @test_throws Pkg.Types.PkgError Pkg.Types.check_manifest_julia_version_compat(m, env_manifest, julia_version_strict = true)
m.julia_version = nothing
msg = r"The active manifest file is missing a julia version entry"
@test_logs (:warn, msg) Pkg.Types.check_manifest_julia_version_compat(m, env_manifest)
- @test_throws Pkg.Types.PkgError Pkg.Types.check_manifest_julia_version_compat(m, env_manifest, julia_version_strict=true)
+ @test_throws Pkg.Types.PkgError Pkg.Types.check_manifest_julia_version_compat(m, env_manifest, julia_version_strict = true)
end
end
- @testset "v3.0: unknown format, warn" begin
- # the reference file here is not actually v3.0. It just represents an unknown manifest format
- reference_manifest_isolated_test("v3.0_unknown") do env_dir, env_manifest
+ @testset "v2.1: activate, change, maintain manifest format with registries" begin
+ reference_manifest_isolated_test("v2.1") do env_dir, env_manifest
io = IOBuffer()
- @test_logs (:warn,) Pkg.activate(env_dir; io=io)
- end
- end
-
- @testset "Pkg.upgrade_manifest()" begin
- reference_manifest_isolated_test("v1.0", v1 = true) do env_dir, env_manifest
- io = IOBuffer()
- Pkg.activate(env_dir; io=io)
+ Pkg.activate(env_dir; io = io)
output = String(take!(io))
- @test occursin(r"Activating.*project at.*`.*v1.0`", output)
- @test Base.is_v1_format_manifest(Base.parsed_toml(env_manifest))
+ @test occursin(r"Activating.*project at.*`.*v2.1`", output)
+ @test Base.is_v1_format_manifest(Base.parsed_toml(env_manifest)) == false
+
+ m = Pkg.Types.read_manifest(env_manifest)
+ @test m.manifest_format == v"2.1.0"
+ @test m.other["some_other_field"] == "other"
+ @test m.other["some_other_data"] == [1, 2, 3, 4]
+
+ # Check that registries are present
+ @test !isempty(m.registries)
+ @test haskey(m.registries, "General")
+ @test m.registries["General"].uuid == UUID("23338594-aafe-5451-b93e-139f81909106")
+ @test m.registries["General"].url == "https://github.com/JuliaRegistries/General.git"
- Pkg.upgrade_manifest()
+ # Check that Example has registry field
+ example_uuid = UUID("7876af07-990d-54b4-ab0e-23690620f79a")
+ @test haskey(m, example_uuid)
+ @test !isempty(m[example_uuid].registries)
+ @test "General" in m[example_uuid].registries
+
+ # Write and read back to verify round-trip
+ mktemp() do path, io
+ Pkg.Types.write_manifest(io, m)
+ close(io)
+ m2 = Pkg.Types.read_manifest(path)
+ @test m.deps == m2.deps
+ @test m.julia_version == m2.julia_version
+ @test m.manifest_format == m2.manifest_format
+ @test m.other == m2.other
+ @test m.registries == m2.registries
+ end
+
+ Pkg.add("Profile")
+ @test Base.is_v1_format_manifest(Base.parsed_toml(env_manifest)) == false
+ # Manifest format should remain 2.1
+ @test Pkg.Types.read_manifest(env_manifest).manifest_format >= v"2.1.0"
+
+ Pkg.rm("Profile")
@test Base.is_v1_format_manifest(Base.parsed_toml(env_manifest)) == false
- Pkg.activate(env_dir; io=io)
- output = String(take!(io))
- @test occursin(r"Activating.*project at.*`.*v1.0`", output)
- @test Pkg.Types.Context().env.manifest.manifest_format == v"2.0.0"
end
end
- @testset "Pkg.upgrade_manifest(manifest_path)" begin
- reference_manifest_isolated_test("v1.0", v1 = true) do env_dir, env_manifest
- io = IOBuffer()
- Pkg.activate(env_dir; io=io)
- output = String(take!(io))
- @test occursin(r"Activating.*project at.*`.*v1.0`", output)
- @test Base.is_v1_format_manifest(Base.parsed_toml(env_manifest))
- Pkg.upgrade_manifest(env_manifest)
- @test Base.is_v1_format_manifest(Base.parsed_toml(env_manifest)) == false
- Pkg.activate(env_dir; io=io)
- output = String(take!(io))
- @test occursin(r"Activating.*project at.*`.*v1.0`", output)
- @test Pkg.Types.Context().env.manifest.manifest_format == v"2.0.0"
+ @testset "v3.0: unknown format, warn" begin
+ # the reference file here is not actually v3.0. It just represents an unknown manifest format
+ reference_manifest_isolated_test("v3.0_unknown") do env_dir, env_manifest
+ io = IOBuffer()
+ @test_logs (:warn,) Pkg.activate(env_dir; io = io)
end
end
+
end
@testset "Manifest metadata" begin
@@ -181,8 +206,8 @@ end
@test Pkg.Operations.dropbuild(v"1.2.3-rc1") == v"1.2.3-rc1"
end
@testset "new environment: value is `nothing`, then ~`VERSION` after resolve" begin
- isolate(loaded_depot=true) do
- Pkg.activate(; temp=true)
+ isolate(loaded_depot = true) do
+ Pkg.activate(; temp = true)
@test Pkg.Types.Context().env.manifest.julia_version == nothing
Pkg.add("Profile")
@test Pkg.Types.Context().env.manifest.julia_version == Pkg.Operations.dropbuild(VERSION)
@@ -212,10 +237,10 @@ end
end
end
@testset "project_hash for identifying out of sync manifest" begin
- isolate(loaded_depot=true) do
+ isolate(loaded_depot = true) do
iob = IOBuffer()
- Pkg.activate(; temp=true)
+ Pkg.activate(; temp = true)
Pkg.add("Example")
@test Pkg.is_manifest_current(Pkg.Types.Context()) === true
@@ -243,6 +268,391 @@ end
end
end
end
+ @testset "syntax julia_version" begin
+ @testset "dropbuild applied: dev build number dropped" begin
+ # syntax.julia_version should drop the DEV build number to avoid manifest churn
+ p = Pkg.Types.Project()
+ @test Pkg.Operations.get_project_syntax_version(p) == Pkg.Operations.dropbuild(VERSION)
+ end
+ end
+end
+
+@testset "Manifest registry tracking" begin
+ @testset "Manifest format upgraded to 2.1 when registries tracked" begin
+ isolate(loaded_depot = true) do
+ Pkg.activate(; temp = true)
+ Pkg.add("Example")
+ ctx = Pkg.Types.Context()
+
+ # Check that manifest format is 2.1 when registries are tracked
+ @test ctx.env.manifest.manifest_format >= v"2.1.0"
+
+ # Check that registries section exists and has General registry
+ @test !isempty(ctx.env.manifest.registries)
+ @test any(reg -> reg.uuid == UUID("23338594-aafe-5451-b93e-139f81909106"), values(ctx.env.manifest.registries))
+
+ # Check that Example package has registry field
+ example_uuid = UUID("7876af07-990d-54b4-ab0e-23690620f79a")
+ example_entry = ctx.env.manifest[example_uuid]
+ @test !isempty(example_entry.registries)
+ @test "General" in example_entry.registries
+ end
+ end
+
+ @testset "Registries written and read from manifest" begin
+ isolate(loaded_depot = true) do
+ Pkg.activate(; temp = true)
+ Pkg.add("Example")
+
+ env_manifest = Pkg.Types.Context().env.manifest_file
+
+ # Read the TOML and check structure
+ manifest_toml = TOML.parsefile(env_manifest)
+ @test haskey(manifest_toml, "registries")
+ @test haskey(manifest_toml["registries"], "General")
+
+ general_entry = manifest_toml["registries"]["General"]
+ @test haskey(general_entry, "uuid")
+ @test general_entry["uuid"] == "23338594-aafe-5451-b93e-139f81909106"
+ @test haskey(general_entry, "url")
+
+ # Check that packages have registry field
+ @test haskey(manifest_toml, "deps")
+ @test haskey(manifest_toml["deps"], "Example")
+ example_entries = manifest_toml["deps"]["Example"]
+ @test example_entries isa Vector
+ @test length(example_entries) > 0
+ # Check that at least one entry has registries field
+ @test any(e -> haskey(e, "registries") || haskey(e, "registry"), example_entries)
+
+ # Read it back with Pkg API and verify
+ manifest = Pkg.Types.read_manifest(env_manifest)
+ @test !isempty(manifest.registries)
+ example_uuid = UUID("7876af07-990d-54b4-ab0e-23690620f79a")
+ @test haskey(manifest, example_uuid)
+ @test !isempty(manifest[example_uuid].registries)
+ end
+ end
+
+ @testset "Instantiate with non-default registry from manifest" begin
+ isolate(loaded_depot = true) do
+ mktempdir() do test_dir
+ # Create a test package git repository
+ pkg_repo_path = joinpath(test_dir, "TestPkg.git")
+ mkpath(joinpath(pkg_repo_path, "src"))
+ pkg_uuid = uuid4()
+
+ write(
+ joinpath(pkg_repo_path, "Project.toml"), """
+ name = "TestPkg"
+ uuid = "$pkg_uuid"
+ version = "0.1.0"
+ """
+ )
+ write(
+ joinpath(pkg_repo_path, "src", "TestPkg.jl"), """
+ module TestPkg
+ greet() = "Hello from TestPkg!"
+ end
+ """
+ )
+ Utils.git_init_and_commit(pkg_repo_path)
+
+ # Get the git tree hash for the package
+ pkg_tree_hash = cd(pkg_repo_path) do
+ return LibGit2.with(LibGit2.GitRepo(pkg_repo_path)) do repo
+ return string(LibGit2.GitHash(LibGit2.peel(LibGit2.GitTree, LibGit2.head(repo))))
+ end
+ end
+
+ # Create a custom registry
+ regpath = joinpath(test_dir, "CustomReg")
+ reg_uuid = uuid4()
+
+ # Convert paths to forward slashes for TOML (works on Windows too)
+ regpath_toml = replace(regpath, "\\" => "/")
+ pkg_repo_path_toml = replace(pkg_repo_path, "\\" => "/")
+
+ mkpath(joinpath(regpath, "TestPkg"))
+ write(
+ joinpath(regpath, "Registry.toml"), """
+ name = "CustomReg"
+ uuid = "$reg_uuid"
+ repo = "$(regpath_toml)"
+ [packages]
+ $pkg_uuid = { name = "TestPkg", path = "TestPkg" }
+ """
+ )
+ write(
+ joinpath(regpath, "TestPkg", "Package.toml"), """
+ name = "TestPkg"
+ uuid = "$pkg_uuid"
+ repo = "$pkg_repo_path_toml"
+ """
+ )
+ write(
+ joinpath(regpath, "TestPkg", "Versions.toml"), """
+ ["0.1.0"]
+ git-tree-sha1 = "$pkg_tree_hash"
+ """
+ )
+ write(
+ joinpath(regpath, "TestPkg", "Compat.toml"), """
+ ["0.1"]
+ julia = "1.0-2"
+ """
+ )
+ Utils.git_init_and_commit(regpath)
+
+ # Add the registry and a package from it
+ Pkg.Registry.add(url = regpath)
+ Pkg.activate(; temp = true)
+ Pkg.add(Pkg.Types.PackageSpec(name = "TestPkg", uuid = pkg_uuid))
+
+ # Get the manifest content
+ manifest_file = Pkg.Types.Context().env.manifest_file
+ manifest_content = read(manifest_file, String)
+
+ # Now create a new isolated environment and copy the manifest
+ isolate(loaded_depot = true) do
+ # Verify the custom registry is not installed
+ @test !any(r -> r.uuid == reg_uuid, Pkg.Registry.reachable_registries())
+
+ # Create a new temp environment with the manifest
+ mktempdir() do env_dir
+ project_file = joinpath(env_dir, "Project.toml")
+ new_manifest_file = joinpath(env_dir, "Manifest.toml")
+
+ write(
+ project_file, """
+ [deps]
+ TestPkg = "$pkg_uuid"
+ """
+ )
+ write(new_manifest_file, manifest_content)
+
+ Pkg.activate(env_dir)
+
+ # Before instantiate, registry should not be installed
+ @test !any(r -> r.uuid == reg_uuid, Pkg.Registry.reachable_registries())
+
+ # Instantiate should automatically install the registry from manifest
+ Pkg.instantiate()
+
+ # After instantiate, registry should be installed
+ @test any(r -> r.uuid == reg_uuid, Pkg.Registry.reachable_registries())
+ end
+ end
+ end
+ end
+ end
+
+ @testset "Non-registry packages do not have registry field" begin
+ isolate(loaded_depot = true) do
+ mktempdir() do test_dir
+ # Create a simple package to develop
+ dev_pkg_dir = joinpath(test_dir, "DevPkg")
+ mkpath(joinpath(dev_pkg_dir, "src"))
+ dev_pkg_uuid = uuid4()
+
+ write(
+ joinpath(dev_pkg_dir, "Project.toml"), """
+ name = "DevPkg"
+ uuid = "$dev_pkg_uuid"
+ version = "0.1.0"
+ """
+ )
+ write(
+ joinpath(dev_pkg_dir, "src", "DevPkg.jl"), """
+ module DevPkg
+ greet() = "Hello from DevPkg!"
+ end
+ """
+ )
+
+ # Create a git package
+ git_pkg_dir = joinpath(test_dir, "GitPkg")
+ mkpath(joinpath(git_pkg_dir, "src"))
+ git_pkg_uuid = uuid4()
+
+ write(
+ joinpath(git_pkg_dir, "Project.toml"), """
+ name = "GitPkg"
+ uuid = "$git_pkg_uuid"
+ version = "0.1.0"
+ """
+ )
+ write(
+ joinpath(git_pkg_dir, "src", "GitPkg.jl"), """
+ module GitPkg
+ greet() = "Hello from GitPkg!"
+ end
+ """
+ )
+
+ Utils.git_init_and_commit(git_pkg_dir)
+
+ Pkg.activate(; temp = true)
+ Pkg.develop(path = dev_pkg_dir)
+ Pkg.add(url = git_pkg_dir)
+
+ ctx = Pkg.Types.Context()
+
+ # Developed package should not have registry field
+ @test haskey(ctx.env.manifest, dev_pkg_uuid)
+ dev_entry = ctx.env.manifest[dev_pkg_uuid]
+ @test isempty(dev_entry.registries)
+
+ # Git package should not have registry field
+ @test haskey(ctx.env.manifest, git_pkg_uuid)
+ git_entry = ctx.env.manifest[git_pkg_uuid]
+ @test isempty(git_entry.registries)
+
+ # Manifest format is always 2.1 now
+ @test ctx.env.manifest.manifest_format == v"2.1.0"
+ # Registries section should be empty since no registry packages
+ @test isempty(ctx.env.manifest.registries)
+ end
+ end
+ end
+
+ @testset "Package in multiple registries records all" begin
+ isolate(loaded_depot = true) do
+ mktempdir() do test_dir
+ # Create a test package git repository
+ pkg_repo_path = joinpath(test_dir, "SharedPkg.git")
+ mkpath(joinpath(pkg_repo_path, "src"))
+ pkg_uuid = uuid4()
+
+ write(
+ joinpath(pkg_repo_path, "Project.toml"), """
+ name = "SharedPkg"
+ uuid = "$pkg_uuid"
+ version = "1.0.0"
+ """
+ )
+ write(
+ joinpath(pkg_repo_path, "src", "SharedPkg.jl"), """
+ module SharedPkg
+ greet() = "Hello from SharedPkg!"
+ end
+ """
+ )
+ Utils.git_init_and_commit(pkg_repo_path)
+
+ # Get the git tree hash for the package
+ pkg_tree_hash = cd(pkg_repo_path) do
+ return LibGit2.with(LibGit2.GitRepo(pkg_repo_path)) do repo
+ return string(LibGit2.GitHash(LibGit2.peel(LibGit2.GitTree, LibGit2.head(repo))))
+ end
+ end
+
+ # Create two registries with the same package
+ reg1_uuid = uuid4()
+ reg1_path = joinpath(test_dir, "Registry1")
+ reg1_path_toml = replace(reg1_path, "\\" => "/")
+ pkg_repo_path_toml = replace(pkg_repo_path, "\\" => "/")
+
+ mkpath(joinpath(reg1_path, "SharedPkg"))
+ write(
+ joinpath(reg1_path, "Registry.toml"), """
+ name = "Registry1"
+ uuid = "$reg1_uuid"
+ repo = "$(reg1_path_toml)"
+ [packages]
+ $pkg_uuid = { name = "SharedPkg", path = "SharedPkg" }
+ """
+ )
+ write(
+ joinpath(reg1_path, "SharedPkg", "Package.toml"), """
+ name = "SharedPkg"
+ uuid = "$pkg_uuid"
+ repo = "$pkg_repo_path_toml"
+ """
+ )
+ write(
+ joinpath(reg1_path, "SharedPkg", "Versions.toml"), """
+ ["1.0.0"]
+ git-tree-sha1 = "$pkg_tree_hash"
+ """
+ )
+ write(
+ joinpath(reg1_path, "SharedPkg", "Compat.toml"), """
+ ["1"]
+ julia = "1.0-2"
+ """
+ )
+ Utils.git_init_and_commit(reg1_path)
+
+ reg2_uuid = uuid4()
+ reg2_path = joinpath(test_dir, "Registry2")
+ reg2_path_toml = replace(reg2_path, "\\" => "/")
+
+ mkpath(joinpath(reg2_path, "SharedPkg"))
+ write(
+ joinpath(reg2_path, "Registry.toml"), """
+ name = "Registry2"
+ uuid = "$reg2_uuid"
+ repo = "$(reg2_path_toml)"
+ [packages]
+ $pkg_uuid = { name = "SharedPkg", path = "SharedPkg" }
+ """
+ )
+ write(
+ joinpath(reg2_path, "SharedPkg", "Package.toml"), """
+ name = "SharedPkg"
+ uuid = "$pkg_uuid"
+ repo = "$pkg_repo_path_toml"
+ """
+ )
+ write(
+ joinpath(reg2_path, "SharedPkg", "Versions.toml"), """
+ ["1.0.0"]
+ git-tree-sha1 = "$pkg_tree_hash"
+ """
+ )
+ write(
+ joinpath(reg2_path, "SharedPkg", "Compat.toml"), """
+ ["1"]
+ julia = "1.0-2"
+ """
+ )
+ Utils.git_init_and_commit(reg2_path)
+
+ # Add both registries
+ Pkg.Registry.add(url = reg1_path)
+ Pkg.Registry.add(url = reg2_path)
+
+ # Add the package
+ Pkg.activate(; temp = true)
+ Pkg.add(Pkg.Types.PackageSpec(name = "SharedPkg", uuid = pkg_uuid))
+
+ ctx = Pkg.Types.Context()
+ @test haskey(ctx.env.manifest, pkg_uuid)
+ pkg_entry = ctx.env.manifest[pkg_uuid]
+
+ # Package should reference both registries
+ @test length(pkg_entry.registries) == 2
+ @test "Registry1" in pkg_entry.registries
+ @test "Registry2" in pkg_entry.registries
+
+ # Both registries should be in the manifest
+ @test haskey(ctx.env.manifest.registries, "Registry1")
+ @test haskey(ctx.env.manifest.registries, "Registry2")
+
+ # Check TOML output
+ manifest_toml = TOML.parsefile(ctx.env.manifest_file)
+ shared_pkg_entries = manifest_toml["deps"]["SharedPkg"]
+ @test shared_pkg_entries isa Vector
+ @test length(shared_pkg_entries) == 1
+ registries_field = shared_pkg_entries[1]["registries"]
+ @test registries_field isa Vector
+ @test length(registries_field) == 2
+ @test "Registry1" in registries_field
+ @test "Registry2" in registries_field
+ end
+ end
+ end
end
end # module
diff --git a/test/misc.jl b/test/misc.jl
index e9b3d00ff6..e2e3dc3ed0 100644
--- a/test/misc.jl
+++ b/test/misc.jl
@@ -12,19 +12,59 @@ end
@testset "hashing" begin
@test hash(Pkg.Types.Project()) == hash(Pkg.Types.Project())
@test hash(Pkg.Types.VersionBound()) == hash(Pkg.Types.VersionBound())
- @test hash(Pkg.Resolve.Fixed(VersionNumber(0,1,0))) == hash(Pkg.Resolve.Fixed(VersionNumber(0,1,0)))
+ @test hash(Pkg.Resolve.Fixed(VersionNumber(0, 1, 0))) == hash(Pkg.Resolve.Fixed(VersionNumber(0, 1, 0)))
hash(Pkg.Types.VersionSpec()) # hash isn't stable
hash(Pkg.Types.PackageEntry()) # hash isn't stable because the internal `repo` field is a mutable struct
end
@testset "safe_realpath" begin
+ realpath(Sys.BINDIR) == Pkg.safe_realpath(Sys.BINDIR)
# issue #3085
- for p in ("", "some-non-existing-path")
+ for p in ("", "some-non-existing-path", "some-non-existing-drive:")
@test p == Pkg.safe_realpath(p)
end
end
+@testset "normalize_path_for_toml" begin
+ # Test that relative paths with backslashes are normalized to forward slashes on Windows
+ # and left unchanged on other platforms
+ if Sys.iswindows()
+ @test Pkg.normalize_path_for_toml("foo\\bar\\baz") == "foo/bar/baz"
+ @test Pkg.normalize_path_for_toml("..\\parent\\dir") == "../parent/dir"
+ @test Pkg.normalize_path_for_toml(".\\current") == "./current"
+ # Absolute paths should not be normalized (they're platform-specific)
+ @test Pkg.normalize_path_for_toml("C:\\absolute\\path") == "C:\\absolute\\path"
+ @test Pkg.normalize_path_for_toml("\\\\network\\share") == "\\\\network\\share"
+ else
+ # On Unix-like systems, paths should be unchanged
+ @test Pkg.normalize_path_for_toml("foo/bar/baz") == "foo/bar/baz"
+ @test Pkg.normalize_path_for_toml("../parent/dir") == "../parent/dir"
+ @test Pkg.normalize_path_for_toml("./current") == "./current"
+ @test Pkg.normalize_path_for_toml("/absolute/path") == "/absolute/path"
+ end
+end
+
@test eltype([PackageSpec(a) for a in []]) == PackageSpec
+@testset "PackageSpec version default" begin
+ # Test that PackageSpec without explicit version gets set to VersionSpec("*")
+ # This behavior is relied upon by BinaryBuilderBase.jl for dependency filtering
+ # See: https://github.com/JuliaPackaging/BinaryBuilderBase.jl/blob/master/src/Prefix.jl
+ ps = PackageSpec(name = "Example")
+ @test ps.version == Pkg.Types.VersionSpec("*")
+
+ # Test with UUID as well
+ ps_uuid = PackageSpec(name = "Example", uuid = Base.UUID("7876af07-990d-54b4-ab0e-23690620f79a"))
+ @test ps_uuid.version == Pkg.Types.VersionSpec("*")
+
+ # Test that explicitly set version is preserved
+ ps_versioned = PackageSpec(name = "Example", version = v"1.0.0")
+ @test ps_versioned.version == v"1.0.0"
+
+ # Test that explicitly set versionspec (string format) is preserved
+ ps_versioned = PackageSpec(name = "Example", version = "1.0.0")
+ @test ps_versioned.version == "1.0.0"
+end
+
end # module
diff --git a/test/new.jl b/test/new.jl
index 5b10b72965..a4ecd1a6fc 100644
--- a/test/new.jl
+++ b/test/new.jl
@@ -1,13 +1,12 @@
module NewTests
-using Test, UUIDs, Dates, TOML
+using Test, UUIDs, Dates, TOML
import ..Pkg, LibGit2
-using Pkg.Types: PkgError
-using Pkg.Resolve: ResolverError
+using Pkg.Types: PkgError
+using Pkg.Resolve: ResolverError
import Pkg.Artifacts: artifact_meta, artifact_path
import Base.BinaryPlatforms: HostPlatform, Platform, platforms_match
-using ..Utils
-import ..HistoricalStdlibVersions
+using ..Utils
using Logging
general_uuid = UUID("23338594-aafe-5451-b93e-139f81909106") # UUID for `General`
@@ -31,7 +30,7 @@ Pkg._auto_gc_enabled[] = false
@testset "Depot setup" begin
isolate() do
# Lets make sure we start with a clean slate.
- rm(LOADED_DEPOT; force=true, recursive=true)
+ rm(LOADED_DEPOT; force = true, recursive = true)
mkdir(LOADED_DEPOT)
# And set the loaded depot as our working depot.
empty!(DEPOT_PATH)
@@ -40,13 +39,16 @@ Pkg._auto_gc_enabled[] = false
# Now we double check we have a clean slate.
@test isempty(Pkg.dependencies())
# A simple `add` should set up some things for us:
- Pkg.add(name="Example", version="0.5.3")
+ Pkg.add(name = "Example", version = "0.5.3")
# - `General` should be initiated by default.
regs = Pkg.Registry.reachable_registries()
@test length(regs) == 1
reg = regs[1]
@test reg.name == "General"
@test reg.uuid == general_uuid
+ # - Check that CACHEDIR.TAG files exist in cache directories
+ @test isfile(joinpath(LOADED_DEPOT, "registries", "CACHEDIR.TAG"))
+ @test isfile(joinpath(LOADED_DEPOT, "packages", "CACHEDIR.TAG"))
# - The package should be installed correctly.
source053, source053_time = nothing, nothing
Pkg.dependencies(exuuid) do pkg
@@ -58,7 +60,7 @@ Pkg._auto_gc_enabled[] = false
@test haskey(Pkg.project().dependencies, "Example")
@test length(Pkg.project().dependencies) == 1
# Now we install the same package at a different version:
- Pkg.add(name="Example", version="0.5.1")
+ Pkg.add(name = "Example", version = "0.5.1")
# - Check that the package was installed correctly.
Pkg.dependencies(exuuid) do pkg
@test pkg.version == v"0.5.1"
@@ -67,10 +69,10 @@ Pkg._auto_gc_enabled[] = false
@test pkg.source != source053
end
# Now a few more versions:
- Pkg.add(name="Example", version="0.5.0")
- Pkg.add(name="Example")
- Pkg.add(name="Example", version="0.3.0")
- Pkg.add(name="Example", version="0.3.3")
+ Pkg.add(name = "Example", version = "0.5.0")
+ Pkg.add(name = "Example")
+ Pkg.add(name = "Example", version = "0.3.0")
+ Pkg.add(name = "Example", version = "0.3.3")
# With similar checks
Pkg.dependencies(exuuid) do pkg
@test pkg.version == v"0.3.3"
@@ -78,42 +80,43 @@ Pkg._auto_gc_enabled[] = false
end
# Now we try adding a second dependency.
# We repeat the same class of tests.
- Pkg.add(name="JSON", version="0.18.0")
+ Pkg.add(name = "JSON", version = "0.18.0")
sourcej018 = nothing
Pkg.dependencies(json_uuid) do pkg
@test pkg.version == v"0.18.0"
@test isdir(pkg.source)
end
- Pkg.add(name="JSON", version="0.20.0")
+ Pkg.add(name = "JSON", version = "0.20.0")
Pkg.dependencies(json_uuid) do pkg
@test isdir(pkg.source)
@test pkg.source != sourcej018
end
# Now check packages which track repos instead of registered versions
- Pkg.add(url="https://github.com/JuliaLang/Example.jl", rev="v0.5.3")
+ Pkg.add(url = "https://github.com/JuliaLang/Example.jl", rev = "v0.5.3")
+ @test isfile(joinpath(LOADED_DEPOT, "clones", "CACHEDIR.TAG"))
Pkg.dependencies(exuuid) do pkg
@test !pkg.is_tracking_registry
@test isdir(pkg.source)
@test isdir(Pkg.Types.add_repo_cache_path(pkg.git_source))
end
- Pkg.add(name="Example", rev="master")
+ Pkg.add(name = "Example", rev = "master")
Pkg.dependencies(exuuid) do pkg
@test !pkg.is_tracking_registry
@test isdir(pkg.source)
@test isdir(Pkg.Types.add_repo_cache_path(pkg.git_source))
end
# Also check that unregistered packages are installed properly.
- Pkg.add(url="https://github.com/00vareladavid/Unregistered.jl")
+ Pkg.add(url = "https://github.com/00vareladavid/Unregistered.jl")
Pkg.dependencies(unregistered_uuid) do pkg
@test isdir(pkg.source)
@test isdir(Pkg.Types.add_repo_cache_path(pkg.git_source))
end
# Check `develop`
- Pkg.develop(name="Example")
+ Pkg.develop(name = "Example")
Pkg.dependencies(exuuid) do pkg
@test isdir(pkg.source) # TODO check for full git clone, have to implement saving original URL first
end
- Pkg.develop(name="JSON")
+ Pkg.develop(name = "JSON")
Pkg.dependencies(json_uuid) do pkg
@test isdir(pkg.source) # TODO check for full git clone, have to implement saving original URL first
end
@@ -125,10 +128,10 @@ Pkg._auto_gc_enabled[] = false
@test reg.uuid == general_uuid
@test mtime(source053) == source053_time
# Now we clean up so that `isolate` can reuse the loaded depot properly
- rm(joinpath(LOADED_DEPOT, "environments"); force=true, recursive=true)
- rm(joinpath(LOADED_DEPOT, "clones"); force=true, recursive=true)
- rm(joinpath(LOADED_DEPOT, "logs"); force=true, recursive=true)
- rm(joinpath(LOADED_DEPOT, "dev"); force=true, recursive=true)
+ rm(joinpath(LOADED_DEPOT, "environments"); force = true, recursive = true)
+ rm(joinpath(LOADED_DEPOT, "clones"); force = true, recursive = true)
+ rm(joinpath(LOADED_DEPOT, "logs"); force = true, recursive = true)
+ rm(joinpath(LOADED_DEPOT, "dev"); force = true, recursive = true)
for (root, dirs, files) in walkdir(LOADED_DEPOT)
for file in files
filepath = joinpath(root, file)
@@ -140,19 +143,106 @@ Pkg._auto_gc_enabled[] = false
end
end
end
+ copy_this_pkg_cache(LOADED_DEPOT)
+end
+
+function kill_with_info(p)
+ if Sys.islinux()
+ SIGINFO = 10
+ elseif Sys.isbsd()
+ SIGINFO = 29
+ end
+ if @isdefined(SIGINFO)
+ kill(p, SIGINFO)
+ timedwait(() -> process_exited(p), 20; pollint = 1.0) # Allow time for profile to collect and print before killing
+ end
+ kill(p)
+ wait(p)
+ return nothing
+end
+
+# This test tests that multiple julia processes can install within same depot concurrently without
+# corrupting the depot and being able to load the package. Only one process will do each of these, others will wait on
+# the specific action for the specific thing:
+# - Install the default registries
+# - Install source of package and deps
+# - Install artifacts
+# - Precompile package and deps
+# - Load & use package
+@testset "Concurrent setup/installation/precompilation across processes" begin
+ @testset for test in 1:1 # increase for stress testing
+ mktempdir() do tmp
+ pathsep = Sys.iswindows() ? ";" : ":"
+ Pkg_dir = dirname(@__DIR__)
+ script = """
+ using Dates
+ t = Timer(t->println(stderr, Dates.now()), 4*60; interval = 10)
+ import Pkg
+ samefile(pkgdir(Pkg), $(repr(Pkg_dir))) || error("Using wrong Pkg")
+ Pkg.activate(temp=true)
+ Pkg.add(name="FFMPEG", version="0.4") # a package with a lot of deps but fast to load
+ using FFMPEG
+ @showtime FFMPEG.exe("-version")
+ @showtime FFMPEG.exe("-f", "lavfi", "-i", "testsrc=duration=1:size=128x128:rate=10", "-f", "null", "-") # more complete quick test (~10ms)
+ close(t)
+ """
+ cmd = addenv(
+ `$(Base.julia_cmd()) --project=$(dirname(@__DIR__)) --startup-file=no --color=no -e $script`,
+ "JULIA_DEPOT_PATH" => join([tmp, LOADED_DEPOT, ""], pathsep)
+ )
+ did_install_package = Threads.Atomic{Int}(0)
+ did_install_artifact = Threads.Atomic{Int}(0)
+ any_failed = Threads.Atomic{Bool}(false)
+ outputs = fill("", 3)
+ t = @elapsed @sync begin
+ # All but 1 process should be waiting, so should be ok to run many
+ for i in 1:3
+ Threads.@spawn begin
+ iob = IOBuffer()
+ start = time()
+ p = run(pipeline(cmd, stdout = iob, stderr = iob), wait = false)
+ if timedwait(() -> process_exited(p), 5 * 60; pollint = 1.0) === :timed_out
+ kill_with_info(p)
+ end
+ if !success(p)
+ Threads.atomic_cas!(any_failed, false, true)
+ end
+ str = String(take!(iob))
+ if occursin(r"Installed FFMPEG ─", str)
+ Threads.atomic_add!(did_install_package, 1)
+ end
+ if occursin(r"Installed artifact FFMPEG ", str)
+ Threads.atomic_add!(did_install_artifact, 1)
+ end
+ outputs[i] = string("=== test $test, process $i. Took $(time() - start) seconds.\n", str)
+ end
+ end
+ end
+ if any_failed[] || did_install_package[] != 1 || did_install_artifact[] != 1
+ println("=== Concurrent Pkg.add test $test failed after $t seconds")
+ for i in 1:3
+ printstyled(stdout, outputs[i]; color = (:blue, :green, :yellow)[i])
+ end
+ end
+ # only 1 should have actually installed FFMPEG
+ @test !any_failed[]
+ @test did_install_package[] == 1
+ @test did_install_artifact[] == 1
+ end
+ end
end
#
# ## Sandboxing
#
-inside_test_sandbox(fn, name; kwargs...) = Pkg.test(name; test_fn=fn, kwargs...)
-inside_test_sandbox(fn; kwargs...) = Pkg.test(;test_fn=fn, kwargs...)
+inside_test_sandbox(fn, name; kwargs...) = Pkg.test(name; test_fn = fn, kwargs...)
+inside_test_sandbox(fn; kwargs...) = Pkg.test(; test_fn = fn, kwargs...)
@testset "test: printing" begin
- isolate(loaded_depot=true) do
- Pkg.add(name="Example")
+ isolate(loaded_depot = true) do
+ Pkg.add(name = "Example")
io = Base.BufferStream()
- Pkg.test("Example"; io=io)
+ Pkg.test("Example"; io = io)
closewrite(io)
output = read(io, String)
@test occursin(r"Testing Example", output)
@@ -165,133 +255,155 @@ end
@testset "test: sandboxing" begin
# explicit test dependencies and the tested project are available within the test sandbox
- isolate(loaded_depot=true) do; mktempdir() do tempdir
- foo_uuid = UUID("02250abe-2050-11e9-017e-b301a2b5bcc4")
- path = copy_test_package(tempdir, "BasicSandbox")
- # we set readonly here to simulate the permissions in the `$DEPOT/packages` directory
- Pkg.Types.set_readonly(path)
- Pkg.develop(path=path)
- inside_test_sandbox("BasicSandbox") do
- Pkg.dependencies(foo_uuid) do pkg
- @test length(pkg.dependencies) == 1
- @test haskey(pkg.dependencies, "Random")
- end
- @test haskey(Pkg.project().dependencies, "Test")
- @test haskey(Pkg.project().dependencies, "BasicSandbox")
- end
- end end
+ isolate(loaded_depot = true) do;
+ mktempdir() do tempdir
+ foo_uuid = UUID("02250abe-2050-11e9-017e-b301a2b5bcc4")
+ path = copy_test_package(tempdir, "BasicSandbox")
+ # we set readonly here to simulate the permissions in the `$DEPOT/packages` directory
+ Pkg.Types.set_readonly(path)
+ Pkg.develop(path = path)
+ inside_test_sandbox("BasicSandbox") do
+ Pkg.dependencies(foo_uuid) do pkg
+ @test length(pkg.dependencies) == 1
+ @test haskey(pkg.dependencies, "Random")
+ end
+ @test haskey(Pkg.project().dependencies, "Test")
+ @test haskey(Pkg.project().dependencies, "BasicSandbox")
+ end
+ end
+ end
# the active dependency graph is transferred to the test sandbox
- isolate(loaded_depot=true) do; mktempdir() do tempdir
- path = copy_test_package(tempdir, "TransferSubgraph")
- Pkg.activate(path)
- active_json_version = Pkg.dependencies()[json_uuid].version
- inside_test_sandbox("Unregistered") do
- @test Pkg.dependencies()[json_uuid].version == active_json_version
- end
- end end
+ isolate(loaded_depot = true) do;
+ mktempdir() do tempdir
+ path = copy_test_package(tempdir, "TransferSubgraph")
+ Pkg.activate(path)
+ active_json_version = Pkg.dependencies()[json_uuid].version
+ inside_test_sandbox("Unregistered") do
+ @test Pkg.dependencies()[json_uuid].version == active_json_version
+ end
+ end
+ end
# the active dep graph is transferred to test sandbox, even when tracking unregistered repos
- isolate(loaded_depot=true) do; mktempdir() do tempdir
- path = copy_test_package(tempdir, "TestSubgraphTrackingRepo")
- Pkg.activate(path)
- inside_test_sandbox() do
- Pkg.dependencies(unregistered_uuid) do pkg
- @test pkg.git_source == "https://github.com/00vareladavid/Unregistered.jl"
- @test !pkg.is_tracking_registry
+ isolate(loaded_depot = true) do;
+ mktempdir() do tempdir
+ path = copy_test_package(tempdir, "TestSubgraphTrackingRepo")
+ Pkg.activate(path)
+ inside_test_sandbox() do
+ Pkg.dependencies(unregistered_uuid) do pkg
+ @test pkg.git_source == "https://github.com/00vareladavid/Unregistered.jl"
+ @test !pkg.is_tracking_registry
+ end
end
end
- end end
+ end
# a test dependency can track a path
- isolate(loaded_depot=true) do; mktempdir() do tempdir
- path = copy_test_package(tempdir, "TestDepTrackingPath")
- Pkg.activate(path)
- inside_test_sandbox() do
- @test Pkg.dependencies()[unregistered_uuid].is_tracking_path
+ isolate(loaded_depot = true) do;
+ mktempdir() do tempdir
+ path = copy_test_package(tempdir, "TestDepTrackingPath")
+ Pkg.activate(path)
+ inside_test_sandbox() do
+ @test Pkg.dependencies()[unregistered_uuid].is_tracking_path
+ end
end
- end end
+ end
# a test dependency can track a repo
- isolate(loaded_depot=true) do; mktempdir() do tempdir
- path = copy_test_package(tempdir, "TestDepTrackingRepo")
- Pkg.activate(path)
- inside_test_sandbox() do
- Pkg.dependencies(unregistered_uuid) do pkg
- @test !pkg.is_tracking_registry
- @test pkg.git_source == "https://github.com/00vareladavid/Unregistered.jl"
+ isolate(loaded_depot = true) do;
+ mktempdir() do tempdir
+ path = copy_test_package(tempdir, "TestDepTrackingRepo")
+ Pkg.activate(path)
+ inside_test_sandbox() do
+ Pkg.dependencies(unregistered_uuid) do pkg
+ @test !pkg.is_tracking_registry
+ @test pkg.git_source == "https://github.com/00vareladavid/Unregistered.jl"
+ end
end
end
- end end
+ end
# `compat` for test dependencies is honored
- isolate(loaded_depot=true) do; mktempdir() do tempdir
- path = copy_test_package(tempdir, "TestDepCompat")
- Pkg.activate(path)
- inside_test_sandbox() do
- deps = Pkg.dependencies()
- @test deps[exuuid].version == v"0.3.0"
- @test deps[UUID("9cb9b0df-a8d1-4a6c-a371-7d2ae60a2f25")].version == v"0.1.0"
+ isolate(loaded_depot = true) do;
+ mktempdir() do tempdir
+ path = copy_test_package(tempdir, "TestDepCompat")
+ Pkg.activate(path)
+ inside_test_sandbox() do
+ deps = Pkg.dependencies()
+ @test deps[exuuid].version == v"0.3.0"
+ @test deps[UUID("9cb9b0df-a8d1-4a6c-a371-7d2ae60a2f25")].version == v"0.1.0"
+ end
end
- end end
+ end
end
# These tests cover the original "targets" API for specifying test dependencies
@testset "test: 'targets' based testing" begin
# `Pkg.test` should work on dependency graphs with nodes sharing the same name but not the same UUID
- isolate(loaded_depot=true) do; mktempdir() do tempdir
- Pkg.activate(joinpath(@__DIR__, "test_packages", "SameNameDifferentUUID"))
- inside_test_sandbox("Example") do
- Pkg.dependencies(UUID("6876af07-990d-54b4-ab0e-23690620f79a")) do pkg
- @test pkg.name == "Example"
- @test realpath(pkg.source) == realpath(joinpath(@__DIR__, "test_packages", "SameNameDifferentUUID", "dev", "Example"))
+ isolate(loaded_depot = true) do;
+ mktempdir() do tempdir
+ Pkg.activate(joinpath(@__DIR__, "test_packages", "SameNameDifferentUUID"))
+ inside_test_sandbox("Example") do
+ Pkg.dependencies(UUID("6876af07-990d-54b4-ab0e-23690620f79a")) do pkg
+ @test pkg.name == "Example"
+ @test realpath(pkg.source) == realpath(joinpath(@__DIR__, "test_packages", "SameNameDifferentUUID", "dev", "Example"))
+ end
end
end
- end end
- isolate(loaded_depot=true) do; mktempdir() do tempdir
- basic_test_target = UUID("50adb811-5a1f-4be4-8146-2725c7f5d900")
- path = copy_test_package(tempdir, "BasicTestTarget")
- # we set readonly here to simulate the permissions in the `$DEPOT/packages` directory
- Pkg.Types.set_readonly(path)
- Pkg.develop(path=path)
- inside_test_sandbox("BasicTestTarget") do
- @test haskey(Pkg.project().dependencies, "Markdown")
- @test haskey(Pkg.project().dependencies, "Test")
- @test haskey(Pkg.project().dependencies, "BasicTestTarget")
- Pkg.dependencies(basic_test_target) do pkg
- @test pkg.is_tracking_path == true
- @test haskey(pkg.dependencies, "UUIDs")
- @test !haskey(pkg.dependencies, "Markdown")
- @test !haskey(pkg.dependencies, "Test")
+ end
+ isolate(loaded_depot = true) do;
+ mktempdir() do tempdir
+ basic_test_target = UUID("50adb811-5a1f-4be4-8146-2725c7f5d900")
+ path = copy_test_package(tempdir, "BasicTestTarget")
+ # we set readonly here to simulate the permissions in the `$DEPOT/packages` directory
+ Pkg.Types.set_readonly(path)
+ Pkg.develop(path = path)
+ inside_test_sandbox("BasicTestTarget") do
+ @test haskey(Pkg.project().dependencies, "Markdown")
+ @test haskey(Pkg.project().dependencies, "Test")
+ @test haskey(Pkg.project().dependencies, "BasicTestTarget")
+ Pkg.dependencies(basic_test_target) do pkg
+ @test pkg.is_tracking_path == true
+ @test haskey(pkg.dependencies, "UUIDs")
+ @test !haskey(pkg.dependencies, "Markdown")
+ @test !haskey(pkg.dependencies, "Test")
+ end
end
end
- end end
+ end
# dependency of test dependency (#567)
- isolate(loaded_depot=true) do; mktempdir() do tempdir
- for x in ["x1", "x2", "x3"]
- path = copy_test_package(tempdir, x)
- Pkg.develop(Pkg.PackageSpec(path = path))
+ isolate(loaded_depot = true) do;
+ mktempdir() do tempdir
+ for x in ["x1", "x2", "x3"]
+ path = copy_test_package(tempdir, x)
+ Pkg.develop(Pkg.PackageSpec(path = path))
+ end
+ Pkg.test("x3")
end
- Pkg.test("x3")
- end end
+ end
# preserve root of active project if it is a dependency (#1423)
- isolate(loaded_depot=false) do; mktempdir() do tempdir
- path = copy_test_package(tempdir, "ActiveProjectInTestSubgraph")
- Pkg.activate(path)
- inside_test_sandbox("B") do
- deps = Pkg.dependencies()
- @test deps[UUID("c86f0f68-174e-41db-bd5e-b032223de205")].version == v"1.2.3"
- end
- end end
+ isolate(loaded_depot = false) do;
+ mktempdir() do tempdir
+ path = copy_test_package(tempdir, "ActiveProjectInTestSubgraph")
+ Pkg.activate(path)
+ inside_test_sandbox("B") do
+ deps = Pkg.dependencies()
+ @test deps[UUID("c86f0f68-174e-41db-bd5e-b032223de205")].version == v"1.2.3"
+ end
+ end
+ end
# test targets should also honor compat
- isolate(loaded_depot=false) do; mktempdir() do tempdir
- path = copy_test_package(tempdir, "TestTargetCompat")
- Pkg.activate(path)
- inside_test_sandbox() do
- deps = Pkg.dependencies()
- @test deps[exuuid].version == v"0.3.0"
+ isolate(loaded_depot = false) do;
+ mktempdir() do tempdir
+ path = copy_test_package(tempdir, "TestTargetCompat")
+ Pkg.activate(path)
+ inside_test_sandbox() do
+ deps = Pkg.dependencies()
+ @test deps[exuuid].version == v"0.3.0"
+ end
end
- end end
+ end
end
@testset "test: fallback when no project file exists" begin
- isolate(loaded_depot=true) do
- Pkg.add(name="Permutations", version="0.3.2")
+ isolate(loaded_depot = true) do
+ Pkg.add(name = "Permutations", version = "0.3.2")
if Sys.WORD_SIZE == 32
# The Permutations.jl v0.3.2 tests are known to fail on 32-bit Julia
@test_skip Pkg.test("Permutations")
@@ -303,7 +415,7 @@ end
@testset "using a test/REQUIRE file" begin
isolate() do
- Pkg.add(name="EnglishText", version="0.6.0")
+ Pkg.add(name = "EnglishText", version = "0.6.0")
Pkg.test("EnglishText")
end
end
@@ -312,7 +424,7 @@ end
# # Activate
#
@testset "activate: repl" begin
- isolate(loaded_depot=true) do
+ isolate(loaded_depot = true) do
Pkg.REPLMode.TEST_MODE[] = true
# - activate shared env
api, args, opts = first(Pkg.pkg"activate --shared Foo")
@@ -346,41 +458,52 @@ end
arg = args[1]
@test arg.url == "https://github.com/JuliaLang/Pkg.jl"
@test arg.rev == "aa/gitlab"
+
+ api, args, opts = first(Pkg.pkg"add https://github.com/JuliaPy/PythonCall.jl/pull/529")
+ arg = args[1]
+ @test arg.url == "https://github.com/JuliaPy/PythonCall.jl"
+ @test arg.rev == "pull/529/head"
+
+ api, args, opts = first(Pkg.pkg"add https://github.com/TimG1964/XLSX.jl#Bug-fixing-post-#289:subdir")
+ arg = args[1]
+ @test arg.url == "https://github.com/TimG1964/XLSX.jl"
+ @test arg.rev == "Bug-fixing-post-#289"
+ @test arg.subdir == "subdir"
end
end
@testset "activate" begin
- isolate(loaded_depot=true) do
+ isolate(loaded_depot = true) do
io = IOBuffer()
- Pkg.activate("Foo"; io=io)
+ Pkg.activate("Foo"; io = io)
output = String(take!(io))
@test occursin(r"Activating.*project at.*`.*Foo`", output)
- Pkg.activate(; io=io, temp=true)
+ Pkg.activate(; io = io, temp = true)
output = String(take!(io))
@test occursin(r"Activating new project at `.*`", output)
prev_env = Base.active_project()
# - activating the previous project
- Pkg.activate(; temp=true)
+ Pkg.activate(; temp = true)
@test Base.active_project() != prev_env
- Pkg.activate(; prev=true)
+ Pkg.activate(; prev = true)
@test prev_env == Base.active_project()
- Pkg.activate(; temp=true)
+ Pkg.activate(; temp = true)
@test Base.active_project() != prev_env
- Pkg.activate(; prev=true)
+ Pkg.activate(; prev = true)
@test Base.active_project() == prev_env
Pkg.activate("")
@test Base.active_project() != prev_env
- Pkg.activate(; prev=true)
+ Pkg.activate(; prev = true)
@test Base.active_project() == prev_env
load_path_before = copy(LOAD_PATH)
try
empty!(LOAD_PATH) # unset active env
Pkg.activate() # shouldn't error
- Pkg.activate(; prev=true) # shouldn't error
+ Pkg.activate(; prev = true) # shouldn't error
finally
append!(empty!(LOAD_PATH), load_path_before)
end
@@ -397,41 +520,62 @@ end
# Here we check against invalid input.
@testset "add: input checking" begin
- isolate(loaded_depot=true) do
+ isolate(loaded_depot = true) do
# Julia is not a valid package name.
- @test_throws PkgError("`julia` is not a valid package name") Pkg.add(name="julia")
+ @test_throws PkgError("`julia` is not a valid package name") Pkg.add(name = "julia")
# Package names must be valid Julia identifiers.
- @test_throws PkgError("`***` is not a valid package name") Pkg.add(name="***")
- @test_throws PkgError("`Foo Bar` is not a valid package name") Pkg.add(name="Foo Bar")
+ @test_throws PkgError("`***` is not a valid package name") Pkg.add(name = "***")
+ @test_throws PkgError("`Foo Bar` is not a valid package name") Pkg.add(name = "Foo Bar")
# Names which are invalid and are probably URLs or paths.
- @test_throws PkgError("""
- `https://github.com` is not a valid package name
- The argument appears to be a URL or path, perhaps you meant `Pkg.add(url="...")` or `Pkg.add(path="...")`.""") Pkg.add("https://github.com")
- @test_throws PkgError("""
- `./Foobar` is not a valid package name
- The argument appears to be a URL or path, perhaps you meant `Pkg.add(url="...")` or `Pkg.add(path="...")`.""") Pkg.add("./Foobar")
+ @test_throws PkgError(
+ """
+ `https://github.com` is not a valid package name
+ The argument appears to be a URL or path, perhaps you meant `Pkg.add(url="...")` or `Pkg.add(path="...")`."""
+ ) Pkg.add("https://github.com")
+ @test_throws PkgError(
+ """
+ `./Foobar` is not a valid package name
+ The argument appears to be a URL or path, perhaps you meant `Pkg.add(url="...")` or `Pkg.add(path="...")`."""
+ ) Pkg.add("./Foobar")
# An empty spec is invalid.
@test_throws PkgError(
"name, UUID, URL, or filesystem path specification required when calling `add`"
- ) Pkg.add(Pkg.PackageSpec())
+ ) Pkg.add(Pkg.PackageSpec())
# Versions imply that we are tracking a registered version.
@test_throws PkgError(
"version specification invalid when tracking a repository: `0.5.0` specified for package `Example`"
- ) Pkg.add(name="Example", rev="master", version="0.5.0")
+ ) Pkg.add(name = "Example", rev = "master", version = "0.5.0")
# Adding with a slight typo gives suggestions
try
- Pkg.add("Examplle")
+ io = IOBuffer()
+ Pkg.add("Examplle"; io)
@test false # to fail if add doesn't error
- catch err
+ catch err
@test err isa PkgError
@test occursin("The following package names could not be resolved:", err.msg)
@test occursin("Examplle (not found in project, manifest or registry)", err.msg)
- @test occursin("Suggestions:", err.msg)
- # @test occursin("Example", err.msg) # can't test this as each char in "Example" is individually colorized
+ @test occursin("Suggestions: Example", err.msg)
+ end
+ # Adding with lowercase suggests uppercase
+ try
+ io = IOBuffer()
+ Pkg.add("http"; io)
+ @test false # to fail if add doesn't error
+ catch err
+ @test err isa PkgError
+ @test occursin("Suggestions: HTTP", err.msg)
+ end
+ try
+ io = IOBuffer()
+ Pkg.add("Flix"; io)
+ @test false # to fail if add doesn't error
+ catch err
+ @test err isa PkgError
+ @test occursin("Suggestions: Flux", err.msg)
end
@test_throws PkgError(
"name, UUID, URL, or filesystem path specification required when calling `add`"
- ) Pkg.add(Pkg.PackageSpec())
+ ) Pkg.add(Pkg.PackageSpec())
# Adding an unregistered package
@test_throws PkgError Pkg.add("ThisIsHopefullyRandom012856014925701382")
# Wrong UUID
@@ -441,24 +585,29 @@ end
# Two packages with the same name
@test_throws PkgError(
"it is invalid to specify multiple packages with the same name: `Example`"
- ) Pkg.add([(;name="Example"), (;name="Example",version="0.5.0")])
+ ) Pkg.add([(; name = "Example"), (; name = "Example", version = "0.5.0")])
end
# Unregistered UUID in manifest
- isolate(loaded_depot=true) do; mktempdir() do tempdir
- package_path = copy_test_package(tempdir, "UnregisteredUUID")
- Pkg.activate(package_path)
- @test_throws PkgError("expected package `Example [142fd7e7]` to be registered") Pkg.add("JSON")
- end end
+ isolate(loaded_depot = true) do;
+ mktempdir() do tempdir
+ package_path = copy_test_package(tempdir, "UnregisteredUUID")
+ Pkg.activate(package_path)
+ @test_throws PkgError Pkg.add("JSON")
+ end
+ end
# empty git repo (no commits)
- isolate(loaded_depot=true) do; mktempdir() do tempdir
- close(LibGit2.init(tempdir))
- try Pkg.add(path=tempdir)
- @test false # to fail if add doesn't error
- catch err
- @test err isa PkgError
- @test match(r"^invalid git HEAD", err.msg) !== nothing
+ isolate(loaded_depot = true) do;
+ mktempdir() do tempdir
+ close(LibGit2.init(tempdir))
+ try
+ Pkg.add(path = tempdir)
+ @test false # to fail if add doesn't error
+ catch err
+ @test err isa PkgError
+ @test match(r"^invalid git HEAD", err.msg) !== nothing
+ end
end
- end end
+ end
end
#
@@ -470,7 +619,7 @@ end
# The package should be added as a direct dependency.
@testset "add: changes to the active project" begin
# Basic add
- isolate(loaded_depot=true) do
+ isolate(loaded_depot = true) do
Pkg.add(Pkg.PackageSpec("Example"))
Pkg.dependencies(exuuid) do ex
@test ex.is_tracking_registry
@@ -478,8 +627,8 @@ end
@test haskey(Pkg.project().dependencies, "Example")
end
# Basic add by version
- isolate(loaded_depot=true) do
- Pkg.add(name="Example", version="0.5.0")
+ isolate(loaded_depot = true) do
+ Pkg.add(name = "Example", version = "0.5.0")
Pkg.dependencies(exuuid) do ex
@test ex.is_tracking_registry
@test ex.version == v"0.5.0"
@@ -500,8 +649,8 @@ end
end
=#
# Basic add by URL
- isolate(loaded_depot=true) do
- Pkg.add(url="https://github.com/JuliaLang/Example.jl", rev="v0.5.3")
+ isolate(loaded_depot = true) do
+ Pkg.add(url = "https://github.com/JuliaLang/Example.jl", rev = "v0.5.3")
Pkg.dependencies(exuuid) do ex
@test !ex.is_tracking_registry
@test ex.git_source == "https://github.com/JuliaLang/Example.jl"
@@ -510,8 +659,8 @@ end
@test haskey(Pkg.project().dependencies, "Example")
end
# Basic add by git revision
- isolate(loaded_depot=true) do
- Pkg.add(name="Example", rev="master")
+ isolate(loaded_depot = true) do
+ Pkg.add(name = "Example", rev = "master")
Pkg.dependencies(exuuid) do ex
@test !ex.is_tracking_registry
@test ex.git_source == "https://github.com/JuliaLang/Example.jl.git"
@@ -520,7 +669,7 @@ end
@test haskey(Pkg.project().dependencies, "Example")
end
# Adding stdlibs should work.
- isolate(loaded_depot=true) do
+ isolate(loaded_depot = true) do
profile_uuid = UUID("9abbd945-dff8-562f-b5e8-e1ebf5ef1b79")
# - Adding a stdlib by name.
Pkg.add("Markdown")
@@ -528,48 +677,52 @@ end
@test pkg.name == "Markdown"
end
# - Adding a stdlib by UUID.
- Pkg.add(uuid=profile_uuid)
+ Pkg.add(uuid = profile_uuid)
Pkg.dependencies(profile_uuid) do pkg
@test pkg.name == "Profile"
end
# - Adding a stdlib by name/UUID.
- Pkg.add(name="Markdown", uuid=markdown_uuid)
+ Pkg.add(name = "Markdown", uuid = markdown_uuid)
Pkg.dependencies(markdown_uuid) do pkg
@test pkg.name == "Markdown"
end
end
# Basic add by local path.
- isolate(loaded_depot=true) do; mktempdir() do tempdir
- path = git_init_package(tempdir, joinpath(@__DIR__, "test_packages", "SimplePackage"))
- Pkg.add(path=path)
- Pkg.dependencies(simple_package_uuid) do pkg
- @test pkg.git_source == realpath(path)
- # We take care to check that the project file has been parsed correctly.
- @test pkg.name == "SimplePackage"
- @test pkg.version == v"0.2.0"
- @test haskey(pkg.dependencies, "Example")
- @test haskey(pkg.dependencies, "Markdown")
+ isolate(loaded_depot = true) do;
+ mktempdir() do tempdir
+ path = git_init_package(tempdir, joinpath(@__DIR__, "test_packages", "SimplePackage"))
+ Pkg.add(path = path)
+ Pkg.dependencies(simple_package_uuid) do pkg
+ @test pkg.git_source == realpath(path)
+ # We take care to check that the project file has been parsed correctly.
+ @test pkg.name == "SimplePackage"
+ @test pkg.version == v"0.2.0"
+ @test haskey(pkg.dependencies, "Example")
+ @test haskey(pkg.dependencies, "Markdown")
+ end
+ @test haskey(Pkg.project().dependencies, "SimplePackage")
+ @test length(Pkg.project().dependencies) == 1
end
- @test haskey(Pkg.project().dependencies, "SimplePackage")
- @test length(Pkg.project().dependencies) == 1
- end end
+ end
# add when depot does not exist should create the default project in the correct location
- isolate() do; mktempdir() do tempdir
- empty!(DEPOT_PATH)
- push!(DEPOT_PATH, tempdir)
- Base.append_bundled_depot_path!(DEPOT_PATH)
- rm(tempdir; force=true, recursive=true)
- @test !isdir(first(DEPOT_PATH))
- Pkg.add("JSON")
- @test dirname(dirname(Pkg.project().path)) == realpath(joinpath(tempdir, "environments"))
- end end
+ isolate() do;
+ mktempdir() do tempdir
+ empty!(DEPOT_PATH)
+ push!(DEPOT_PATH, tempdir)
+ Base.append_bundled_depot_path!(DEPOT_PATH)
+ rm(tempdir; force = true, recursive = true)
+ @test !isdir(first(DEPOT_PATH))
+ Pkg.add("JSON")
+ @test dirname(dirname(Pkg.project().path)) == realpath(joinpath(tempdir, "environments"))
+ end
+ end
end
# Here we can use a loaded depot because we are only checking changes to the active project.
@testset "add: package state changes" begin
# Check that `add` on an already added stdlib works.
# Stdlibs are special cased throughout the codebase.
- isolate(loaded_depot=true) do
+ isolate(loaded_depot = true) do
Pkg.add("Markdown")
Pkg.add("Markdown")
Pkg.dependencies(markdown_uuid) do pkg
@@ -578,9 +731,9 @@ end
@test haskey(Pkg.project().dependencies, "Markdown")
end
# Double add should not change state, this would be an unnecessary change.
- isolate(loaded_depot=true) do
+ isolate(loaded_depot = true) do
@test !haskey(Pkg.Types.Context().env.project.compat, "Example")
- Pkg.add(name="Example", version="0.3.0")
+ Pkg.add(name = "Example", version = "0.3.0")
@test Pkg.dependencies()[exuuid].version == v"0.3.0"
@test !haskey(Pkg.Types.Context().env.project.compat, "Example")
Pkg.add("Example")
@@ -588,22 +741,22 @@ end
@test !haskey(Pkg.Types.Context().env.project.compat, "Example")
end
# Adding a new package should not alter the version of existing packages.
- isolate(loaded_depot=true) do
- Pkg.add(name="Example", version="0.3.0")
+ isolate(loaded_depot = true) do
+ Pkg.add(name = "Example", version = "0.3.0")
@test Pkg.dependencies()[exuuid].version == v"0.3.0"
Pkg.add("Test")
@test Pkg.dependencies()[exuuid].version == v"0.3.0"
end
# Add by version should not override pinned version.
- isolate(loaded_depot=true) do
- Pkg.add(name="Example", version="0.3.0")
+ isolate(loaded_depot = true) do
+ Pkg.add(name = "Example", version = "0.3.0")
Pkg.pin("Example")
Pkg.dependencies(exuuid) do ex
@test ex.version == v"0.3.0"
@test ex.is_tracking_registry
@test ex.is_pinned
end
- Pkg.add(name="Example", version="0.5.0")
+ Pkg.add(name = "Example", version = "0.5.0")
# We check that the package state is left unchanged.
Pkg.dependencies(exuuid) do ex
@test ex.version == v"0.3.0"
@@ -612,14 +765,14 @@ end
end
end
# Add by version should override add by repo.
- isolate(loaded_depot=true) do
- Pkg.add(name="Example", rev="master")
+ isolate(loaded_depot = true) do
+ Pkg.add(name = "Example", rev = "master")
# First we check that we are not tracking a registered version.
Pkg.dependencies(exuuid) do ex
@test ex.git_revision == "master"
@test !ex.is_tracking_registry
end
- Pkg.add(name="Example", version="0.3.0")
+ Pkg.add(name = "Example", version = "0.3.0")
# We should now be tracking a registered version.
Pkg.dependencies(exuuid) do ex
@test ex.version == v"0.3.0"
@@ -628,31 +781,33 @@ end
end
end
# Add by version should override add by repo, even for indirect dependencies.
- isolate(loaded_depot=true) do; mktempdir() do tempdir
- path = git_init_package(tempdir, joinpath(@__DIR__, "test_packages", "DependsOnExample"))
- Pkg.add(path=path)
- Pkg.add(name="Example", rev="master")
- @test !Pkg.dependencies()[exuuid].is_tracking_registry
- # Now we remove the package as a direct dependency.
- # The package should still exist as an indirect dependency because `DependsOnExample` depends on it.
- Pkg.rm("Example")
- Pkg.add(name="Example", version="0.3.0")
- # Now we check that we are tracking a registered version.
- Pkg.dependencies(exuuid) do ex
- @test ex.version == v"0.3.0"
- @test ex.is_tracking_registry
+ isolate(loaded_depot = true) do;
+ mktempdir() do tempdir
+ path = git_init_package(tempdir, joinpath(@__DIR__, "test_packages", "DependsOnExample"))
+ Pkg.add(path = path)
+ Pkg.add(name = "Example", rev = "master")
+ @test !Pkg.dependencies()[exuuid].is_tracking_registry
+ # Now we remove the package as a direct dependency.
+ # The package should still exist as an indirect dependency because `DependsOnExample` depends on it.
+ Pkg.rm("Example")
+ Pkg.add(name = "Example", version = "0.3.0")
+ # Now we check that we are tracking a registered version.
+ Pkg.dependencies(exuuid) do ex
+ @test ex.version == v"0.3.0"
+ @test ex.is_tracking_registry
+ end
end
- end end
+ end
# Add by URL should not override pin.
- isolate(loaded_depot=true) do
- Pkg.add(name="Example", version="0.3.0")
- Pkg.pin(name="Example")
+ isolate(loaded_depot = true) do
+ Pkg.add(name = "Example", version = "0.3.0")
+ Pkg.pin(name = "Example")
Pkg.dependencies(exuuid) do ex
@test ex.is_pinned
@test ex.is_tracking_registry
@test ex.version == v"0.3.0"
end
- Pkg.add(url="https://github.com/JuliaLang/Example.jl")
+ Pkg.add(url = "https://github.com/JuliaLang/Example.jl")
Pkg.dependencies(exuuid) do ex
@test ex.is_pinned
@test ex.is_tracking_registry
@@ -660,8 +815,8 @@ end
end
end
# It should be possible to switch branches by reusing the URL.
- isolate(loaded_depot=true) do
- Pkg.add(url="https://github.com/00vareladavid/Unregistered.jl", rev="0.2.0")
+ isolate(loaded_depot = true) do
+ Pkg.add(url = "https://github.com/00vareladavid/Unregistered.jl", rev = "0.2.0")
Pkg.dependencies(unregistered_uuid) do pkg
@test pkg.git_source == "https://github.com/00vareladavid/Unregistered.jl"
@test !pkg.is_tracking_registry
@@ -670,7 +825,7 @@ end
@test haskey(pkg.dependencies, "Example")
end
# Now we refer to it by name so to check that we reuse the URL.
- Pkg.add(name="Unregistered", rev="0.1.0")
+ Pkg.add(name = "Unregistered", rev = "0.1.0")
Pkg.dependencies(unregistered_uuid) do pkg
@test pkg.git_source == "https://github.com/00vareladavid/Unregistered.jl"
@test !pkg.is_tracking_registry
@@ -680,168 +835,170 @@ end
end
end
# add should resolve the correct versions even when the manifest is out of sync with the project compat
- isolate(loaded_depot=true) do; mktempdir() do tempdir
- Pkg.activate(copy_test_package(tempdir, "CompatOutOfSync"))
- Pkg.add("Libdl")
- Pkg.dependencies(exuuid) do pkg
- @test pkg.version == v"0.3.0"
+ isolate(loaded_depot = true) do;
+ mktempdir() do tempdir
+ Pkg.activate(copy_test_package(tempdir, "CompatOutOfSync"))
+ Pkg.add("Libdl")
+ Pkg.dependencies(exuuid) do pkg
+ @test pkg.version == v"0.3.0"
+ end
end
- end end
+ end
# Preserve syntax
# These tests mostly check the REPL side correctness.
# make sure the default behavior is invoked
withenv("JULIA_PKG_PRESERVE_TIERED_INSTALLED" => false) do
- # - Normal add should not change the existing version.
- isolate(loaded_depot=true) do
- Pkg.add(name="libpng_jll", version=v"1.6.37+4")
- @test Pkg.dependencies()[pngjll_uuid].version == v"1.6.37+4"
- Pkg.add(name="Example", version="0.3.0")
- @test Pkg.dependencies()[exuuid].version == v"0.3.0"
- @test Pkg.dependencies()[pngjll_uuid].version == v"1.6.37+4"
- Pkg.add(name="JSON", version="0.18.0")
- @test Pkg.dependencies()[exuuid].version == v"0.3.0"
- @test Pkg.dependencies()[json_uuid].version == v"0.18.0"
- @test Pkg.dependencies()[pngjll_uuid].version == v"1.6.37+4"
- end
- # - `tiered_installed`.
- isolate(loaded_depot=false) do
- Pkg.add(name="libpng_jll", version=v"1.6.37+4")
- Pkg.add(name="Example", version="0.3.0")
- @test Pkg.dependencies()[exuuid].version == v"0.3.0"
- @test Pkg.dependencies()[pngjll_uuid].version == v"1.6.37+4"
-
- @test_logs(
- (:debug, "tiered_resolve: trying PRESERVE_ALL_INSTALLED"),
- (:debug, "tiered_resolve: trying PRESERVE_ALL"),
- min_level=Logging.Debug,
- match_mode=:any,
- Pkg.add(Pkg.PackageSpec(;name="JSON", version="0.18.0"); preserve=Pkg.PRESERVE_TIERED_INSTALLED)
- )
- @test Pkg.dependencies()[exuuid].version == v"0.3.0"
- @test Pkg.dependencies()[json_uuid].version == v"0.18.0"
- @test Pkg.dependencies()[pngjll_uuid].version == v"1.6.37+4"
-
- Pkg.activate(temp=true)
- @test_logs(
- (:debug, "tiered_resolve: trying PRESERVE_ALL_INSTALLED"),
- min_level=Logging.Debug,
- match_mode=:any,
- Pkg.add("Example"; preserve=Pkg.PRESERVE_TIERED_INSTALLED) # should only add v0.3.0 as it was installed earlier
- )
- @test Pkg.dependencies()[exuuid].version == v"0.3.0"
+ # - Normal add should not change the existing version.
+ isolate(loaded_depot = true) do
+ Pkg.add(name = "libpng_jll", version = v"1.6.37+4")
+ @test Pkg.dependencies()[pngjll_uuid].version == v"1.6.37+4"
+ Pkg.add(name = "Example", version = "0.3.0")
+ @test Pkg.dependencies()[exuuid].version == v"0.3.0"
+ @test Pkg.dependencies()[pngjll_uuid].version == v"1.6.37+4"
+ Pkg.add(name = "JSON", version = "0.18.0")
+ @test Pkg.dependencies()[exuuid].version == v"0.3.0"
+ @test Pkg.dependencies()[json_uuid].version == v"0.18.0"
+ @test Pkg.dependencies()[pngjll_uuid].version == v"1.6.37+4"
+ end
+ # - `tiered_installed`.
+ isolate(loaded_depot = false) do
+ Pkg.add(name = "libpng_jll", version = v"1.6.37+4")
+ Pkg.add(name = "Example", version = "0.3.0")
+ @test Pkg.dependencies()[exuuid].version == v"0.3.0"
+ @test Pkg.dependencies()[pngjll_uuid].version == v"1.6.37+4"
- withenv("JULIA_PKG_PRESERVE_TIERED_INSTALLED" => true) do
- Pkg.activate(temp=true)
@test_logs(
(:debug, "tiered_resolve: trying PRESERVE_ALL_INSTALLED"),
- min_level=Logging.Debug,
- match_mode=:any,
- Pkg.add(name="Example")
+ (:debug, "tiered_resolve: trying PRESERVE_ALL"),
+ min_level = Logging.Debug,
+ match_mode = :any,
+ Pkg.add(Pkg.PackageSpec(; name = "JSON", version = "0.18.0"); preserve = Pkg.PRESERVE_TIERED_INSTALLED)
)
@test Pkg.dependencies()[exuuid].version == v"0.3.0"
- end
+ @test Pkg.dependencies()[json_uuid].version == v"0.18.0"
+ @test Pkg.dependencies()[pngjll_uuid].version == v"1.6.37+4"
- Pkg.activate(temp=true)
- @test_logs(
- (:debug, "tiered_resolve: trying PRESERVE_ALL"),
- min_level=Logging.Debug,
- match_mode=:any,
- Pkg.add(name="Example") # default 'add' should serve a newer version
- )
- @test Pkg.dependencies()[exuuid].version > v"0.3.0"
- end
- # - `tiered` is the default option.
- isolate(loaded_depot=false) do
- Pkg.add(name="libpng_jll", version=v"1.6.37+4")
- Pkg.add(name="Example", version="0.3.0")
- @test Pkg.dependencies()[exuuid].version == v"0.3.0"
- @test Pkg.dependencies()[pngjll_uuid].version == v"1.6.37+4"
- Pkg.add(Pkg.PackageSpec(;name="JSON", version="0.18.0"); preserve=Pkg.PRESERVE_TIERED)
- @test Pkg.dependencies()[exuuid].version == v"0.3.0"
- @test Pkg.dependencies()[json_uuid].version == v"0.18.0"
- @test Pkg.dependencies()[pngjll_uuid].version == v"1.6.37+4"
- end
- # - `installed`.
- isolate(loaded_depot=false) do
- Pkg.add(name="libpng_jll", version=v"1.6.37+4")
- Pkg.add(name="Example", version="0.3.0")
- @test Pkg.dependencies()[exuuid].version == v"0.3.0"
- @test Pkg.dependencies()[pngjll_uuid].version == v"1.6.37+4"
- @test_throws Pkg.Resolve.ResolverError Pkg.add(Pkg.PackageSpec(;name="JSON", version="0.18.0"); preserve=Pkg.PRESERVE_ALL_INSTALLED) # no installed version
- end
- # - `all` should succeed in the same way as `tiered`.
- isolate(loaded_depot=false) do
- Pkg.add(name="libpng_jll", version=v"1.6.37+4")
- Pkg.add(name="Example", version="0.3.0")
- @test Pkg.dependencies()[exuuid].version == v"0.3.0"
- @test Pkg.dependencies()[pngjll_uuid].version == v"1.6.37+4"
- Pkg.add(Pkg.PackageSpec(;name="JSON", version="0.18.0"); preserve=Pkg.PRESERVE_ALL)
- @test Pkg.dependencies()[exuuid].version == v"0.3.0"
- @test Pkg.dependencies()[json_uuid].version == v"0.18.0"
- @test Pkg.dependencies()[pngjll_uuid].version == v"1.6.37+4"
+ Pkg.activate(temp = true)
+ @test_logs(
+ (:debug, "tiered_resolve: trying PRESERVE_ALL_INSTALLED"),
+ min_level = Logging.Debug,
+ match_mode = :any,
+ Pkg.add("Example"; preserve = Pkg.PRESERVE_TIERED_INSTALLED) # should only add v0.3.0 as it was installed earlier
+ )
+ @test Pkg.dependencies()[exuuid].version == v"0.3.0"
- Pkg.rm("JSON")
- Pkg.add(Pkg.PackageSpec(;name="JSON"); preserve=Pkg.PRESERVE_ALL_INSTALLED)
- @test Pkg.dependencies()[exuuid].version == v"0.3.0"
- @test Pkg.dependencies()[json_uuid].version == v"0.18.0"
- @test Pkg.dependencies()[pngjll_uuid].version == v"1.6.37+4"
- end
- # - `direct` should also succeed in the same way.
- isolate(loaded_depot=true) do
- Pkg.add(name="libpng_jll", version=v"1.6.37+4")
- Pkg.add(name="Example", version="0.3.0")
- @test Pkg.dependencies()[exuuid].version == v"0.3.0"
- @test Pkg.dependencies()[pngjll_uuid].version == v"1.6.37+4"
- Pkg.add(Pkg.PackageSpec(;name="JSON", version="0.18.0"); preserve=Pkg.PRESERVE_DIRECT)
- @test Pkg.dependencies()[exuuid].version == v"0.3.0"
- @test Pkg.dependencies()[json_uuid].version == v"0.18.0"
- @test Pkg.dependencies()[pngjll_uuid].version == v"1.6.37+4"
- end
- # - `semver` should update `Example` and the jll to the highest semver compatible version.
- isolate(loaded_depot=true) do
- Pkg.add(name="libpng_jll", version=v"1.6.37+4")
- Pkg.add(name="Example", version="0.3.0")
- @test Pkg.dependencies()[exuuid].version == v"0.3.0"
- @test Pkg.dependencies()[pngjll_uuid].version == v"1.6.37+4"
- Pkg.add(Pkg.PackageSpec(;name="JSON", version="0.18.0"); preserve=Pkg.PRESERVE_SEMVER)
- @test Pkg.dependencies()[exuuid].version == v"0.3.3"
- @test Pkg.dependencies()[json_uuid].version == v"0.18.0"
- @test Pkg.dependencies()[pngjll_uuid].version > v"1.6.37+4"
- end
- #- `none` should update `Example` and the jll to the highest compatible version.
- isolate(loaded_depot=true) do
- Pkg.add(name="libpng_jll", version=v"1.6.37+4")
- Pkg.add(name="Example", version="0.3.0")
- @test Pkg.dependencies()[exuuid].version == v"0.3.0"
- @test Pkg.dependencies()[pngjll_uuid].version == v"1.6.37+4"
- Pkg.add(Pkg.PackageSpec(;name="JSON", version="0.18.0"); preserve=Pkg.PRESERVE_NONE)
- @test Pkg.dependencies()[exuuid].version > v"0.3.0"
- @test Pkg.dependencies()[json_uuid].version == v"0.18.0"
- @test Pkg.dependencies()[pngjll_uuid].version > v"1.6.37+4"
- end
- isolate(loaded_depot=true) do
- Pkg.add(name="libpng_jll", version=v"1.6.37+5")
- @test Pkg.dependencies()[pngjll_uuid].version == v"1.6.37+5"
- end
- # Adding a new package to a package should add compat entries
- isolate(loaded_depot=true) do
- mktempdir() do tempdir
- Pkg.activate(tempdir)
- mkpath(joinpath(tempdir, "src"))
- touch(joinpath(tempdir, "src", "Foo.jl"))
- ctx = Pkg.Types.Context()
- ctx.env.project.name = "Foo"
- ctx.env.project.uuid = UUIDs.UUID(0)
- Pkg.Types.write_project(ctx.env)
- Pkg.add(name="Example", version="0.3.0")
+ withenv("JULIA_PKG_PRESERVE_TIERED_INSTALLED" => true) do
+ Pkg.activate(temp = true)
+ @test_logs(
+ (:debug, "tiered_resolve: trying PRESERVE_ALL_INSTALLED"),
+ min_level = Logging.Debug,
+ match_mode = :any,
+ Pkg.add(name = "Example")
+ )
+ @test Pkg.dependencies()[exuuid].version == v"0.3.0"
+ end
+
+ Pkg.activate(temp = true)
+ @test_logs(
+ (:debug, "tiered_resolve: trying PRESERVE_ALL"),
+ min_level = Logging.Debug,
+ match_mode = :any,
+ Pkg.add(name = "Example") # default 'add' should serve a newer version
+ )
+ @test Pkg.dependencies()[exuuid].version > v"0.3.0"
+ end
+ # - `tiered` is the default option.
+ isolate(loaded_depot = false) do
+ Pkg.add(name = "libpng_jll", version = v"1.6.37+4")
+ Pkg.add(name = "Example", version = "0.3.0")
+ @test Pkg.dependencies()[exuuid].version == v"0.3.0"
+ @test Pkg.dependencies()[pngjll_uuid].version == v"1.6.37+4"
+ Pkg.add(Pkg.PackageSpec(; name = "JSON", version = "0.18.0"); preserve = Pkg.PRESERVE_TIERED)
@test Pkg.dependencies()[exuuid].version == v"0.3.0"
- @test Pkg.Types.Context().env.project.compat["Example"] == Pkg.Types.Compat(Pkg.Types.VersionSpec("0.3"), "0.3.0")
- Pkg.add(name="Example", version="0.3.1")
- @test Pkg.Types.Context().env.project.compat["Example"] == Pkg.Types.Compat(Pkg.Types.VersionSpec("0.3"), "0.3.0")
+ @test Pkg.dependencies()[json_uuid].version == v"0.18.0"
+ @test Pkg.dependencies()[pngjll_uuid].version == v"1.6.37+4"
+ end
+ # - `installed`.
+ isolate(loaded_depot = false) do
+ Pkg.add(name = "libpng_jll", version = v"1.6.37+4")
+ Pkg.add(name = "Example", version = "0.3.0")
+ @test Pkg.dependencies()[exuuid].version == v"0.3.0"
+ @test Pkg.dependencies()[pngjll_uuid].version == v"1.6.37+4"
+ @test_throws Pkg.Resolve.ResolverError Pkg.add(Pkg.PackageSpec(; name = "JSON", version = "0.18.0"); preserve = Pkg.PRESERVE_ALL_INSTALLED) # no installed version
+ end
+ # - `all` should succeed in the same way as `tiered`.
+ isolate(loaded_depot = false) do
+ Pkg.add(name = "libpng_jll", version = v"1.6.37+4")
+ Pkg.add(name = "Example", version = "0.3.0")
+ @test Pkg.dependencies()[exuuid].version == v"0.3.0"
+ @test Pkg.dependencies()[pngjll_uuid].version == v"1.6.37+4"
+ Pkg.add(Pkg.PackageSpec(; name = "JSON", version = "0.18.0"); preserve = Pkg.PRESERVE_ALL)
+ @test Pkg.dependencies()[exuuid].version == v"0.3.0"
+ @test Pkg.dependencies()[json_uuid].version == v"0.18.0"
+ @test Pkg.dependencies()[pngjll_uuid].version == v"1.6.37+4"
+
+ Pkg.rm("JSON")
+ Pkg.add(Pkg.PackageSpec(; name = "JSON"); preserve = Pkg.PRESERVE_ALL_INSTALLED)
+ @test Pkg.dependencies()[exuuid].version == v"0.3.0"
+ @test Pkg.dependencies()[json_uuid].version == v"0.18.0"
+ @test Pkg.dependencies()[pngjll_uuid].version == v"1.6.37+4"
+ end
+ # - `direct` should also succeed in the same way.
+ isolate(loaded_depot = true) do
+ Pkg.add(name = "libpng_jll", version = v"1.6.37+4")
+ Pkg.add(name = "Example", version = "0.3.0")
+ @test Pkg.dependencies()[exuuid].version == v"0.3.0"
+ @test Pkg.dependencies()[pngjll_uuid].version == v"1.6.37+4"
+ Pkg.add(Pkg.PackageSpec(; name = "JSON", version = "0.18.0"); preserve = Pkg.PRESERVE_DIRECT)
+ @test Pkg.dependencies()[exuuid].version == v"0.3.0"
+ @test Pkg.dependencies()[json_uuid].version == v"0.18.0"
+ @test Pkg.dependencies()[pngjll_uuid].version == v"1.6.37+4"
+ end
+ # - `semver` should update `Example` and the jll to the highest semver compatible version.
+ isolate(loaded_depot = true) do
+ Pkg.add(name = "libpng_jll", version = v"1.6.37+4")
+ Pkg.add(name = "Example", version = "0.3.0")
+ @test Pkg.dependencies()[exuuid].version == v"0.3.0"
+ @test Pkg.dependencies()[pngjll_uuid].version == v"1.6.37+4"
+ Pkg.add(Pkg.PackageSpec(; name = "JSON", version = "0.18.0"); preserve = Pkg.PRESERVE_SEMVER)
+ @test Pkg.dependencies()[exuuid].version == v"0.3.3"
+ @test Pkg.dependencies()[json_uuid].version == v"0.18.0"
+ @test Pkg.dependencies()[pngjll_uuid].version > v"1.6.37+4"
+ end
+ #- `none` should update `Example` and the jll to the highest compatible version.
+ isolate(loaded_depot = true) do
+ Pkg.add(name = "libpng_jll", version = v"1.6.37+4")
+ Pkg.add(name = "Example", version = "0.3.0")
+ @test Pkg.dependencies()[exuuid].version == v"0.3.0"
+ @test Pkg.dependencies()[pngjll_uuid].version == v"1.6.37+4"
+ Pkg.add(Pkg.PackageSpec(; name = "JSON", version = "0.18.0"); preserve = Pkg.PRESERVE_NONE)
+ @test Pkg.dependencies()[exuuid].version > v"0.3.0"
+ @test Pkg.dependencies()[json_uuid].version == v"0.18.0"
+ @test Pkg.dependencies()[pngjll_uuid].version > v"1.6.37+4"
+ end
+ isolate(loaded_depot = true) do
+ Pkg.add(name = "libpng_jll", version = v"1.6.37+5")
+ @test Pkg.dependencies()[pngjll_uuid].version == v"1.6.37+5"
+ end
+ # Adding a new package to a package should add compat entries
+ isolate(loaded_depot = true) do
+ mktempdir() do tempdir
+ Pkg.activate(tempdir)
+ mkpath(joinpath(tempdir, "src"))
+ touch(joinpath(tempdir, "src", "Foo.jl"))
+ ctx = Pkg.Types.Context()
+ ctx.env.project.name = "Foo"
+ ctx.env.project.uuid = UUIDs.UUID(0)
+ Pkg.Types.write_project(ctx.env)
+ Pkg.add(name = "Example", version = "0.3.0")
+ @test Pkg.dependencies()[exuuid].version == v"0.3.0"
+ @test Pkg.Types.Context().env.project.compat["Example"] == Pkg.Types.Compat(Pkg.Types.VersionSpec("0.3"), "0.3.0")
+ Pkg.add(name = "Example", version = "0.3.1")
+ @test Pkg.Types.Context().env.project.compat["Example"] == Pkg.Types.Compat(Pkg.Types.VersionSpec("0.3"), "0.3.0")
+ end
end
- end
end # withenv
end
@@ -853,60 +1010,64 @@ end
# This tests shows that, packages added with an absolute path will not break
# if the project is moved to a new position.
# We can use the loaded depot here, it will help us avoid the original clone.
- isolate(loaded_depot=true) do; mktempdir() do tempdir
- empty_package = UUID("26187899-7657-4a90-a2f6-e79e0214bedc")
- path = git_init_package(tempdir, joinpath(@__DIR__, "test_packages", "EmptyPackage"))
- path = abspath(path)
- Pkg.add(path=path)
- # Now we try to find the package.
- rm(joinpath(DEPOT_PATH[1], "packages"); recursive=true)
- @test !isdir(Pkg.dependencies()[empty_package].source)
- Pkg.instantiate()
- @test isdir(Pkg.dependencies()[empty_package].source)
- # Now we move the project and should still be able to find the package.
- mktempdir() do other_dir
- cp(dirname(Base.active_project()), other_dir; force=true)
- Pkg.activate(other_dir)
- rm(joinpath(DEPOT_PATH[1], "packages"); recursive=true)
+ isolate(loaded_depot = true) do;
+ mktempdir() do tempdir
+ empty_package = UUID("26187899-7657-4a90-a2f6-e79e0214bedc")
+ path = git_init_package(tempdir, joinpath(@__DIR__, "test_packages", "EmptyPackage"))
+ path = abspath(path)
+ Pkg.add(path = path)
+ # Now we try to find the package.
+ rm(joinpath(DEPOT_PATH[1], "packages"); recursive = true)
@test !isdir(Pkg.dependencies()[empty_package].source)
Pkg.instantiate()
+ @test isdir(Pkg.dependencies()[empty_package].source)
+ # Now we move the project and should still be able to find the package.
+ mktempdir() do other_dir
+ cp(dirname(Base.active_project()), other_dir; force = true)
+ Pkg.activate(other_dir)
+ rm(joinpath(DEPOT_PATH[1], "packages"); recursive = true)
+ @test !isdir(Pkg.dependencies()[empty_package].source)
+ Pkg.instantiate()
+ end
end
- end end
+ end
# Dependencies added with relative paths should be stored relative to the active project.
# This test shows that packages added with a relative path will not break
# as long as they maintain the same relative position to the project.
# We can use the loaded depot here, it will help us avoid the original clone.
- isolate(loaded_depot=true) do; mktempdir() do tempdir
- empty_package = UUID("26187899-7657-4a90-a2f6-e79e0214bedc")
- path = git_init_package(tempdir, joinpath(@__DIR__, "test_packages", "EmptyPackage"))
- # We add the package using a relative path.
- cd(path) do
- Pkg.add(path=".")
- manifest = Pkg.Types.read_manifest(joinpath(dirname(Base.active_project()), "Manifest.toml"))
- # Test that the relative path is canonicalized.
- repo = string("../../../", basename(tempdir), "/EmptyPackage")
- @test manifest[empty_package].repo.source == repo
- end
- # Now we try to find the package.
- rm(joinpath(DEPOT_PATH[1], "packages"); recursive=true)
- rm(joinpath(DEPOT_PATH[1], "clones"); recursive=true)
- Pkg.instantiate()
- # Test that Operations.is_instantiated works with relative path
- @test Pkg.Operations.is_instantiated(Pkg.Types.EnvCache())
- # Now we destroy the relative position and should not be able to find the package.
- rm(joinpath(DEPOT_PATH[1], "packages"); recursive=true)
- # Test that Operations.is_instantiated works with relative path
- @test !Pkg.Operations.is_instantiated(Pkg.Types.EnvCache())
- mktempdir() do other_dir
- cp(dirname(Base.active_project()), other_dir; force=true)
- Pkg.activate(other_dir)
- @test_throws PkgError Pkg.instantiate() # TODO is there a way to pattern match on just part of the err message?
- end
- end end
+ isolate(loaded_depot = true) do;
+ mktempdir() do tempdir
+ empty_package = UUID("26187899-7657-4a90-a2f6-e79e0214bedc")
+ path = git_init_package(tempdir, joinpath(@__DIR__, "test_packages", "EmptyPackage"))
+ # We add the package using a relative path.
+ cd(path) do
+ Pkg.add(path = ".")
+ manifest = Pkg.Types.read_manifest(joinpath(dirname(Base.active_project()), "Manifest.toml"))
+ # Test that the relative path is canonicalized.
+ repo = string("../../../", basename(tempdir), "/EmptyPackage")
+ @test manifest[empty_package].repo.source == repo
+ end
+ # Now we try to find the package.
+ rm(joinpath(DEPOT_PATH[1], "packages"); recursive = true)
+ rm(joinpath(DEPOT_PATH[1], "clones"); recursive = true)
+ Pkg.instantiate()
+ # Test that Operations.is_instantiated works with relative path
+ @test Pkg.Operations.is_instantiated(Pkg.Types.EnvCache())
+ # Now we destroy the relative position and should not be able to find the package.
+ rm(joinpath(DEPOT_PATH[1], "packages"); recursive = true)
+ # Test that Operations.is_instantiated works with relative path
+ @test !Pkg.Operations.is_instantiated(Pkg.Types.EnvCache())
+ mktempdir() do other_dir
+ cp(dirname(Base.active_project()), other_dir; force = true)
+ Pkg.activate(other_dir)
+ @test_throws PkgError Pkg.instantiate() # TODO is there a way to pattern match on just part of the err message?
+ end
+ end
+ end
# Now we test packages added by URL.
- isolate(loaded_depot=true) do
+ isolate(loaded_depot = true) do
# Details: `master` is past `0.1.0`
- Pkg.add(url="https://github.com/00vareladavid/Unregistered.jl", rev="0.1.0")
+ Pkg.add(url = "https://github.com/00vareladavid/Unregistered.jl", rev = "0.1.0")
Pkg.dependencies(unregistered_uuid) do pkg
@test pkg.name == "Unregistered"
@test isdir(pkg.source)
@@ -914,7 +1075,7 @@ end
@test haskey(Pkg.project().dependencies, "Unregistered")
# Now we remove the source so that we have to load it again.
# We should reuse the existing clone in this case.
- rm(joinpath(DEPOT_PATH[1], "packages"); recursive=true)
+ rm(joinpath(DEPOT_PATH[1], "packages"); recursive = true)
Pkg.instantiate()
Pkg.dependencies(unregistered_uuid) do pkg
@test pkg.name == "Unregistered"
@@ -923,8 +1084,8 @@ end
@test haskey(Pkg.project().dependencies, "Unregistered")
# Now we remove the source _and_ our cache, we have no choice to re-clone the remote.
# We should still be able to find the source.
- rm(joinpath(DEPOT_PATH[1], "packages"); recursive=true)
- rm(joinpath(DEPOT_PATH[1], "clones"); recursive=true)
+ rm(joinpath(DEPOT_PATH[1], "packages"); recursive = true)
+ rm(joinpath(DEPOT_PATH[1], "clones"); recursive = true)
Pkg.instantiate()
Pkg.dependencies(unregistered_uuid) do pkg
@test pkg.name == "Unregistered"
@@ -944,45 +1105,57 @@ end
# these tests.
registry_url = "https://github.com/JuliaRegistries/General.git"
registry_commit = "030d6dae0df2ad6c3b2f90d41749df3eedb8d1b1"
- Utils.isolate_and_pin_registry(; registry_url, registry_commit) do; mktempdir() do tmp
- # All
- copy_test_package(tmp, "ShouldPreserveAll"; use_pkg=false)
- Pkg.activate(joinpath(tmp, "ShouldPreserveAll"))
- parsers_uuid = UUID("69de0a69-1ddd-5017-9359-2bf0b02dc9f0")
- original_parsers_version = Pkg.dependencies()[parsers_uuid].version
- Pkg.add(name="Example", version="0.5.0")
- @test Pkg.dependencies()[parsers_uuid].version == original_parsers_version
- # Direct
- copy_test_package(tmp, "ShouldPreserveDirect"; use_pkg=false)
- Pkg.activate(joinpath(tmp, "ShouldPreserveDirect"))
- ordered_collections = UUID("bac558e1-5e72-5ebc-8fee-abe8a469f55d")
- Pkg.add(uuid=ordered_collections, version="1.0.1")
- lazy_json = UUID("fc18253b-5e1b-504c-a4a2-9ece4944c004")
- data_structures = UUID("864edb3b-99cc-5e75-8d2d-829cb0a9cfe8")
- @test Pkg.dependencies()[lazy_json].version == v"0.1.0" # stayed the same
- @test Pkg.dependencies()[data_structures].version == v"0.16.1" # forced to change
- @test Pkg.dependencies()[ordered_collections].version == v"1.0.1" # sanity check
- # SEMVER
- copy_test_package(tmp, "ShouldPreserveSemver"; use_pkg=false)
- Pkg.activate(joinpath(tmp, "ShouldPreserveSemver"))
- light_graphs = UUID("093fc24a-ae57-5d10-9952-331d41423f4d")
- meta_graphs = UUID("626554b9-1ddb-594c-aa3c-2596fe9399a5")
- light_graphs_version = Pkg.dependencies()[light_graphs].version
- Pkg.add(uuid=meta_graphs, version="0.6.4")
- @test Pkg.dependencies()[meta_graphs].version == v"0.6.4" # sanity check
- # did not break semver
- @test Pkg.dependencies()[light_graphs].version in Pkg.Types.semver_spec("$(light_graphs_version)")
- # did change version
- @test Pkg.dependencies()[light_graphs].version != light_graphs_version
- # NONE
- copy_test_package(tmp, "ShouldPreserveNone"; use_pkg=false)
- Pkg.activate(joinpath(tmp, "ShouldPreserveNone"))
- array_interface = UUID("4fba245c-0d91-5ea0-9b3e-6abc04ee57a9")
- diff_eq_diff_tools = UUID("01453d9d-ee7c-5054-8395-0335cb756afa")
- Pkg.add(uuid=diff_eq_diff_tools, version="1.0.0")
- @test Pkg.dependencies()[diff_eq_diff_tools].version == v"1.0.0" # sanity check
- @test Pkg.dependencies()[array_interface].version in Pkg.Types.semver_spec("1") # had to make breaking change
- end end
+ Utils.isolate_and_pin_registry(; registry_url, registry_commit) do;
+ mktempdir() do tmp
+ # All
+ copy_test_package(tmp, "ShouldPreserveAll"; use_pkg = false)
+ Pkg.activate(joinpath(tmp, "ShouldPreserveAll"))
+ parsers_uuid = UUID("69de0a69-1ddd-5017-9359-2bf0b02dc9f0")
+ original_parsers_version = Pkg.dependencies()[parsers_uuid].version
+ Pkg.add(name = "Example", version = "0.5.0")
+ @test Pkg.dependencies()[parsers_uuid].version == original_parsers_version
+ # Direct
+ copy_test_package(tmp, "ShouldPreserveDirect"; use_pkg = false)
+ Pkg.activate(joinpath(tmp, "ShouldPreserveDirect"))
+ ordered_collections = UUID("bac558e1-5e72-5ebc-8fee-abe8a469f55d")
+ Pkg.add(uuid = ordered_collections, version = "1.0.1")
+ lazy_json = UUID("fc18253b-5e1b-504c-a4a2-9ece4944c004")
+ data_structures = UUID("864edb3b-99cc-5e75-8d2d-829cb0a9cfe8")
+ @test Pkg.dependencies()[lazy_json].version == v"0.1.0" # stayed the same
+ @test Pkg.dependencies()[data_structures].version == v"0.16.1" # forced to change
+ @test Pkg.dependencies()[ordered_collections].version == v"1.0.1" # sanity check
+ # SEMVER
+ copy_test_package(tmp, "ShouldPreserveSemver"; use_pkg = false)
+
+ # Support julia versions before & after the MbedTLS > OpenSSL switch
+ OpenSSL_pkgid = Base.PkgId(Base.UUID("458c3c95-2e84-50aa-8efc-19380b2a3a95"), "OpenSSL_jll")
+ manifest_to_use = if Base.is_stdlib(OpenSSL_pkgid)
+ joinpath(tmp, "ShouldPreserveSemver", "Manifest_OpenSSL.toml")
+ else
+ joinpath(tmp, "ShouldPreserveSemver", "Manifest_MbedTLS.toml")
+ end
+ mv(manifest_to_use, joinpath(tmp, "ShouldPreserveSemver", "Manifest.toml"))
+
+ Pkg.activate(joinpath(tmp, "ShouldPreserveSemver"))
+ light_graphs = UUID("093fc24a-ae57-5d10-9952-331d41423f4d")
+ meta_graphs = UUID("626554b9-1ddb-594c-aa3c-2596fe9399a5")
+ light_graphs_version = Pkg.dependencies()[light_graphs].version
+ Pkg.add(uuid = meta_graphs, version = "0.6.4")
+ @test Pkg.dependencies()[meta_graphs].version == v"0.6.4" # sanity check
+ # did not break semver
+ @test Pkg.dependencies()[light_graphs].version in Pkg.Types.semver_spec("$(light_graphs_version)")
+ # did change version
+ @test Pkg.dependencies()[light_graphs].version != light_graphs_version
+ # NONE
+ copy_test_package(tmp, "ShouldPreserveNone"; use_pkg = false)
+ Pkg.activate(joinpath(tmp, "ShouldPreserveNone"))
+ array_interface = UUID("4fba245c-0d91-5ea0-9b3e-6abc04ee57a9")
+ diff_eq_diff_tools = UUID("01453d9d-ee7c-5054-8395-0335cb756afa")
+ Pkg.add(uuid = diff_eq_diff_tools, version = "1.0.0")
+ @test Pkg.dependencies()[diff_eq_diff_tools].version == v"1.0.0" # sanity check
+ @test Pkg.dependencies()[array_interface].version in Pkg.Types.semver_spec("1") # had to make breaking change
+ end
+ end
end
#
@@ -994,96 +1167,343 @@ end
# Add using UUID syntax
api, args, opts = first(Pkg.pkg"add 7876af07-990d-54b4-ab0e-23690620f79a")
@test api == Pkg.add
- @test args == [Pkg.PackageSpec(;uuid=UUID("7876af07-990d-54b4-ab0e-23690620f79a"))]
+ @test args == [Pkg.PackageSpec(; uuid = UUID("7876af07-990d-54b4-ab0e-23690620f79a"))]
@test isempty(opts)
# Add using `name=UUID` syntax.
api, args, opts = first(Pkg.pkg"add Example=7876af07-990d-54b4-ab0e-23690620f79a")
@test api == Pkg.add
- @test args == [Pkg.PackageSpec(;name="Example", uuid=UUID("7876af07-990d-54b4-ab0e-23690620f79a"))]
+ @test args == [Pkg.PackageSpec(; name = "Example", uuid = UUID("7876af07-990d-54b4-ab0e-23690620f79a"))]
@test isempty(opts)
# Add using git revision syntax.
api, args, opts = first(Pkg.pkg"add Example#master")
@test api == Pkg.add
- @test args == [Pkg.PackageSpec(;name="Example", rev="master")]
+ @test args == [Pkg.PackageSpec(; name = "Example", rev = "master")]
@test isempty(opts)
# Add using git revision syntax.
- api,args, opt = first(Pkg.pkg"add Example#v0.5.3")
+ api, args, opt = first(Pkg.pkg"add Example#v0.5.3")
@test api == Pkg.add
- @test args == [Pkg.PackageSpec(;name="Example", rev="v0.5.3")]
+ @test args == [Pkg.PackageSpec(; name = "Example", rev = "v0.5.3")]
@test isempty(opts)
# Add using registered version syntax.
api, args, opts = first(Pkg.pkg"add Example@0.5.0")
@test api == Pkg.add
- @test args == [Pkg.PackageSpec(;name="Example", version="0.5.0")]
+ @test args == [Pkg.PackageSpec(; name = "Example", version = "0.5.0")]
+ @test isempty(opts)
+ # Add multiple packages with version specifier
+ api, args, opts = first(Pkg.pkg"add Example@0.5.5 Test")
+ @test api == Pkg.add
+ @test length(args) == 2
+ @test args[1].name == "Example"
+ @test args[1].version == "0.5.5"
+ @test args[2].name == "Test"
@test isempty(opts)
# Add as a weakdep.
api, args, opts = first(Pkg.pkg"add --weak Example")
@test api == Pkg.add
- @test args == [Pkg.PackageSpec(;name="Example")]
+ @test args == [Pkg.PackageSpec(; name = "Example")]
@test opts == Dict(:target => :weakdeps)
# Add as an extra.
api, args, opts = first(Pkg.pkg"add --extra Example")
@test api == Pkg.add
- @test args == [Pkg.PackageSpec(;name="Example")]
+ @test args == [Pkg.PackageSpec(; name = "Example")]
@test opts == Dict(:target => :extras)
# Add using direct URL syntax.
api, args, opts = first(Pkg.pkg"add https://github.com/00vareladavid/Unregistered.jl#0.1.0")
@test api == Pkg.add
- @test args == [Pkg.PackageSpec(;url="https://github.com/00vareladavid/Unregistered.jl", rev="0.1.0")]
+ @test args == [Pkg.PackageSpec(; url = "https://github.com/00vareladavid/Unregistered.jl", rev = "0.1.0")]
@test isempty(opts)
+
+ api, args, opts = first(Pkg.pkg"add a/path/with/@/deal/with/it")
+ @test normpath(args[1].path) == normpath("a/path/with/@/deal/with/it")
+
+ # Test GitHub URLs with tree/commit paths
+ @testset "GitHub tree/commit URLs" begin
+ api, args, opts = first(Pkg.pkg"add https://github.com/user/repo/tree/feature-branch")
+ @test api == Pkg.add
+ @test length(args) == 1
+ @test args[1].url == "https://github.com/user/repo"
+ @test args[1].rev == "feature-branch"
+
+ api, args, opts = first(Pkg.pkg"add https://github.com/user/repo/commit/abc123def")
+ @test api == Pkg.add
+ @test length(args) == 1
+ @test args[1].url == "https://github.com/user/repo"
+ @test args[1].rev == "abc123def"
+ end
+
+ # Test Git URLs with branch specifiers
+ @testset "Git URLs with branch specifiers" begin
+ api, args, opts = first(Pkg.pkg"add https://github.com/user/repo.git#main")
+ @test api == Pkg.add
+ @test length(args) == 1
+ @test args[1].url == "https://github.com/user/repo.git"
+ @test args[1].rev == "main"
+
+ api, args, opts = first(Pkg.pkg"add https://bitbucket.org/user/repo.git#develop")
+ @test api == Pkg.add
+ @test length(args) == 1
+ @test args[1].url == "https://bitbucket.org/user/repo.git"
+ @test args[1].rev == "develop"
+
+ api, args, opts = first(Pkg.pkg"add git@github.com:user/repo.git#feature")
+ @test api == Pkg.add
+ @test length(args) == 1
+ @test args[1].url == "git@github.com:user/repo.git"
+ @test args[1].rev == "feature"
+
+ api, args, opts = first(Pkg.pkg"add ssh://git@server.com/path/repo.git#branch-name")
+ @test api == Pkg.add
+ @test length(args) == 1
+ @test args[1].url == "ssh://git@server.com/path/repo.git"
+ @test args[1].rev == "branch-name"
+ end
+
+ # Test SSH URLs with IP addresses (issue #1822)
+ @testset "SSH URLs with IP addresses" begin
+ # Test that user@host:path URLs with IP addresses are parsed correctly as complete URLs
+ api, args, opts = first(Pkg.pkg"add user@10.20.30.40:PackageName.jl")
+ @test api == Pkg.add
+ @test length(args) == 1
+ @test args[1].url == "user@10.20.30.40:PackageName.jl"
+ @test args[1].subdir === nothing
+
+ api, args, opts = first(Pkg.pkg"add git@192.168.1.100:path/to/repo.jl")
+ @test api == Pkg.add
+ @test length(args) == 1
+ @test args[1].url == "git@192.168.1.100:path/to/repo.jl"
+ @test args[1].subdir === nothing
+ end
+
+ # Test Git URLs with subdir specifiers
+ @testset "Git URLs with subdir specifiers" begin
+ api, args, opts = first(Pkg.pkg"add https://github.com/user/monorepo.git:packages/MyPackage")
+ @test api == Pkg.add
+ @test length(args) == 1
+ @test args[1].url == "https://github.com/user/monorepo.git"
+ @test args[1].subdir == "packages/MyPackage"
+
+ api, args, opts = first(Pkg.pkg"add ssh://git@server.com/repo.git:subdir/nested")
+ @test api == Pkg.add
+ @test length(args) == 1
+ @test args[1].url == "ssh://git@server.com/repo.git"
+ @test args[1].subdir == "subdir/nested"
+ end
+
+ # Test complex URLs (with username in URL + branch/tag/subdir)
+ @testset "Complex Git URLs" begin
+ api, args, opts = first(Pkg.pkg"add https://username@bitbucket.org/org/repo.git#dev")
+ @test api == Pkg.add
+ @test length(args) == 1
+ @test args[1].url == "https://username@bitbucket.org/org/repo.git"
+ @test args[1].rev == "dev"
+
+ api, args, opts = first(Pkg.pkg"add https://user:token@gitlab.company.com/group/project.git")
+ @test api == Pkg.add
+ @test length(args) == 1
+ @test args[1].url == "https://user:token@gitlab.company.com/group/project.git"
+
+ api, args, opts = first(Pkg.pkg"add https://example.com:8080/git/repo.git:packages/core")
+ @test api == Pkg.add
+ @test length(args) == 1
+ @test args[1].url == "https://example.com:8080/git/repo.git"
+ @test args[1].subdir == "packages/core"
+
+ # Test URLs with complex authentication and branch names containing #
+ api, args, opts = first(Pkg.pkg"add https://user:pass123@gitlab.example.com:8443/group/project.git#feature/fix-#42")
+ @test api == Pkg.add
+ @test length(args) == 1
+ @test args[1].url == "https://user:pass123@gitlab.example.com:8443/group/project.git"
+ @test args[1].rev == "feature/fix-#42"
+
+ # Test URLs with complex authentication and subdirs
+ api, args, opts = first(Pkg.pkg"add https://api_key:secret@company.git.server.com/team/monorepo.git:libs/julia/pkg")
+ @test api == Pkg.add
+ @test length(args) == 1
+ @test args[1].url == "https://api_key:secret@company.git.server.com/team/monorepo.git"
+ @test args[1].subdir == "libs/julia/pkg"
+
+ # Test URLs with authentication, branch with #, and subdir
+ api, args, opts = first(Pkg.pkg"add https://deploy:token123@internal.git.company.com/product/backend.git#hotfix/issue-#789:packages/core")
+ @test api == Pkg.add
+ @test length(args) == 1
+ @test args[1].url == "https://deploy:token123@internal.git.company.com/product/backend.git"
+ @test args[1].rev == "hotfix/issue-#789"
+ @test args[1].subdir == "packages/core"
+
+ # Test SSH URLs with port numbers and subdirs
+ api, args, opts = first(Pkg.pkg"add ssh://git@custom.server.com:2222/path/to/repo.git:src/package")
+ @test api == Pkg.add
+ @test length(args) == 1
+ @test args[1].url == "ssh://git@custom.server.com:2222/path/to/repo.git"
+ @test args[1].subdir == "src/package"
+
+ # Test URL with username in URL and multiple # in branch name
+ api, args, opts = first(Pkg.pkg"add https://ci_user@build.company.net/team/project.git#release/v2.0-#123-#456")
+ @test api == Pkg.add
+ @test length(args) == 1
+ @test args[1].url == "https://ci_user@build.company.net/team/project.git"
+ @test args[1].rev == "release/v2.0-#123-#456"
+
+ # Test complex case: auth + port + branch with # + subdir
+ api, args, opts = first(Pkg.pkg"add https://robot:abc123@git.enterprise.com:9443/division/platform.git#bugfix/handle-#special-chars:modules/julia-pkg")
+ @test api == Pkg.add
+ @test length(args) == 1
+ @test args[1].url == "https://robot:abc123@git.enterprise.com:9443/division/platform.git"
+ @test args[1].rev == "bugfix/handle-#special-chars"
+ @test args[1].subdir == "modules/julia-pkg"
+
+ # Test local paths with branch specifiers (paths can be repos)
+ api, args, opts = first(Pkg.pkg"add ./local/repo#feature-branch")
+ @test api == Pkg.add
+ @test length(args) == 1
+ @test normpath(args[1].path) == normpath("local/repo") # normpath removes "./"
+ @test args[1].rev == "feature-branch"
+
+ # Test local paths with subdir specifiers
+ api, args, opts = first(Pkg.pkg"add ./monorepo:packages/subpkg")
+ @test api == Pkg.add
+ @test length(args) == 1
+ @test args[1].path == "monorepo" # normpath removes "./"
+ @test args[1].subdir == "packages/subpkg"
+
+ # Test local paths with both branch and subdir
+ api, args, opts = first(Pkg.pkg"add ./project#develop:src/package")
+ @test api == Pkg.add
+ @test length(args) == 1
+ @test args[1].path == "project" # normpath removes "./"
+ @test args[1].rev == "develop"
+ @test args[1].subdir == "src/package"
+
+ # Test local paths with branch containing # characters
+ api, args, opts = first(Pkg.pkg"add ../workspace/repo#bugfix/issue-#123")
+ @test api == Pkg.add
+ @test length(args) == 1
+ @test normpath(args[1].path) == normpath("../workspace/repo")
+ @test args[1].rev == "bugfix/issue-#123"
+
+ # Test complex local path case: relative path + branch with # + subdir
+ if !Sys.iswindows()
+ api, args, opts = first(Pkg.pkg"add ~/projects/myrepo#feature/fix-#456:libs/core")
+ @test api == Pkg.add
+ @test length(args) == 1
+ @test startswith(args[1].path, "/") # ~ gets expanded to absolute path
+ @test endswith(normpath(args[1].path), normpath("/projects/myrepo"))
+ @test args[1].rev == "feature/fix-#456"
+ @test args[1].subdir == "libs/core"
+ end
+
+ # Test quoted URL with separate revision specifier (regression test)
+ api, args, opts = first(Pkg.pkg"add \"https://username@bitbucket.org/orgname/reponame.git\"#dev")
+ @test api == Pkg.add
+ @test length(args) == 1
+ @test args[1].url == "https://username@bitbucket.org/orgname/reponame.git"
+ @test args[1].rev == "dev"
+
+ # Test quoted URL with separate version specifier
+ api, args, opts = first(Pkg.pkg"add \"https://company.git.server.com/project.git\"@v2.1.0")
+ @test api == Pkg.add
+ @test length(args) == 1
+ @test args[1].url == "https://company.git.server.com/project.git"
+ @test args[1].version == "v2.1.0"
+
+ # Test quoted URL with separate subdir specifier
+ api, args, opts = first(Pkg.pkg"add \"https://gitlab.example.com/monorepo.git\":packages/core")
+ @test api == Pkg.add
+ @test length(args) == 1
+ @test args[1].url == "https://gitlab.example.com/monorepo.git"
+ @test args[1].subdir == "packages/core"
+ end
+
+ # Test that regular URLs without .git still work
+ @testset "Non-.git URLs (unchanged behavior)" begin
+ api, args, opts = first(Pkg.pkg"add https://github.com/user/repo")
+ @test api == Pkg.add
+ @test length(args) == 1
+ @test args[1].url == "https://github.com/user/repo"
+ @test args[1].rev === nothing
+ @test args[1].subdir === nothing
+ end
+
+ @testset "Windows path handling" begin
+ # Test that Windows drive letters are not treated as subdir separators
+ api, args, opts = first(Pkg.pkg"add C:\\Users\\test\\project")
+ @test api == Pkg.add
+ @test length(args) == 1
+ @test args[1].path == normpath("C:\\\\Users\\\\test\\\\project")
+ @test args[1].subdir === nothing
+
+ # Test with forward slashes too
+ api, args, opts = first(Pkg.pkg"add C:/Users/test/project")
+ @test api == Pkg.add
+ @test length(args) == 1
+ @test args[1].path == normpath("C:/Users/test/project")
+ @test args[1].subdir === nothing
+
+ # Test that actual subdir syntax still works with Windows paths
+ api, args, opts = first(Pkg.pkg"add C:\\Users\\test\\project:subdir")
+ @test api == Pkg.add
+ @test length(args) == 1
+ @test args[1].path == normpath("C:\\\\Users\\\\test\\\\project")
+ @test args[1].subdir == "subdir"
+ end
+
# Add using preserve option
api, args, opts = first(Pkg.pkg"add --preserve=none Example")
@test api == Pkg.add
- @test args == [Pkg.PackageSpec(;name="Example")]
+ @test args == [Pkg.PackageSpec(; name = "Example")]
@test opts == Dict(:preserve => Pkg.PRESERVE_NONE)
api, args, opts = first(Pkg.pkg"add --preserve=semver Example")
@test api == Pkg.add
- @test args == [Pkg.PackageSpec(;name="Example")]
+ @test args == [Pkg.PackageSpec(; name = "Example")]
@test opts == Dict(:preserve => Pkg.PRESERVE_SEMVER)
api, args, opts = first(Pkg.pkg"add --preserve=tiered Example")
@test api == Pkg.add
- @test args == [Pkg.PackageSpec(;name="Example")]
+ @test args == [Pkg.PackageSpec(; name = "Example")]
@test opts == Dict(:preserve => Pkg.PRESERVE_TIERED)
api, args, opts = first(Pkg.pkg"add --preserve=all Example")
@test api == Pkg.add
- @test args == [Pkg.PackageSpec(;name="Example")]
+ @test args == [Pkg.PackageSpec(; name = "Example")]
@test opts == Dict(:preserve => Pkg.PRESERVE_ALL)
api, args, opts = first(Pkg.pkg"add --preserve=direct Example")
@test api == Pkg.add
- @test args == [Pkg.PackageSpec(;name="Example")]
+ @test args == [Pkg.PackageSpec(; name = "Example")]
@test opts == Dict(:preserve => Pkg.PRESERVE_DIRECT)
end
# check casesensitive resolution of paths
- isolate() do; cd_tempdir() do dir
- Pkg.REPLMode.TEST_MODE[] = true
- mkdir("example")
- api, args, opts = first(Pkg.pkg"add Example")
- @test api == Pkg.add
- @test args == [Pkg.PackageSpec(;name="Example")]
- @test isempty(opts)
- api, args, opts = first(Pkg.pkg"add example")
- @test api == Pkg.add
- @test args == [Pkg.PackageSpec(;name="example")]
- @test isempty(opts)
- @test_throws PkgError Pkg.pkg"add ./Example"
- api, args, opts = first(Pkg.pkg"add ./example")
- @test api == Pkg.add
- @test args == [Pkg.PackageSpec(;path="example")]
- @test isempty(opts)
- cd("example")
- api, args, opts = first(Pkg.pkg"add .")
- @test api == Pkg.add
- @test args == [Pkg.PackageSpec(;path=".")]
- @test isempty(opts)
- end end
- isolate() do; cd_tempdir() do dir
- # adding a nonexistent directory
- @test_throws PkgError("`some/really/random/Dir` appears to be a local path, but directory does not exist"
- ) Pkg.pkg"add some/really/random/Dir"
- # warn if not explicit about adding directory
- mkdir("Example")
- @test_logs (:info, r"Use `./Example` to add or develop the local directory at `.*`.") match_mode=:any Pkg.pkg"add Example"
- end end
+ isolate() do;
+ cd_tempdir() do dir
+ Pkg.REPLMode.TEST_MODE[] = true
+ mkdir("example")
+ api, args, opts = first(Pkg.pkg"add Example")
+ @test api == Pkg.add
+ @test args == [Pkg.PackageSpec(; name = "Example")]
+ @test isempty(opts)
+ api, args, opts = first(Pkg.pkg"add example")
+ @test api == Pkg.add
+ @test args == [Pkg.PackageSpec(; name = "example")]
+ @test isempty(opts)
+ api, args, opts = first(Pkg.pkg"add ./example")
+ @test api == Pkg.add
+ @test args == [Pkg.PackageSpec(; path = "example")]
+ @test isempty(opts)
+ cd("example")
+ api, args, opts = first(Pkg.pkg"add .")
+ @test api == Pkg.add
+ @test args == [Pkg.PackageSpec(; path = ".")]
+ @test isempty(opts)
+ end
+ end
+ isolate() do;
+ cd_tempdir() do dir
+ # adding a nonexistent directory
+ @test_throws PkgError(
+ "Path `$(abspath("some/really/random/Dir"))` does not exist."
+ ) Pkg.pkg"add some/really/random/Dir"
+ # warn if not explicit about adding directory
+ mkdir("Example")
+ @test_logs (:info, r"Use `./Example` to add or develop the local directory at `.*`.") match_mode = :any Pkg.pkg"add Example"
+ end
+ end
end
#
@@ -1094,27 +1514,31 @@ end
# ## Input Checking
#
@testset "develop: input checking" begin
- isolate(loaded_depot=true) do
+ isolate(loaded_depot = true) do
# Julia is not a valid package name.
- @test_throws PkgError("`julia` is not a valid package name") Pkg.develop(name="julia")
+ @test_throws PkgError("`julia` is not a valid package name") Pkg.develop(name = "julia")
# Package names must be valid Julia identifiers.
- @test_throws PkgError("`***` is not a valid package name") Pkg.develop(name="***")
- @test_throws PkgError("`Foo Bar` is not a valid package name") Pkg.develop(name="Foo Bar")
+ @test_throws PkgError("`***` is not a valid package name") Pkg.develop(name = "***")
+ @test_throws PkgError("`Foo Bar` is not a valid package name") Pkg.develop(name = "Foo Bar")
# Names which are invalid and are probably URLs or paths.
- @test_throws PkgError("""
- `https://github.com` is not a valid package name
- The argument appears to be a URL or path, perhaps you meant `Pkg.develop(url="...")` or `Pkg.develop(path="...")`.""") Pkg.develop("https://github.com")
- @test_throws PkgError("""
- `./Foobar` is not a valid package name
- The argument appears to be a URL or path, perhaps you meant `Pkg.develop(url="...")` or `Pkg.develop(path="...")`.""") Pkg.develop("./Foobar")
+ @test_throws PkgError(
+ """
+ `https://github.com` is not a valid package name
+ The argument appears to be a URL or path, perhaps you meant `Pkg.develop(url="...")` or `Pkg.develop(path="...")`."""
+ ) Pkg.develop("https://github.com")
+ @test_throws PkgError(
+ """
+ `./Foobar` is not a valid package name
+ The argument appears to be a URL or path, perhaps you meant `Pkg.develop(url="...")` or `Pkg.develop(path="...")`."""
+ ) Pkg.develop("./Foobar")
# An empty spec is invalid.
@test_throws PkgError(
"name, UUID, URL, or filesystem path specification required when calling `develop`"
- ) Pkg.develop(Pkg.PackageSpec())
+ ) Pkg.develop(Pkg.PackageSpec())
# git revisions imply that `develop` tracks a git repo.
@test_throws PkgError(
"rev argument not supported by `develop`; consider using `add` instead"
- ) Pkg.develop(name="Example", rev="master")
+ ) Pkg.develop(name = "Example", rev = "master")
# Adding an unregistered package by name.
@test_throws PkgError Pkg.develop("ThisIsHopefullyRandom012856014925701382")
# Wrong UUID
@@ -1124,7 +1548,7 @@ end
# Two packages with the same name
@test_throws PkgError(
"it is invalid to specify multiple packages with the same UUID: `Example [7876af07]`"
- ) Pkg.develop([(;name="Example"), (;uuid=exuuid)])
+ ) Pkg.develop([(; name = "Example"), (; uuid = exuuid)])
end
end
@@ -1133,7 +1557,7 @@ end
#
@testset "develop: changes to the active project" begin
# It is possible to `develop` by specifying a registered name.
- isolate(loaded_depot=true) do
+ isolate(loaded_depot = true) do
Pkg.develop("Example")
Pkg.dependencies(exuuid) do pkg
@test pkg.name == "Example"
@@ -1143,8 +1567,8 @@ end
@test haskey(Pkg.project().dependencies, "Example")
end
# Develop with shared=false
- isolate(loaded_depot=true) do
- Pkg.develop("Example"; shared=false)
+ isolate(loaded_depot = true) do
+ Pkg.develop("Example"; shared = false)
Pkg.dependencies(exuuid) do pkg
@test pkg.name == "Example"
@test Base.samefile(pkg.source, joinpath(dirname(Pkg.project().path), "dev", "Example"))
@@ -1153,8 +1577,8 @@ end
@test haskey(Pkg.project().dependencies, "Example")
end
# It is possible to develop by specifying a registered UUID.
- isolate(loaded_depot=true) do
- Pkg.develop(uuid=exuuid)
+ isolate(loaded_depot = true) do
+ Pkg.develop(uuid = exuuid)
Pkg.dependencies(exuuid) do pkg
@test pkg.name == "Example"
@test Base.samefile(pkg.source, joinpath(DEPOT_PATH[1], "dev", "Example"))
@@ -1163,8 +1587,8 @@ end
@test haskey(Pkg.project().dependencies, "Example")
end
# It is possible to develop by specifying a URL.
- isolate(loaded_depot=true) do
- Pkg.develop(url="https://github.com/JuliaLang/Example.jl")
+ isolate(loaded_depot = true) do
+ Pkg.develop(url = "https://github.com/JuliaLang/Example.jl")
Pkg.dependencies(exuuid) do pkg
@test pkg.name == "Example"
@test Base.samefile(pkg.source, joinpath(DEPOT_PATH[1], "dev", "Example"))
@@ -1173,22 +1597,24 @@ end
@test haskey(Pkg.project().dependencies, "Example")
end
# It is possible to develop by directly specifying a path.
- isolate(loaded_depot=true) do; mktempdir() do tempdir
- copy_test_package(tempdir, "SimplePackage")
- path = joinpath(tempdir, "SimplePackage")
- Pkg.develop(path=path)
- Pkg.dependencies(simple_package_uuid) do pkg
- @test pkg.name == "SimplePackage"
- @test realpath(pkg.source) == realpath(path)
- @test !pkg.is_tracking_registry
- @test haskey(pkg.dependencies, "Example")
- @test haskey(pkg.dependencies, "Markdown")
+ isolate(loaded_depot = true) do;
+ mktempdir() do tempdir
+ copy_test_package(tempdir, "SimplePackage")
+ path = joinpath(tempdir, "SimplePackage")
+ Pkg.develop(path = path)
+ Pkg.dependencies(simple_package_uuid) do pkg
+ @test pkg.name == "SimplePackage"
+ @test realpath(pkg.source) == realpath(path)
+ @test !pkg.is_tracking_registry
+ @test haskey(pkg.dependencies, "Example")
+ @test haskey(pkg.dependencies, "Markdown")
+ end
+ @test haskey(Pkg.project().dependencies, "SimplePackage")
end
- @test haskey(Pkg.project().dependencies, "SimplePackage")
- end end
+ end
# recursive `dev`
- isolate(loaded_depot=true) do
- Pkg.develop(path=joinpath(@__DIR__, "test_packages", "A"))
+ isolate(loaded_depot = true) do
+ Pkg.develop(path = joinpath(@__DIR__, "test_packages", "A"))
Pkg.dependencies(UUID("0829fd7c-1e7e-4927-9afa-b8c61d5e0e42")) do pkg # dep A
@test haskey(pkg.dependencies, "B")
@test haskey(pkg.dependencies, "C")
@@ -1206,53 +1632,59 @@ end
end
end
# primary depot is a relative path
- isolate() do; cd_tempdir() do dir
- empty!(DEPOT_PATH)
- push!(DEPOT_PATH, "temp")
- Base.append_bundled_depot_path!(DEPOT_PATH)
- Pkg.develop("JSON")
- Pkg.dependencies(json_uuid) do pkg
- @test Base.samefile(pkg.source, abspath(joinpath("temp", "dev", "JSON")))
+ isolate() do;
+ cd_tempdir() do dir
+ empty!(DEPOT_PATH)
+ push!(DEPOT_PATH, "temp")
+ Base.append_bundled_depot_path!(DEPOT_PATH)
+ Pkg.develop("JSON")
+ Pkg.dependencies(json_uuid) do pkg
+ @test Base.samefile(pkg.source, abspath(joinpath("temp", "dev", "JSON")))
+ end
end
- end end
+ end
end
@testset "develop: interaction with `JULIA_PKG_DEVDIR`" begin
# A shared `develop` should obey `JULIA_PKG_DEVDIR`.
- isolate(loaded_depot=true) do; mktempdir() do tempdir
- withenv("JULIA_PKG_DEVDIR" => tempdir) do
- Pkg.develop("Example")
- end
- Pkg.dependencies(exuuid) do pkg
- @test pkg.name == "Example"
- @test Base.samefile(pkg.source, joinpath(tempdir, "Example"))
+ isolate(loaded_depot = true) do;
+ mktempdir() do tempdir
+ withenv("JULIA_PKG_DEVDIR" => tempdir) do
+ Pkg.develop("Example")
+ end
+ Pkg.dependencies(exuuid) do pkg
+ @test pkg.name == "Example"
+ @test Base.samefile(pkg.source, joinpath(tempdir, "Example"))
+ end
+ @test haskey(Pkg.project().dependencies, "Example")
end
- @test haskey(Pkg.project().dependencies, "Example")
- end end
+ end
# A local `develop` should not be affected by `JULIA_PKG_DEVDIR`
- isolate(loaded_depot=true) do; mktempdir() do tempdir
- withenv("JULIA_PKG_DEVDIR" => tempdir) do
- Pkg.develop("Example"; shared=false)
- end
- Pkg.dependencies(exuuid) do pkg
- @test pkg.name == "Example"
- @test Base.samefile(pkg.source, joinpath(dirname(Pkg.project().path), "dev", "Example"))
- @test !pkg.is_tracking_registry
+ isolate(loaded_depot = true) do;
+ mktempdir() do tempdir
+ withenv("JULIA_PKG_DEVDIR" => tempdir) do
+ Pkg.develop("Example"; shared = false)
+ end
+ Pkg.dependencies(exuuid) do pkg
+ @test pkg.name == "Example"
+ @test Base.samefile(pkg.source, joinpath(dirname(Pkg.project().path), "dev", "Example"))
+ @test !pkg.is_tracking_registry
+ end
+ @test haskey(Pkg.project().dependencies, "Example")
end
- @test haskey(Pkg.project().dependencies, "Example")
- end end
+ end
end
@testset "develop: path handling" begin
# Relative paths
- isolate(loaded_depot=true) do
+ isolate(loaded_depot = true) do
project_path = dirname(Pkg.project().path)
mkpath(project_path)
copy_test_package(project_path, "SimplePackage")
package_path = joinpath(project_path, "SimplePackage")
# Now we `develop` using a relative path.
cd(project_path) do
- Pkg.develop(Pkg.PackageSpec(path="SimplePackage"))
+ Pkg.develop(Pkg.PackageSpec(path = "SimplePackage"))
end
# Check that everything went ok.
original_source = nothing
@@ -1264,7 +1696,7 @@ end
end
# Now we move the project, but preserve the relative structure.
mktempdir() do tempdir
- cp(project_path, tempdir; force=true)
+ cp(project_path, tempdir; force = true)
Pkg.activate(tempdir)
# We check that we can still find the source.
Pkg.dependencies(simple_package_uuid) do pkg
@@ -1274,70 +1706,78 @@ end
end
end
# Absolute paths
- isolate(loaded_depot=true) do; mktempdir() do tempdir
- copy_test_package(tempdir, "SimplePackage")
- package_path = joinpath(tempdir, "SimplePackage")
- Pkg.activate(tempdir)
- Pkg.develop(path=package_path)
- original_source = nothing
- Pkg.dependencies(simple_package_uuid) do pkg
- @test pkg.name == "SimplePackage"
- @test isdir(pkg.source)
- @test realpath(pkg.source) == realpath(package_path)
- original_source = pkg.source
- end
- mktempdir() do tempdir2
- cp(joinpath(tempdir, "Project.toml"), joinpath(tempdir2, "Project.toml"))
- cp(joinpath(tempdir, "Manifest.toml"), joinpath(tempdir2, "Manifest.toml"))
- Pkg.activate(tempdir2)
+ isolate(loaded_depot = true) do;
+ mktempdir() do tempdir
+ copy_test_package(tempdir, "SimplePackage")
+ package_path = joinpath(tempdir, "SimplePackage")
+ Pkg.activate(tempdir)
+ Pkg.develop(path = package_path)
+ original_source = nothing
Pkg.dependencies(simple_package_uuid) do pkg
+ @test pkg.name == "SimplePackage"
@test isdir(pkg.source)
- @test Base.samefile(pkg.source, original_source)
+ @test realpath(pkg.source) == realpath(package_path)
+ original_source = pkg.source
+ end
+ mktempdir() do tempdir2
+ cp(joinpath(tempdir, "Project.toml"), joinpath(tempdir2, "Project.toml"))
+ cp(joinpath(tempdir, "Manifest.toml"), joinpath(tempdir2, "Manifest.toml"))
+ Pkg.activate(tempdir2)
+ Pkg.dependencies(simple_package_uuid) do pkg
+ @test isdir(pkg.source)
+ @test Base.samefile(pkg.source, original_source)
+ end
end
end
- end end
+ end
# ### Special casing on path handling
# "." style path
- isolate(loaded_depot=true) do; mktempdir() do tempdir
- path = copy_test_package(tempdir, "SimplePackage")
- cd(path) do
- Pkg.pkg"develop ."
- end
- Pkg.dependencies(simple_package_uuid) do pkg
- @test pkg.name == "SimplePackage"
- @test isdir(pkg.source)
- @test pkg.is_tracking_path
+ isolate(loaded_depot = true) do;
+ mktempdir() do tempdir
+ path = copy_test_package(tempdir, "SimplePackage")
+ cd(path) do
+ Pkg.pkg"develop ."
+ end
+ Pkg.dependencies(simple_package_uuid) do pkg
+ @test pkg.name == "SimplePackage"
+ @test isdir(pkg.source)
+ @test pkg.is_tracking_path
+ end
end
- end end
+ end
# ".." style path
- isolate(loaded_depot=true) do; mktempdir() do tempdir
- path = copy_test_package(tempdir, "SimplePackage")
- cd(joinpath(path, "src")) do
- Pkg.pkg"develop .."
- end
- Pkg.dependencies(simple_package_uuid) do pkg
- @test pkg.name == "SimplePackage"
- @test isdir(pkg.source)
- @test pkg.is_tracking_path
+ isolate(loaded_depot = true) do;
+ mktempdir() do tempdir
+ path = copy_test_package(tempdir, "SimplePackage")
+ cd(joinpath(path, "src")) do
+ Pkg.pkg"develop .."
+ end
+ Pkg.dependencies(simple_package_uuid) do pkg
+ @test pkg.name == "SimplePackage"
+ @test isdir(pkg.source)
+ @test pkg.is_tracking_path
+ end
end
- end end
+ end
# Local directory name. This must be prepended by "./".
- isolate(loaded_depot=true) do; mktempdir() do tempdir
- path = copy_test_package(tempdir, "SimplePackage")
- cd(dirname(path)) do
- Pkg.pkg"develop ./SimplePackage"
- end
- Pkg.dependencies(simple_package_uuid) do pkg
- @test pkg.name == "SimplePackage"
- @test isdir(pkg.source)
- @test pkg.is_tracking_path
+ isolate(loaded_depot = true) do;
+ mktempdir() do tempdir
+ path = copy_test_package(tempdir, "SimplePackage")
+ cd(dirname(path)) do
+ Pkg.pkg"develop ./SimplePackage"
+ end
+ Pkg.dependencies(simple_package_uuid) do pkg
+ @test pkg.name == "SimplePackage"
+ @test isdir(pkg.source)
+ @test pkg.is_tracking_path
+ end
end
- end end
+ end
end
@testset "develop: package state changes" begin
# Developing an existing package which is tracking the registry should just override.
- isolate(loaded_depot=true) do
+ isolate(loaded_depot = true) do
Pkg.add("Example")
Pkg.develop("Example")
Pkg.dependencies(exuuid) do pkg
@@ -1349,8 +1789,8 @@ end
@test length(Pkg.project().dependencies) == 1
end
# Developing an existing package which is tracking a repo should just override.
- isolate(loaded_depot=true) do
- Pkg.add(name="Example", rev="master")
+ isolate(loaded_depot = true) do
+ Pkg.add(name = "Example", rev = "master")
Pkg.develop("Example")
Pkg.dependencies(exuuid) do pkg
@test pkg.name == "Example"
@@ -1361,9 +1801,9 @@ end
@test length(Pkg.project().dependencies) == 1
end
# Develop with different target path should override old path with target path.
- isolate(loaded_depot=true) do
+ isolate(loaded_depot = true) do
Pkg.develop("Example")
- Pkg.develop("Example"; shared=false)
+ Pkg.develop("Example"; shared = false)
Pkg.dependencies(exuuid) do pkg
@test pkg.name == "Example"
@test Base.samefile(pkg.source, joinpath(dirname(Pkg.project().path), "dev", "Example"))
@@ -1373,9 +1813,9 @@ end
@test length(Pkg.project().dependencies) == 1
end
# develop tries to resolve from the manifest
- isolate(loaded_depot=true) do
+ isolate(loaded_depot = true) do
remote_url = "https://github.com/00vareladavid/Unregistered.jl"
- Pkg.add(Pkg.PackageSpec(url=remote_url))
+ Pkg.add(Pkg.PackageSpec(url = remote_url))
Pkg.develop("Unregistered")
Pkg.dependencies(unregistered_uuid) do pkg
@test pkg.name == "Unregistered"
@@ -1392,37 +1832,37 @@ end
# registered name
api, args, opts = first(Pkg.pkg"develop Example")
@test api == Pkg.develop
- @test args == [Pkg.PackageSpec(;name="Example")]
+ @test args == [Pkg.PackageSpec(; name = "Example")]
@test isempty(opts)
# registered uuid
api, args, opts = first(Pkg.pkg"develop 7876af07-990d-54b4-ab0e-23690620f79a")
@test api == Pkg.develop
- @test args == [Pkg.PackageSpec(;uuid=UUID("7876af07-990d-54b4-ab0e-23690620f79a"))]
+ @test args == [Pkg.PackageSpec(; uuid = UUID("7876af07-990d-54b4-ab0e-23690620f79a"))]
@test isempty(opts)
# name=uuid
api, args, opts = first(Pkg.pkg"develop Example=7876af07-990d-54b4-ab0e-23690620f79a")
@test api == Pkg.develop
- @test args == [Pkg.PackageSpec(;name="Example", uuid=UUID("7876af07-990d-54b4-ab0e-23690620f79a"))]
+ @test args == [Pkg.PackageSpec(; name = "Example", uuid = UUID("7876af07-990d-54b4-ab0e-23690620f79a"))]
@test isempty(opts)
# local flag
api, args, opts = first(Pkg.pkg"develop --local Example")
@test api == Pkg.develop
- @test args == [Pkg.PackageSpec(;name="Example")]
+ @test args == [Pkg.PackageSpec(; name = "Example")]
@test opts == Dict(:shared => false)
# shared flag
api, args, opts = first(Pkg.pkg"develop --shared Example")
@test api == Pkg.develop
- @test args == [Pkg.PackageSpec(;name="Example")]
+ @test args == [Pkg.PackageSpec(; name = "Example")]
@test opts == Dict(:shared => true)
# URL
api, args, opts = first(Pkg.pkg"develop https://github.com/JuliaLang/Example.jl")
@test api == Pkg.develop
- @test args == [Pkg.PackageSpec(;url="https://github.com/JuliaLang/Example.jl")]
+ @test args == [Pkg.PackageSpec(; url = "https://github.com/JuliaLang/Example.jl")]
@test isempty(opts)
# develop using preserve option
api, args, opts = first(Pkg.pkg"dev --preserve=none Example")
@test api == Pkg.develop
- @test args == [Pkg.PackageSpec(;name="Example")]
+ @test args == [Pkg.PackageSpec(; name = "Example")]
@test opts == Dict(:preserve => Pkg.PRESERVE_NONE)
end
end
@@ -1432,17 +1872,19 @@ end
#
@testset "instantiate: input checking" begin
# Unregistered UUID in manifest
- isolate(loaded_depot=true) do; mktempdir() do tempdir
- package_path = copy_test_package(tempdir, "UnregisteredUUID")
- Pkg.activate(package_path)
- @test_throws PkgError("expected package `Example [142fd7e7]` to be registered") Pkg.update()
- end end
+ isolate(loaded_depot = true) do;
+ mktempdir() do tempdir
+ package_path = copy_test_package(tempdir, "UnregisteredUUID")
+ Pkg.activate(package_path)
+ @test_throws PkgError Pkg.update()
+ end
+ end
end
@testset "instantiate: changes to the active project" begin
# Instantiate should preserve tree hash for regularly versioned packages.
- isolate(loaded_depot=true) do
- Pkg.add(name="Example", version="0.3.0")
+ isolate(loaded_depot = true) do
+ Pkg.add(name = "Example", version = "0.3.0")
th = nothing
Pkg.dependencies(exuuid) do pkg
th = pkg.tree_hash
@@ -1450,8 +1892,8 @@ end
@test pkg.version == v"0.3.0"
@test isdir(pkg.source)
end
- rm(joinpath(DEPOT_PATH[1], "packages"); force=true, recursive=true)
- rm(joinpath(DEPOT_PATH[1], "clones"); force=true, recursive=true)
+ rm(joinpath(DEPOT_PATH[1], "packages"); force = true, recursive = true)
+ rm(joinpath(DEPOT_PATH[1], "clones"); force = true, recursive = true)
Pkg.instantiate()
Pkg.dependencies(exuuid) do pkg
@test pkg.name == "Example"
@@ -1461,16 +1903,16 @@ end
end
end
# `instantiate` should preserve tree hash for packages tracking repos.
- isolate(loaded_depot=true) do
- Pkg.add(name="Example", rev="v0.5.3")
+ isolate(loaded_depot = true) do
+ Pkg.add(name = "Example", rev = "v0.5.3")
th = nothing
Pkg.dependencies(exuuid) do pkg
th = pkg.tree_hash
@test pkg.name == "Example"
@test isdir(pkg.source)
end
- rm(joinpath(DEPOT_PATH[1], "packages"); force=true, recursive=true)
- rm(joinpath(DEPOT_PATH[1], "clones"); force=true, recursive=true)
+ rm(joinpath(DEPOT_PATH[1], "packages"); force = true, recursive = true)
+ rm(joinpath(DEPOT_PATH[1], "clones"); force = true, recursive = true)
Pkg.instantiate()
Pkg.dependencies(exuuid) do pkg
@test pkg.name == "Example"
@@ -1479,21 +1921,25 @@ end
end
# `instantiate` should check for a consistent dependency graph.
# Otherwise it is not clear what to instantiate.
- isolate(loaded_depot=true) do; mktempdir() do tempdir
- copy_test_package(tempdir, "ExtraDirectDep")
- Pkg.activate(joinpath(tempdir, "ExtraDirectDep"))
- @test_throws PkgError Pkg.instantiate()
- end end
+ isolate(loaded_depot = true) do;
+ mktempdir() do tempdir
+ copy_test_package(tempdir, "ExtraDirectDep")
+ Pkg.activate(joinpath(tempdir, "ExtraDirectDep"))
+ @test_throws PkgError Pkg.instantiate()
+ end
+ end
# However, if `manifest=false`, we know to instantiate from the direct dependencies.
- isolate(loaded_depot=true) do; mktempdir() do tempdir
- copy_test_package(tempdir, "ExtraDirectDep")
- Pkg.activate(joinpath(tempdir, "ExtraDirectDep"))
- Pkg.instantiate(;manifest=false)
- @test haskey(Pkg.project().dependencies, "Example")
- @test haskey(Pkg.project().dependencies, "Unicode")
- end end
+ isolate(loaded_depot = true) do;
+ mktempdir() do tempdir
+ copy_test_package(tempdir, "ExtraDirectDep")
+ Pkg.activate(joinpath(tempdir, "ExtraDirectDep"))
+ Pkg.instantiate(; manifest = false)
+ @test haskey(Pkg.project().dependencies, "Example")
+ @test haskey(Pkg.project().dependencies, "Unicode")
+ end
+ end
# `instantiate` lonely manifest
- isolate(loaded_depot=true) do
+ isolate(loaded_depot = true) do
manifest_dir = joinpath(@__DIR__, "manifest", "noproject")
cd(manifest_dir) do
try
@@ -1503,12 +1949,12 @@ end
@test isinstalled("Example")
@test isinstalled("x1")
finally
- rm("Project.toml"; force=true)
+ rm("Project.toml"; force = true)
end
end
end
# instantiate old manifest
- isolate(loaded_depot=true) do
+ isolate(loaded_depot = true) do
manifest_dir = joinpath(@__DIR__, "manifest", "old")
cd(manifest_dir) do
Pkg.activate(".")
@@ -1517,24 +1963,27 @@ end
end
end
# `instantiate` on a lonely manifest should detect duplicate names
- isolate(loaded_depot=true) do; mktempdir() do tempdir
- simple_package_path = copy_test_package(tempdir, "SimplePackage")
- unregistered_example_path = copy_test_package(tempdir, "Example")
- Pkg.develop(path=simple_package_path)
- Pkg.develop(path=unregistered_example_path)
- rm(Pkg.project().path)
- @test_throws PkgError Pkg.instantiate()
- end end
+ isolate(loaded_depot = true) do;
+ mktempdir() do tempdir
+ simple_package_path = copy_test_package(tempdir, "SimplePackage")
+ unregistered_example_path = copy_test_package(tempdir, "Example")
+ Pkg.develop(path = simple_package_path)
+ Pkg.develop(path = unregistered_example_path)
+ rm(Pkg.project().path)
+ # Broken, likely by a change in julia Base
+ # @test_throws PkgError Pkg.instantiate()
+ end
+ end
# verbose smoke test
- isolate(loaded_depot=true) do
- Pkg.instantiate(;verbose=true)
+ isolate(loaded_depot = true) do
+ Pkg.instantiate(; verbose = true)
end
end
@testset "instantiate: caching" begin
# Instantiate should not override existing source.
- isolate(loaded_depot=true) do
- Pkg.add(name="Example", version="0.3.0")
+ isolate(loaded_depot = true) do
+ Pkg.add(name = "Example", version = "0.3.0")
th, t1 = nothing, nothing
Pkg.dependencies(exuuid) do pkg
th = pkg.tree_hash
@@ -1585,7 +2034,7 @@ end
@testset "why" begin
isolate() do
- Pkg.add(name = "StaticArrays", version = "1.5.0")
+ Pkg.add(name = "StaticArrays", version = "1.5.20")
io = IOBuffer()
Pkg.why("StaticArrays"; io)
@@ -1594,14 +2043,14 @@ end
Pkg.why("StaticArraysCore"; io)
str = String(take!(io))
- @test str == " StaticArrays → StaticArraysCore\n"
+ @test str == " StaticArrays → StaticArraysCore\n"
Pkg.why("LinearAlgebra"; io)
str = String(take!(io))
@test str ==
- """ StaticArrays → LinearAlgebra
- StaticArrays → Statistics → LinearAlgebra
- """
+ """ StaticArrays → LinearAlgebra
+ StaticArrays → Statistics → LinearAlgebra
+ """
end
end
@@ -1610,21 +2059,23 @@ end
#
@testset "update: input checking" begin
# Unregistered UUID in manifest
- isolate(loaded_depot=true) do; mktempdir() do tempdir
- package_path = copy_test_package(tempdir, "UnregisteredUUID")
- Pkg.activate(package_path)
- @test_throws PkgError("expected package `Example [142fd7e7]` to be registered") Pkg.update()
- end end
+ isolate(loaded_depot = true) do;
+ mktempdir() do tempdir
+ package_path = copy_test_package(tempdir, "UnregisteredUUID")
+ Pkg.activate(package_path)
+ @test_throws PkgError Pkg.update()
+ end
+ end
# package does not exist in the manifest
- isolate(loaded_depot=true) do
+ isolate(loaded_depot = true) do
@test_throws PkgError Pkg.update("Example")
end
end
@testset "update: changes to the active project" begin
# Basic testing of UPLEVEL
- isolate(loaded_depot=true) do
- Pkg.add(name="Example", version="0.3.0")
+ isolate(loaded_depot = true) do
+ Pkg.add(name = "Example", version = "0.3.0")
@test Pkg.dependencies()[exuuid].version == v"0.3.0"
Pkg.update(; level = Pkg.UPLEVEL_FIXED)
@test Pkg.dependencies()[exuuid].version == v"0.3.0"
@@ -1634,34 +2085,38 @@ end
@test Pkg.dependencies()[exuuid].version.minor != 3
end
# `update` should prune manifest
- isolate(loaded_depot=true) do; mktempdir() do tempdir
- copy_test_package(tempdir, "Unpruned")
- Pkg.activate(joinpath(tempdir, "Unpruned"))
- Pkg.update()
- @test haskey(Pkg.project().dependencies, "Example")
- Pkg.dependencies(exuuid) do pkg
- @test pkg.version > v"0.4.0"
+ isolate(loaded_depot = true) do;
+ mktempdir() do tempdir
+ copy_test_package(tempdir, "Unpruned")
+ Pkg.activate(joinpath(tempdir, "Unpruned"))
+ Pkg.update()
+ @test haskey(Pkg.project().dependencies, "Example")
+ Pkg.dependencies(exuuid) do pkg
+ @test pkg.version > v"0.4.0"
+ end
+ @test !haskey(Pkg.dependencies(), unicode_uuid)
end
- @test !haskey(Pkg.dependencies(), unicode_uuid)
- end end
+ end
# `up` should work without a manifest
- isolate(loaded_depot=true) do; mktempdir() do tempdir
- copy_test_package(tempdir, "SimplePackage")
- Pkg.activate(joinpath(tempdir, "SimplePackage"))
- Pkg.update()
- @test haskey(Pkg.project().dependencies, "Example")
- @test haskey(Pkg.project().dependencies, "Markdown")
- Pkg.dependencies(exuuid) do pkg
- @test pkg.name == "Example"
- @test pkg.is_tracking_registry
+ isolate(loaded_depot = true) do;
+ mktempdir() do tempdir
+ copy_test_package(tempdir, "SimplePackage")
+ Pkg.activate(joinpath(tempdir, "SimplePackage"))
+ Pkg.update()
+ @test haskey(Pkg.project().dependencies, "Example")
+ @test haskey(Pkg.project().dependencies, "Markdown")
+ Pkg.dependencies(exuuid) do pkg
+ @test pkg.name == "Example"
+ @test pkg.is_tracking_registry
+ end
end
- end end
+ end
end
@testset "update: package state changes" begin
# basic update on old registered package
- isolate(loaded_depot=true) do
- Pkg.add(name="Example", version="0.3.0")
+ isolate(loaded_depot = true) do
+ Pkg.add(name = "Example", version = "0.3.0")
Pkg.update()
Pkg.dependencies(exuuid) do pkg
@test pkg.name == "Example"
@@ -1669,8 +2124,8 @@ end
end
end
# `update` should not update `pin`ed packages
- isolate(loaded_depot=true) do
- Pkg.add(name="Example",version="0.3.0")
+ isolate(loaded_depot = true) do
+ Pkg.add(name = "Example", version = "0.3.0")
Pkg.pin("Example")
Pkg.dependencies(exuuid) do pkg
@test pkg.name == "Example"
@@ -1685,7 +2140,7 @@ end
end
end
# stdlib special casing
- isolate(loaded_depot=true) do
+ isolate(loaded_depot = true) do
Pkg.add("Markdown")
Pkg.update()
Pkg.dependencies(markdown_uuid) do pkg
@@ -1693,62 +2148,66 @@ end
end
end
# up should not affect `dev` packages
- isolate(loaded_depot=true) do; mktempdir() do tempdir
- path = copy_test_package(tempdir, "SimplePackage")
- Pkg.develop(path=path)
- state = Pkg.dependencies()[simple_package_uuid]
- Pkg.update()
- @test Pkg.dependencies()[simple_package_uuid] == state
- end end
+ isolate(loaded_depot = true) do;
+ mktempdir() do tempdir
+ path = copy_test_package(tempdir, "SimplePackage")
+ Pkg.develop(path = path)
+ state = Pkg.dependencies()[simple_package_uuid]
+ Pkg.update()
+ @test Pkg.dependencies()[simple_package_uuid] == state
+ end
+ end
# up and packages tracking repos
- isolate(loaded_depot=true) do; mktempdir() do tempdir
- path = git_init_package(tempdir, joinpath(@__DIR__, "test_packages", "SimplePackage"))
- Pkg.add(path=path)
- # test everything went ok
- Pkg.dependencies(simple_package_uuid) do pkg
- @test pkg.name == "SimplePackage"
- @test pkg.version == v"0.2.0"
- @test haskey(pkg.dependencies, "Example")
- @test haskey(pkg.dependencies, "Markdown")
- @test !haskey(pkg.dependencies, "Unicode")
- end
- simple_package_node = Pkg.dependencies()[simple_package_uuid]
- # now we bump the remote version
- mv(joinpath(path, "Project2.toml"), joinpath(path, "Project.toml"); force=true)
- new_commit = nothing
- LibGit2.with(LibGit2.GitRepo(path)) do repo
- LibGit2.add!(repo, "*")
- new_commit = string(LibGit2.commit(repo, "bump version"; author=TEST_SIG, committer=TEST_SIG))
- end
- # update with UPLEVEL != UPLEVEL_MAJOR should not update packages tracking repos
- Pkg.update(; level=Pkg.UPLEVEL_MINOR)
- @test simple_package_node == Pkg.dependencies()[simple_package_uuid]
- Pkg.update(; level=Pkg.UPLEVEL_PATCH)
- @test simple_package_node == Pkg.dependencies()[simple_package_uuid]
- Pkg.update(; level=Pkg.UPLEVEL_FIXED)
- @test simple_package_node == Pkg.dependencies()[simple_package_uuid]
- # Update should not modify pinned packages which are tracking repos
- Pkg.pin("SimplePackage")
- Pkg.update()
- Pkg.free("SimplePackage")
- @test simple_package_node == Pkg.dependencies()[simple_package_uuid]
- # update should update packages tracking repos if UPLEVEL_MAJOR
- Pkg.update()
- if !Sys.iswindows() # this test is very flaky on Windows, why?
+ isolate(loaded_depot = true) do;
+ mktempdir() do tempdir
+ path = git_init_package(tempdir, joinpath(@__DIR__, "test_packages", "SimplePackage"))
+ Pkg.add(path = path)
+ # test everything went ok
Pkg.dependencies(simple_package_uuid) do pkg
@test pkg.name == "SimplePackage"
- @test pkg.version == v"0.3.0"
- @test !haskey(pkg.dependencies, "Example")
+ @test pkg.version == v"0.2.0"
+ @test haskey(pkg.dependencies, "Example")
@test haskey(pkg.dependencies, "Markdown")
- @test haskey(pkg.dependencies, "Unicode")
+ @test !haskey(pkg.dependencies, "Unicode")
+ end
+ simple_package_node = Pkg.dependencies()[simple_package_uuid]
+ # now we bump the remote version
+ mv(joinpath(path, "Project2.toml"), joinpath(path, "Project.toml"); force = true)
+ new_commit = nothing
+ LibGit2.with(LibGit2.GitRepo(path)) do repo
+ LibGit2.add!(repo, "*")
+ new_commit = string(LibGit2.commit(repo, "bump version"; author = TEST_SIG, committer = TEST_SIG))
+ end
+ # update with UPLEVEL != UPLEVEL_MAJOR should not update packages tracking repos
+ Pkg.update(; level = Pkg.UPLEVEL_MINOR)
+ @test simple_package_node == Pkg.dependencies()[simple_package_uuid]
+ Pkg.update(; level = Pkg.UPLEVEL_PATCH)
+ @test simple_package_node == Pkg.dependencies()[simple_package_uuid]
+ Pkg.update(; level = Pkg.UPLEVEL_FIXED)
+ @test simple_package_node == Pkg.dependencies()[simple_package_uuid]
+ # Update should not modify pinned packages which are tracking repos
+ Pkg.pin("SimplePackage")
+ Pkg.update()
+ Pkg.free("SimplePackage")
+ @test simple_package_node == Pkg.dependencies()[simple_package_uuid]
+ # update should update packages tracking repos if UPLEVEL_MAJOR
+ Pkg.update()
+ if !Sys.iswindows() # this test is very flaky on Windows, why?
+ Pkg.dependencies(simple_package_uuid) do pkg
+ @test pkg.name == "SimplePackage"
+ @test pkg.version == v"0.3.0"
+ @test !haskey(pkg.dependencies, "Example")
+ @test haskey(pkg.dependencies, "Markdown")
+ @test haskey(pkg.dependencies, "Unicode")
+ end
end
end
- end end
+ end
# make sure that we preserve the state of packages which are not the target
- isolate(loaded_depot=true) do
- Pkg.add(url="https://github.com/00vareladavid/Unregistered.jl")
+ isolate(loaded_depot = true) do
+ Pkg.add(url = "https://github.com/00vareladavid/Unregistered.jl")
Pkg.develop("Example")
- Pkg.add(name="JSON", version="0.18.0")
+ Pkg.add(name = "JSON", version = "0.18.0")
Pkg.add("Markdown")
Pkg.add("Unicode")
Pkg.update("Unicode")
@@ -1768,8 +2227,8 @@ end
@test haskey(Pkg.project().dependencies, "Markdown")
@test haskey(Pkg.project().dependencies, "Unicode")
end
- isolate(loaded_depot=true) do
- Pkg.add([(;name="Example", version="0.3.0"), (;name="JSON", version="0.21.0"), (;name="Parsers", version="1.1.2")])
+ isolate(loaded_depot = true) do
+ Pkg.add([(; name = "Example", version = "0.3.0"), (; name = "JSON", version = "0.21.0"), (; name = "Parsers", version = "1.1.2")])
Pkg.update("JSON")
Pkg.dependencies(json_uuid) do pkg
@test pkg.version > v"0.21.0"
@@ -1781,8 +2240,8 @@ end
@test pkg.version == v"1.1.2"
end
- Pkg.add(name="JSON", version="0.21.0")
- Pkg.update("JSON"; preserve=Pkg.PRESERVE_DIRECT)
+ Pkg.add(name = "JSON", version = "0.21.0")
+ Pkg.update("JSON"; preserve = Pkg.PRESERVE_DIRECT)
Pkg.dependencies(json_uuid) do pkg
@test pkg.version > v"0.21.0"
end
@@ -1793,10 +2252,10 @@ end
@test pkg.version == v"1.1.2"
end
- Pkg.add(name="JSON", version="0.21.0")
+ Pkg.add(name = "JSON", version = "0.21.0")
Pkg.rm("Parsers")
- Pkg.update("JSON"; preserve=Pkg.PRESERVE_DIRECT)
+ Pkg.update("JSON"; preserve = Pkg.PRESERVE_DIRECT)
Pkg.dependencies(json_uuid) do pkg
@test pkg.version > v"0.21.0"
end
@@ -1807,8 +2266,8 @@ end
@test pkg.version > v"1.1.2"
end
- Pkg.add([(;name="Example", version="0.3.0"), (;name="JSON", version="0.21.0"), (;name="Parsers", version="1.1.2")])
- Pkg.update("JSON"; preserve=Pkg.PRESERVE_NONE)
+ Pkg.add([(; name = "Example", version = "0.3.0"), (; name = "JSON", version = "0.21.0"), (; name = "Parsers", version = "1.1.2")])
+ Pkg.update("JSON"; preserve = Pkg.PRESERVE_NONE)
Pkg.dependencies(json_uuid) do pkg
@test pkg.version > v"0.21.0"
end
@@ -1822,6 +2281,8 @@ end
Pkg.dependencies(exuuid) do pkg
@test pkg.version > v"0.3.0"
end
+
+ @test_throws PkgError("`repo` is a private field of PackageSpec and should not be set directly") Pkg.add([Pkg.PackageSpec(; repo = Pkg.Types.GitRepo(source = "someurl"))])
end
end
@@ -1836,12 +2297,14 @@ end
@testset "update: caching" begin
# `up` should detect broken local packages
- isolate(loaded_depot=true) do; mktempdir() do tempdir
- path = git_init_package(tempdir, joinpath(@__DIR__, "test_packages", "SimplePackage"))
- Pkg.add(path=path)
- rm(joinpath(path, ".git"); force=true, recursive=true)
- @test_throws PkgError Pkg.update()
- end end
+ isolate(loaded_depot = true) do;
+ mktempdir() do tempdir
+ path = git_init_package(tempdir, joinpath(@__DIR__, "test_packages", "SimplePackage"))
+ Pkg.add(path = path)
+ rm(joinpath(path, ".git"); force = true, recursive = true)
+ @test_throws PkgError Pkg.update()
+ end
+ end
end
#
@@ -1849,26 +2312,27 @@ end
#
@testset "pin: input checking" begin
# a package must exist in the dep graph in order to be pinned
- isolate(loaded_depot=true) do
+ isolate(loaded_depot = true) do
@test_throws PkgError Pkg.pin("Example")
end
# pinning to an arbitrary version should check for unregistered packages
- isolate(loaded_depot=true) do
- Pkg.add(url="https://github.com/00vareladavid/Unregistered.jl")
- @test_throws PkgError("unable to pin unregistered package `Unregistered [dcb67f36]` to an arbitrary version"
- ) Pkg.pin(name="Unregistered", version="0.1.0")
+ isolate(loaded_depot = true) do
+ Pkg.add(url = "https://github.com/00vareladavid/Unregistered.jl")
+ @test_throws PkgError(
+ "unable to pin unregistered package `Unregistered [dcb67f36]` to an arbitrary version"
+ ) Pkg.pin(name = "Unregistered", version = "0.1.0")
end
# pinning to an arbitrary version should check version exists
- isolate(loaded_depot=true) do
- Pkg.add(name="Example",rev="master")
- @test_throws ResolverError Pkg.pin(name="Example",version="100.0.0")
+ isolate(loaded_depot = true) do
+ Pkg.add(name = "Example", rev = "master")
+ @test_throws ResolverError Pkg.pin(name = "Example", version = "100.0.0")
end
end
@testset "pin: package state changes" begin
# regular registered package
- isolate(loaded_depot=true) do
- Pkg.add( name="Example", version="0.3.3")
+ isolate(loaded_depot = true) do
+ Pkg.add(name = "Example", version = "0.3.3")
Pkg.pin("Example")
Pkg.dependencies(exuuid) do pkg
@test pkg.name == "Example"
@@ -1876,8 +2340,8 @@ end
end
end
# package tracking repo
- isolate(loaded_depot=true) do
- Pkg.add(url="https://github.com/00vareladavid/Unregistered.jl")
+ isolate(loaded_depot = true) do
+ Pkg.add(url = "https://github.com/00vareladavid/Unregistered.jl")
Pkg.pin("Unregistered")
Pkg.dependencies(unregistered_uuid) do pkg
@test !pkg.is_tracking_registry
@@ -1885,18 +2349,18 @@ end
end
end
# versioned pin
- isolate(loaded_depot=true) do
- Pkg.add( name="Example", version="0.3.3")
- Pkg.pin( name="Example", version="0.5.1")
+ isolate(loaded_depot = true) do
+ Pkg.add(name = "Example", version = "0.3.3")
+ Pkg.pin(name = "Example", version = "0.5.1")
Pkg.dependencies(exuuid) do pkg
@test pkg.name == "Example"
@test pkg.is_pinned
end
end
# pin should check for a valid version number
- isolate(loaded_depot=true) do
- Pkg.add(name="Example", rev="master")
- @test_throws ResolverError Pkg.pin(name="Example",version="100.0.0") # TODO maybe make a PkgError
+ isolate(loaded_depot = true) do
+ Pkg.add(name = "Example", rev = "master")
+ @test_throws ResolverError Pkg.pin(name = "Example", version = "100.0.0") # TODO maybe make a PkgError
end
end
@@ -1905,21 +2369,24 @@ end
#
@testset "free: input checking" begin
# free checks for existing package
- isolate(loaded_depot=true) do
+ isolate(loaded_depot = true) do
@test_throws PkgError Pkg.free("Example")
end
# free checks for unpinned package
- isolate(loaded_depot=true) do
+ isolate(loaded_depot = true) do
Pkg.add("Unicode")
- @test_throws PkgError(string("expected package `Unicode [4ec0a83e]` to be",
- " pinned, tracking a path, or tracking a repository"
- )) Pkg.free("Unicode")
+ @test_throws PkgError(
+ string(
+ "expected package `Unicode [4ec0a83e]` to be",
+ " pinned, tracking a path, or tracking a repository"
+ )
+ ) Pkg.free("Unicode")
end
end
@testset "free: package state changes" begin
# free pinned package
- isolate(loaded_depot=true) do
+ isolate(loaded_depot = true) do
Pkg.add("Example")
Pkg.pin("Example")
Pkg.free("Example")
@@ -1929,8 +2396,8 @@ end
end
end
# free package tracking repo
- isolate(loaded_depot=true) do
- Pkg.add( name="Example", rev="master")
+ isolate(loaded_depot = true) do
+ Pkg.add(name = "Example", rev = "master")
Pkg.free("Example")
Pkg.dependencies(exuuid) do pkg
@test pkg.name == "Example"
@@ -1938,7 +2405,7 @@ end
end
end
# free developed package
- isolate(loaded_depot=true) do
+ isolate(loaded_depot = true) do
Pkg.develop("Example")
Pkg.free("Example")
Pkg.dependencies(exuuid) do pkg
@@ -1947,12 +2414,12 @@ end
end
end
# free should error when called on packages tracking unregistered packages
- isolate(loaded_depot=true) do
- Pkg.add(url="https://github.com/00vareladavid/Unregistered.jl")
+ isolate(loaded_depot = true) do
+ Pkg.add(url = "https://github.com/00vareladavid/Unregistered.jl")
@test_throws PkgError("unable to free unregistered package `Unregistered [dcb67f36]`") Pkg.free("Unregistered")
end
- isolate(loaded_depot=true) do
- Pkg.develop(url="https://github.com/00vareladavid/Unregistered.jl")
+ isolate(loaded_depot = true) do
+ Pkg.develop(url = "https://github.com/00vareladavid/Unregistered.jl")
@test_throws PkgError("unable to free unregistered package `Unregistered [dcb67f36]`") Pkg.free("Unregistered")
end
end
@@ -1965,7 +2432,7 @@ end
Pkg.REPLMode.TEST_MODE[] = true
api, args, opts = first(Pkg.pkg"free Example")
@test api == Pkg.free
- @test args == [Pkg.PackageSpec(;name="Example")]
+ @test args == [Pkg.PackageSpec(; name = "Example")]
@test isempty(opts)
end
end
@@ -1975,13 +2442,29 @@ end
#
@testset "resolve" begin
# resolve should ignore `extras`
- isolate(loaded_depot=true) do; mktempdir() do tempdir
- package_path = copy_test_package(tempdir, "TestTarget")
- Pkg.activate(package_path)
+ isolate(loaded_depot = true) do;
+ mktempdir() do tempdir
+ package_path = copy_test_package(tempdir, "TestTarget")
+ Pkg.activate(package_path)
+ Pkg.resolve()
+ @test !haskey(Pkg.dependencies(), markdown_uuid)
+ @test !haskey(Pkg.dependencies(), test_stdlib_uuid)
+ end
+ end
+ # resolve with repo-tracked package that has tree_hash in manifest (issue #4561)
+ # This tests that startswith/endswith correctly handle SHA1 tree_hash types
+ isolate(loaded_depot = true) do
+ Pkg.add(url = "https://github.com/JuliaLang/Example.jl", rev = "v0.5.3")
+ # Remove both clones and packages so resolve needs to re-clone
+ rm(joinpath(DEPOT_PATH[1], "clones"); force = true, recursive = true)
+ rm(joinpath(DEPOT_PATH[1], "packages"); force = true, recursive = true)
+ # This should not throw "MethodError: no method matching startswith(::Base.SHA1, ::String)"
Pkg.resolve()
- @test !haskey(Pkg.dependencies(), markdown_uuid)
- @test !haskey(Pkg.dependencies(), test_stdlib_uuid)
- end end
+ Pkg.dependencies(exuuid) do pkg
+ @test pkg.name == "Example"
+ @test isdir(pkg.source)
+ end
+ end
end
#
@@ -1989,22 +2472,99 @@ end
#
@testset "test" begin
# stdlib special casing
- isolate(loaded_depot=true) do
+ isolate(loaded_depot = true) do
Pkg.add("UUIDs")
Pkg.test("UUIDs")
end
# test args smoketest
- isolate(loaded_depot=true) do; mktempdir() do tempdir
- copy_test_package(tempdir, "TestArguments")
- Pkg.activate(joinpath(tempdir, "TestArguments"))
- # test the old code path (no test/Project.toml)
- Pkg.test("TestArguments"; test_args=`a b`, julia_args=`--quiet --check-bounds=no`)
- Pkg.test("TestArguments"; test_args=["a", "b"], julia_args=["--quiet", "--check-bounds=no"])
- # test new code path
- touch(joinpath(tempdir, "TestArguments", "test", "Project.toml"))
- Pkg.test("TestArguments"; test_args=`a b`, julia_args=`--quiet --check-bounds=no`)
- Pkg.test("TestArguments"; test_args=["a", "b"], julia_args=["--quiet", "--check-bounds=no"])
- end end
+ isolate(loaded_depot = true) do;
+ mktempdir() do tempdir
+ copy_test_package(tempdir, "TestArguments")
+ Pkg.activate(joinpath(tempdir, "TestArguments"))
+ # test the old code path (no test/Project.toml)
+ Pkg.test("TestArguments"; test_args = `a b`, julia_args = `--quiet --check-bounds=no`)
+ Pkg.test("TestArguments"; test_args = ["a", "b"], julia_args = ["--quiet", "--check-bounds=no"])
+ # test new code path
+ touch(joinpath(tempdir, "TestArguments", "test", "Project.toml"))
+ Pkg.test("TestArguments"; test_args = `a b`, julia_args = `--quiet --check-bounds=no`)
+ Pkg.test("TestArguments"; test_args = ["a", "b"], julia_args = ["--quiet", "--check-bounds=no"])
+ end
+ end
+
+ @testset "threads" begin
+ isolate(loaded_depot = true) do;
+ mktempdir() do dir
+ path = copy_test_package(dir, "TestThreads")
+ cd(path) do
+ # Do this all in a subprocess to protect against the parent having non-default threadpool sizes.
+ script = """
+ using Pkg, Test
+ @testset "JULIA_NUM_THREADS=1" begin
+ withenv(
+ "EXPECTED_NUM_THREADS_DEFAULT" => "1",
+ "EXPECTED_NUM_THREADS_INTERACTIVE" => "0", # https://github.com/JuliaLang/julia/pull/57454
+ "JULIA_NUM_THREADS" => "1",
+ ) do
+ Pkg.test("TestThreads")
+ end
+ end
+ @testset "JULIA_NUM_THREADS=2" begin
+ withenv(
+ "EXPECTED_NUM_THREADS_DEFAULT" => "2",
+ "EXPECTED_NUM_THREADS_INTERACTIVE" => "1",
+ "JULIA_NUM_THREADS" => "2",
+ ) do
+ Pkg.test("TestThreads")
+ end
+ end
+ @testset "JULIA_NUM_THREADS=2,0" begin
+ withenv(
+ "EXPECTED_NUM_THREADS_DEFAULT" => "2",
+ "EXPECTED_NUM_THREADS_INTERACTIVE" => "0",
+ "JULIA_NUM_THREADS" => "2,0",
+ ) do
+ Pkg.test("TestThreads")
+ end
+ end
+
+ @testset "--threads=1" begin
+ withenv(
+ "EXPECTED_NUM_THREADS_DEFAULT" => "1",
+ "EXPECTED_NUM_THREADS_INTERACTIVE" => "0", # https://github.com/JuliaLang/julia/pull/57454
+ "JULIA_NUM_THREADS" => nothing,
+ ) do
+ Pkg.test("TestThreads"; julia_args=`--threads=1`)
+ end
+ end
+ @testset "--threads=2" begin
+ withenv(
+ "EXPECTED_NUM_THREADS_DEFAULT" => "2",
+ "EXPECTED_NUM_THREADS_INTERACTIVE" => "1",
+ "JULIA_NUM_THREADS" => nothing,
+ ) do
+ Pkg.test("TestThreads"; julia_args=`--threads=2`)
+ end
+ end
+ @testset "--threads=2,0" begin
+ withenv(
+ "EXPECTED_NUM_THREADS_DEFAULT" => "2",
+ "EXPECTED_NUM_THREADS_INTERACTIVE" => "0",
+ "JULIA_NUM_THREADS" => nothing,
+ ) do
+ Pkg.test("TestThreads"; julia_args=`--threads=2,0`)
+ end
+ end
+ """
+ @test Utils.show_output_if_command_errors(
+ addenv(
+ `$(Base.julia_cmd()) --project=$(path) --startup-file=no -e "$script"`,
+ "JULIA_DEPOT_PATH" => join(Base.DEPOT_PATH, Sys.iswindows() ? ";" : ":")
+ )
+ )
+ end
+ end
+ end
+ end
end
#
@@ -2012,66 +2572,76 @@ end
#
@testset "rm" begin
# simple rm
- isolate(loaded_depot=true) do
+ isolate(loaded_depot = true) do
Pkg.add("Example")
Pkg.rm("Example")
@test isempty(Pkg.project().dependencies)
@test isempty(Pkg.dependencies())
end
# remove should not alter other dependencies
- isolate(loaded_depot=true) do
- Pkg.add([(;name="Example"),
- (;name="JSON", version="0.18.0"),])
+ isolate(loaded_depot = true) do
+ Pkg.add(
+ [
+ (; name = "Example"),
+ (; name = "JSON", version = "0.18.0"),
+ ]
+ )
json = Pkg.dependencies()[json_uuid]
Pkg.rm("Example")
@test Pkg.dependencies()[json_uuid] == json
@test haskey(Pkg.project().dependencies, "JSON")
end
# rm should remove unused compat entries
- isolate(loaded_depot=true) do; mktempdir() do tempdir
- path = copy_test_package(tempdir, "BasicCompat")
- Pkg.activate(path)
- # TODO interface for `compat`
- @test haskey(Pkg.Types.Context().env.project.compat, "Example")
- @test haskey(Pkg.Types.Context().env.project.compat, "julia")
- Pkg.rm("Example")
- @test !haskey(Pkg.Types.Context().env.project.compat, "Example")
- @test haskey(Pkg.Types.Context().env.project.compat, "julia")
- end end
+ isolate(loaded_depot = true) do;
+ mktempdir() do tempdir
+ path = copy_test_package(tempdir, "BasicCompat")
+ Pkg.activate(path)
+ # TODO interface for `compat`
+ @test haskey(Pkg.Types.Context().env.project.compat, "Example")
+ @test haskey(Pkg.Types.Context().env.project.compat, "julia")
+ Pkg.rm("Example")
+ @test !haskey(Pkg.Types.Context().env.project.compat, "Example")
+ @test haskey(Pkg.Types.Context().env.project.compat, "julia")
+ end
+ end
# rm should not unnecessarily remove compat entries
- isolate(loaded_depot=true) do; mktempdir() do tempdir
- path = copy_test_package(tempdir, "CompatExtras")
- Pkg.activate(path)
- @test haskey(Pkg.Types.Context().env.project.compat, "Aqua")
- @test haskey(Pkg.Types.Context().env.project.compat, "DataFrames")
- Pkg.rm("DataFrames")
- @test !haskey(Pkg.Types.Context().env.project.compat, "DataFrames")
- @test haskey(Pkg.Types.Context().env.project.compat, "Aqua")
- end end
+ isolate(loaded_depot = true) do;
+ mktempdir() do tempdir
+ path = copy_test_package(tempdir, "CompatExtras")
+ Pkg.activate(path)
+ @test haskey(Pkg.Types.Context().env.project.compat, "Aqua")
+ @test haskey(Pkg.Types.Context().env.project.compat, "DataFrames")
+ Pkg.rm("DataFrames")
+ @test !haskey(Pkg.Types.Context().env.project.compat, "DataFrames")
+ @test haskey(Pkg.Types.Context().env.project.compat, "Aqua")
+ end
+ end
# rm removes unused recursive dependencies
- isolate(loaded_depot=true) do; mktempdir() do tempdir
- path = copy_test_package(tempdir, "SimplePackage")
- Pkg.develop(path=path)
- Pkg.add(name="JSON", version="0.18.0")
- Pkg.rm("SimplePackage")
- @test haskey(Pkg.dependencies(), markdown_uuid)
- @test !haskey(Pkg.dependencies(), simple_package_uuid)
- @test !haskey(Pkg.dependencies(), exuuid)
- @test haskey(Pkg.dependencies(), json_uuid)
- end end
+ isolate(loaded_depot = true) do;
+ mktempdir() do tempdir
+ path = copy_test_package(tempdir, "SimplePackage")
+ Pkg.develop(path = path)
+ Pkg.add(name = "JSON", version = "0.18.0")
+ Pkg.rm("SimplePackage")
+ @test haskey(Pkg.dependencies(), markdown_uuid)
+ @test !haskey(Pkg.dependencies(), simple_package_uuid)
+ @test !haskey(Pkg.dependencies(), exuuid)
+ @test haskey(Pkg.dependencies(), json_uuid)
+ end
+ end
# rm manifest mode
- isolate(loaded_depot=true) do
+ isolate(loaded_depot = true) do
Pkg.add("Example")
- Pkg.add(name="JSON", version="0.18.0")
- Pkg.rm("Random"; mode=Pkg.PKGMODE_MANIFEST)
+ Pkg.add(name = "JSON", version = "0.18.0")
+ Pkg.rm("Random"; mode = Pkg.PKGMODE_MANIFEST)
@test haskey(Pkg.dependencies(), exuuid)
@test !haskey(Pkg.dependencies(), json_uuid)
end
# rm nonexistent packages warns but does not error
- isolate(loaded_depot=true) do
+ isolate(loaded_depot = true) do
Pkg.add("Example")
- @test_logs (:warn, r"not in project, ignoring") Pkg.rm(name="FooBar", uuid=UUIDs.UUID(0))
- @test_logs (:warn, r"not in manifest, ignoring") Pkg.rm(name="FooBar", uuid=UUIDs.UUID(0); mode=Pkg.PKGMODE_MANIFEST)
+ @test_logs (:warn, r"not in project, ignoring") Pkg.rm(name = "FooBar", uuid = UUIDs.UUID(0))
+ @test_logs (:warn, r"not in manifest, ignoring") Pkg.rm(name = "FooBar", uuid = UUIDs.UUID(0); mode = Pkg.PKGMODE_MANIFEST)
end
end
@@ -2080,15 +2650,15 @@ end
Pkg.REPLMode.TEST_MODE[] = true
api, args, opts = first(Pkg.pkg"rm Example")
@test api == Pkg.rm
- @test args == [Pkg.PackageSpec(;name="Example")]
+ @test args == [Pkg.PackageSpec(; name = "Example")]
@test isempty(opts)
api, args, opts = first(Pkg.pkg"rm --project Example")
@test api == Pkg.rm
- @test args == [Pkg.PackageSpec(;name="Example")]
+ @test args == [Pkg.PackageSpec(; name = "Example")]
@test opts == Dict(:mode => Pkg.PKGMODE_PROJECT)
api, args, opts = first(Pkg.pkg"rm --manifest Example")
@test api == Pkg.rm
- @test args == [Pkg.PackageSpec(;name="Example")]
+ @test args == [Pkg.PackageSpec(; name = "Example")]
@test opts == Dict(:mode => Pkg.PKGMODE_MANIFEST)
end
end
@@ -2098,7 +2668,7 @@ end
#
@testset "all" begin
# pin all, free all, rm all packages
- isolate(loaded_depot=true) do
+ isolate(loaded_depot = true) do
Pkg.add(["Example", "JSON"])
Pkg.pin(all_pkgs = true)
@@ -2158,7 +2728,7 @@ end
@test isempty(opts)
api, args, opts = first(Pkg.pkg"build Example")
@test api == Pkg.build
- @test args == [Pkg.PackageSpec(;name="Example")]
+ @test args == [Pkg.PackageSpec(; name = "Example")]
@test isempty(opts)
api, args, opts = first(Pkg.pkg"build --verbose")
@test api == Pkg.build
@@ -2166,37 +2736,44 @@ end
@test opts == Dict(:verbose => true)
api, args, opts = first(Pkg.pkg"build -v Foo Bar")
@test api == Pkg.build
- @test args == [Pkg.PackageSpec(;name="Foo"), Pkg.PackageSpec(;name="Bar")]
+ @test args == [Pkg.PackageSpec(; name = "Foo"), Pkg.PackageSpec(; name = "Bar")]
@test opts == Dict(:verbose => true)
end
# Test package that fails build
- isolate(loaded_depot=true) do; mktempdir() do tempdir
- package_path = copy_test_package(tempdir, "FailBuild")
- Pkg.activate(package_path)
- @test_throws PkgError Pkg.build()
- end end
+ isolate(loaded_depot = true) do;
+ mktempdir() do tempdir
+ package_path = copy_test_package(tempdir, "FailBuild")
+ Pkg.activate(package_path)
+ @test_throws PkgError Pkg.build()
+ end
+ end
# Build log location
- isolate(loaded_depot=true) do; mktempdir() do tmp
- path = git_init_package(tmp, joinpath(@__DIR__, "test_packages", "FailBuild"))
- # Log file in the directory when it is deved
- Pkg.develop(path=path; io=devnull)
- log_file_dev = joinpath(path, "deps", "build.log")
- @test !isfile(log_file_dev)
- @test_throws PkgError Pkg.build("FailBuild"; io=devnull)
- @test isfile(log_file_dev)
- @test occursin("oops", read(log_file_dev, String))
- # Log file in scratchspace when added
- addpath = dirname(dirname(Base.find_package("FailBuild")))
- log_file_add = joinpath(path, "deps", "build.log")
- @test_throws PkgError Pkg.add(path=path; io=devnull)
- @test !isfile(joinpath(Base.find_package("FailBuild"), "..", "..", "deps", "build.log"))
- log_file_add = joinpath(DEPOT_PATH[1], "scratchspaces",
- "44cfe95a-1eb2-52ea-b672-e2afdf69b78f", "f99d57aad0e5eb2434491b47bac92bb88d463001", "build.log")
- @test isfile(log_file_add)
- @test occursin("oops", read(log_file_add, String))
- end end
+ isolate(loaded_depot = true) do;
+ mktempdir() do tmp
+ path = git_init_package(tmp, joinpath(@__DIR__, "test_packages", "FailBuild"))
+ # Log file in the directory when it is deved
+ Pkg.develop(path = path; io = devnull)
+ log_file_dev = joinpath(path, "deps", "build.log")
+ @test !isfile(log_file_dev)
+ @test_throws PkgError Pkg.build("FailBuild"; io = devnull)
+ @test isfile(log_file_dev)
+ @test occursin("oops", read(log_file_dev, String))
+ # Log file in scratchspace when added
+ addpath = dirname(dirname(Base.find_package("FailBuild")))
+ log_file_add = joinpath(path, "deps", "build.log")
+ @test_throws PkgError Pkg.add(path = path; io = devnull)
+ @test !isfile(joinpath(Base.find_package("FailBuild"), "..", "..", "deps", "build.log"))
+ log_file_add = joinpath(
+ DEPOT_PATH[1], "scratchspaces",
+ "44cfe95a-1eb2-52ea-b672-e2afdf69b78f", "f99d57aad0e5eb2434491b47bac92bb88d463001", "build.log"
+ )
+ @test isfile(log_file_add)
+ @test isfile(joinpath(DEPOT_PATH[1], "scratchspaces", "CACHEDIR.TAG"))
+ @test occursin("oops", read(log_file_add, String))
+ end
+ end
end
#
@@ -2211,7 +2788,7 @@ end
@test isempty(opts)
api, opts = first(Pkg.pkg"gc --all")
@test api == Pkg.gc
- @test opts[:collect_delay] == Hour(0)
+ # N.B.: `--all` is now a no-op, but is retained for now for compatibility.
end
end
@@ -2267,6 +2844,24 @@ end
end
end
end
+ # Test generate . (issue #2821)
+ isolate(loaded_depot = true) do
+ cd_tempdir() do dir
+ mkdir("MyNewPkg")
+ cd("MyNewPkg") do
+ Pkg.generate(".")
+ @test isfile("Project.toml")
+ @test isfile("src/MyNewPkg.jl")
+ @test Pkg.Types.read_project("Project.toml").name == "MyNewPkg"
+ end
+
+ mkdir("NonEmpty")
+ write("NonEmpty/existing.txt", "content")
+ cd("NonEmpty") do
+ @test_throws Pkg.Types.PkgError Pkg.generate(".")
+ end
+ end
+ end
end
#
@@ -2274,55 +2869,58 @@ end
#
@testset "Pkg.status" begin
# other
- isolate(loaded_depot=true) do
+ isolate(loaded_depot = true) do
+ # IO is necessary even if we're not looking at it, because we have a short-circuit for
+ # devnull (and also don't want to pollute the logs (if any))
+ io = PipeBuffer()
@test_deprecated Pkg.status(Pkg.PKGMODE_MANIFEST)
- @test_logs (:warn, r"diff option only available") match_mode=:any Pkg.status(diff=true)
+ @test_logs (:warn, r"diff option only available") match_mode = :any Pkg.status(diff = true; io)
end
# State changes
- isolate(loaded_depot=true) do
+ isolate(loaded_depot = true) do
io = IOBuffer()
# Basic Add
- Pkg.add(Pkg.PackageSpec(; name="Example", version="0.3.0"); io=io)
+ Pkg.add(Pkg.PackageSpec(; name = "Example", version = "0.3.0"); io = io)
output = String(take!(io))
@test occursin(r"Updating `.+Project\.toml`", output)
@test occursin(r"\[7876af07\] \+ Example v0\.3\.0", output)
@test occursin(r"Updating `.+Manifest\.toml`", output)
@test occursin(r"\[7876af07\] \+ Example v0\.3\.0", output)
# Double add should not claim "Updating"
- Pkg.add(Pkg.PackageSpec(; name="Example", version="0.3.0"); io=io)
+ Pkg.add(Pkg.PackageSpec(; name = "Example", version = "0.3.0"); io = io)
output = String(take!(io))
@test occursin(r"No packages added to or removed from `.+Project\.toml`", output)
@test occursin(r"No packages added to or removed from `.+Manifest\.toml`", output)
# From tracking registry to tracking repo
- Pkg.add(Pkg.PackageSpec(; name="Example", rev="master"); io=io)
+ Pkg.add(Pkg.PackageSpec(; name = "Example", rev = "master"); io = io)
output = String(take!(io))
@test occursin(r"Updating `.+Project\.toml`", output)
@test occursin(r"\[7876af07\] ~ Example v0\.3\.0 ⇒ v\d\.\d\.\d `https://github\.com/JuliaLang/Example\.jl\.git#master`", output)
@test occursin(r"Updating `.+Manifest\.toml`", output)
@test occursin(r"\[7876af07\] ~ Example v0\.3\.0 ⇒ v\d\.\d\.\d `https://github.com/JuliaLang/Example.jl.git#master`", output)
# From tracking repo to tracking path
- Pkg.develop("Example"; io=io)
+ Pkg.develop("Example"; io = io)
output = String(take!(io))
@test occursin(r"Updating `.+Project\.toml`", output)
@test occursin(r"\[7876af07\] ~ Example v\d\.\d\.\d `https://github\.com/JuliaLang/Example\.jl\.git#master` ⇒ v\d\.\d\.\d `.+`", output)
@test occursin(r"Updating `.+Manifest\.toml`", output)
@test occursin(r"\[7876af07\] ~ Example v\d\.\d\.\d `https://github\.com/JuliaLang/Example\.jl\.git#master` ⇒ v\d\.\d\.\d `.+`", output)
# From tracking path to tracking repo
- Pkg.add(Pkg.PackageSpec(; name="Example", rev="master"); io=io)
+ Pkg.add(Pkg.PackageSpec(; name = "Example", rev = "master"); io = io)
output = String(take!(io))
@test occursin(r"Updating `.+Project\.toml`", output)
@test occursin(r"\[7876af07\] ~ Example v\d\.\d\.\d `.+` ⇒ v\d\.\d\.\d `https://github.com/JuliaLang/Example.jl.git#master`", output)
@test occursin(r"Updating `.+Manifest\.toml`", output)
@test occursin(r"\[7876af07\] ~ Example v\d\.\d\.\d `.+` ⇒ v\d\.\d\.\d `https://github.com/JuliaLang/Example.jl.git#master`", output)
# From tracking repo to tracking registered version
- Pkg.free("Example"; io=io)
+ Pkg.free("Example"; io = io)
output = String(take!(io))
@test occursin(r"Updating `.+Project\.toml`", output)
@test occursin(r"\[7876af07\] ~ Example v\d\.\d\.\d `https://github.com/JuliaLang/Example.jl.git#master` ⇒ v\d\.\d\.\d", output)
@test occursin(r"Updating `.+Manifest\.toml`", output)
@test occursin(r"\[7876af07\] ~ Example v\d\.\d\.\d `https://github.com/JuliaLang/Example.jl.git#master` ⇒ v\d\.\d\.\d", output)
# Removing registered version
- Pkg.rm("Example"; io=io)
+ Pkg.rm("Example"; io = io)
output = String(take!(io))
@test occursin(r"Updating `.+Project.toml`", output)
@test occursin(r"\[7876af07\] - Example v\d\.\d\.\d", output)
@@ -2331,31 +2929,31 @@ end
# Pinning a registered package
Pkg.add("Example")
- Pkg.pin("Example"; io=io)
+ Pkg.pin("Example"; io = io)
output = String(take!(io))
@test occursin(r"Updating `.+Project.toml`", output)
@test occursin(r"\[7876af07\] ~ Example v\d\.\d\.\d ⇒ v\d\.\d\.\d ⚲", output)
@test occursin(r"Updating `.+Manifest.toml`", output)
# Free a pinned package
- Pkg.free("Example"; io=io)
+ Pkg.free("Example"; io = io)
output = String(take!(io))
@test occursin(r"Updating `.+Project.toml`", output)
@test occursin(r"\[7876af07\] ~ Example v\d\.\d\.\d ⚲ ⇒ v\d\.\d\.\d", output)
@test occursin(r"Updating `.+Manifest.toml`", output)
end
# Project Status API
- isolate(loaded_depot=true) do
- Pkg.Registry.add(Pkg.RegistrySpec[], io=devnull) # load reg before io capturing
+ isolate(loaded_depot = true) do
+ Pkg.Registry.add(Pkg.RegistrySpec[], io = devnull) # load reg before io capturing
io = PipeBuffer()
## empty project
- Pkg.status(;io=io)
+ Pkg.status(; io = io)
@test occursin(r"Status `.+Project.toml` \(empty project\)", readline(io))
## loaded project
Pkg.add("Markdown")
- Pkg.add( name="JSON", version="0.18.0")
+ Pkg.add(name = "JSON", version = "0.18.0")
Pkg.develop("Example")
- Pkg.add(url="https://github.com/00vareladavid/Unregistered.jl")
+ Pkg.add(url = "https://github.com/00vareladavid/Unregistered.jl")
Pkg.status(; io = io)
@test occursin(r"Status `.+Project\.toml`", readline(io))
@test occursin(r"\[7876af07\] Example\s*v\d\.\d\.\d\s*`.+`", readline(io))
@@ -2365,24 +2963,24 @@ end
end
## status warns when package not installed
isolate() do
- Pkg.Registry.add(Pkg.RegistrySpec[], io=devnull) # load reg before io capturing
+ Pkg.Registry.add(Pkg.RegistrySpec[], io = devnull) # load reg before io capturing
Pkg.activate(joinpath(@__DIR__, "test_packages", "Status"))
io = PipeBuffer()
- Pkg.status(; io=io)
+ Pkg.status(; io = io)
@test occursin(r"Status `.+Project.toml`", readline(io))
@test occursin(r"^→⌃ \[7876af07\] Example\s*v\d\.\d\.\d", readline(io))
@test occursin(r"^ \[d6f4376e\] Markdown", readline(io))
@test "Info Packages marked with → are not downloaded, use `instantiate` to download" == strip(readline(io))
@test "Info Packages marked with ⌃ have new versions available and may be upgradable." == strip(readline(io))
- Pkg.status(;io=io, mode=Pkg.PKGMODE_MANIFEST)
+ Pkg.status(; io = io, mode = Pkg.PKGMODE_MANIFEST)
@test occursin(r"Status `.+Manifest.toml`", readline(io))
@test occursin(r"^→⌃ \[7876af07\] Example\s*v\d\.\d\.\d", readline(io))
@test occursin(r"^ \[2a0f44e3\] Base64", readline(io))
@test occursin(r"^ \[d6f4376e\] Markdown", readline(io))
@test "Info Packages marked with → are not downloaded, use `instantiate` to download" == strip(readline(io))
@test "Info Packages marked with ⌃ have new versions available and may be upgradable." == strip(readline(io))
- Pkg.instantiate(;io=devnull) # download Example
- Pkg.status(;io=io, mode=Pkg.PKGMODE_MANIFEST)
+ Pkg.instantiate(; io = devnull) # download Example
+ Pkg.status(; io = io, mode = Pkg.PKGMODE_MANIFEST)
@test occursin(r"Status `.+Manifest.toml`", readline(io))
@test occursin(r"^⌃ \[7876af07\] Example\s*v\d\.\d\.\d", readline(io))
@test occursin(r"^ \[2a0f44e3\] Base64", readline(io))
@@ -2390,58 +2988,69 @@ end
@test "Info Packages marked with ⌃ have new versions available and may be upgradable." == strip(readline(io))
end
# Manifest Status API
- isolate(loaded_depot=true) do
- Pkg.Registry.add(Pkg.RegistrySpec[], io=devnull) # load reg before io capturing
+ isolate(loaded_depot = true) do
+ Pkg.Registry.add(Pkg.RegistrySpec[], io = devnull) # load reg before io capturing
io = PipeBuffer()
## empty manifest
- Pkg.status(;io=io, mode=Pkg.PKGMODE_MANIFEST)
+ Pkg.status(; io = io, mode = Pkg.PKGMODE_MANIFEST)
@test occursin(r"Status `.+Manifest\.toml` \(empty manifest\)", readline(io))
# loaded manifest
- Pkg.add( name="Example", version="0.3.0")
+ Pkg.add(name = "Example", version = "0.3.0")
Pkg.add("Markdown")
- Pkg.status(; io=io, mode=Pkg.PKGMODE_MANIFEST)
+ Pkg.status(; io = io, mode = Pkg.PKGMODE_MANIFEST)
statuslines = readlines(io)
@test occursin(r"Status `.+Manifest.toml`", first(statuslines))
@test any(l -> occursin(r"\[7876af07\] Example\s*v0\.3\.0", l), statuslines)
@test any(l -> occursin(r"\[2a0f44e3\] Base64", l), statuslines)
@test any(l -> occursin(r"\[d6f4376e\] Markdown", l), statuslines)
+ # Test that manifest status with filter shows package and its dependencies (issue #1989)
+ Pkg.add(name = "JSON", version = "0.21.0") # JSON has dependencies
+ Pkg.status("JSON"; io = io, mode = Pkg.PKGMODE_MANIFEST)
+ statuslines = readlines(io)
+ @test occursin(r"Status `.+Manifest.toml`", first(statuslines))
+ @test any(l -> occursin(r"\[682c06a0\] JSON\s*v0\.21\.0", l), statuslines)
+ # JSON's dependencies (Parsers, Dates, Mmap, Unicode) should also be shown
+ @test any(l -> occursin(r"Parsers", l), statuslines)
+ # But Example and Markdown (not dependencies of JSON) should not be shown
+ @test !any(l -> occursin(r"\[7876af07\] Example", l), statuslines)
+ @test !any(l -> occursin(r"\[d6f4376e\] Markdown", l), statuslines)
end
# Diff API
- isolate(loaded_depot=true) do
- Pkg.Registry.add(Pkg.RegistrySpec[], io=devnull) # load reg before io capturing
+ isolate(loaded_depot = true) do
+ Pkg.Registry.add(Pkg.RegistrySpec[], io = devnull) # load reg before io capturing
io = PipeBuffer()
projdir = dirname(Pkg.project().path)
mkpath(projdir)
git_init_and_commit(projdir)
## empty project + empty diff
- Pkg.status(; io=io, diff=true)
+ Pkg.status(; io = io, diff = true)
@test occursin(r"No packages added to or removed from `.+Project\.toml`", readline(io))
- Pkg.status(; io=io, mode=Pkg.PKGMODE_MANIFEST, diff=true)
+ Pkg.status(; io = io, mode = Pkg.PKGMODE_MANIFEST, diff = true)
@test occursin(r"No packages added to or removed from `.+Manifest\.toml`", readline(io))
### empty diff + filter
- Pkg.status("Example"; io=io, diff=true)
+ Pkg.status("Example"; io = io, diff = true)
@test occursin(r"No packages added to or removed from `.+Project\.toml`", readline(io))
## non-empty project but empty diff
Pkg.add("Markdown")
git_init_and_commit(dirname(Pkg.project().path))
- Pkg.status(; io=io, diff=true)
+ Pkg.status(; io = io, diff = true)
@test occursin(r"No packages added to or removed from `.+Project\.toml`", readline(io))
- Pkg.status(; io=io, mode=Pkg.PKGMODE_MANIFEST, diff=true)
+ Pkg.status(; io = io, mode = Pkg.PKGMODE_MANIFEST, diff = true)
@test occursin(r"No packages added to or removed from `.+Manifest\.toml`", readline(io))
### filter should still show "empty diff"
- Pkg.status("Example"; io=io, diff=true)
+ Pkg.status("Example"; io = io, diff = true)
@test occursin(r"No packages added to or removed from `.+Project\.toml`", readline(io))
## non-empty project + non-empty diff
Pkg.rm("Markdown")
- Pkg.add(name="Example", version="0.3.0")
+ Pkg.add(name = "Example", version = "0.3.0")
## diff project
- Pkg.status(; io=io, diff=true)
+ Pkg.status(; io = io, diff = true)
@test occursin(r"Diff `.+Project\.toml`", readline(io))
@test occursin(r"\[7876af07\] \+ Example\s*v0\.3\.0", readline(io))
@test occursin(r"\[d6f4376e\] - Markdown", readline(io))
@test occursin("Info Packages marked with ⌃ have new versions available and may be upgradable.", readline(io))
## diff manifest
- Pkg.status(; io=io, mode=Pkg.PKGMODE_MANIFEST, diff=true)
+ Pkg.status(; io = io, mode = Pkg.PKGMODE_MANIFEST, diff = true)
statuslines = readlines(io)
@test occursin(r"Diff `.+Manifest.toml`", first(statuslines))
@test any(l -> occursin(r"\[7876af07\] \+ Example\s*v0\.3\.0", l), statuslines)
@@ -2449,32 +3058,32 @@ end
@test any(l -> occursin(r"\[d6f4376e\] - Markdown", l), statuslines)
@test any(l -> occursin("Info Packages marked with ⌃ have new versions available and may be upgradable.", l), statuslines)
## diff project with filtering
- Pkg.status("Markdown"; io=io, diff=true)
+ Pkg.status("Markdown"; io = io, diff = true)
@test occursin(r"Diff `.+Project\.toml`", readline(io))
@test occursin(r"\[d6f4376e\] - Markdown", readline(io))
## empty diff + filter
- Pkg.status("Base64"; io=io, diff=true)
+ Pkg.status("Base64"; io = io, diff = true)
@test occursin(r"No Matches in diff for `.+Project\.toml`", readline(io))
## diff manifest with filtering
- Pkg.status("Base64"; io=io, mode=Pkg.PKGMODE_MANIFEST, diff=true)
+ Pkg.status("Base64"; io = io, mode = Pkg.PKGMODE_MANIFEST, diff = true)
@test occursin(r"Diff `.+Manifest.toml`", readline(io))
@test occursin(r"\[2a0f44e3\] - Base64", readline(io))
## manifest diff + empty filter
- Pkg.status("FooBar"; io=io, mode=Pkg.PKGMODE_MANIFEST, diff=true)
+ Pkg.status("FooBar"; io = io, mode = Pkg.PKGMODE_MANIFEST, diff = true)
@test occursin(r"No Matches in diff for `.+Manifest.toml`", readline(io))
end
# Outdated API
- isolate(loaded_depot=true) do
- Pkg.Registry.add(Pkg.RegistrySpec[], io=devnull) # load reg before io capturing
- Pkg.add("Example"; io=devnull)
+ isolate(loaded_depot = true) do
+ Pkg.Registry.add(Pkg.RegistrySpec[], io = devnull) # load reg before io capturing
+ Pkg.add("Example"; io = devnull)
v = Pkg.dependencies()[exuuid].version
io = IOBuffer()
- Pkg.add(Pkg.PackageSpec(name="Example", version="0.4.0"); io=devnull)
- Pkg.status(; outdated=true, io=io)
+ Pkg.add(Pkg.PackageSpec(name = "Example", version = "0.4.0"); io = devnull)
+ Pkg.status(; outdated = true, io = io)
str = String(take!(io))
@test occursin(Regex("⌃\\s*\\[7876af07\\] Example\\s*v0.4.0\\s*\\( tmp) do
+ withenv("JULIA_DEPOT_PATH" => tmp * (Sys.iswindows() ? ";" : ":")) do
Base.init_depot_path()
cp(joinpath(@__DIR__, "test_packages", "BasicSandbox"), joinpath(tmp, "BasicSandbox"))
git_init_and_commit(joinpath(tmp, "BasicSandbox"))
cd(tmp) do
- Pkg.add(path="BasicSandbox")
+ Pkg.add(path = "BasicSandbox")
end
end
end
end
end
-using Pkg.Types: is_stdlib
-@testset "is_stdlib() across versions" begin
- HistoricalStdlibVersions.register!()
-
- networkoptions_uuid = UUID("ca575930-c2e3-43a9-ace4-1e988b2c1908")
- pkg_uuid = UUID("44cfe95a-1eb2-52ea-b672-e2afdf69b78f")
-
- # Test NetworkOptions across multiple versions (It became an stdlib in v1.6+, and was registered)
- @test is_stdlib(networkoptions_uuid)
- @test is_stdlib(networkoptions_uuid, v"1.6")
- @test !is_stdlib(networkoptions_uuid, v"1.5")
- @test !is_stdlib(networkoptions_uuid, v"1.0.0")
- @test !is_stdlib(networkoptions_uuid, v"0.7")
- @test !is_stdlib(networkoptions_uuid, nothing)
-
- # Pkg is an unregistered stdlib and has always been an stdlib
- @test is_stdlib(pkg_uuid)
- @test is_stdlib(pkg_uuid, v"1.0")
- @test is_stdlib(pkg_uuid, v"1.6")
- @test is_stdlib(pkg_uuid, v"999.999.999")
- @test is_stdlib(pkg_uuid, v"0.7")
- @test is_stdlib(pkg_uuid, nothing)
-
- HistoricalStdlibVersions.unregister!()
- # Test that we can probe for stdlibs for the current version with no STDLIBS_BY_VERSION,
- # but that we throw a PkgError if we ask for a particular julia version.
- @test is_stdlib(networkoptions_uuid)
- @test_throws Pkg.Types.PkgError is_stdlib(networkoptions_uuid, v"1.6")
-end
-
-
-@testset "Pkg.add() with julia_version" begin
- HistoricalStdlibVersions.register!()
-
- # A package with artifacts that went from normal package -> stdlib
- gmp_jll_uuid = "781609d7-10c4-51f6-84f2-b8444358ff6d"
- # A package that has always only ever been an stdlib
- linalg_uuid = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e"
- # A package that went from normal package - >stdlib
- networkoptions_uuid = "ca575930-c2e3-43a9-ace4-1e988b2c1908"
-
- function get_manifest_block(name)
- manifest_path = joinpath(dirname(Base.active_project()), "Manifest.toml")
- @test isfile(manifest_path)
- deps = Base.get_deps(TOML.parsefile(manifest_path))
- @test haskey(deps, name)
- return only(deps[name])
- end
-
- isolate(loaded_depot=true) do
- # Next, test that if we ask for `v1.5` it DOES have a version, and that GMP_jll installs v6.1.X
- Pkg.add(["NetworkOptions", "GMP_jll"]; julia_version=v"1.5")
- no_block = get_manifest_block("NetworkOptions")
- @test haskey(no_block, "uuid")
- @test no_block["uuid"] == networkoptions_uuid
- @test haskey(no_block, "version")
-
- gmp_block = get_manifest_block("GMP_jll")
- @test haskey(gmp_block, "uuid")
- @test gmp_block["uuid"] == gmp_jll_uuid
- @test haskey(gmp_block, "version")
- @test startswith(gmp_block["version"], "6.1.2")
-
- # Test that the artifact of GMP_jll contains the right library
- @test haskey(gmp_block, "git-tree-sha1")
- gmp_jll_dir = Pkg.Operations.find_installed("GMP_jll", Base.UUID(gmp_jll_uuid), Base.SHA1(gmp_block["git-tree-sha1"]))
- @test isdir(gmp_jll_dir)
- artifacts_toml = joinpath(gmp_jll_dir, "Artifacts.toml")
- @test isfile(artifacts_toml)
- meta = artifact_meta("GMP", artifacts_toml)
-
- # `meta` can be `nothing` on some of our newer platforms; we _know_ this should
- # not be the case on the following platforms, so we check these explicitly to
- # ensure that we haven't accidentally broken something, and then we gate some
- # following tests on whether or not `meta` is `nothing`:
- for arch in ("x86_64", "i686"), os in ("linux", "mac", "windows")
- if platforms_match(HostPlatform(), Platform(arch, os))
- @test meta !== nothing
- end
- end
-
- # These tests require a matching platform artifact for this old version of GMP_jll,
- # which is not the case on some of our newer platforms.
- if meta !== nothing
- gmp_artifact_path = artifact_path(Base.SHA1(meta["git-tree-sha1"]))
- @test isdir(gmp_artifact_path)
-
- # On linux, we can check the filename to ensure it's grabbing the correct library
- if Sys.islinux()
- libgmp_filename = joinpath(gmp_artifact_path, "lib", "libgmp.so.10.3.2")
- @test isfile(libgmp_filename)
- end
- end
- end
-
- # Next, test that if we ask for `v1.6`, GMP_jll gets `v6.2.0`, and for `v1.7`, it gets `v6.2.1`
- function do_gmp_test(julia_version, gmp_version)
- isolate(loaded_depot=true) do
- Pkg.add("GMP_jll"; julia_version)
- gmp_block = get_manifest_block("GMP_jll")
- @test haskey(gmp_block, "uuid")
- @test gmp_block["uuid"] == gmp_jll_uuid
- @test haskey(gmp_block, "version")
- @test startswith(gmp_block["version"], string(gmp_version))
- end
- end
- do_gmp_test(v"1.6", v"6.2.0")
- do_gmp_test(v"1.7", v"6.2.1")
-
- isolate(loaded_depot=true) do
- # Next, test that if we ask for `nothing`, NetworkOptions has a `version` but `LinearAlgebra` does not.
- Pkg.add(["LinearAlgebra", "NetworkOptions"]; julia_version=nothing)
- no_block = get_manifest_block("NetworkOptions")
- @test haskey(no_block, "uuid")
- @test no_block["uuid"] == networkoptions_uuid
- @test haskey(no_block, "version")
- linalg_block = get_manifest_block("LinearAlgebra")
- @test haskey(linalg_block, "uuid")
- @test linalg_block["uuid"] == linalg_uuid
- @test !haskey(linalg_block, "version")
- end
-
- isolate(loaded_depot=true) do
- # Next, test that stdlibs do not get dependencies from the registry
- # NOTE: this test depends on the fact that in Julia v1.6+ we added
- # "fake" JLLs that do not depend on Pkg while the "normal" p7zip_jll does.
- # A future p7zip_jll in the registry may not depend on Pkg, so be sure
- # to verify your assumptions when updating this test.
- Pkg.add("p7zip_jll")
- p7zip_jll_uuid = UUID("3f19e933-33d8-53b3-aaab-bd5110c3b7a0")
- @test !("Pkg" in keys(Pkg.dependencies()[p7zip_jll_uuid].dependencies))
- end
-
- HistoricalStdlibVersions.unregister!()
-end
-
-
@testset "Issue #2931" begin
- isolate(loaded_depot=false) do
+ isolate(loaded_depot = false) do
temp_pkg_dir() do path
name = "Example"
version = "0.5.3"
@@ -3163,7 +3723,7 @@ end
# Delete directory where the package would be installed
pkg_dir = Pkg.Operations.find_installed(name, exuuid, tree_hash)
- rm(pkg_dir; recursive=true, force=true)
+ rm(pkg_dir; recursive = true, force = true)
# (Re-)download sources
Pkg.Operations.download_source(ctx)
@@ -3174,49 +3734,86 @@ end
end
end
-if :version in fieldnames(Base.PkgOrigin)
-@testset "sysimage functionality" begin
- old_sysimage_modules = copy(Base._sysimage_modules)
- old_pkgorigins = copy(Base.pkgorigins)
- try
- # Fake having a packages in the sysimage.
- json_pkgid = Base.PkgId(json_uuid, "JSON")
- push!(Base._sysimage_modules, json_pkgid)
- Base.pkgorigins[json_pkgid] = Base.PkgOrigin(nothing, nothing, v"0.20.1")
- isolate(loaded_depot=true) do
- Pkg.add("JSON"; io=devnull)
- Pkg.dependencies(json_uuid) do pkg
- pkg.version == v"0.20.1"
+@testset "Issue #4345: pidfile in writable location when depot is readonly" begin
+ isolate(loaded_depot = false) do
+ mktempdir() do readonly_depot
+ mktempdir() do writable_depot
+ # Set up initial depot with a package
+ old_depot_path = copy(DEPOT_PATH)
+ try
+ empty!(DEPOT_PATH)
+ push!(DEPOT_PATH, readonly_depot)
+ Base.append_bundled_depot_path!(DEPOT_PATH)
+
+ Pkg.activate(temp = true)
+ # Install Example.jl in the initial depot
+ Pkg.add(name = "Example", version = "0.5.3")
+
+ # Make the depot read-only
+ run(`chmod -R -w $readonly_depot`)
+
+ # Add writable depot to front of DEPOT_PATH
+ pushfirst!(DEPOT_PATH, writable_depot)
+
+ # Create a new temporary environment and try to add a package
+ # that depends on something in the readonly depot
+ Pkg.activate(temp = true)
+ # This should not fail with permission denied on pidfile creation
+ # The fix ensures pidfiles are created in writable locations
+ @test_nowarn Pkg.add(name = "Example", version = "0.5.3")
+ finally
+ # Restore depot path and make readonly depot writable again for cleanup
+ empty!(DEPOT_PATH)
+ append!(DEPOT_PATH, old_depot_path)
+ run(`chmod -R +w $readonly_depot`)
+ end
end
- io = IOBuffer()
- Pkg.status(; outdated=true, io=io)
- str = String(take!(io))
- @test occursin("⌅ [682c06a0] JSON v0.20.1", str)
- @test occursin("[sysimage]", str)
-
- @test_throws PkgError Pkg.add(name="JSON", rev="master"; io=devnull)
- @test_throws PkgError Pkg.develop("JSON"; io=devnull)
+ end
+ end
+end
- Pkg.respect_sysimage_versions(false)
- Pkg.add("JSON"; io=devnull)
- Pkg.dependencies(json_uuid) do pkg
- pkg.version != v"0.20.1"
+if :version in fieldnames(Base.PkgOrigin)
+ @testset "sysimage functionality" begin
+ old_sysimage_modules = copy(Base._sysimage_modules)
+ old_pkgorigins = copy(Base.pkgorigins)
+ try
+ # Fake having a packages in the sysimage.
+ json_pkgid = Base.PkgId(json_uuid, "JSON")
+ push!(Base._sysimage_modules, json_pkgid)
+ Base.pkgorigins[json_pkgid] = Base.PkgOrigin(nothing, nothing, v"0.20.1")
+ isolate(loaded_depot = true) do
+ Pkg.add("JSON"; io = devnull)
+ Pkg.dependencies(json_uuid) do pkg
+ pkg.version == v"0.20.1"
+ end
+ io = IOBuffer()
+ Pkg.status(; outdated = true, io = io)
+ str = String(take!(io))
+ @test occursin("⌅ [682c06a0] JSON v0.20.1", str)
+ @test occursin("[sysimage]", str)
+
+ @test_throws PkgError Pkg.add(name = "JSON", rev = "master"; io = devnull)
+ @test_throws PkgError Pkg.develop("JSON"; io = devnull)
+
+ Pkg.respect_sysimage_versions(false)
+ Pkg.add("JSON"; io = devnull)
+ Pkg.dependencies(json_uuid) do pkg
+ pkg.version != v"0.20.1"
+ end
end
+ finally
+ copy!(Base._sysimage_modules, old_sysimage_modules)
+ copy!(Base.pkgorigins, old_pkgorigins)
+ Pkg.respect_sysimage_versions(true)
end
- finally
- copy!(Base._sysimage_modules, old_sysimage_modules)
- copy!(Base.pkgorigins, old_pkgorigins)
- Pkg.respect_sysimage_versions(true)
end
end
-end
temp_pkg_dir() do project_path
@testset "test entryfile entries" begin
mktempdir() do dir
- path = abspath(joinpath(dirname(pathof(Pkg)), "../test", "test_packages", "ProjectPath"))
- cp(path, joinpath(dir, "ProjectPath"))
- cd(joinpath(dir, "ProjectPath")) do
+ path = copy_test_package(dir, "ProjectPath")
+ cd(path) do
with_current_env() do
Pkg.resolve()
@test success(run(`$(Base.julia_cmd()) --startup-file=no --project -e 'using ProjectPath'`))
@@ -3227,20 +3824,122 @@ temp_pkg_dir() do project_path
end
end
@testset "test resolve with tree hash" begin
- mktempdir() do dir
- path = abspath(joinpath(@__DIR__, "../test", "test_packages", "ResolveWithRev"))
- cp(path, joinpath(dir, "ResolveWithRev"))
- cd(joinpath(dir, "ResolveWithRev")) do
- with_current_env() do
- @test !isfile("Manifest.toml")
- @test !isdir(joinpath(DEPOT_PATH[1], "packages", "Example"))
- Pkg.resolve()
- @test isdir(joinpath(DEPOT_PATH[1], "packages", "Example"))
- rm(joinpath(DEPOT_PATH[1], "packages", "Example"); recursive = true)
- Pkg.resolve()
+ isolate() do
+ mktempdir() do dir
+ path = copy_test_package(dir, "ResolveWithRev")
+ cd(path) do
+ with_current_env() do
+ @test !isfile("Manifest.toml")
+ @test !isdir(joinpath(DEPOT_PATH[1], "packages", "Example"))
+ Pkg.resolve()
+ @test isdir(joinpath(DEPOT_PATH[1], "packages", "Example"))
+ rm(joinpath(DEPOT_PATH[1], "packages", "Example"); recursive = true)
+ Pkg.resolve()
+ end
+ end
+ end
+ end
+end
+
+@testset "status diff non-root" begin
+ isolate(loaded_depot = true) do
+ cd_tempdir() do dir
+ Pkg.generate("A")
+ git_init_and_commit(".")
+ Pkg.activate("A")
+ Pkg.add("Example")
+ io = IOBuffer()
+ Pkg.status(; io, diff = true)
+ str = String(take!(io))
+ @test occursin("+ Example", str)
+ end
+ end
+end
+
+@testset "test instantiate with sources with only rev" begin
+ isolate() do
+ mktempdir() do dir
+ cp(joinpath(@__DIR__, "test_packages", "sources_only_rev", "Project.toml"), joinpath(dir, "Project.toml"))
+ cd(dir) do
+ with_current_env() do
+ @test !isfile("Manifest.toml")
+ Pkg.instantiate()
+ uuid, info = only(Pkg.dependencies())
+ @test info.git_revision == "ba3d6704f09330ae973773496a4212f85e0ffe45"
+ @test info.git_source == "https://github.com/JuliaLang/Example.jl.git"
+ end
end
end
end
end
+@testset "status showing incompatible loaded deps" begin
+ isolate(loaded_depot = true) do
+ cmd = addenv(`$(Base.julia_cmd()) --color=no --startup-file=no -e "
+ using Pkg
+ Pkg.activate(temp=true)
+ Pkg.add(Pkg.PackageSpec(name=\"Example\", version=v\"0.5.4\"))
+ using Example
+ Pkg.activate(temp=true)
+ Pkg.add(Pkg.PackageSpec(name=\"Example\", version=v\"0.5.5\"))
+ "`, "JULIA_DEPOT_PATH" => join(Base.DEPOT_PATH, Sys.iswindows() ? ";" : ":"))
+ iob = IOBuffer()
+ run(pipeline(cmd, stderr = iob, stdout = iob))
+ out = String(take!(iob))
+ @test occursin("[loaded: v0.5.4]", out)
+ end
+end
+
+@test allunique(unique([Pkg.PackageSpec(path = "foo"), Pkg.PackageSpec(path = "foo")]))
+
+# Test the readonly functionality
+@testset "Readonly Environment Tests" begin
+ isolate() do
+ cd_tempdir() do dir
+ # Activate the environment
+ Pkg.activate(".")
+
+ # Test readonly API - should be false initially
+ @test Pkg.readonly() == false
+
+ # Add a package (should work fine)
+ Pkg.add("Test")
+
+ # Enable readonly mode using new API
+ previous_state = Pkg.readonly(true)
+ @test previous_state == false
+ @test Pkg.readonly() == true
+
+ # Test that status shows readonly indicator
+ io = IOBuffer()
+ Pkg.status(io = io)
+ status_output = String(take!(io))
+ @test occursin("(readonly)", status_output)
+
+ # These operations should fail with early readonly check
+ @test_throws Pkg.Types.PkgError Pkg.add("Dates")
+ @test_throws Pkg.Types.PkgError Pkg.rm("Test")
+ @test_throws Pkg.Types.PkgError Pkg.update()
+ @test_throws Pkg.Types.PkgError Pkg.pin("Test")
+ @test_throws Pkg.Types.PkgError Pkg.free("Test")
+ @test_throws Pkg.Types.PkgError Pkg.develop("Example")
+
+ # Disable readonly mode
+ previous_state = Pkg.readonly(false)
+ @test previous_state == true
+ @test Pkg.readonly() == false
+
+ # Test that status no longer shows readonly indicator
+ io = IOBuffer()
+ Pkg.status(io = io)
+ status_output = String(take!(io))
+ @test !occursin("(readonly)", status_output)
+
+ # Operations should work again
+ @test_nowarn Pkg.add("Random")
+ @test_nowarn Pkg.rm("Random")
+ end
+ end
+end
+
end #module
diff --git a/test/pkg.jl b/test/pkg.jl
index 2793c246e2..8986dfdf6c 100644
--- a/test/pkg.jl
+++ b/test/pkg.jl
@@ -24,44 +24,44 @@ const PackageSpec = Pkg.Types.PackageSpec
import Pkg.Types: semver_spec, VersionSpec
@testset "semver notation" begin
@test semver_spec("^1.2.3") == VersionSpec("1.2.3-1")
- @test semver_spec("^1.2") == VersionSpec("1.2.0-1")
- @test semver_spec("^1") == VersionSpec("1.0.0-1")
+ @test semver_spec("^1.2") == VersionSpec("1.2.0-1")
+ @test semver_spec("^1") == VersionSpec("1.0.0-1")
@test semver_spec("^0.2.3") == VersionSpec("0.2.3-0.2")
@test semver_spec("^0.0.3") == VersionSpec("0.0.3-0.0.3")
- @test semver_spec("^0.0") == VersionSpec("0.0.0-0.0")
- @test semver_spec("^0") == VersionSpec("0.0.0-0")
+ @test semver_spec("^0.0") == VersionSpec("0.0.0-0.0")
+ @test semver_spec("^0") == VersionSpec("0.0.0-0")
@test semver_spec("~1.2.3") == VersionSpec("1.2.3-1.2")
- @test semver_spec("~1.2") == VersionSpec("1.2.0-1.2")
- @test semver_spec("~1") == VersionSpec("1.0.0-1")
- @test semver_spec("1.2.3") == semver_spec("^1.2.3")
- @test semver_spec("1.2") == semver_spec("^1.2")
- @test semver_spec("1") == semver_spec("^1")
- @test semver_spec("0.0.3") == semver_spec("^0.0.3")
- @test semver_spec("0") == semver_spec("^0")
+ @test semver_spec("~1.2") == VersionSpec("1.2.0-1.2")
+ @test semver_spec("~1") == VersionSpec("1.0.0-1")
+ @test semver_spec("1.2.3") == semver_spec("^1.2.3")
+ @test semver_spec("1.2") == semver_spec("^1.2")
+ @test semver_spec("1") == semver_spec("^1")
+ @test semver_spec("0.0.3") == semver_spec("^0.0.3")
+ @test semver_spec("0") == semver_spec("^0")
@test semver_spec("0.0.3, 1.2") == VersionSpec(["0.0.3-0.0.3", "1.2.0-1"])
@test semver_spec("~1.2.3, ~v1") == VersionSpec(["1.2.3-1.2", "1.0.0-1"])
- @test v"1.5.2" in semver_spec("1.2.3")
- @test v"1.2.3" in semver_spec("1.2.3")
- @test !(v"2.0.0" in semver_spec("1.2.3"))
- @test !(v"1.2.2" in semver_spec("1.2.3"))
+ @test v"1.5.2" in semver_spec("1.2.3")
+ @test v"1.2.3" in semver_spec("1.2.3")
+ @test !(v"2.0.0" in semver_spec("1.2.3"))
+ @test !(v"1.2.2" in semver_spec("1.2.3"))
@test v"1.2.99" in semver_spec("~1.2.3")
- @test v"1.2.3" in semver_spec("~1.2.3")
- @test !(v"1.3" in semver_spec("~1.2.3"))
- @test v"1.2.0" in semver_spec("1.2")
- @test v"1.9.9" in semver_spec("1.2")
- @test !(v"2.0.0" in semver_spec("1.2"))
- @test !(v"1.1.9" in semver_spec("1.2"))
- @test v"0.2.3" in semver_spec("0.2.3")
- @test !(v"0.3.0" in semver_spec("0.2.3"))
- @test !(v"0.2.2" in semver_spec("0.2.3"))
- @test v"0.0.0" in semver_spec("0")
- @test v"0.99.0" in semver_spec("0")
- @test !(v"1.0.0" in semver_spec("0"))
- @test v"0.0.0" in semver_spec("0.0")
- @test v"0.0.99" in semver_spec("0.0")
- @test !(v"0.1.0" in semver_spec("0.0"))
+ @test v"1.2.3" in semver_spec("~1.2.3")
+ @test !(v"1.3" in semver_spec("~1.2.3"))
+ @test v"1.2.0" in semver_spec("1.2")
+ @test v"1.9.9" in semver_spec("1.2")
+ @test !(v"2.0.0" in semver_spec("1.2"))
+ @test !(v"1.1.9" in semver_spec("1.2"))
+ @test v"0.2.3" in semver_spec("0.2.3")
+ @test !(v"0.3.0" in semver_spec("0.2.3"))
+ @test !(v"0.2.2" in semver_spec("0.2.3"))
+ @test v"0.0.0" in semver_spec("0")
+ @test v"0.99.0" in semver_spec("0")
+ @test !(v"1.0.0" in semver_spec("0"))
+ @test v"0.0.0" in semver_spec("0.0")
+ @test v"0.0.99" in semver_spec("0.0")
+ @test !(v"0.1.0" in semver_spec("0.0"))
@test semver_spec("<1.2.3") == VersionSpec("0.0.0 - 1.2.2")
@test semver_spec("<1.2") == VersionSpec("0.0.0 - 1.1")
@@ -147,8 +147,8 @@ import Pkg.Types: semver_spec, VersionSpec
@test_throws ErrorException semver_spec("0.0.0")
@test_throws ErrorException semver_spec("0.7 1.0")
- @test Pkg.Types.isjoinable(Pkg.Types.VersionBound((1,5)), Pkg.Types.VersionBound((1,6)))
- @test !(Pkg.Types.isjoinable(Pkg.Types.VersionBound((1,5)), Pkg.Types.VersionBound((1,6,0))))
+ @test Pkg.Types.isjoinable(Pkg.Types.VersionBound((1, 5)), Pkg.Types.VersionBound((1, 6)))
+ @test !(Pkg.Types.isjoinable(Pkg.Types.VersionBound((1, 5)), Pkg.Types.VersionBound((1, 6, 0))))
end
# TODO: Should rewrite these tests not to rely on internals like field names
@@ -187,26 +187,33 @@ temp_pkg_dir() do project_path
@test !isinstalled(TEST_PKG)
pkgdir = joinpath(Pkg.depots1(), "packages")
- # Test to ensure that with a long enough collect_delay, nothing gets reaped
- Pkg.gc(;collect_delay=Day(1000))
- @test !isempty(readdir(pkgdir))
-
- # Setting collect_delay to zero causes it to be reaped immediately, however
- Pkg.gc(;collect_delay=Second(0))
- @test isempty(readdir(pkgdir))
+ # Test that unused packages are reaped
+ Pkg.gc()
+ @test isempty(filter(x -> x != "CACHEDIR.TAG", readdir(pkgdir)))
clonedir = joinpath(Pkg.depots1(), "clones")
- Pkg.add(Pkg.PackageSpec(name=TEST_PKG.name, rev="master"))
+ Pkg.add(Pkg.PackageSpec(name = TEST_PKG.name, rev = "master"))
@test !isempty(readdir(clonedir))
Pkg.rm(TEST_PKG.name)
- Pkg.gc(;collect_delay=Day(1000))
- @test !isempty(readdir(clonedir))
- Pkg.gc(;collect_delay=Second(0))
- @test isempty(readdir(clonedir))
+ # Test that unused repos are also reaped
+ Pkg.gc()
+ @test isempty(filter(x -> x != "CACHEDIR.TAG", readdir(clonedir)))
end
@testset "package with wrong UUID" begin
@test_throws PkgError Pkg.add(PackageSpec(TEST_PKG.name, UUID(UInt128(1))))
+ @testset "package with wrong UUID but correct name" begin
+ try
+ Pkg.add(PackageSpec(name = "Example", uuid = UUID(UInt128(2))))
+ catch e
+ @test e isa PkgError
+ errstr = sprint(showerror, e)
+ @test occursin("expected package `Example [00000000]` to be registered", errstr)
+ @test occursin("You may have provided the wrong UUID for package Example.", errstr)
+ @test occursin("Found the following UUIDs for that name:", errstr)
+ @test occursin("- 7876af07-990d-54b4-ab0e-23690620f79a from registry: General", errstr)
+ end
+ end
# Missing uuid
@test_throws PkgError Pkg.add(PackageSpec(uuid = uuid4()))
end
@@ -240,7 +247,7 @@ temp_pkg_dir() do project_path
recursive_rm_cov_files(pkgdir) # clean out cov files from previous test runs
@test !any(endswith(".cov"), readdir(pkgdir)) # should be no cov files to start with
- Pkg.test(TEST_PKG.name; coverage=true)
+ Pkg.test(TEST_PKG.name; coverage = true)
@test any(endswith(".cov"), readdir(pkgdir))
Pkg.rm(TEST_PKG.name)
end
@@ -258,7 +265,7 @@ temp_pkg_dir() do project_path
@testset "pinning / freeing" begin
Pkg.add(TEST_PKG.name)
old_v = Pkg.dependencies()[TEST_PKG.uuid].version
- Pkg.pin(Pkg.PackageSpec(;name=TEST_PKG.name, version=v"0.2"))
+ Pkg.pin(Pkg.PackageSpec(; name = TEST_PKG.name, version = v"0.2"))
@test Pkg.dependencies()[TEST_PKG.uuid].version.minor == 2
Pkg.update(TEST_PKG.name)
@test Pkg.dependencies()[TEST_PKG.uuid].version.minor == 2
@@ -269,20 +276,21 @@ temp_pkg_dir() do project_path
end
@testset "develop / freeing" begin
- Pkg.add(name=TEST_PKG.name, version=v"0.5.3")
+ Pkg.add(name = TEST_PKG.name, version = v"0.5.3")
old_v = Pkg.dependencies()[TEST_PKG.uuid].version
@test old_v == v"0.5.3"
Pkg.rm(TEST_PKG.name)
mktempdir() do devdir
withenv("JULIA_PKG_DEVDIR" => devdir) do
- @test_throws PkgError Pkg.develop(Pkg.PackageSpec(url="bleh", rev="blurg"))
+ @test_throws PkgError Pkg.develop(Pkg.PackageSpec(url = "bleh", rev = "blurg"))
Pkg.develop(TEST_PKG.name)
@test isinstalled(TEST_PKG)
@test Pkg.dependencies()[TEST_PKG.uuid].version > old_v
test_pkg_main_file = joinpath(devdir, TEST_PKG.name, "src", TEST_PKG.name * ".jl")
@test isfile(test_pkg_main_file)
# Pkg #152
- write(test_pkg_main_file,
+ write(
+ test_pkg_main_file,
"""
module Example
export hello, domath
@@ -293,9 +301,11 @@ temp_pkg_dir() do project_path
hello(who::String) = "Hello, \$who"
domath(x::Number) = x + 5
end
- """)
+ """
+ )
mkpath(joinpath(devdir, TEST_PKG.name, "deps"))
- write(joinpath(devdir, TEST_PKG.name, "deps", "build.jl"),
+ write(
+ joinpath(devdir, TEST_PKG.name, "deps", "build.jl"),
"""
touch("deps.jl")
"""
@@ -304,14 +314,16 @@ temp_pkg_dir() do project_path
proj_str = read(exa_proj, String)
compat_onwards = split(proj_str, "[compat]")[2]
open(exa_proj, "w") do io
- println(io, """
- name = "Example"
- uuid = "$(TEST_PKG.uuid)"
- version = "100.0.0"
-
- [compat]
- $compat_onwards
- """)
+ println(
+ io, """
+ name = "Example"
+ uuid = "$(TEST_PKG.uuid)"
+ version = "100.0.0"
+
+ [compat]
+ $compat_onwards
+ """
+ )
end
Pkg.resolve()
@test Pkg.dependencies()[TEST_PKG.uuid].version == v"100.0.0"
@@ -337,7 +349,7 @@ temp_pkg_dir() do project_path
@testset "package name in resolver errors" begin
try
- Pkg.add(PackageSpec(;name = TEST_PKG.name, version = v"55"))
+ Pkg.add(PackageSpec(; name = TEST_PKG.name, version = v"55"))
catch e
@test occursin(TEST_PKG.name, sprint(showerror, e))
end
@@ -396,55 +408,62 @@ temp_pkg_dir() do project_path
end
@testset "test atomicity of write_env_usage with $(Sys.CPU_THREADS) parallel processes" begin
- tasks = Task[]
- iobs = IOBuffer[]
Sys.CPU_THREADS == 1 && error("Cannot test for atomic usage log file interaction effectively with only Sys.CPU_THREADS=1")
- # Precompile Pkg given we're in a different depot
- # and make sure the General registry is installed
- Utils.show_output_if_command_errors(`$(Base.julia_cmd()[1]) --project="$(pkgdir(Pkg))" -e "import Pkg; isempty(Pkg.Registry.reachable_registries()) && Pkg.Registry.add()"`)
- flag_start_dir = tempdir() # once n=Sys.CPU_THREADS files are in here, the processes can proceed to the concurrent test
- flag_end_file = tempname() # use creating this file as a way to stop the processes early if an error happens
- for i in 1:Sys.CPU_THREADS
- iob = IOBuffer()
- t = @async run(pipeline(`$(Base.julia_cmd()[1]) --project="$(pkgdir(Pkg))"
- -e "import Pkg;
- Pkg.UPDATED_REGISTRY_THIS_SESSION[] = true;
- Pkg.activate(temp = true);
- Pkg.add(\"Random\", io = devnull);
- touch(tempname(raw\"$flag_start_dir\")) # file marker that first part has finished
- while length(readdir(raw\"$flag_start_dir\")) < $(Sys.CPU_THREADS)
- # sync all processes to start at the same time
- sleep(0.1)
- end
- @async begin
- sleep(15)
- touch(raw\"$flag_end_file\")
- end
- i = 0
- while !isfile(raw\"$flag_end_file\")
- global i += 1
- try
- Pkg.Types.EnvCache()
- catch
+ isolate(loaded_depot = true) do
+ tasks = Task[]
+ iobs = IOBuffer[]
+ # Precompile Pkg given we're in a different depot
+ # and make sure the General registry is installed
+ flag_start_dir = mktempdir() # once n=Sys.CPU_THREADS files are in here, the processes can proceed to the concurrent test
+ flag_end_file = tempname() # use creating this file as a way to stop the processes early if an error happens
+ for i in 1:Sys.CPU_THREADS
+ iob = IOBuffer()
+ t = @async run(
+ pipeline(
+ addenv(
+ `$(Base.julia_cmd()) --project="$(pkgdir(Pkg))"
+ -e "import Pkg;
+ Pkg.UPDATED_REGISTRY_THIS_SESSION[] = true;
+ Pkg.activate(temp = true);
+ Pkg.add(\"Random\", io = devnull);
+ touch(tempname(raw\"$flag_start_dir\")*raw\"$i\") # file marker that first part has finished
+ while length(readdir(raw\"$flag_start_dir\")) < $(Sys.CPU_THREADS)
+ # sync all processes to start at the same time
+ sleep(0.1)
+ end
+ @async begin
+ sleep(15)
touch(raw\"$flag_end_file\")
- println(stderr, \"Errored after $i iterations\")
- rethrow()
end
- yield()
- end"`,
- stderr = iob, stdout = devnull))
- push!(tasks, t)
- push!(iobs, iob)
- end
- for i in eachindex(tasks)
- try
- fetch(tasks[i]) # If any of these failed it will throw when fetched
- catch
- print(String(take!(iobs[i])))
- break
+ i = 0
+ while !isfile(raw\"$flag_end_file\")
+ global i += 1
+ try
+ Pkg.Types.EnvCache()
+ catch
+ touch(raw\"$flag_end_file\")
+ println(stderr, \"Errored after $i iterations\")
+ rethrow()
+ end
+ yield()
+ end"`, "JULIA_DEPOT_PATH" => join(Base.DEPOT_PATH, Sys.iswindows() ? ";" : ":")
+ ),
+ stderr = iob, stdout = devnull
+ )
+ )
+ push!(tasks, t)
+ push!(iobs, iob)
+ end
+ for i in eachindex(tasks)
+ try
+ fetch(tasks[i]) # If any of these failed it will throw when fetched
+ catch
+ print(String(take!(iobs[i])))
+ break
+ end
end
+ @test any(istaskfailed, tasks) == false
end
- @test any(istaskfailed, tasks) == false
end
@testset "parsing malformed usage file" begin
@@ -482,7 +501,7 @@ end
temp_pkg_dir() do project_path
@testset "libgit2 downloads" begin
- Pkg.add(TEST_PKG.name; use_git_for_all_downloads=true)
+ Pkg.add(TEST_PKG.name; use_git_for_all_downloads = true)
@test haskey(Pkg.dependencies(), TEST_PKG.uuid)
@eval import $(Symbol(TEST_PKG.name))
@test_throws SystemError open(pathof(eval(Symbol(TEST_PKG.name))), "w") do io end # check read-only
@@ -504,12 +523,12 @@ end
temp_pkg_dir() do project_path
@testset "libgit2 downloads" begin
- Pkg.add(TEST_PKG.name; use_git_for_all_downloads=true)
+ Pkg.add(TEST_PKG.name; use_git_for_all_downloads = true)
@test haskey(Pkg.dependencies(), TEST_PKG.uuid)
Pkg.rm(TEST_PKG.name)
end
@testset "tarball downloads" begin
- Pkg.add("JSON"; use_only_tarballs_for_downloads=true)
+ Pkg.add("JSON"; use_only_tarballs_for_downloads = true)
@test "JSON" in [pkg.name for (uuid, pkg) in Pkg.dependencies()]
Pkg.rm("JSON")
end
@@ -546,27 +565,29 @@ temp_pkg_dir() do project_path
cd(project_path) do
target_dir = mktempdir()
uuid = nothing
- mktempdir() do tmp; cd(tmp) do
- pkg_name = "FooBar"
- # create a project and grab its uuid
- Pkg.generate(pkg_name)
- uuid = extract_uuid(joinpath(pkg_name, "Project.toml"))
- # activate project env
- Pkg.activate(abspath(pkg_name))
- # add an example project to populate manifest file
- Pkg.add("Example")
- # change away from default names
- ## note: this is written awkwardly because a `mv` here causes failures on AppVeyor
- cp(joinpath(pkg_name, "src"), joinpath(target_dir, "src"))
- cp(joinpath(pkg_name, "Project.toml"), joinpath(target_dir, "JuliaProject.toml"))
- cp(joinpath(pkg_name, "Manifest.toml"), joinpath(target_dir, "JuliaManifest.toml"))
- end end
+ mktempdir() do tmp
+ cd(tmp) do
+ pkg_name = "FooBar"
+ # create a project and grab its uuid
+ Pkg.generate(pkg_name)
+ uuid = extract_uuid(joinpath(pkg_name, "Project.toml"))
+ # activate project env
+ Pkg.activate(abspath(pkg_name))
+ # add an example project to populate manifest file
+ Pkg.add("Example")
+ # change away from default names
+ ## note: this is written awkwardly because a `mv` here causes failures on AppVeyor
+ cp(joinpath(pkg_name, "src"), joinpath(target_dir, "src"))
+ cp(joinpath(pkg_name, "Project.toml"), joinpath(target_dir, "JuliaProject.toml"))
+ cp(joinpath(pkg_name, "Manifest.toml"), joinpath(target_dir, "JuliaManifest.toml"))
+ end
+ end
Pkg.activate()
# make sure things still work
Pkg.REPLMode.pkgstr("dev $target_dir")
- @test isinstalled((name="FooBar", uuid=UUID(uuid)))
+ @test isinstalled((name = "FooBar", uuid = UUID(uuid)))
Pkg.rm("FooBar")
- @test !isinstalled((name="FooBar", uuid=UUID(uuid)))
+ @test !isinstalled((name = "FooBar", uuid = UUID(uuid)))
end # cd project_path
end # @testset
end
@@ -581,53 +602,57 @@ temp_pkg_dir() do project_path
end
end
-temp_pkg_dir() do project_path; cd(project_path) do
- tmp = mktempdir()
- depo1 = mktempdir()
- depo2 = mktempdir()
- cd(tmp) do; @testset "instantiating updated repo" begin
- empty!(DEPOT_PATH)
- pushfirst!(DEPOT_PATH, depo1)
- Base.append_bundled_depot_path!(DEPOT_PATH)
- LibGit2.close(LibGit2.clone(TEST_PKG.url, "Example.jl"))
- mkdir("machine1")
- cd("machine1")
- Pkg.activate(".")
- Pkg.add(Pkg.PackageSpec(path="../Example.jl"))
- cd("..")
- cp("machine1", "machine2")
- empty!(DEPOT_PATH)
- pushfirst!(DEPOT_PATH, depo2)
- Base.append_bundled_depot_path!(DEPOT_PATH)
- cd("machine2")
- Pkg.activate(".")
- Pkg.instantiate()
- cd("..")
- cd("Example.jl")
- open("README.md", "a") do io
- print(io, "Hello")
- end
- LibGit2.with(LibGit2.GitRepo(".")) do repo
- LibGit2.add!(repo, "*")
- LibGit2.commit(repo, "changes"; author=TEST_SIG, committer=TEST_SIG)
+temp_pkg_dir() do project_path
+ cd(project_path) do
+ tmp = mktempdir()
+ depo1 = mktempdir()
+ depo2 = mktempdir()
+ cd(tmp) do;
+ @testset "instantiating updated repo" begin
+ empty!(DEPOT_PATH)
+ pushfirst!(DEPOT_PATH, depo1)
+ Base.append_bundled_depot_path!(DEPOT_PATH)
+ LibGit2.close(LibGit2.clone(TEST_PKG.url, "Example.jl"))
+ mkdir("machine1")
+ cd("machine1")
+ Pkg.activate(".")
+ Pkg.add(Pkg.PackageSpec(path = "../Example.jl"))
+ cd("..")
+ cp("machine1", "machine2")
+ empty!(DEPOT_PATH)
+ pushfirst!(DEPOT_PATH, depo2)
+ Base.append_bundled_depot_path!(DEPOT_PATH)
+ cd("machine2")
+ Pkg.activate(".")
+ Pkg.instantiate()
+ cd("..")
+ cd("Example.jl")
+ open("README.md", "a") do io
+ print(io, "Hello")
+ end
+ LibGit2.with(LibGit2.GitRepo(".")) do repo
+ LibGit2.add!(repo, "*")
+ LibGit2.commit(repo, "changes"; author = TEST_SIG, committer = TEST_SIG)
+ end
+ cd("../machine1")
+ empty!(DEPOT_PATH)
+ pushfirst!(DEPOT_PATH, depo1)
+ Base.append_bundled_depot_path!(DEPOT_PATH)
+ Pkg.activate(".")
+ Pkg.update()
+ cd("..")
+ cp("machine1/Manifest.toml", "machine2/Manifest.toml"; force = true)
+ cd("machine2")
+ empty!(DEPOT_PATH)
+ pushfirst!(DEPOT_PATH, depo2)
+ Base.append_bundled_depot_path!(DEPOT_PATH)
+ Pkg.activate(".")
+ Pkg.instantiate()
+ end
end
- cd("../machine1")
- empty!(DEPOT_PATH)
- pushfirst!(DEPOT_PATH, depo1)
- Base.append_bundled_depot_path!(DEPOT_PATH)
- Pkg.activate(".")
- Pkg.update()
- cd("..")
- cp("machine1/Manifest.toml", "machine2/Manifest.toml"; force=true)
- cd("machine2")
- empty!(DEPOT_PATH)
- pushfirst!(DEPOT_PATH, depo2)
- Base.append_bundled_depot_path!(DEPOT_PATH)
- Pkg.activate(".")
- Pkg.instantiate()
- end end
- Base.rm.([tmp, depo1, depo2]; force = true, recursive = true)
-end end
+ Base.rm.([tmp, depo1, depo2]; force = true, recursive = true)
+ end
+end
@testset "printing of stdlib paths, issue #605" begin
path = Pkg.Types.stdlib_path("Test")
@@ -635,13 +660,13 @@ end end
end
@testset "stdlib_resolve!" begin
- a = Pkg.Types.PackageSpec(name="Markdown")
- b = Pkg.Types.PackageSpec(uuid=UUID("9abbd945-dff8-562f-b5e8-e1ebf5ef1b79"))
+ a = Pkg.Types.PackageSpec(name = "Markdown")
+ b = Pkg.Types.PackageSpec(uuid = UUID("9abbd945-dff8-562f-b5e8-e1ebf5ef1b79"))
Pkg.Types.stdlib_resolve!([a, b])
@test a.uuid == UUID("d6f4376e-aef5-505a-96c1-9c027394607a")
@test b.name == "Profile"
- x = Pkg.Types.PackageSpec(name="Markdown", uuid=UUID("d6f4376e-aef5-505a-96c1-9c027394607a"))
+ x = Pkg.Types.PackageSpec(name = "Markdown", uuid = UUID("d6f4376e-aef5-505a-96c1-9c027394607a"))
Pkg.Types.stdlib_resolve!([x])
@test x.name == "Markdown"
@test x.uuid == UUID("d6f4376e-aef5-505a-96c1-9c027394607a")
@@ -650,10 +675,10 @@ end
@testset "issue #913" begin
temp_pkg_dir() do project_path
Pkg.activate(project_path)
- Pkg.add(Pkg.PackageSpec(name="Example", rev = "master"))
+ Pkg.add(Pkg.PackageSpec(name = "Example", rev = "master"))
@test isinstalled(TEST_PKG)
- rm.(joinpath.(project_path, ["Project.toml","Manifest.toml"]))
- Pkg.add(Pkg.PackageSpec(name="Example", rev = "master")) # should not fail
+ rm.(joinpath.(project_path, ["Project.toml", "Manifest.toml"]))
+ Pkg.add(Pkg.PackageSpec(name = "Example", rev = "master")) # should not fail
@test isinstalled(TEST_PKG)
end
end
@@ -672,35 +697,64 @@ end
end
end
+if isdefined(Base.Filesystem, :delayed_delete_ref)
+ @testset "Pkg.gc for delayed deletes" begin
+ mktempdir() do root
+ with_temp_env(root) do
+ dir = joinpath(root, "julia_delayed_deletes")
+ mkdir(dir)
+ testfile = joinpath(dir, "testfile")
+ write(testfile, "foo bar")
+ delayed_delete_ref_path = Base.Filesystem.delayed_delete_ref()
+ mkpath(delayed_delete_ref_path)
+ ref = tempname(delayed_delete_ref_path; cleanup = false)
+ write(ref, testfile)
+ @test isfile(testfile)
+ Pkg.gc()
+ @test !ispath(testfile)
+ @test !ispath(dir)
+ @test !ispath(ref)
+ @test !ispath(delayed_delete_ref_path) || !isempty(readdir(delayed_delete_ref_path))
+ end
+ end
+ end
+end
+
#issue #876
@testset "targets should survive add/rm" begin
- temp_pkg_dir() do project_path; cd_tempdir() do tmpdir
- cp(joinpath(@__DIR__, "project", "good", "pkg.toml"), "Project.toml")
- mkdir("src")
- touch("src/Pkg.jl")
- targets = deepcopy(Pkg.Types.read_project("Project.toml").targets)
- Pkg.activate(".")
- Pkg.add("Example")
- Pkg.rm("Example")
- @test targets == Pkg.Types.read_project("Project.toml").targets
- end end
+ temp_pkg_dir() do project_path
+ cd_tempdir() do tmpdir
+ cp(joinpath(@__DIR__, "project", "good", "pkg.toml"), "Project.toml")
+ mkdir("src")
+ touch("src/Pkg.jl")
+ targets = deepcopy(Pkg.Types.read_project("Project.toml").targets)
+ Pkg.activate(".")
+ Pkg.add("Example")
+ Pkg.rm("Example")
+ @test targets == Pkg.Types.read_project("Project.toml").targets
+ end
+ end
end
@testset "canonicalized relative paths in manifest" begin
- mktempdir() do tmp; cd(tmp) do
- write("Manifest.toml",
- """
- [[Foo]]
- path = "bar/Foo"
- uuid = "824dc81a-29a7-11e9-3958-fba342a32644"
- version = "0.1.0"
- """)
- manifest = Pkg.Types.read_manifest("Manifest.toml")
- package = manifest[Base.UUID("824dc81a-29a7-11e9-3958-fba342a32644")]
- @test package.path == (Sys.iswindows() ? "bar\\Foo" : "bar/Foo")
- Pkg.Types.write_manifest(manifest, "Manifest.toml")
- @test occursin("path = \"bar/Foo\"", read("Manifest.toml", String))
- end end
+ mktempdir() do tmp
+ cd(tmp) do
+ write(
+ "Manifest.toml",
+ """
+ [[Foo]]
+ path = "bar/Foo"
+ uuid = "824dc81a-29a7-11e9-3958-fba342a32644"
+ version = "0.1.0"
+ """
+ )
+ manifest = Pkg.Types.read_manifest("Manifest.toml")
+ package = manifest[Base.UUID("824dc81a-29a7-11e9-3958-fba342a32644")]
+ @test package.path == (Sys.iswindows() ? "bar\\Foo" : "bar/Foo")
+ Pkg.Types.write_manifest(manifest, "Manifest.toml")
+ @test occursin("path = \"bar/Foo\"", read("Manifest.toml", String))
+ end
+ end
end
@testset "building project should fix version of deps" begin
@@ -718,37 +772,77 @@ end
@test sprint(showerror, err) == "foobar"
end
-@testset "issue #1066: package with colliding name/uuid exists in project" begin
- temp_pkg_dir() do project_path; cd_tempdir() do tmpdir
- Pkg.activate(".")
- Pkg.generate("A")
- cd(mkdir("packages")) do
+@testset "issue #2191: better diagnostic for missing package" begin
+ temp_pkg_dir() do project_path
+ cd_tempdir() do tmpdir
+ Pkg.activate(".")
+
+ # Create a package A that depends on package B
Pkg.generate("A")
+ Pkg.generate("B")
git_init_and_commit("A")
+ git_init_and_commit("B")
+
+ # Add B as a dependency of A
+ cd("A") do
+ Pkg.develop(PackageSpec(path = "../B"))
+ end
+
+ # Now remove the B directory to simulate the missing package scenario
+ rm("B", recursive = true)
+
+ # Try to perform an operation that would trigger the missing package error
+ cd("A") do
+ try
+ Pkg.resolve()
+ @test false # a PkgError should be thrown"
+ catch e
+ @test e isa PkgError
+ error_msg = sprint(showerror, e)
+ # Check that the improved error message contains helpful information
+ @test occursin("This package is referenced in the manifest file:", error_msg)
+ end
+ end
+ end
+ end
+end
+
+@testset "issue #1066: package with colliding name/uuid exists in project" begin
+ temp_pkg_dir() do project_path
+ cd_tempdir() do tmpdir
+ Pkg.activate(".")
+ Pkg.generate("A")
+ cd(mkdir("packages")) do
+ Pkg.generate("A")
+ git_init_and_commit("A")
+ end
+ Pkg.generate("B")
+ project = Pkg.Types.read_project("A/Project.toml")
+ project.name = "B"
+ Pkg.Types.write_project(project, "B/Project.toml")
+ git_init_and_commit("B")
+ Pkg.develop(Pkg.PackageSpec(path = abspath("A")))
+ # package with same name but different uuid exist in project
+ @test_throws PkgError Pkg.develop(Pkg.PackageSpec(path = abspath("packages", "A")))
+ @test_throws PkgError Pkg.add(Pkg.PackageSpec(path = abspath("packages", "A")))
+ # package with same uuid but different name exist in project
+ @test_throws PkgError Pkg.develop(Pkg.PackageSpec(path = abspath("B")))
+ @test_throws PkgError Pkg.add(Pkg.PackageSpec(path = abspath("B")))
end
- Pkg.generate("B")
- project = Pkg.Types.read_project("A/Project.toml")
- project.name = "B"
- Pkg.Types.write_project(project, "B/Project.toml")
- git_init_and_commit("B")
- Pkg.develop(Pkg.PackageSpec(path = abspath("A")))
- # package with same name but different uuid exist in project
- @test_throws PkgError Pkg.develop(Pkg.PackageSpec(path = abspath("packages", "A")))
- @test_throws PkgError Pkg.add(Pkg.PackageSpec(path = abspath("packages", "A")))
- # package with same uuid but different name exist in project
- @test_throws PkgError Pkg.develop(Pkg.PackageSpec(path = abspath("B")))
- @test_throws PkgError Pkg.add(Pkg.PackageSpec(path = abspath("B")))
- end end
+ end
end
@testset "issue #1180: broken toml-files in HEAD" begin
- temp_pkg_dir() do dir; cd(dir) do
- write("Project.toml", "[deps]\nExample = \n")
- git_init_and_commit(dir)
- write("Project.toml", "[deps]\nExample = \"7876af07-990d-54b4-ab0e-23690620f79a\"\n")
- Pkg.activate(dir)
- @test_logs (:warn, r"could not read project from HEAD") Pkg.status(diff=true)
- end end
+ temp_pkg_dir() do dir
+ cd(dir) do
+ write("Project.toml", "[deps]\nExample = \n")
+ git_init_and_commit(dir)
+ write("Project.toml", "[deps]\nExample = \"7876af07-990d-54b4-ab0e-23690620f79a\"\n")
+ Pkg.activate(dir)
+ io = PipeBuffer() # IO is required to avoid short-circuit in Pkg.status
+ @test_logs (:warn, r"could not read project from HEAD") Pkg.status(diff = true; io)
+ end
+ end
end
import Markdown
@@ -763,96 +857,102 @@ end
@testset "up should prune manifest" begin
example_uuid = UUID("7876af07-990d-54b4-ab0e-23690620f79a")
unicode_uuid = UUID("4ec0a83e-493e-50e2-b9ac-8f72acf5a8f5")
- temp_pkg_dir() do project_path; mktempdir() do tmp
- copy_test_package(tmp, "Unpruned")
- Pkg.activate(joinpath(tmp, "Unpruned"))
- Pkg.update()
- manifest = Pkg.Types.Context().env.manifest
- package_example = get(manifest, example_uuid, nothing)
- @test package_example !== nothing
- @test package_example.version > v"0.4.0"
- @test get(manifest, unicode_uuid, nothing) === nothing
- end end
+ temp_pkg_dir() do project_path
+ mktempdir() do tmp
+ copy_test_package(tmp, "Unpruned")
+ Pkg.activate(joinpath(tmp, "Unpruned"))
+ Pkg.update()
+ manifest = Pkg.Types.Context().env.manifest
+ package_example = get(manifest, example_uuid, nothing)
+ @test package_example !== nothing
+ @test package_example.version > v"0.4.0"
+ @test get(manifest, unicode_uuid, nothing) === nothing
+ end
+ end
end
@testset "undo redo functionality" begin
unicode_uuid = UUID("4ec0a83e-493e-50e2-b9ac-8f72acf5a8f5")
- temp_pkg_dir() do project_path; with_temp_env() do
- Pkg.activate(project_path)
- # Example
- Pkg.add(TEST_PKG.name)
- @test haskey(Pkg.dependencies(), TEST_PKG.uuid)
- #
- Pkg.undo()
- @test !haskey(Pkg.dependencies(), TEST_PKG.uuid)
- # Example
- Pkg.redo()
- # Example, Unicode
- Pkg.add("Unicode")
- @test haskey(Pkg.dependencies(), TEST_PKG.uuid)
- # Example
- Pkg.undo()
- @test !haskey(Pkg.dependencies(), unicode_uuid)
- #
- Pkg.undo()
- @test !haskey(Pkg.dependencies(), TEST_PKG.uuid)
- # Example, Unicode
- Pkg.redo()
- Pkg.redo()
- @test haskey(Pkg.dependencies(), TEST_PKG.uuid)
- @test haskey(Pkg.dependencies(), unicode_uuid)
- # Should not add states since they are nops
- Pkg.add("Unicode")
- Pkg.add("Unicode")
- # Example
- Pkg.undo()
- @test !haskey(Pkg.dependencies(), unicode_uuid)
- # Example, Unicode
- Pkg.redo()
- @test haskey(Pkg.dependencies(), unicode_uuid)
-
- # Example
- Pkg.undo()
-
- prev_project = Base.active_project()
- mktempdir() do tmp
- Pkg.activate(tmp)
- Pkg.add("Example")
+ temp_pkg_dir() do project_path
+ with_temp_env() do
+ Pkg.activate(project_path)
+ # Example
+ Pkg.add(TEST_PKG.name)
+ @test haskey(Pkg.dependencies(), TEST_PKG.uuid)
+ #
Pkg.undo()
@test !haskey(Pkg.dependencies(), TEST_PKG.uuid)
- end
- Pkg.activate(prev_project)
+ # Example
+ Pkg.redo()
+ # Example, Unicode
+ Pkg.add("Unicode")
+ @test haskey(Pkg.dependencies(), TEST_PKG.uuid)
+ # Example
+ Pkg.undo()
+ @test !haskey(Pkg.dependencies(), unicode_uuid)
+ #
+ Pkg.undo()
+ @test !haskey(Pkg.dependencies(), TEST_PKG.uuid)
+ # Example, Unicode
+ Pkg.redo()
+ Pkg.redo()
+ @test haskey(Pkg.dependencies(), TEST_PKG.uuid)
+ @test haskey(Pkg.dependencies(), unicode_uuid)
+ # Should not add states since they are nops
+ Pkg.add("Unicode")
+ Pkg.add("Unicode")
+ # Example
+ Pkg.undo()
+ @test !haskey(Pkg.dependencies(), unicode_uuid)
+ # Example, Unicode
+ Pkg.redo()
+ @test haskey(Pkg.dependencies(), unicode_uuid)
+
+ # Example
+ Pkg.undo()
+
+ prev_project = Base.active_project()
+ mktempdir() do tmp
+ Pkg.activate(tmp)
+ Pkg.add("Example")
+ Pkg.undo()
+ @test !haskey(Pkg.dependencies(), TEST_PKG.uuid)
+ end
+ Pkg.activate(prev_project)
- # Check that undo state persists after swapping projects
- # Example, Unicode
- Pkg.redo()
- @test haskey(Pkg.dependencies(), unicode_uuid)
+ # Check that undo state persists after swapping projects
+ # Example, Unicode
+ Pkg.redo()
+ @test haskey(Pkg.dependencies(), unicode_uuid)
- end end
+ end
+ end
end
@testset "subdir functionality" begin
- temp_pkg_dir() do project_path; with_temp_env() do
- mktempdir() do tmp
- repodir = git_init_package(tmp, joinpath(@__DIR__, "test_packages", "MainRepo"))
- # Add with subdir
- subdir_uuid = UUID("6fe4e069-dcb0-448a-be67-3a8bf3404c58")
- Pkg.add(url = repodir, subdir = "SubDir")
- pkgdir = abspath(joinpath(dirname(Base.find_package("SubDir")), ".."))
-
- # Update with subdir in manifest
- Pkg.update()
- # Test instantiate with subdir
- rm(pkgdir; recursive=true)
- Pkg.instantiate()
- @test isinstalled("SubDir")
- Pkg.rm("SubDir")
-
- # Dev of local path with subdir
- Pkg.develop(path=repodir, subdir="SubDir")
- @test Pkg.dependencies()[subdir_uuid].source == joinpath(repodir, "SubDir")
+ temp_pkg_dir() do project_path
+ with_temp_env() do
+ mktempdir() do tmp
+ repodir = git_init_package(tmp, joinpath(@__DIR__, "test_packages", "MainRepo"))
+ # Add with subdir
+ subdir_uuid = UUID("6fe4e069-dcb0-448a-be67-3a8bf3404c58")
+ Pkg.add(url = repodir, subdir = "SubDir")
+ pkgdir = abspath(joinpath(dirname(Base.find_package("SubDir")), ".."))
+
+ # Update with subdir in manifest
+ Pkg.update()
+ # Test instantiate with subdir
+ rm(pkgdir; recursive = true)
+ Pkg.instantiate()
+ @test isinstalled("SubDir")
+ Pkg.rm("SubDir")
+
+ # Dev of local path with subdir
+ Pkg.develop(path = repodir, subdir = "SubDir")
+ @test Pkg.dependencies()[subdir_uuid].source == joinpath(repodir, "SubDir")
+ end
end
- end end
+ end
end
# PR #1784 - Remove trailing slash from URL.
@@ -943,9 +1043,10 @@ end
import Pkg.Resolve.range_compressed_versionspec
@testset "range_compressed_versionspec" begin
pool = [v"1.0.0", v"1.1.0", v"1.2.0", v"1.2.1", v"2.0.0", v"2.0.1", v"3.0.0", v"3.1.0"]
- @test (range_compressed_versionspec(pool)
- == range_compressed_versionspec(pool, pool)
- == VersionSpec("1.0.0-3.1.0")
+ @test (
+ range_compressed_versionspec(pool)
+ == range_compressed_versionspec(pool, pool)
+ == VersionSpec("1.0.0-3.1.0")
)
@test isequal(
@@ -971,88 +1072,177 @@ end
end
@testset "Suggest `Pkg.develop` instead of `Pkg.add`" begin
- mktempdir() do tmp_dir
- touch(joinpath(tmp_dir, "Project.toml"))
- @test_throws Pkg.Types.PkgError Pkg.add(; path = tmp_dir)
+ isolate() do
+ mktempdir() do tmp_dir
+ touch(joinpath(tmp_dir, "Project.toml"))
+ @test_throws Pkg.Types.PkgError Pkg.add(; path = tmp_dir)
+ end
end
end
@testset "Issue #3069" begin
- p = PackageSpec(; path="test_packages/Example")
- @test_throws Pkg.Types.PkgError("Package PackageSpec(\n path = test_packages/Example\n version = *\n) has neither name nor uuid") ensure_resolved(Pkg.Types.Context(), Pkg.Types.Manifest(), [p])
+ with_temp_env() do
+ p = PackageSpec(; path = "test_packages/Example")
+ @test_throws Pkg.Types.PkgError("Package PackageSpec(\n path = test_packages/Example\n version = *\n) has neither name nor uuid") ensure_resolved(Pkg.Types.Context(), Pkg.Types.Manifest(), [p])
+ end
end
@testset "Issue #3147" begin
- prev_project = Base.active_project()
+ isolate() do
- @testset "Pkg.add" begin
- Pkg.activate(temp = true)
- mktempdir() do tmp_dir
- LibGit2.close(LibGit2.clone(TEST_PKG.url, tmp_dir))
- Pkg.develop(path=tmp_dir)
+ @testset "Pkg.add" begin
+ Pkg.activate(temp = true)
+ mktempdir() do tmp_dir
+ LibGit2.close(LibGit2.clone(TEST_PKG.url, tmp_dir))
+ Pkg.develop(path = tmp_dir)
+ Pkg.pin("Example")
+ Pkg.add("Example")
+ info = Pkg.dependencies()[TEST_PKG.uuid]
+ @test info.is_pinned
+ @test info.is_tracking_path
+ @test !info.is_tracking_repo
+ @test info.version > v"0.5.3"
+ end
+ Pkg.rm("Example")
+
+ Pkg.add(url = TEST_PKG.url, rev = "29aa1b4")
Pkg.pin("Example")
Pkg.add("Example")
info = Pkg.dependencies()[TEST_PKG.uuid]
@test info.is_pinned
- @test info.is_tracking_path
- @test !info.is_tracking_repo
- @test info.version > v"0.5.3"
+ @test !info.is_tracking_path
+ @test info.is_tracking_repo
+ @test info.version == v"0.5.3"
+ Pkg.rm("Example")
+ end
+
+ @testset "Pkg.update" begin
+ Pkg.activate(temp = true)
+ mktempdir() do tmp_dir
+ ver = v"0.5.3"
+ repo = LibGit2.clone(TEST_PKG.url, tmp_dir)
+ tag = LibGit2.GitObject(repo, "v$ver")
+ hash = string(LibGit2.target(tag))
+ LibGit2.checkout!(repo, hash)
+ LibGit2.close(repo)
+ Pkg.develop(path = tmp_dir)
+ Pkg.pin("Example")
+ Pkg.update("Example") # pkg should remain pinned
+ info = Pkg.dependencies()[TEST_PKG.uuid]
+ @test info.is_pinned
+ @test info.is_tracking_path
+ @test !info.is_tracking_repo
+ @test info.version == ver
+
+ # modify the pkg version manually, to mimic developing this pkg
+ dev_ver = VersionNumber(ver.major, ver.minor, ver.patch + 1)
+ fn = joinpath(tmp_dir, "Project.toml")
+ toml = TOML.parse(read(fn, String))
+ toml["version"] = string(dev_ver)
+ open(io -> TOML.print(io, toml), fn, "w")
+ Pkg.update("Example") # noop since Pkg.is_fully_pinned(...) is true
+ info = Pkg.dependencies()[TEST_PKG.uuid]
+ @test info.is_pinned
+ @test info.is_tracking_path
+ @test !info.is_tracking_repo
+ @test info.version == ver
+
+ Pkg.pin("Example") # pinning a 2ⁿᵈ time updates versions in the manifest
+ info = Pkg.dependencies()[TEST_PKG.uuid]
+ @test info.is_pinned
+ @test info.is_tracking_path
+ @test !info.is_tracking_repo
+ @test info.version == dev_ver
+ end
+ Pkg.rm("Example")
end
- Pkg.rm("Example")
-
- Pkg.add(url=TEST_PKG.url, rev="29aa1b4")
- Pkg.pin("Example")
- Pkg.add("Example")
- info = Pkg.dependencies()[TEST_PKG.uuid]
- @test info.is_pinned
- @test !info.is_tracking_path
- @test info.is_tracking_repo
- @test info.version == v"0.5.3"
- Pkg.rm("Example")
end
+end
- @testset "Pkg.update" begin
- Pkg.activate(temp = true)
- mktempdir() do tmp_dir
- ver = v"0.5.3"
- repo = LibGit2.clone(TEST_PKG.url, tmp_dir)
- tag = LibGit2.GitObject(repo, "v$ver")
- hash = string(LibGit2.target(tag))
- LibGit2.checkout!(repo, hash)
- LibGit2.close(repo)
- Pkg.develop(path=tmp_dir)
- Pkg.pin("Example")
- Pkg.update("Example") # pkg should remain pinned
- info = Pkg.dependencies()[TEST_PKG.uuid]
- @test info.is_pinned
- @test info.is_tracking_path
- @test !info.is_tracking_repo
- @test info.version == ver
-
- # modify the pkg version manually, to mimic developing this pkg
- dev_ver = VersionNumber(ver.major, ver.minor, ver.patch + 1)
- fn = joinpath(tmp_dir, "Project.toml")
- toml = TOML.parse(read(fn, String))
- toml["version"] = string(dev_ver)
- open(io -> TOML.print(io, toml), fn, "w")
- Pkg.update("Example") # noop since Pkg.is_fully_pinned(...) is true
- info = Pkg.dependencies()[TEST_PKG.uuid]
- @test info.is_pinned
- @test info.is_tracking_path
- @test !info.is_tracking_repo
- @test info.version == ver
+@testset "check_registered error paths" begin
+ # Test the "no registries have been installed" error path
+ isolate(loaded_depot = false, linked_reg = false) do
+ with_temp_env() do
+ # Ensure we have no registries available
+ @test isempty(Pkg.Registry.reachable_registries())
- Pkg.pin("Example") # pinning a 2ⁿᵈ time updates versions in the manifest
- info = Pkg.dependencies()[TEST_PKG.uuid]
- @test info.is_pinned
- @test info.is_tracking_path
- @test !info.is_tracking_repo
- @test info.version == dev_ver
- end
- Pkg.rm("Example")
+ # Should install General registry automatically
+ Pkg.add("Example")
+
+ Pkg.Registry.rm("General")
+ @test isempty(Pkg.Registry.reachable_registries())
+ end
end
- Pkg.activate(prev_project)
+ # Test the "expected package to be registered" error path with a custom unregistered package
+ isolate(loaded_depot = true) do;
+ mktempdir() do tempdir
+ with_temp_env() do
+ # Create a fake package with a manifest that references an unregistered UUID
+ fake_pkg_path = copy_test_package(tempdir, "UnregisteredUUID")
+ Pkg.activate(fake_pkg_path)
+
+ # This should fail with "expected package to be registered" error
+ @test_throws r"expected package.*to be registered" begin
+ Pkg.add("JSON") # This will fail because Example UUID in manifest is unregistered
+ end
+ end
+ end
+ end
+end
+
+# issue #2291: relative paths in manifests should be resolved relative to manifest location
+@testset "relative path resolution from different directories (issue #2291)" begin
+ isolate() do
+ mktempdir() do dir
+ # Create a local package with a git repo
+ pkg_path = joinpath(dir, "LocalPackage")
+ mkpath(joinpath(pkg_path, "src"))
+ write(
+ joinpath(pkg_path, "Project.toml"), """
+ name = "LocalPackage"
+ uuid = "00000000-0000-0000-0000-000000000001"
+ version = "0.1.0"
+ """
+ )
+ write(
+ joinpath(pkg_path, "src", "LocalPackage.jl"), """
+ module LocalPackage
+ greet() = "Hello from LocalPackage!"
+ end
+ """
+ )
+
+ # Initialize git repo
+ LibGit2.with(LibGit2.init(pkg_path)) do repo
+ LibGit2.add!(repo, "*")
+ LibGit2.commit(repo, "Initial commit"; author = TEST_SIG, committer = TEST_SIG)
+ end
+
+ # Create a project in a subdirectory and add the package with relative path
+ project_path = joinpath(dir, "project")
+ mkpath(project_path)
+ cd(project_path) do
+ Pkg.activate(".")
+ Pkg.add(Pkg.PackageSpec(path = "../LocalPackage"))
+
+ # Verify the package was added with relative path
+ manifest = read_manifest(joinpath(project_path, "Manifest.toml"))
+ pkg_entry = manifest[UUID("00000000-0000-0000-0000-000000000001")]
+ @test pkg_entry.repo.source == "../LocalPackage"
+ end
+
+ # Now change to parent directory and try to update - this should work
+ cd(dir) do
+ Pkg.activate("project")
+ Pkg.update() # This should not fail
+ # Check the package is installed by looking it up in dependencies
+ pkg_info = Pkg.dependencies()[UUID("00000000-0000-0000-0000-000000000001")]
+ @test pkg_info.name == "LocalPackage"
+ @test isinstalled(PackageSpec(uuid = UUID("00000000-0000-0000-0000-000000000001"), name = "LocalPackage"))
+ end
+ end
+ end
end
end # module
diff --git a/test/platformengines.jl b/test/platformengines.jl
index d2e57048f7..2c290acef3 100644
--- a/test/platformengines.jl
+++ b/test/platformengines.jl
@@ -31,7 +31,7 @@ using ..Utils: list_tarball_files
# Next, package it up as a .tar.gz file
mktempdir() do output_dir
- tarball_path = joinpath(output_dir, "foo.tar.gz")
+ tarball_path = joinpath(output_dir, "foo.tar.gz")
package(prefix, tarball_path)
@test isfile(tarball_path)
@@ -55,8 +55,8 @@ end
foo_hash = bytes2hex(sha256("test"))
# Check that verifying with the right hash works
- @test_logs (:info, r"No hash cache found") match_mode=:any begin
- ret, status = verify(foo_path, foo_hash; verbose=true, report_cache_status=true)
+ @test_logs (:info, r"No hash cache found") match_mode = :any begin
+ ret, status = verify(foo_path, foo_hash; verbose = true, report_cache_status = true)
@test ret == true
@test status == :hash_cache_missing
end
@@ -65,8 +65,8 @@ end
@test isfile("$(foo_path).sha256")
# Check that it verifies the second time around properly
- @test_logs (:info, r"Hash cache is consistent") match_mode=:any begin
- ret, status = verify(foo_path, foo_hash; verbose=true, report_cache_status=true)
+ @test_logs (:info, r"Hash cache is consistent") match_mode = :any begin
+ ret, status = verify(foo_path, foo_hash; verbose = true, report_cache_status = true)
@test ret == true
@test status == :hash_cache_consistent
end
@@ -76,29 +76,29 @@ end
# Get coverage of messing with different parts of the verification chain
touch(foo_path)
- @test_logs (:info, r"File has been modified") match_mode=:any begin
- ret, status = verify(foo_path, foo_hash; verbose=true, report_cache_status=true)
+ @test_logs (:info, r"File has been modified") match_mode = :any begin
+ ret, status = verify(foo_path, foo_hash; verbose = true, report_cache_status = true)
@test ret == true
@test status == :file_modified
end
# Ensure that we print an error when verification fails
- rm("$(foo_path).sha256"; force=true)
- @test_logs (:error, r"Hash Mismatch!") match_mode=:any begin
- @test !verify(foo_path, "0"^64; verbose=true)
+ rm("$(foo_path).sha256"; force = true)
+ @test_logs (:error, r"Hash Mismatch!") match_mode = :any begin
+ @test !verify(foo_path, "0"^64; verbose = true)
end
# Ensure that incorrect lengths cause an exception
- @test_throws ErrorException verify(foo_path, "0"^65; verbose=true)
+ @test_throws ErrorException verify(foo_path, "0"^65; verbose = true)
# Ensure that messing with the hash file works properly
touch(foo_path)
- @test verify(foo_path, foo_hash; verbose=true)
+ @test verify(foo_path, foo_hash; verbose = true)
open("$(foo_path).sha256", "w") do file
write(file, "this is not the right hash")
end
- @test_logs (:info, r"hash cache invalidated") match_mode=:any begin
- ret, status = verify(foo_path, foo_hash; verbose=true, report_cache_status=true)
+ @test_logs (:info, r"hash cache invalidated") match_mode = :any begin
+ ret, status = verify(foo_path, foo_hash; verbose = true, report_cache_status = true)
@test ret == true
@test status == :hash_cache_mismatch
end
@@ -109,9 +109,9 @@ end
end
# Delete hash cache file to force re-verification
- rm("$(foo_path).sha256"; force=true)
- @test_logs (:error, r"Hash Mismatch!") match_mode=:any begin
- ret, status = verify(foo_path, foo_hash; verbose=true, report_cache_status=true)
+ rm("$(foo_path).sha256"; force = true)
+ @test_logs (:error, r"Hash Mismatch!") match_mode = :any begin
+ ret, status = verify(foo_path, foo_hash; verbose = true, report_cache_status = true)
@test ret == false
@test status == :hash_mismatch
end
@@ -121,11 +121,11 @@ end
const socrates_urls = [
"https://github.com/staticfloat/small_bin/raw/f1a92f5eafbd30a0c6a8efb6947485b0f6d1bec3/socrates.tar.gz" =>
- "e65d2f13f2085f2c279830e863292312a72930fee5ba3c792b14c33ce5c5cc58",
+ "e65d2f13f2085f2c279830e863292312a72930fee5ba3c792b14c33ce5c5cc58",
"https://github.com/staticfloat/small_bin/raw/f1a92f5eafbd30a0c6a8efb6947485b0f6d1bec3/socrates.tar.bz2" =>
- "13fc17b97be41763b02cbb80e9d048302cec3bd3d446c2ed6e8210bddcd3ac76",
+ "13fc17b97be41763b02cbb80e9d048302cec3bd3d446c2ed6e8210bddcd3ac76",
"https://github.com/staticfloat/small_bin/raw/f1a92f5eafbd30a0c6a8efb6947485b0f6d1bec3/socrates.tar.xz" =>
- "61bcf109fcb749ee7b6a570a6057602c08c836b6f81091eab7aa5f5870ec6475",
+ "61bcf109fcb749ee7b6a570a6057602c08c836b6f81091eab7aa5f5870ec6475",
]
const socrates_hash = "adcbcf15674eafe8905093183d9ab997cbfba9056fc7dde8bfa5a22dfcfb4967"
@@ -135,16 +135,16 @@ const socrates_hash = "adcbcf15674eafe8905093183d9ab997cbfba9056fc7dde8bfa5a22df
tarball_path = joinpath(prefix, "download_target.tar$(splitext(url)[2])")
target_dir = joinpath(prefix, "target")
- download_verify_unpack(url, hash, target_dir; tarball_path=tarball_path, verbose=true)
+ download_verify_unpack(url, hash, target_dir; tarball_path = tarball_path, verbose = true)
# Test downloading a second time, to get the "already exists" path
- download_verify_unpack(url, hash, target_dir; tarball_path=tarball_path, verbose=true)
+ download_verify_unpack(url, hash, target_dir; tarball_path = tarball_path, verbose = true)
# And a third time, after corrupting it, to get the "redownloading" path
open(tarball_path, "w") do io
println(io, "corruptify")
end
- download_verify_unpack(url, hash, target_dir; tarball_path=tarball_path, verbose=true, force=true)
+ download_verify_unpack(url, hash, target_dir; tarball_path = tarball_path, verbose = true, force = true)
# Test that it has the contents we expect
socrates_path = joinpath(target_dir, "bin", "socrates")
@@ -163,7 +163,7 @@ const collapse_hash = "956c1201405f64d3465cc28cb0dec9d63c11a08cad28c381e13bb22e1
withenv("BINARYPROVIDER_COPYDEREF" => "true") do
mktempdir() do prefix
target_dir = joinpath(prefix, "target")
- download_verify_unpack(collapse_url, collapse_hash, target_dir; verbose=true)
+ download_verify_unpack(collapse_url, collapse_hash, target_dir; verbose = true)
# Test that we get the files we expect
@test isfile(joinpath(target_dir, "collapse_the_symlink", "foo"))
@@ -182,7 +182,7 @@ end
@testset "Download GitHub API #88" begin
mktempdir() do tmp
- PlatformEngines.download("https://api.github.com/repos/JuliaPackaging/BinaryProvider.jl/tarball/c2a4fc38f29eb81d66e3322e585d0199722e5d71", joinpath(tmp, "BinaryProvider"); verbose=true)
+ PlatformEngines.download("https://api.github.com/repos/JuliaPackaging/BinaryProvider.jl/tarball/c2a4fc38f29eb81d66e3322e585d0199722e5d71", joinpath(tmp, "BinaryProvider"); verbose = true)
@test isfile(joinpath(tmp, "BinaryProvider"))
end
end
@@ -238,13 +238,22 @@ end
end
end
end
+
+ # file:// URLs (issue #4640)
+ for (path, expected) in [("/some/local/path", "some"), ("/srv/pkg", "srv"), ("/c%3A/foo/bar", "c%3A")]
+ server = "file://$(path)"
+ url = "$(server)/foo"
+ test_server_dir(url, server, expected)
+ end
end
called = 0
- dispose = PlatformEngines.register_auth_error_handler("https://foo.bar/baz", function (url, svr, err)
- called += 1
- return true, called < 3
- end)
+ dispose = PlatformEngines.register_auth_error_handler(
+ "https://foo.bar/baz", function (url, svr, err)
+ called += 1
+ return true, called < 3
+ end
+ )
@test PlatformEngines.get_auth_header("https://foo.bar/baz") == nothing
@test called == 0
diff --git a/test/project/bad/targets_not_a_table.toml b/test/project/bad/targets_not_a_table.toml
index 9af8b2b342..cbca90e7f4 100644
--- a/test/project/bad/targets_not_a_table.toml
+++ b/test/project/bad/targets_not_a_table.toml
@@ -17,4 +17,3 @@ UUIDs = "cf7118a7-6976-5b1a-9a39-7adc72f591a4"
[extras]
Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40"
-
diff --git a/test/project/good/withversion.toml b/test/project/good/withversion.toml
new file mode 100644
index 0000000000..1be6383f2e
--- /dev/null
+++ b/test/project/good/withversion.toml
@@ -0,0 +1,3 @@
+name = "VersionedDep1"
+uuid = "f08855a0-36cb-4a32-8ae5-a227b709c612"
+syntax.julia_version = "1.13.0"
diff --git a/test/project_manifest.jl b/test/project_manifest.jl
index ab6a6cc99f..4f8e379f9c 100644
--- a/test/project_manifest.jl
+++ b/test/project_manifest.jl
@@ -7,11 +7,10 @@ using ..Utils
temp_pkg_dir() do project_path
@testset "test Project.toml manifest" begin
mktempdir() do dir
- path = abspath(joinpath(dirname(pathof(Pkg)), "../test", "test_packages", "monorepo"))
- cp(path, joinpath(dir, "monorepo"))
- cd(joinpath(dir, "monorepo")) do
+ path = copy_test_package(dir, "monorepo")
+ cd(path) do
with_current_env() do
- Pkg.develop(path="packages/B")
+ Pkg.develop(path = "packages/B")
end
end
# test subpackage instantiates/tests
@@ -25,7 +24,7 @@ temp_pkg_dir() do project_path
# to make those Manifest changes "stick" before adding Test.
cd(joinpath(dir, "monorepo", "packages", "C")) do
with_current_env() do
- Pkg.develop(path="../D") # add unregistered local dependency
+ Pkg.develop(path = "../D") # add unregistered local dependency
Pkg.test()
end
end
@@ -37,7 +36,7 @@ temp_pkg_dir() do project_path
@test haskey(pkgC.deps, "D")
cd(joinpath(dir, "monorepo")) do
with_current_env() do
- Pkg.develop(path="packages/C")
+ Pkg.develop(path = "packages/C")
Pkg.add("Test")
Pkg.test()
end
@@ -60,4 +59,4 @@ temp_pkg_dir() do project_path
end
end
-end # module
\ No newline at end of file
+end # module
diff --git a/test/registry.jl b/test/registry.jl
index 20d70ea038..352f4f3666 100644
--- a/test/registry.jl
+++ b/test/registry.jl
@@ -5,6 +5,9 @@ using Pkg, UUIDs, LibGit2, Test
using Pkg: depots1
using Pkg.REPLMode: pkgstr
using Pkg.Types: PkgError, manifest_info, PackageSpec, EnvCache
+using Pkg.Operations: get_pkg_deprecation_info
+
+using Dates: Second
using ..Utils
@@ -16,39 +19,48 @@ function setup_test_registries(dir = pwd())
for i in 1:2
regpath = joinpath(dir, "RegistryFoo$(i)")
mkpath(joinpath(regpath, "Example"))
- write(joinpath(regpath, "Registry.toml"), """
+ write(
+ joinpath(regpath, "Registry.toml"), """
name = "RegistryFoo"
uuid = "$(reg_uuids[i])"
repo = "https://github.com"
[packages]
$(pkg_uuids[i]) = { name = "Example$(i)", path = "Example" }
- """)
- write(joinpath(regpath, "Example", "Package.toml"), """
+ """
+ )
+ write(
+ joinpath(regpath, "Example", "Package.toml"), """
name = "Example$(i)"
uuid = "$(pkg_uuids[i])"
repo = "https://github.com/JuliaLang/Example.jl.git"
- """)
- write(joinpath(regpath, "Example", "Versions.toml"), """
+ """
+ )
+ write(
+ joinpath(regpath, "Example", "Versions.toml"), """
["0.5.1"]
git-tree-sha1 = "8eb7b4d4ca487caade9ba3e85932e28ce6d6e1f8"
- """)
- write(joinpath(regpath, "Example", "Deps.toml"), """
- ["0.5"]
- julia = "0.6-1.0"
- """)
- write(joinpath(regpath, "Example", "Compat.toml"), """
+ """
+ )
+ write(
+ joinpath(regpath, "Example", "Deps.toml"), """
+ """
+ )
+ write(
+ joinpath(regpath, "Example", "Compat.toml"), """
["0.5"]
julia = "0.6-1.0"
- """)
+ """
+ )
git_init_and_commit(regpath)
end
+ return
end
function test_installed(registries)
- @test setdiff(
+ return @test setdiff(
UUID[r.uuid for r in registries],
UUID[r.uuid for r in Pkg.Registry.reachable_registries()]
- ) == UUID[]
+ ) == UUID[]
end
function is_pkg_available(pkg::PackageSpec)
@@ -62,231 +74,357 @@ end
function with_depot2(f)
Base.DEPOT_PATH[1:2] .= Base.DEPOT_PATH[2:-1:1]
f()
- Base.DEPOT_PATH[1:2] .= Base.DEPOT_PATH[2:-1:1]
+ return Base.DEPOT_PATH[1:2] .= Base.DEPOT_PATH[2:-1:1]
end
@testset "registries" begin
- temp_pkg_dir() do depot; mktempdir() do depot2
- insert!(Base.DEPOT_PATH, 2, depot2)
- # set up registries
- regdir = mktempdir()
- setup_test_registries(regdir)
- general_url = Pkg.Registry.DEFAULT_REGISTRIES[1].url
- general_path = Pkg.Registry.DEFAULT_REGISTRIES[1].path
- general_linked = Pkg.Registry.DEFAULT_REGISTRIES[1].linked
- General = RegistrySpec(name = "General", uuid = "23338594-aafe-5451-b93e-139f81909106",
- url = general_url, path = general_path, linked = general_linked)
- Foo1 = RegistrySpec(name = "RegistryFoo", uuid = "e9fceed0-5623-4384-aff0-6db4c442647a",
- url = joinpath(regdir, "RegistryFoo1"))
- Foo2 = RegistrySpec(name = "RegistryFoo", uuid = "a8e078ad-b4bd-4e09-a52f-c464826eef9d",
- url = joinpath(regdir, "RegistryFoo2"))
-
- # Packages in registries
- Example = PackageSpec(name = "Example", uuid = UUID("7876af07-990d-54b4-ab0e-23690620f79a"))
- Example1 = PackageSpec(name = "Example1", uuid = UUID("c5f1542f-b8aa-45da-ab42-05303d706c66"))
- Example2 = PackageSpec(name = "Example2", uuid = UUID("d7897d3a-8e65-4b65-bdc8-28ce4e859565"))
-
- # Add General registry
- ## Pkg REPL
- for reg in ("General",
+ temp_pkg_dir() do depot
+ mktempdir() do depot2
+ insert!(Base.DEPOT_PATH, 2, depot2)
+ # set up registries
+ regdir = mktempdir()
+ setup_test_registries(regdir)
+ general_url = Pkg.Registry.DEFAULT_REGISTRIES[1].url
+ general_path = Pkg.Registry.DEFAULT_REGISTRIES[1].path
+ general_linked = Pkg.Registry.DEFAULT_REGISTRIES[1].linked
+ General = RegistrySpec(
+ name = "General", uuid = "23338594-aafe-5451-b93e-139f81909106",
+ url = general_url, path = general_path, linked = general_linked
+ )
+ Foo1 = RegistrySpec(
+ name = "RegistryFoo", uuid = "e9fceed0-5623-4384-aff0-6db4c442647a",
+ url = joinpath(regdir, "RegistryFoo1")
+ )
+ Foo2 = RegistrySpec(
+ name = "RegistryFoo", uuid = "a8e078ad-b4bd-4e09-a52f-c464826eef9d",
+ url = joinpath(regdir, "RegistryFoo2")
+ )
+
+ # Packages in registries
+ Example = PackageSpec(name = "Example", uuid = UUID("7876af07-990d-54b4-ab0e-23690620f79a"))
+ Example1 = PackageSpec(name = "Example1", uuid = UUID("c5f1542f-b8aa-45da-ab42-05303d706c66"))
+ Example2 = PackageSpec(name = "Example2", uuid = UUID("d7897d3a-8e65-4b65-bdc8-28ce4e859565"))
+
+ # Add General registry
+ ## Pkg REPL
+ for reg in (
+ "General",
"23338594-aafe-5451-b93e-139f81909106",
- "General=23338594-aafe-5451-b93e-139f81909106")
- pkgstr("registry add $(reg)")
- test_installed([General])
+ "General=23338594-aafe-5451-b93e-139f81909106",
+ )
+ pkgstr("registry add $(reg)")
+ test_installed([General])
+
+ pkgstr("registry up $(reg)")
+ test_installed([General])
+ pkgstr("registry rm $(reg)")
+ test_installed([])
+ end
- pkgstr("registry up $(reg)")
+ ## Pkg REPL without argument
+ pkgstr("registry add")
test_installed([General])
- pkgstr("registry rm $(reg)")
+ pkgstr("registry rm General")
test_installed([])
- end
- ## Pkg REPL without argument
- pkgstr("registry add")
- test_installed([General])
- pkgstr("registry rm General")
- test_installed([])
-
- ## Registry API
- for reg in ("General",
+ ## Registry API
+ for reg in (
+ "General",
RegistrySpec("General"),
RegistrySpec(name = "General"),
RegistrySpec(name = "General", path = general_path),
RegistrySpec(uuid = "23338594-aafe-5451-b93e-139f81909106"),
- RegistrySpec(name = "General", uuid = "23338594-aafe-5451-b93e-139f81909106"))
- Pkg.Registry.add(reg)
- test_installed([General])
+ RegistrySpec(name = "General", uuid = "23338594-aafe-5451-b93e-139f81909106"),
+ )
+ Pkg.Registry.add(reg)
+ test_installed([General])
+ @test is_pkg_available(Example)
+ Pkg.Registry.update(reg)
+ test_installed([General])
+ Pkg.Registry.rm(reg)
+ test_installed([])
+ @test !is_pkg_available(Example)
+ end
+
+ # Add registry from URL/local path.
+ pkgstr("registry add $(Foo1.url)")
+ test_installed([Foo1])
+ @test is_pkg_available(Example1)
+ @test !is_pkg_available(Example2)
+ with_depot2(() -> pkgstr("registry add $(Foo2.url)"))
+ test_installed([Foo1, Foo2])
+ @test is_pkg_available(Example1)
+ @test is_pkg_available(Example2)
+
+ # reset installed registries
+ rm.(joinpath.(Base.DEPOT_PATH[1:2], "registries"); force = true, recursive = true)
+
+ Registry.add(url = Foo1.url)
+ test_installed([Foo1])
+ @test is_pkg_available(Example1)
+ @test !is_pkg_available(Example2)
+ with_depot2(() -> Registry.add(url = Foo2.url))
+ test_installed([Foo1, Foo2])
+ @test is_pkg_available(Example1)
+ @test is_pkg_available(Example2)
+
+
+ pkgstr("registry up $(Foo1.uuid)")
+ pkgstr("registry update $(Foo1.name)=$(Foo1.uuid)")
+ Registry.update(uuid = Foo1.uuid)
+ Registry.update(name = Foo1.name, uuid = Foo1.uuid)
+
+ test_installed([Foo1, Foo2])
+ pkgstr("registry rm $(Foo1.uuid)")
+ test_installed([Foo2])
+ @test !is_pkg_available(Example1)
+ @test is_pkg_available(Example2)
+ Registry.add(url = Foo1.url)
+ test_installed([Foo1, Foo2])
+ @test is_pkg_available(Example1)
+ @test is_pkg_available(Example2)
+ pkgstr("registry rm $(Foo1.name)=$(Foo1.uuid)")
+ test_installed([Foo2])
+ @test !is_pkg_available(Example1)
+ @test is_pkg_available(Example2)
+ with_depot2() do
+ pkgstr("registry rm $(Foo2.name)")
+ end
+ test_installed([])
+ @test !is_pkg_available(Example1)
+ @test !is_pkg_available(Example2)
+
+ Registry.add(url = Foo1.url)
+ with_depot2(() -> Registry.add(url = Foo2.url))
+ test_installed([Foo1, Foo2])
+ @test is_pkg_available(Example1)
+ @test is_pkg_available(Example2)
+ Registry.rm(uuid = Foo1.uuid)
+ test_installed([Foo2])
+ @test !is_pkg_available(Example1)
+ @test is_pkg_available(Example2)
+ Registry.add(url = Foo1.url)
+ test_installed([Foo1, Foo2])
+ @test is_pkg_available(Example1)
+ @test is_pkg_available(Example2)
+ Registry.rm(name = Foo1.name, uuid = Foo1.uuid)
+ test_installed([Foo2])
+ @test !is_pkg_available(Example1)
+ @test is_pkg_available(Example2)
+ with_depot2() do
+ Registry.rm(Foo2.name)
+ end
+ test_installed([])
+ @test !is_pkg_available(Example1)
+ @test !is_pkg_available(Example2)
+
+ # multiple registries on the same time
+ pkgstr("registry add General $(Foo1.url)")
+ with_depot2(() -> pkgstr("registry add $(Foo2.url)"))
+ test_installed([General, Foo1, Foo2])
@test is_pkg_available(Example)
- Pkg.Registry.update(reg)
- test_installed([General])
- Pkg.Registry.rm(reg)
+ @test is_pkg_available(Example1)
+ @test is_pkg_available(Example2)
+ pkgstr("registry up General $(Foo1.uuid) $(Foo2.name)=$(Foo2.uuid)")
+ pkgstr("registry rm General $(Foo1.uuid)")
+ with_depot2() do
+ pkgstr("registry rm General $(Foo2.name)=$(Foo2.uuid)")
+ end
test_installed([])
@test !is_pkg_available(Example)
- end
+ @test !is_pkg_available(Example1)
+ @test !is_pkg_available(Example2)
- # Add registry from URL/local path.
- pkgstr("registry add $(Foo1.url)")
- test_installed([Foo1])
- @test is_pkg_available(Example1)
- @test !is_pkg_available(Example2)
- with_depot2(() -> pkgstr("registry add $(Foo2.url)"))
- test_installed([Foo1, Foo2])
- @test is_pkg_available(Example1)
- @test is_pkg_available(Example2)
-
- # reset installed registries
- rm.(joinpath.(Base.DEPOT_PATH[1:2], "registries"); force=true, recursive=true)
-
- Registry.add(url = Foo1.url)
- test_installed([Foo1])
- @test is_pkg_available(Example1)
- @test !is_pkg_available(Example2)
- with_depot2(() -> Registry.add(url = Foo2.url))
- test_installed([Foo1, Foo2])
- @test is_pkg_available(Example1)
- @test is_pkg_available(Example2)
-
-
- pkgstr("registry up $(Foo1.uuid)")
- pkgstr("registry update $(Foo1.name)=$(Foo1.uuid)")
- Registry.update(uuid = Foo1.uuid)
- Registry.update(name = Foo1.name, uuid = Foo1.uuid)
-
- test_installed([Foo1, Foo2])
- pkgstr("registry rm $(Foo1.uuid)")
- test_installed([Foo2])
- @test !is_pkg_available(Example1)
- @test is_pkg_available(Example2)
- Registry.add(url = Foo1.url)
- test_installed([Foo1, Foo2])
- @test is_pkg_available(Example1)
- @test is_pkg_available(Example2)
- pkgstr("registry rm $(Foo1.name)=$(Foo1.uuid)")
- test_installed([Foo2])
- @test !is_pkg_available(Example1)
- @test is_pkg_available(Example2)
- with_depot2() do
- pkgstr("registry rm $(Foo2.name)")
- end
- test_installed([])
- @test !is_pkg_available(Example1)
- @test !is_pkg_available(Example2)
-
- Registry.add(url = Foo1.url)
- with_depot2(() -> Registry.add(url = Foo2.url))
- test_installed([Foo1, Foo2])
- @test is_pkg_available(Example1)
- @test is_pkg_available(Example2)
- Registry.rm(uuid = Foo1.uuid)
- test_installed([Foo2])
- @test !is_pkg_available(Example1)
- @test is_pkg_available(Example2)
- Registry.add(url = Foo1.url)
- test_installed([Foo1, Foo2])
- @test is_pkg_available(Example1)
- @test is_pkg_available(Example2)
- Registry.rm(name = Foo1.name, uuid = Foo1.uuid)
- test_installed([Foo2])
- @test !is_pkg_available(Example1)
- @test is_pkg_available(Example2)
- with_depot2() do
- Registry.rm(Foo2.name)
- end
- test_installed([])
- @test !is_pkg_available(Example1)
- @test !is_pkg_available(Example2)
-
- # multiple registries on the same time
- pkgstr("registry add General $(Foo1.url)")
- with_depot2(() -> pkgstr("registry add $(Foo2.url)"))
- test_installed([General, Foo1, Foo2])
- @test is_pkg_available(Example)
- @test is_pkg_available(Example1)
- @test is_pkg_available(Example2)
- pkgstr("registry up General $(Foo1.uuid) $(Foo2.name)=$(Foo2.uuid)")
- pkgstr("registry rm General $(Foo1.uuid)")
- with_depot2() do
- pkgstr("registry rm General $(Foo2.name)=$(Foo2.uuid)")
- end
- test_installed([])
- @test !is_pkg_available(Example)
- @test !is_pkg_available(Example1)
- @test !is_pkg_available(Example2)
-
- Registry.add([RegistrySpec("General"),
- RegistrySpec(url = Foo1.url)])
- with_depot2(() -> Registry.add([RegistrySpec(url = Foo2.url)]))
- test_installed([General, Foo1, Foo2])
- @test is_pkg_available(Example)
- @test is_pkg_available(Example1)
- @test is_pkg_available(Example2)
- Registry.update([RegistrySpec("General"),
- RegistrySpec(uuid = Foo1.uuid),
- RegistrySpec(name = Foo2.name, uuid = Foo2.uuid)])
- Registry.rm([RegistrySpec("General"),
- RegistrySpec(uuid = Foo1.uuid),
- ])
- with_depot2() do
- Registry.rm(name = Foo2.name, uuid = Foo2.uuid)
- end
- test_installed([])
- @test !is_pkg_available(Example)
- @test !is_pkg_available(Example1)
- @test !is_pkg_available(Example2)
+ Registry.add(
+ [
+ RegistrySpec("General"),
+ RegistrySpec(url = Foo1.url),
+ ]
+ )
+ with_depot2(() -> Registry.add([RegistrySpec(url = Foo2.url)]))
+ test_installed([General, Foo1, Foo2])
+ @test is_pkg_available(Example)
+ @test is_pkg_available(Example1)
+ @test is_pkg_available(Example2)
+ Registry.update(
+ [
+ RegistrySpec("General"),
+ RegistrySpec(uuid = Foo1.uuid),
+ RegistrySpec(name = Foo2.name, uuid = Foo2.uuid),
+ ]
+ )
+ Registry.rm(
+ [
+ RegistrySpec("General"),
+ RegistrySpec(uuid = Foo1.uuid),
+ ]
+ )
+ with_depot2() do
+ Registry.rm(name = Foo2.name, uuid = Foo2.uuid)
+ end
+ test_installed([])
+ @test !is_pkg_available(Example)
+ @test !is_pkg_available(Example1)
+ @test !is_pkg_available(Example2)
- # Trying to add a registry with the same name as existing one
- pkgstr("registry add $(Foo1.url)")
- @test_throws PkgError pkgstr("registry add $(Foo2.url)")
- @test_throws PkgError Registry.add([RegistrySpec(url = Foo2.url)])
+ # Trying to add a registry with the same name as existing one
+ pkgstr("registry add $(Foo1.url)")
+ @test_throws PkgError pkgstr("registry add $(Foo2.url)")
+ @test_throws PkgError Registry.add([RegistrySpec(url = Foo2.url)])
- end end
+ end
+ end
# issue #711
- temp_pkg_dir() do depot; mktempdir() do depot2
- insert!(Base.DEPOT_PATH, 2, depot2)
- Registry.add("General")
- with_depot2(() -> Registry.add("General"))
- # This add should not error because depot/Example and depot2/Example have the same uuid
- Pkg.add("Example")
- @test isinstalled((name = "Example", uuid = UUID("7876af07-990d-54b4-ab0e-23690620f79a")))
- end end
+ temp_pkg_dir() do depot
+ mktempdir() do depot2
+ insert!(Base.DEPOT_PATH, 2, depot2)
+ Registry.add("General")
+ with_depot2(() -> Registry.add("General"))
+ # This add should not error because depot/Example and depot2/Example have the same uuid
+ Pkg.add("Example")
+ @test isinstalled((name = "Example", uuid = UUID("7876af07-990d-54b4-ab0e-23690620f79a")))
+ end
+ end
# Test Registry.add and Registry.update with explicit depot values
- temp_pkg_dir() do depot_on_path; mktempdir() do depot_off_path
- # No registries anywhere
- @test isempty(Registry.reachable_registries())
- @test isempty(Registry.reachable_registries(; depots=[depot_off_path]))
-
- # After this, we have depots only in the depot that's off the path
- Registry.add("General"; depot=depot_off_path)
- @test isempty(Registry.reachable_registries())
- @test length(Registry.reachable_registries(; depots=[depot_off_path])) == 1
-
- # Test that `update()` with `depots` runs
- io = Base.BufferStream()
- Registry.update(; depots=[depot_off_path], io)
- closewrite(io)
- output = read(io, String)
- @test occursin("registry at `$(depot_off_path)", output)
-
- # Show that we can install `Example` off of that depot
- empty!(Base.DEPOT_PATH)
- push!(Base.DEPOT_PATH, depot_off_path)
- Pkg.add("Example")
- @test isinstalled((name = "Example", uuid = UUID("7876af07-990d-54b4-ab0e-23690620f79a")))
- end end
+ temp_pkg_dir() do depot_on_path
+ mktempdir() do depot_off_path
+ # No registries anywhere
+ @test isempty(Registry.reachable_registries())
+ @test isempty(Registry.reachable_registries(; depots = [depot_off_path]))
+
+ # After this, we have depots only in the depot that's off the path
+ Registry.add("General"; depots = depot_off_path)
+ @test isempty(Registry.reachable_registries())
+ @test length(Registry.reachable_registries(; depots = [depot_off_path])) == 1
+
+ # Test that `update()` with `depots` runs
+ io = Base.BufferStream()
+ Registry.update(; depots = [depot_off_path], io, update_cooldown = Second(0))
+ closewrite(io)
+ output = read(io, String)
+ @test occursin("registry at `$(depot_off_path)", output)
+
+ # Show that we can install `Example` off of that depot
+ empty!(Base.DEPOT_PATH)
+ push!(Base.DEPOT_PATH, depot_off_path)
+ Pkg.add("Example")
+ @test isinstalled((name = "Example", uuid = UUID("7876af07-990d-54b4-ab0e-23690620f79a")))
+ end
+ end
+
+ # Registry status. Mostly verify that it runs without errors but
+ # also make some sanity checks on the output. We can't really know
+ # whether it was installed as a git clone or a tarball, so that
+ # limits how much information we are guaranteed to get from
+ # status.
+ temp_pkg_dir() do depot
+ Registry.add("General")
+ buf = IOBuffer()
+ Pkg.Registry.status(buf)
+ status = String(take!(buf))
+ @test contains(status, "[23338594] General (https://github.com/JuliaRegistries/General.git)")
+ @test contains(status, "last updated")
+ end
# only clone default registry if there are no registries installed at all
- temp_pkg_dir() do depot1; mktempdir() do depot2
- append!(empty!(DEPOT_PATH), [depot1, depot2])
- Base.append_bundled_depot_path!(DEPOT_PATH)
- @test length(Pkg.Registry.reachable_registries()) == 0
- Pkg.add("Example")
- @test length(Pkg.Registry.reachable_registries()) == 1
- Pkg.rm("Example")
- DEPOT_PATH[1:2] .= DEPOT_PATH[2:-1:1]
- Pkg.add("Example") # should not trigger a clone of default registries
- @test length(Pkg.Registry.reachable_registries()) == 1
- end end
+ temp_pkg_dir() do depot1
+ mktempdir() do depot2
+ append!(empty!(DEPOT_PATH), [depot1, depot2])
+ Base.append_bundled_depot_path!(DEPOT_PATH)
+ @test length(Pkg.Registry.reachable_registries()) == 0
+ Pkg.add("Example")
+ @test length(Pkg.Registry.reachable_registries()) == 1
+ Pkg.rm("Example")
+ DEPOT_PATH[1:2] .= DEPOT_PATH[2:-1:1]
+ Pkg.add("Example") # should not trigger a clone of default registries
+ @test length(Pkg.Registry.reachable_registries()) == 1
+ end
+ end
+
+ @testset "deprecated package" begin
+ temp_pkg_dir() do depot
+ # Set up test registries with an extra deprecated package
+ regdir = mktempdir()
+ setup_test_registries(regdir)
+
+ # Add a deprecated package to the first registry
+ regpath = joinpath(regdir, "RegistryFoo1")
+ mkpath(joinpath(regpath, "DeprecatedExample"))
+
+ # Add the deprecated package to Registry.toml
+ registry_toml = read(joinpath(regpath, "Registry.toml"), String)
+ registry_toml = replace(
+ registry_toml,
+ "[packages]" =>
+ "[packages]\n11111111-1111-1111-1111-111111111111 = { name = \"DeprecatedExample\", path = \"DeprecatedExample\" }"
+ )
+ write(joinpath(regpath, "Registry.toml"), registry_toml)
+
+ # Create deprecated package with [metadata.deprecated] table
+ write(
+ joinpath(regpath, "DeprecatedExample", "Package.toml"), """
+ name = "DeprecatedExample"
+ uuid = "11111111-1111-1111-1111-111111111111"
+ repo = "https://github.com/test/DeprecatedExample.jl.git"
+
+ [metadata.deprecated]
+ reason = "This package is no longer maintained"
+ alternative = "Example"
+ """
+ )
+
+ write(
+ joinpath(regpath, "DeprecatedExample", "Versions.toml"), """
+ ["1.0.0"]
+ git-tree-sha1 = "1234567890abcdef1234567890abcdef12345678"
+ """
+ )
+
+ git_init_and_commit(regpath)
+
+ # Add the test registry
+ Pkg.Registry.add(url = regpath)
+
+ # Test that the package is marked as deprecated
+ registries = Pkg.Registry.reachable_registries()
+ reg_idx = findfirst(r -> r.name == "RegistryFoo", registries)
+ @test reg_idx !== nothing
+
+ reg = registries[reg_idx]
+ pkg_uuid = UUID("11111111-1111-1111-1111-111111111111")
+ @test haskey(reg, pkg_uuid)
+
+ pkg_entry = reg[pkg_uuid]
+ pkg_info = Pkg.Registry.registry_info(reg, pkg_entry)
+
+ # Test that deprecated info is loaded correctly
+ @test Pkg.Registry.isdeprecated(pkg_info)
+ @test pkg_info.deprecated !== nothing
+ @test pkg_info.deprecated["reason"] == "This package is no longer maintained"
+ @test pkg_info.deprecated["alternative"] == "Example"
+
+ # Test that non-deprecated package is not marked as deprecated
+ example1_uuid = UUID("c5f1542f-b8aa-45da-ab42-05303d706c66")
+ example1_entry = reg[example1_uuid]
+ example1_info = Pkg.Registry.registry_info(reg, example1_entry)
+ @test !Pkg.Registry.isdeprecated(example1_info)
+ @test example1_info.deprecated === nothing
+
+ # Test get_pkg_deprecation_info function
+ deprecated_pkg_spec = Pkg.Types.PackageSpec(name = "DeprecatedExample", uuid = pkg_uuid)
+ normal_pkg_spec = Pkg.Types.PackageSpec(name = "Example1", uuid = example1_uuid)
+
+ dep_info = get_pkg_deprecation_info(deprecated_pkg_spec, registries)
+ @test dep_info !== nothing
+ @test dep_info["reason"] == "This package is no longer maintained"
+ @test dep_info["alternative"] == "Example"
+
+ normal_info = get_pkg_deprecation_info(normal_pkg_spec, registries)
+ @test normal_info === nothing
+ end
+ end
@testset "yanking" begin
uuid = Base.UUID("7876af07-990d-54b4-ab0e-23690620f79a") # Example
@@ -297,7 +435,7 @@ end
@test manifest_info(EnvCache().manifest, uuid).version == v"0.5.0"
Pkg.update() # should not update Example
@test manifest_info(EnvCache().manifest, uuid).version == v"0.5.0"
- @test_throws Pkg.Resolve.ResolverError Pkg.add(PackageSpec(name="Example", version=v"0.5.1"))
+ @test_throws Pkg.Resolve.ResolverError Pkg.add(PackageSpec(name = "Example", version = v"0.5.1"))
Pkg.rm("Example")
Pkg.add("JSON") # depends on Example
@test manifest_info(EnvCache().manifest, uuid).version == v"0.5.0"
@@ -307,27 +445,34 @@ end
# Test that Example@0.5.1 can be obtained from an existing manifest
temp_pkg_dir() do env
Pkg.Registry.add(url = "https://github.com/JuliaRegistries/Test")
- write(joinpath(env, "Project.toml"),"""
+ write(
+ joinpath(env, "Project.toml"), """
[deps]
Example = "7876af07-990d-54b4-ab0e-23690620f79a"
- """)
- write(joinpath(env, "Manifest.toml"),"""
+ """
+ )
+ write(
+ joinpath(env, "Manifest.toml"), """
[[Example]]
git-tree-sha1 = "8eb7b4d4ca487caade9ba3e85932e28ce6d6e1f8"
uuid = "7876af07-990d-54b4-ab0e-23690620f79a"
version = "0.5.1"
- """)
+ """
+ )
Pkg.activate(env)
Pkg.instantiate()
@test manifest_info(EnvCache().manifest, uuid).version == v"0.5.1"
end
temp_pkg_dir() do env
Pkg.Registry.add(url = "https://github.com/JuliaRegistries/Test")
- write(joinpath(env, "Project.toml"),"""
+ write(
+ joinpath(env, "Project.toml"), """
[deps]
JSON = "682c06a0-de6a-54ab-a142-c8b1cf79cde6"
- """)
- write(joinpath(env, "Manifest.toml"),"""
+ """
+ )
+ write(
+ joinpath(env, "Manifest.toml"), """
[[Example]]
git-tree-sha1 = "8eb7b4d4ca487caade9ba3e85932e28ce6d6e1f8"
uuid = "7876af07-990d-54b4-ab0e-23690620f79a"
@@ -338,7 +483,8 @@ end
git-tree-sha1 = "1f7a25b53ec67f5e9422f1f551ee216503f4a0fa"
uuid = "682c06a0-de6a-54ab-a142-c8b1cf79cde6"
version = "0.20.0"
- """)
+ """
+ )
Pkg.activate(env)
Pkg.instantiate()
@test manifest_info(EnvCache().manifest, uuid).version == v"0.5.1"
@@ -347,40 +493,88 @@ end
end
if Pkg.Registry.registry_use_pkg_server()
-@testset "compressed registry" begin
- for unpack in (true, nothing)
- withenv("JULIA_PKG_UNPACK_REGISTRY" => unpack) do
- temp_pkg_dir(;linked_reg=false) do depot
- # These get restored by temp_pkg_dir
- Pkg.Registry.DEFAULT_REGISTRIES[1].path = nothing
- Pkg.Registry.DEFAULT_REGISTRIES[1].url = "https://github.com/JuliaRegistries/General.git"
-
- # This should not uncompress the registry
- Registry.add(uuid = UUID("23338594-aafe-5451-b93e-139f81909106"))
- @test isfile(joinpath(DEPOT_PATH[1], "registries", "General.tar.gz")) != something(unpack, false)
- Pkg.add("Example")
-
- # Write some bad git-tree-sha1 here so that Pkg.update will have to update the registry
- if unpack == true
- write(joinpath(DEPOT_PATH[1], "registries", "General", ".tree_info.toml"),
- """
- git-tree-sha1 = "179182faa6a80b3cf24445e6f55c954938d57941"
- """)
- else
- write(joinpath(DEPOT_PATH[1], "registries", "General.toml"),
- """
- git-tree-sha1 = "179182faa6a80b3cf24445e6f55c954938d57941"
- uuid = "23338594-aafe-5451-b93e-139f81909106"
- path = "General.tar.gz"
- """)
+ @testset "compressed registry" begin
+ for unpack in (true, nothing)
+ withenv("JULIA_PKG_UNPACK_REGISTRY" => unpack) do
+ temp_pkg_dir(; linked_reg = false) do depot
+ # These get restored by temp_pkg_dir
+ Pkg.Registry.DEFAULT_REGISTRIES[1].path = nothing
+ Pkg.Registry.DEFAULT_REGISTRIES[1].url = "https://github.com/JuliaRegistries/General.git"
+
+ # This should not uncompress the registry
+ Registry.add(uuid = UUID("23338594-aafe-5451-b93e-139f81909106"))
+ @test isfile(joinpath(DEPOT_PATH[1], "registries", "General.tar.gz")) != something(unpack, false)
+ Pkg.add("Example")
+
+ # Write some bad git-tree-sha1 here so that Pkg.update will have to update the registry
+ if unpack == true
+ write(
+ joinpath(DEPOT_PATH[1], "registries", "General", ".tree_info.toml"),
+ """
+ git-tree-sha1 = "179182faa6a80b3cf24445e6f55c954938d57941"
+ """
+ )
+ else
+ write(
+ joinpath(DEPOT_PATH[1], "registries", "General.toml"),
+ """
+ git-tree-sha1 = "179182faa6a80b3cf24445e6f55c954938d57941"
+ uuid = "23338594-aafe-5451-b93e-139f81909106"
+ path = "General.tar.gz"
+ """
+ )
+ end
+ Pkg.update()
+ Pkg.Registry.rm(name = "General")
+ @test isempty(filter(x -> x != "CACHEDIR.TAG", readdir(joinpath(DEPOT_PATH[1], "registries"))))
end
- Pkg.update()
- Pkg.Registry.rm(name = "General")
- @test isempty(readdir(joinpath(DEPOT_PATH[1], "registries")))
end
end
end
end
+
+@testset "gc runs git gc on registries" begin
+ # Only run this test if git is available
+ if Sys.which("git") !== nothing
+ temp_pkg_dir() do depot
+ # Set up a test registry that is a git repository
+ regdir = mktempdir()
+ regpath = joinpath(regdir, "TestReg")
+ mkpath(joinpath(regpath, "TestPkg"))
+ write(
+ joinpath(regpath, "Registry.toml"), """
+ name = "TestReg"
+ uuid = "$(uuid4())"
+ repo = "https://github.com/test/test.git"
+ """
+ )
+ write(
+ joinpath(regpath, "TestPkg", "Package.toml"), """
+ name = "TestPkg"
+ uuid = "$(uuid4())"
+ repo = "https://github.com/test/TestPkg.git"
+ """
+ )
+ git_init_and_commit(regpath)
+
+ # Install the registry
+ target_reg_path = joinpath(depot, "registries", "TestReg")
+ mkpath(dirname(target_reg_path))
+ cp(regpath, target_reg_path)
+
+ # Verify the registry is a git repository
+ @test isdir(joinpath(target_reg_path, ".git"))
+
+ # Run Pkg.gc() - it should run git gc on the registry without errors
+ # We can't easily verify that git gc was run, but we can verify
+ # that Pkg.gc() completes without errors
+ @test_nowarn Pkg.gc(verbose = false)
+
+ # The registry should still exist after gc
+ @test isdir(target_reg_path)
+ @test isdir(joinpath(target_reg_path, ".git"))
+ end
+ end
end
end # module
diff --git a/test/repl.jl b/test/repl.jl
index b0d729dd92..6256701caa 100644
--- a/test/repl.jl
+++ b/test/repl.jl
@@ -25,14 +25,28 @@ using ..Utils
@test_throws PkgError pkg"helpadd"
end
+@testset "accidental" begin
+ isolate() do
+ pkg"]?"
+ pkg"] ?"
+ pkg"]st"
+ pkg"] st"
+ pkg"]st -m"
+ pkg"] st -m"
+ pkg"]" # noop
+ end
+end
+
temp_pkg_dir() do project_path
- with_pkg_env(project_path; change_dir=true) do;
+ with_pkg_env(project_path; change_dir = true) do;
pkg"generate HelloWorld"
LibGit2.close((LibGit2.init(".")))
cd("HelloWorld")
@test_throws PkgError pkg"dev Example#blergh"
+ @test_throws PkgError pkg"add ÖÖÖ"
+
@test_throws PkgError pkg"generate 2019Julia"
pkg"generate Foo"
pkg"dev ./Foo"
@@ -40,117 +54,124 @@ temp_pkg_dir() do project_path
@test_throws PkgError pkg"dev ./Foo"
###
mv(joinpath("Foo", "src", "Foo2.jl"), joinpath("Foo", "src", "Foo.jl"))
- write(joinpath("Foo", "Project.toml"), """
- name = "Foo"
- """
+ write(
+ joinpath("Foo", "Project.toml"), """
+ name = "Foo"
+ """
)
@test_throws PkgError pkg"dev ./Foo"
- write(joinpath("Foo", "Project.toml"), """
- uuid = "b7b78b08-812d-11e8-33cd-11188e330cbe"
- """
+ write(
+ joinpath("Foo", "Project.toml"), """
+ uuid = "b7b78b08-812d-11e8-33cd-11188e330cbe"
+ """
)
@test_throws PkgError pkg"dev ./Foo"
end
end
-temp_pkg_dir(;rm=false) do project_path; cd(project_path) do;
- tmp_pkg_path = mktempdir()
-
- pkg"activate ."
- pkg"add Example@0.5.3"
- @test isinstalled(TEST_PKG)
- v = Pkg.dependencies()[TEST_PKG.uuid].version
- @test v == v"0.5.3"
- pkg"rm Example"
- pkg"add Example, Random"
- pkg"rm Example Random"
- pkg"add Example,Random"
- pkg"rm Example,Random"
- pkg"add Example#master"
- pkg"rm Example"
- pkg"add https://github.com/JuliaLang/Example.jl#master"
-
- ## TODO: figure out how to test these in CI
- # pkg"rm Example"
- # pkg"add git@github.com:JuliaLang/Example.jl.git"
- # pkg"rm Example"
- # pkg"add \"git@github.com:JuliaLang/Example.jl.git\"#master"
- # pkg"rm Example"
-
- # Test upgrade --fixed doesn't change the tracking (https://github.com/JuliaLang/Pkg.jl/issues/434)
- entry = Pkg.Types.manifest_info(EnvCache().manifest, TEST_PKG.uuid)
- @test entry.repo.rev == "master"
- pkg"up --fixed"
- entry = Pkg.Types.manifest_info(EnvCache().manifest, TEST_PKG.uuid)
- @test entry.repo.rev == "master"
-
- pkg"test Example"
- @test isinstalled(TEST_PKG)
- @test Pkg.dependencies()[TEST_PKG.uuid].version > v
+temp_pkg_dir(; rm = false) do project_path
+ cd(project_path) do;
+ tmp_pkg_path = mktempdir()
- pkg2 = "UnregisteredWithProject"
- pkg2_uuid = UUID("58262bb0-2073-11e8-3727-4fe182c12249")
- p2 = git_init_package(tmp_pkg_path, joinpath(@__DIR__, "test_packages/$pkg2"))
- Pkg.REPLMode.pkgstr("add $p2")
- Pkg.REPLMode.pkgstr("pin $pkg2")
- # FIXME: this confuses the precompile logic to know what is going on with the user
- # FIXME: why isn't this testing the Pkg after importing, rather than after freeing it
- #@eval import Example
- #@eval import $(Symbol(pkg2))
- @test Pkg.dependencies()[pkg2_uuid].version == v"0.1.0"
- Pkg.REPLMode.pkgstr("free $pkg2")
- @test_throws PkgError Pkg.REPLMode.pkgstr("free $pkg2")
- Pkg.test("UnregisteredWithProject")
-
- write(joinpath(p2, "Project.toml"), """
- name = "UnregisteredWithProject"
- uuid = "58262bb0-2073-11e8-3727-4fe182c12249"
- version = "0.2.0"
- """
- )
- LibGit2.with(LibGit2.GitRepo, p2) do repo
- LibGit2.add!(repo, "*")
- LibGit2.commit(repo, "bump version"; author = TEST_SIG, committer=TEST_SIG)
- pkg"update"
- @test Pkg.dependencies()[pkg2_uuid].version == v"0.2.0"
- Pkg.REPLMode.pkgstr("rm $pkg2")
-
- c = LibGit2.commit(repo, "empty commit"; author = TEST_SIG, committer=TEST_SIG)
- c_hash = LibGit2.GitHash(c)
- Pkg.REPLMode.pkgstr("add $p2#$c")
- end
+ pkg"activate ."
+ pkg"add Example@0.5.3"
+ @test isinstalled(TEST_PKG)
+ v = Pkg.dependencies()[TEST_PKG.uuid].version
+ @test v == v"0.5.3"
+ pkg"rm Example"
+ pkg"add Example, Random"
+ pkg"rm Example Random"
+ pkg"add Example,Random"
+ pkg"rm Example,Random"
+ # Test leading whitespace handling (issue #4239)
+ pkg" add Example, Random"
+ pkg"rm Example Random"
+ pkg"add Example#master"
+ pkg"rm Example"
+ pkg"add https://github.com/JuliaLang/Example.jl#master"
+
+ ## TODO: figure out how to test these in CI
+ # pkg"rm Example"
+ # pkg"add git@github.com:JuliaLang/Example.jl.git"
+ # pkg"rm Example"
+ # pkg"add \"git@github.com:JuliaLang/Example.jl.git\"#master"
+ # pkg"rm Example"
+
+ # Test upgrade --fixed doesn't change the tracking (https://github.com/JuliaLang/Pkg.jl/issues/434)
+ entry = Pkg.Types.manifest_info(EnvCache().manifest, TEST_PKG.uuid)
+ @test entry.repo.rev == "master"
+ pkg"up --fixed"
+ entry = Pkg.Types.manifest_info(EnvCache().manifest, TEST_PKG.uuid)
+ @test entry.repo.rev == "master"
+
+ pkg"test Example"
+ @test isinstalled(TEST_PKG)
+ @test Pkg.dependencies()[TEST_PKG.uuid].version > v
+
+ pkg2 = "UnregisteredWithProject"
+ pkg2_uuid = UUID("58262bb0-2073-11e8-3727-4fe182c12249")
+ p2 = git_init_package(tmp_pkg_path, joinpath(@__DIR__, "test_packages/$pkg2"))
+ Pkg.REPLMode.pkgstr("add $p2")
+ Pkg.REPLMode.pkgstr("pin $pkg2")
+ # FIXME: this confuses the precompile logic to know what is going on with the user
+ # FIXME: why isn't this testing the Pkg after importing, rather than after freeing it
+ #@eval import Example
+ #@eval import $(Symbol(pkg2))
+ @test Pkg.dependencies()[pkg2_uuid].version == v"0.1.0"
+ Pkg.REPLMode.pkgstr("free $pkg2")
+ @test_throws PkgError Pkg.REPLMode.pkgstr("free $pkg2")
+ Pkg.test("UnregisteredWithProject")
+
+ write(
+ joinpath(p2, "Project.toml"), """
+ name = "UnregisteredWithProject"
+ uuid = "58262bb0-2073-11e8-3727-4fe182c12249"
+ version = "0.2.0"
+ """
+ )
+ LibGit2.with(LibGit2.GitRepo, p2) do repo
+ LibGit2.add!(repo, "*")
+ LibGit2.commit(repo, "bump version"; author = TEST_SIG, committer = TEST_SIG)
+ pkg"update"
+ @test Pkg.dependencies()[pkg2_uuid].version == v"0.2.0"
+ Pkg.REPLMode.pkgstr("rm $pkg2")
+
+ c = LibGit2.commit(repo, "empty commit"; author = TEST_SIG, committer = TEST_SIG)
+ c_hash = LibGit2.GitHash(c)
+ Pkg.REPLMode.pkgstr("add $p2#$c")
+ end
- mktempdir() do tmp_dev_dir
- withenv("JULIA_PKG_DEVDIR" => tmp_dev_dir) do
- pkg"develop Example"
- pkg"develop Example,PackageCompiler"
- pkg"develop Example PackageCompiler"
-
- # Copy the manifest + project and see that we can resolve it in a new environment
- # and get all the packages installed
- proj = read("Project.toml", String)
- manifest = read("Manifest.toml", String)
- cd_tempdir() do tmp
- old_depot = copy(DEPOT_PATH)
- try
- empty!(DEPOT_PATH)
- write("Project.toml", proj)
- write("Manifest.toml", manifest)
- mktempdir() do depot_dir
- pushfirst!(DEPOT_PATH, depot_dir)
- Base.append_bundled_depot_path!(DEPOT_PATH)
- pkg"instantiate"
- @test Pkg.dependencies()[pkg2_uuid].version == v"0.2.0"
- end
- finally
- empty!(DEPOT_PATH)
- append!(DEPOT_PATH, old_depot)
- Base.append_bundled_depot_path!(DEPOT_PATH)
- end
- end # cd_tempdir
- end # withenv
- end # mktempdir
-end # cd
+ mktempdir() do tmp_dev_dir
+ withenv("JULIA_PKG_DEVDIR" => tmp_dev_dir) do
+ pkg"develop Example"
+ pkg"develop Example,PackageCompiler"
+ pkg"develop Example PackageCompiler"
+
+ # Copy the manifest + project and see that we can resolve it in a new environment
+ # and get all the packages installed
+ proj = read("Project.toml", String)
+ manifest = read("Manifest.toml", String)
+ cd_tempdir() do tmp
+ old_depot = copy(DEPOT_PATH)
+ try
+ empty!(DEPOT_PATH)
+ write("Project.toml", proj)
+ write("Manifest.toml", manifest)
+ mktempdir() do depot_dir
+ pushfirst!(DEPOT_PATH, depot_dir)
+ Base.append_bundled_depot_path!(DEPOT_PATH)
+ pkg"instantiate"
+ @test Pkg.dependencies()[pkg2_uuid].version == v"0.2.0"
+ end
+ finally
+ empty!(DEPOT_PATH)
+ append!(DEPOT_PATH, old_depot)
+ Base.append_bundled_depot_path!(DEPOT_PATH)
+ end
+ end # cd_tempdir
+ end # withenv
+ end # mktempdir
+ end # cd
end # temp_pkg_dir
# issue #904: Pkg.status within a git repo
@@ -162,65 +183,66 @@ temp_pkg_dir() do path
Pkg.REPLMode.pkgstr("status") # should not throw
end
-temp_pkg_dir() do project_path; cd(project_path) do
- mktempdir() do tmp
- mktempdir() do depot_dir
- old_depot = copy(DEPOT_PATH)
- try
- empty!(DEPOT_PATH)
- pushfirst!(DEPOT_PATH, depot_dir)
- Base.append_bundled_depot_path!(DEPOT_PATH)
- withenv("JULIA_PKG_DEVDIR" => tmp) do
- # Test an unregistered package
- p1_path = joinpath(@__DIR__, "test_packages", "UnregisteredWithProject")
- p1_new_path = joinpath(tmp, "UnregisteredWithProject")
- cp(p1_path, p1_new_path)
- Pkg.REPLMode.pkgstr("develop $(p1_new_path)")
- Pkg.REPLMode.pkgstr("build; precompile")
- @test realpath(Base.find_package("UnregisteredWithProject")) == realpath(joinpath(p1_new_path, "src", "UnregisteredWithProject.jl"))
- @test Pkg.dependencies()[UUID("58262bb0-2073-11e8-3727-4fe182c12249")].version == v"0.1.0"
- Pkg.test("UnregisteredWithProject")
+temp_pkg_dir() do project_path
+ cd(project_path) do
+ mktempdir() do tmp
+ mktempdir() do depot_dir
+ old_depot = copy(DEPOT_PATH)
+ try
+ empty!(DEPOT_PATH)
+ pushfirst!(DEPOT_PATH, depot_dir)
+ Base.append_bundled_depot_path!(DEPOT_PATH)
+ withenv("JULIA_PKG_DEVDIR" => tmp) do
+ # Test an unregistered package
+ p1_path = joinpath(@__DIR__, "test_packages", "UnregisteredWithProject")
+ p1_new_path = joinpath(tmp, "UnregisteredWithProject")
+ cp(p1_path, p1_new_path)
+ Pkg.REPLMode.pkgstr("develop $(p1_new_path)")
+ Pkg.REPLMode.pkgstr("build; precompile")
+ @test realpath(Base.find_package("UnregisteredWithProject")) == realpath(joinpath(p1_new_path, "src", "UnregisteredWithProject.jl"))
+ @test Pkg.dependencies()[UUID("58262bb0-2073-11e8-3727-4fe182c12249")].version == v"0.1.0"
+ Pkg.test("UnregisteredWithProject")
+ end
+ finally
+ empty!(DEPOT_PATH)
+ append!(DEPOT_PATH, old_depot)
+ Base.append_bundled_depot_path!(DEPOT_PATH)
end
- finally
- empty!(DEPOT_PATH)
- append!(DEPOT_PATH, old_depot)
- Base.append_bundled_depot_path!(DEPOT_PATH)
- end
- end # withenv
- end # mktempdir
- # nested
- mktempdir() do other_dir
- mktempdir() do tmp;
- cd(tmp) do
- pkg"generate HelloWorld"
- cd("HelloWorld") do
+ end # withenv
+ end # mktempdir
+ # nested
+ mktempdir() do other_dir
+ mktempdir() do tmp
+ cd(tmp) do
+ pkg"generate HelloWorld"
+ cd("HelloWorld") do
+ with_current_env() do
+ uuid1 = Pkg.generate("SubModule1")["SubModule1"]
+ uuid2 = Pkg.generate("SubModule2")["SubModule2"]
+ pkg"develop ./SubModule1"
+ mkdir("tests")
+ cd("tests")
+ pkg"develop ../SubModule2"
+ @test Pkg.dependencies()[uuid1].version == v"0.1.0"
+ @test Pkg.dependencies()[uuid2].version == v"0.1.0"
+ # make sure paths to SubModule1 and SubModule2 are relative
+ manifest = Pkg.Types.Context().env.manifest
+ @test manifest[uuid1].path == "SubModule1"
+ @test manifest[uuid2].path == "SubModule2"
+ end
+ end
+ cp("HelloWorld", joinpath(other_dir, "HelloWorld"))
+ cd(joinpath(other_dir, "HelloWorld"))
with_current_env() do
- uuid1 = Pkg.generate("SubModule1")["SubModule1"]
- uuid2 = Pkg.generate("SubModule2")["SubModule2"]
- pkg"develop ./SubModule1"
- mkdir("tests")
- cd("tests")
- pkg"develop ../SubModule2"
- @test Pkg.dependencies()[uuid1].version == v"0.1.0"
- @test Pkg.dependencies()[uuid2].version == v"0.1.0"
- # make sure paths to SubModule1 and SubModule2 are relative
- manifest = Pkg.Types.Context().env.manifest
- @test manifest[uuid1].path == "SubModule1"
- @test manifest[uuid2].path == "SubModule2"
+ # Check that these didn't generate absolute paths in the Manifest by copying
+ # to another directory
+ @test Base.find_package("SubModule1") == joinpath(pwd(), "SubModule1", "src", "SubModule1.jl")
+ @test Base.find_package("SubModule2") == joinpath(pwd(), "SubModule2", "src", "SubModule2.jl")
end
end
- cp("HelloWorld", joinpath(other_dir, "HelloWorld"))
- cd(joinpath(other_dir, "HelloWorld"))
- with_current_env() do
- # Check that these didn't generate absolute paths in the Manifest by copying
- # to another directory
- @test Base.find_package("SubModule1") == joinpath(pwd(), "SubModule1", "src", "SubModule1.jl")
- @test Base.find_package("SubModule2") == joinpath(pwd(), "SubModule2", "src", "SubModule2.jl")
- end
end
end
- end
-end # cd
+ end # cd
end # temp_pkg_dir
# activate
@@ -247,7 +269,7 @@ temp_pkg_dir() do project_path
#=@test_logs (:info, r"activating new environment at ")))=# pkg"activate --shared Foo" # activate shared Foo
@test Base.active_project() == joinpath(Pkg.envdir(), "Foo", "Project.toml")
pkg"activate ."
- rm("Foo"; force=true, recursive=true)
+ rm("Foo"; force = true, recursive = true)
pkg"activate Foo" # activate path from developed Foo
@test Base.active_project() == joinpath(path, "modules", "Foo", "Project.toml")
pkg"activate ."
@@ -300,207 +322,285 @@ end
test_complete(s) = REPLExt.completions(s, lastindex(s))
apply_completion(str) = begin
c, r, s = test_complete(str)
- str[1:prevind(str, first(r))]*first(c)
+ str[1:prevind(str, first(r))] * first(c)
end
# Autocompletions
-temp_pkg_dir() do project_path; cd(project_path) do
- @testset "tab completion while offline" begin
- # No registry and no network connection
- Pkg.offline()
- pkg"activate ."
- c, r = test_complete("add Exam")
- @test isempty(c)
- Pkg.offline(false)
- # Existing registry but no network connection
- pkg"registry add General" # instantiate the `General` registry to complete remote package names
- Pkg.offline(true)
- c, r = test_complete("add Exam")
- @test "Example" in c
- Pkg.offline(false)
+temp_pkg_dir() do project_path
+ cd(project_path) do
+ @testset "tab completion while offline" begin
+ # No registry and no network connection
+ Pkg.offline()
+ pkg"activate ."
+ c, r = test_complete("add Exam")
+ @test isempty(c)
+ Pkg.offline(false)
+ # Existing registry but no network connection
+ pkg"registry add General" # instantiate the `General` registry to complete remote package names
+ Pkg.offline(true)
+ c, r = test_complete("add Exam")
+ @test "Example" in c
+ Pkg.offline(false)
+ end
end
-end end
+end
-temp_pkg_dir() do project_path; cd(project_path) do
- @testset "tab completion" begin
- pkg"registry add General" # instantiate the `General` registry to complete remote package names
- pkg"activate ."
- c, r = test_complete("add Exam")
- @test "Example" in c
- c, r = test_complete("rm Exam")
- @test isempty(c)
-
- Pkg.REPLMode.pkgstr("develop $(joinpath(@__DIR__, "test_packages", "PackageWithDependency"))")
-
- c, r = test_complete("rm PackageWithDep")
- @test "PackageWithDependency" in c
- c, r = test_complete("rm -p PackageWithDep")
- @test "PackageWithDependency" in c
- c, r = test_complete("rm --project PackageWithDep")
- @test "PackageWithDependency" in c
- c, r = test_complete("rm Exam")
- @test isempty(c)
- c, r = test_complete("rm -p Exam")
- @test isempty(c)
- c, r = test_complete("rm --project Exam")
- @test isempty(c)
- c, r = test_complete("free PackageWithDep")
- @test "PackageWithDependency" in c # given this was devved
-
- c, r = test_complete("rm -m PackageWithDep")
- @test "PackageWithDependency" in c
- c, r = test_complete("rm --manifest PackageWithDep")
- @test "PackageWithDependency" in c
- c, r = test_complete("rm -m Exam")
- @test "Example" in c
- c, r = test_complete("rm --manifest Exam")
- @test "Example" in c
- c, r = test_complete("why PackageWithDep")
- @test "PackageWithDependency" in c
-
- c, r = test_complete("rm PackageWithDep")
- @test "PackageWithDependency" in c
- c, r = test_complete("rm Exam")
- @test isempty(c)
- c, r = test_complete("rm -m Exam")
- c, r = test_complete("rm -m Exam")
- @test "Example" in c
-
- pkg"add Example"
- c, r = test_complete("rm Exam")
- @test "Example" in c
- c, r = test_complete("up --man")
- @test "--manifest" in c
- c, r = test_complete("rem")
- @test "remove" in c
- @test apply_completion("rm E") == "rm Example"
- @test apply_completion("add Exampl") == "add Example"
- c, r = test_complete("free Exa")
- @test isempty(c) # given this was added i.e. not fixed
- pkg"pin Example"
- c, r = test_complete("free Exa")
- @test "Example" in c
- pkg"free Example"
-
- # help mode
- @test apply_completion("?ad") == "?add"
- @test apply_completion("?act") == "?activate"
- @test apply_completion("? ad") == "? add"
- @test apply_completion("? act") == "? activate"
-
- # stdlibs
- c, r = test_complete("add Stat")
- @test "Statistics" in c
- c, r = test_complete("add Lib")
- @test "LibGit2" in c
- c, r = test_complete("add REPL")
- @test "REPL" in c
-
- # upper bounded
- c, r = test_complete("add Chu")
- @test !("Chunks" in c)
-
- # local paths
- mkpath("testdir/foo/bar")
- c, r = test_complete("add ")
- @test Sys.iswindows() ? ("testdir\\\\" in c) : ("testdir/" in c)
- @test apply_completion("add tes") == (Sys.iswindows() ? "add testdir\\\\" : "add testdir/")
- @test apply_completion("add ./tes") == (Sys.iswindows() ? "add ./testdir\\\\" : "add ./testdir/")
- c, r = test_complete("dev ./")
- @test (Sys.iswindows() ? ("testdir\\\\" in c) : ("testdir/" in c))
-
- # complete subdirs
- c, r = test_complete("add testdir/f")
- @test Sys.iswindows() ? ("foo\\\\" in c) : ("foo/" in c)
- @test apply_completion("add testdir/f") == (Sys.iswindows() ? "add testdir/foo\\\\" : "add testdir/foo/")
- # dont complete files
- touch("README.md")
- c, r = test_complete("add RE")
- @test !("README.md" in c)
-
- # Expand homedir and
- if !Sys.iswindows()
- dirname = "JuliaPkgTest744a757c-d313-11e9-1cac-118368d5977a"
- tildepath = "~/$dirname"
- try
- mkdir(expanduser(tildepath))
- c, r = test_complete("dev ~/JuliaPkgTest744a75")
- @test joinpath(homedir(), dirname, "") in c
- finally
- rm(expanduser(tildepath); force = true)
- end
- c, r = test_complete("dev ~")
- @test joinpath(homedir(), "") in c
-
- # nested directories
- nested_dirs = "foo/bar/baz"
- tildepath = "~/$nested_dirs"
- try
- mkpath(expanduser(tildepath))
- c, r = test_complete("dev ~/foo/bar/b")
- @test joinpath(homedir(), nested_dirs, "") in c
- finally
- rm(expanduser(tildepath); force = true)
+temp_pkg_dir() do project_path
+ cd(project_path) do
+ @testset "tab completion" begin
+ pkg"registry add General" # instantiate the `General` registry to complete remote package names
+ pkg"activate ."
+ c, r = test_complete("add Exam")
+ @test "Example" in c
+ c, r = test_complete("rm Exam")
+ @test isempty(c)
+
+ Pkg.REPLMode.pkgstr("develop $(joinpath(@__DIR__, "test_packages", "PackageWithDependency"))")
+
+ c, r = test_complete("rm PackageWithDep")
+ @test "PackageWithDependency" in c
+ c, r = test_complete("rm -p PackageWithDep")
+ @test "PackageWithDependency" in c
+ c, r = test_complete("rm --project PackageWithDep")
+ @test "PackageWithDependency" in c
+ c, r = test_complete("rm Exam")
+ @test isempty(c)
+ c, r = test_complete("rm -p Exam")
+ @test isempty(c)
+ c, r = test_complete("rm --project Exam")
+ @test isempty(c)
+ c, r = test_complete("free PackageWithDep")
+ @test "PackageWithDependency" in c # given this was devved
+
+ c, r = test_complete("rm -m PackageWithDep")
+ @test "PackageWithDependency" in c
+ c, r = test_complete("rm --manifest PackageWithDep")
+ @test "PackageWithDependency" in c
+ c, r = test_complete("rm -m Exam")
+ @test "Example" in c
+ c, r = test_complete("rm --manifest Exam")
+ @test "Example" in c
+ c, r = test_complete("why PackageWithDep")
+ @test "PackageWithDependency" in c
+
+ c, r = test_complete("rm PackageWithDep")
+ @test "PackageWithDependency" in c
+ c, r = test_complete("rm Exam")
+ @test isempty(c)
+ c, r = test_complete("rm -m Exam")
+ c, r = test_complete("rm -m Exam")
+ @test "Example" in c
+
+ pkg"add Example"
+ c, r = test_complete("rm Exam")
+ @test "Example" in c
+ c, r = test_complete("up --man")
+ @test "--manifest" in c
+ c, r = test_complete("rem")
+ @test "remove" in c
+ @test apply_completion("rm E") == "rm Example"
+ @test apply_completion("add Exampl") == "add Example"
+ c, r = test_complete("free Exa")
+ @test isempty(c) # given this was added i.e. not fixed
+ pkg"pin Example"
+ c, r = test_complete("free Exa")
+ @test "Example" in c
+ pkg"free Example"
+
+ # Test for issue #59829 - completion with only trailing space should work
+ # When typing "rm " with Example installed, should complete to "rm Example"
+ c, r = test_complete("rm ")
+ @test "Example" in c
+ @test apply_completion("rm ") == "rm Example"
+
+ # Test deduplication of already-specified packages (issue #4098)
+ # After typing "rm Example ", typing "E" should not suggest Example again
+ c, r = test_complete("rm Example E")
+ @test !("Example" in c) # Example already specified, should not suggest again
+
+ # Test with package@version syntax - should still deduplicate
+ c, r = test_complete("rm Example@0.5 Exam")
+ @test !("Example" in c) # Example already specified with version
+
+ # Test with multiple packages already specified
+ c, r = test_complete("rm Example PackageWithDependency E")
+ @test !("Example" in c) # Both already specified
+ @test !("PackageWithDependency" in c)
+
+ # Test deduplication works for add as well
+ c, r = test_complete("add Example E")
+ @test !("Example" in c) # Example already specified for add command
+
+ # help mode
+ @test apply_completion("?ad") == "?add"
+ @test apply_completion("?act") == "?activate"
+ @test apply_completion("? ad") == "? add"
+ @test apply_completion("? act") == "? activate"
+
+ # stdlibs
+ c, r = test_complete("add Stat")
+ @test "Statistics" in c
+ c, r = test_complete("add Lib")
+ @test "LibGit2" in c
+ c, r = test_complete("add REPL")
+ @test "REPL" in c
+
+ # upper bounded
+ c, r = test_complete("add Chu")
+ @test !("Chunks" in c)
+
+ # local paths
+ mkpath("testdir/foo/bar")
+ c, r = test_complete("add ")
+ @test Sys.iswindows() ? ("testdir\\\\" in c) : ("testdir/" in c)
+ @test apply_completion("add tes") == (Sys.iswindows() ? "add testdir\\\\" : "add testdir/")
+ @test apply_completion("add ./tes") == (Sys.iswindows() ? "add ./testdir\\\\" : "add ./testdir/")
+ c, r = test_complete("dev ./")
+ @test (Sys.iswindows() ? ("testdir\\\\" in c) : ("testdir/" in c))
+
+ # complete subdirs
+ c, r = test_complete("add testdir/f")
+ @test Sys.iswindows() ? ("foo\\\\" in c) : ("foo/" in c)
+ @test apply_completion("add testdir/f") == (Sys.iswindows() ? "add testdir/foo\\\\" : "add testdir/foo/")
+ # dont complete files
+ touch("README.md")
+ c, r = test_complete("add RE")
+ @test !("README.md" in c)
+
+ # Expand homedir and
+ if !Sys.iswindows()
+ dirname = "JuliaPkgTest744a757c-d313-11e9-1cac-118368d5977a"
+ tildepath = "~/$dirname"
+ try
+ mkdir(expanduser(tildepath))
+ c, r = test_complete("dev ~/JuliaPkgTest744a75")
+ @test joinpath(homedir(), dirname, "") in c
+ finally
+ rm(expanduser(tildepath); force = true)
+ end
+ c, r = test_complete("dev ~")
+ @test joinpath(homedir(), "") in c
+
+ # nested directories
+ nested_dirs = "foo/bar/baz"
+ tildepath = "~/$nested_dirs"
+ try
+ mkpath(expanduser(tildepath))
+ c, r = test_complete("dev ~/foo/bar/b")
+ @test joinpath(homedir(), nested_dirs, "") in c
+ finally
+ rm(expanduser(tildepath); force = true)
+ end
end
- end
- # activate
- pkg"activate --shared FooBar"
- pkg"add Example"
- pkg"activate ."
- c, r = test_complete("activate --shared ")
- @test "FooBar" in c
-
- # invalid options
- c, r = test_complete("rm -rf ")
- @test isempty(c)
-
- # parse errors should not throw
- _ = test_complete("add \"Foo")
- # invalid option should not throw
- _ = test_complete("add -z Foo")
- _ = test_complete("add --dontexist Foo")
- end # testset
-end end
-
-temp_pkg_dir() do project_path; cd(project_path) do
- mktempdir() do tmp
- cp(joinpath(@__DIR__, "test_packages", "BigProject"), joinpath(tmp, "BigProject"))
- cd(joinpath(tmp, "BigProject"))
- with_current_env() do
- # the command below also tests multiline input
- pkg"""
- dev ./RecursiveDep2
- dev ./RecursiveDep
- dev ./SubModule
- dev ./SubModule2
- add Random
- add Example
- add JSON
- build
- """
- @eval using BigProject
- pkg"build BigProject"
- @test_throws PkgError pkg"add BigProject"
- # the command below also tests multiline input
- Pkg.REPLMode.pkgstr("""
- test SubModule
- test SubModule2
- test BigProject
- test
- """)
- json_uuid = Pkg.project().dependencies["JSON"]
- current_json = Pkg.dependencies()[json_uuid].version
- old_project = read("Project.toml", String)
- Pkg.compat("JSON", "0.18.0")
- pkg"up"
- @test Pkg.dependencies()[json_uuid].version.minor == 18
- write("Project.toml", old_project)
- pkg"up"
- @test Pkg.dependencies()[json_uuid].version == current_json
+ # activate
+ pkg"activate --shared FooBar"
+ pkg"add Example"
+ pkg"activate ."
+ c, r = test_complete("activate --shared ")
+ @test "FooBar" in c
+
+ # invalid options
+ c, r = test_complete("rm -rf ")
+ @test isempty(c)
+
+ # parse errors should not throw
+ _ = test_complete("add \"Foo")
+ # invalid option should not throw
+ _ = test_complete("add -z Foo")
+ _ = test_complete("add --dontexist Foo")
+
+ # Test the fix for issue #58690 - completion should return proper types
+ # This ensures Pkg completions return Vector{String}, Region, Bool format
+ c, r = test_complete("add Example")
+ @test c isa Vector{String}
+ @test r isa UnitRange{Int} # This gets converted to Region in the completion provider
+
+ # Test completion at end of a complete word doesn't crash
+ c, r = test_complete("add Example")
+ @test !isempty(c) # Should have completions
+
+ # Test the completion provider LineEdit interface directly (for coverage of the fix)
+ # This is the actual code path that was failing in issue #58690
+ provider = REPLExt.PkgCompletionProvider()
+
+ # Create a mock state that has the required interface
+ mock_state = (
+ input_buffer = let buf = IOBuffer()
+ write(buf, "add Example"); seek(buf, sizeof("add Example")); buf
+ end,
+ )
+
+ # Define the required interface methods for our mock
+ @eval REPL.beforecursor(state::NamedTuple) = String(state.input_buffer.data[1:(state.input_buffer.ptr - 1)])
+ @eval REPL.LineEdit.input_string(state::NamedTuple) = String(state.input_buffer.data[1:state.input_buffer.size])
+
+ # This calls the modified LineEdit.complete_line method
+ completions, region, should_complete = @invokelatest REPL.LineEdit.complete_line(provider, mock_state)
+ @test completions isa Vector{REPL.LineEdit.NamedCompletion}
+ @test region isa Pair{Int, Int} # This is the key fix - Region not String
+ @test should_complete isa Bool
+
+ # Test the empty range edge case for coverage
+ mock_state_empty = (
+ input_buffer = let buf = IOBuffer()
+ write(buf, ""); seek(buf, 0); buf
+ end,
+ )
+ completions_empty, region_empty, should_complete_empty = @invokelatest REPL.LineEdit.complete_line(provider, mock_state_empty)
+ @test region_empty isa Pair{Int, Int}
+
+ # Test for issue #4121 - completion after semicolon should not crash
+ # When typing "a;" and hitting tab, partial can be nothing causing startswith crash
+ c, r = test_complete("a;")
+ @test c isa Vector{String} # Should not crash, return empty or valid completions
+ @test r isa UnitRange{Int}
+ end # testset
+ end
+end
+
+temp_pkg_dir() do project_path
+ cd(project_path) do
+ mktempdir() do tmp
+ cp(joinpath(@__DIR__, "test_packages", "BigProject"), joinpath(tmp, "BigProject"))
+ cd(joinpath(tmp, "BigProject"))
+ with_current_env() do
+ # the command below also tests multiline input
+ pkg"""
+ dev ./RecursiveDep2
+ dev ./RecursiveDep
+ dev ./SubModule
+ dev ./SubModule2
+ add Random
+ add Example
+ add JSON
+ build
+ """
+ @eval using BigProject
+ pkg"build BigProject"
+ @test_throws PkgError pkg"add BigProject"
+ # the command below also tests multiline input
+ Pkg.REPLMode.pkgstr(
+ """
+ test SubModule
+ test SubModule2
+ test BigProject
+ test
+ """
+ )
+ json_uuid = Pkg.project().dependencies["JSON"]
+ current_json = Pkg.dependencies()[json_uuid].version
+ old_project = read("Project.toml", String)
+ Pkg.compat("JSON", "0.18.0")
+ pkg"up"
+ @test Pkg.dependencies()[json_uuid].version.minor == 18
+ write("Project.toml", old_project)
+ pkg"up"
+ @test Pkg.dependencies()[json_uuid].version == current_json
+ end
end
end
-end; end
+end
temp_pkg_dir() do project_path
cd(project_path) do
@@ -537,9 +637,9 @@ temp_pkg_dir() do project_path
setup_package(dir_name, pkg_name)
uuid = extract_uuid("$dir_name/$pkg_name/Project.toml")
Pkg.REPLMode.pkgstr("add \"$dir_name/$pkg_name\"")
- @test isinstalled((name=pkg_name, uuid = UUID(uuid)))
+ @test isinstalled((name = pkg_name, uuid = UUID(uuid)))
Pkg.REPLMode.pkgstr("remove \"$pkg_name\"")
- @test !isinstalled((name=pkg_name, uuid = UUID(uuid)))
+ @test !isinstalled((name = pkg_name, uuid = UUID(uuid)))
# testing dir name with significant characters
dir_name = "some@d;ir#"
@@ -547,9 +647,9 @@ temp_pkg_dir() do project_path
setup_package(dir_name, pkg_name)
uuid = extract_uuid("$dir_name/$pkg_name/Project.toml")
Pkg.REPLMode.pkgstr("add \"$dir_name/$pkg_name\"")
- @test isinstalled((name=pkg_name, uuid = UUID(uuid)))
+ @test isinstalled((name = pkg_name, uuid = UUID(uuid)))
Pkg.REPLMode.pkgstr("remove '$pkg_name'")
- @test !isinstalled((name=pkg_name, uuid = UUID(uuid)))
+ @test !isinstalled((name = pkg_name, uuid = UUID(uuid)))
# more complicated input
## pkg1
@@ -565,35 +665,28 @@ temp_pkg_dir() do project_path
uuid2 = extract_uuid("$dir2/$pkg_name2/Project.toml")
Pkg.REPLMode.pkgstr("add '$dir1/$pkg_name1' \"$dir2/$pkg_name2\"")
- @test isinstalled((name=pkg_name1, uuid = UUID(uuid1)))
- @test isinstalled((name=pkg_name2, uuid = UUID(uuid2)))
+ @test isinstalled((name = pkg_name1, uuid = UUID(uuid1)))
+ @test isinstalled((name = pkg_name2, uuid = UUID(uuid2)))
Pkg.REPLMode.pkgstr("remove '$pkg_name1' $pkg_name2")
- @test !isinstalled((name=pkg_name1, uuid = UUID(uuid1)))
- @test !isinstalled((name=pkg_name2, uuid = UUID(uuid2)))
+ @test !isinstalled((name = pkg_name1, uuid = UUID(uuid1)))
+ @test !isinstalled((name = pkg_name2, uuid = UUID(uuid2)))
Pkg.REPLMode.pkgstr("add '$dir1/$pkg_name1' \"$dir2/$pkg_name2\"")
- @test isinstalled((name=pkg_name1, uuid = UUID(uuid1)))
- @test isinstalled((name=pkg_name2, uuid = UUID(uuid2)))
+ @test isinstalled((name = pkg_name1, uuid = UUID(uuid1)))
+ @test isinstalled((name = pkg_name2, uuid = UUID(uuid2)))
Pkg.REPLMode.pkgstr("remove '$pkg_name1' \"$pkg_name2\"")
- @test !isinstalled((name=pkg_name1, uuid = UUID(uuid1)))
- @test !isinstalled((name=pkg_name2, uuid = UUID(uuid2)))
+ @test !isinstalled((name = pkg_name1, uuid = UUID(uuid1)))
+ @test !isinstalled((name = pkg_name2, uuid = UUID(uuid2)))
end
end
end
@testset "parse package url win" begin
pkg_id = Pkg.REPLMode.PackageIdentifier("https://github.com/abc/ABC.jl")
- pkg_spec = Pkg.REPLMode.parse_package_identifier(pkg_id; add_or_develop=true)
+ pkg_spec = Pkg.REPLMode.parse_package_identifier(pkg_id; add_or_develop = true)
@test typeof(pkg_spec) == Pkg.Types.PackageSpec
end
-@testset "parse git url (issue #1935) " begin
- urls = ["https://github.com/abc/ABC.jl.git", "https://abc.github.io/ABC.jl"]
- for url in urls
- @test Pkg.REPLMode.package_lex([Pkg.REPLMode.QString((url), false)]) == [url]
- end
-end
-
@testset "unit test for REPLMode.promptf" begin
function set_name(projfile_path, newname)
sleep(1.1)
@@ -635,80 +728,96 @@ end
end
@testset "test" begin
- temp_pkg_dir() do project_path; cd_tempdir() do tmpdir; with_temp_env() do;
- Pkg.add("Example")
- @test_throws PkgError Pkg.REPLMode.pkgstr("test --project Example")
- Pkg.REPLMode.pkgstr("test --coverage Example")
- Pkg.REPLMode.pkgstr("test Example")
- end
- end
+ temp_pkg_dir() do project_path
+ cd_tempdir() do tmpdir
+ with_temp_env() do;
+ Pkg.add("Example")
+ @test_throws PkgError Pkg.REPLMode.pkgstr("test --project Example")
+ Pkg.REPLMode.pkgstr("test --coverage Example")
+ Pkg.REPLMode.pkgstr("test Example")
+ end
+ end
end
end
@testset "activate" begin
- temp_pkg_dir() do project_path; cd_tempdir() do tmpdir; with_temp_env() do;
- mkdir("Foo")
- pkg"activate"
- default = Base.active_project()
- pkg"activate Foo"
- @test Base.active_project() == joinpath(pwd(), "Foo", "Project.toml")
- pkg"activate"
- @test Base.active_project() == default
- end end end
+ temp_pkg_dir() do project_path
+ cd_tempdir() do tmpdir
+ with_temp_env() do;
+ mkdir("Foo")
+ pkg"activate"
+ default = Base.active_project()
+ pkg"activate Foo"
+ @test Base.active_project() == joinpath(pwd(), "Foo", "Project.toml")
+ pkg"activate"
+ @test Base.active_project() == default
+ end
+ end
+ end
end
@testset "status" begin
temp_pkg_dir() do project_path
- pkg"""
- add Example Random
- status
- status -m
- status Example
- status Example=7876af07-990d-54b4-ab0e-23690620f79a
- status 7876af07-990d-54b4-ab0e-23690620f79a
- status Example Random
- status -m Example
- status --outdated
- status --compat
- """
- # --diff option
- @test_logs (:warn, r"diff option only available") pkg"status --diff"
- @test_logs (:warn, r"diff option only available") pkg"status -d"
- git_init_and_commit(project_path)
- @test_logs () pkg"status --diff"
- @test_logs () pkg"status -d"
-
- # comma-separated packages get parsed
- pkg"status Example, Random"
+ # Pkg.status earlyouts if `io` is `devnull`, so override for this test
+ io = PipeBuffer()
+ @Base.ScopedValues.with Pkg.DEFAULT_IO => io begin
+ pkg"""
+ add Example Random
+ status
+ status -m
+ status Example
+ status Example=7876af07-990d-54b4-ab0e-23690620f79a
+ status 7876af07-990d-54b4-ab0e-23690620f79a
+ status Example Random
+ status -m Example
+ status --outdated
+ status --compat
+ """
+ # --diff option
+ @test_logs (:warn, r"diff option only available") pkg"status --diff"
+ @test_logs (:warn, r"diff option only available") pkg"status -d"
+ git_init_and_commit(project_path)
+ @test_logs () pkg"status --diff"
+ @test_logs () pkg"status -d"
+
+ # comma-separated packages get parsed
+ pkg"status Example, Random"
+ end
end
end
@testset "subcommands" begin
- temp_pkg_dir() do project_path; cd_tempdir() do tmpdir; with_temp_env() do
- Pkg.REPLMode.pkg"package add Example"
- @test isinstalled(TEST_PKG)
- Pkg.REPLMode.pkg"package rm Example"
- @test !isinstalled(TEST_PKG)
- end end end
+ temp_pkg_dir() do project_path
+ cd_tempdir() do tmpdir
+ with_temp_env() do
+ Pkg.REPLMode.pkg"package add Example"
+ @test isinstalled(TEST_PKG)
+ Pkg.REPLMode.pkg"package rm Example"
+ @test !isinstalled(TEST_PKG)
+ end
+ end
+ end
end
@testset "REPL API `up`" begin
# errors
- temp_pkg_dir() do project_path; with_temp_env() do;
- @test_throws PkgError Pkg.REPLMode.pkgstr("up --major --minor")
- end end
+ temp_pkg_dir() do project_path
+ with_temp_env() do;
+ @test_throws PkgError Pkg.REPLMode.pkgstr("up --major --minor")
+ end
+ end
end
@testset "Inference" begin
@inferred Pkg.REPLMode.OptionSpecs(Pkg.REPLMode.OptionDeclaration[])
@inferred Pkg.REPLMode.CommandSpecs(Pkg.REPLMode.CommandDeclaration[])
- @inferred Pkg.REPLMode.CompoundSpecs(Pair{String,Vector{Pkg.REPLMode.CommandDeclaration}}[])
+ @inferred Pkg.REPLMode.CompoundSpecs(Pair{String, Vector{Pkg.REPLMode.CommandDeclaration}}[])
end
# To be used to reply to a prompt
function withreply(f, ans)
p = Pipe()
- try
+ return try
redirect_stdin(p) do
@async println(p, ans)
f()
@@ -719,7 +828,7 @@ function withreply(f, ans)
end
@testset "REPL missing package install hook" begin
- isolate(loaded_depot=true) do
+ isolate(loaded_depot = true) do
@test REPLExt.try_prompt_pkg_add(Symbol[:notapackage]) == false
# don't offer to install the dummy "julia" entry that's in General
@@ -735,13 +844,84 @@ end
end
@testset "JuliaLang/julia #55850" begin
- tmp_55850 = mktempdir()
- tmp_sym_link = joinpath(tmp_55850, "sym")
- symlink(tmp_55850, tmp_sym_link; dir_target=true)
- withenv("JULIA_DEPOT_PATH" => tmp_sym_link * (Sys.iswindows() ? ";" : ":"), "JULIA_LOAD_PATH" => nothing) do
- prompt = readchomp(`$(Base.julia_cmd()[1]) --project=$(dirname(@__DIR__)) --startup-file=no -e "using Pkg, REPL; Pkg.activate(io=devnull); REPLExt = Base.get_extension(Pkg, :REPLExt); print(REPLExt.promptf())"`)
+ isolate(loaded_depot = true) do
+ prompt = readchomp(`$(Base.julia_cmd()) --project=$(dirname(@__DIR__)) --startup-file=no -e "using Pkg, REPL; Pkg.activate(io=devnull); REPLExt = Base.get_extension(Pkg, :REPLExt); print(REPLExt.promptf())"`)
@test prompt == "(@v$(VERSION.major).$(VERSION.minor)) pkg> "
end
end
+@testset "in_repl_mode" begin
+ # Test that in_repl_mode() returns false by default (API mode)
+ @test Pkg.in_repl_mode() == false
+
+ # Test that in_repl_mode() returns true when running REPL commands
+ # This is tested indirectly by running a simple REPL command
+ temp_pkg_dir() do project_path
+ cd(project_path) do
+ # The pkg"" macro should set IN_REPL_MODE => true during execution
+ # We can't directly test the scoped value here, but we can test
+ # that REPL commands work correctly
+ pkg"status"
+ # The fact that this doesn't error confirms REPL mode is working
+ @test true
+ end
+ end
+
+ # Test manual scoped value setting (for completeness)
+ Base.ScopedValues.@with Pkg.IN_REPL_MODE => true begin
+ @test Pkg.in_repl_mode() == true
+ end
+
+ # Verify we're back to false after the scoped block
+ @test Pkg.in_repl_mode() == false
+end
+
+@testset "compat REPL mode" begin
+ temp_pkg_dir() do project_path
+ with_pkg_env(project_path; change_dir = true) do
+
+ pkg"add Example JSON"
+
+ test_ctx = Pkg.Types.Context()
+ test_ctx.io = IOBuffer()
+
+ @test Pkg.Operations.get_compat_str(test_ctx.env.project, "Example") === nothing
+ @test Pkg.Operations.get_compat_str(test_ctx.env.project, "JSON") === nothing
+
+ input_io = Base.BufferStream()
+ # Send input to stdin before starting the _compat function
+ # This simulates the user typing in the REPL
+ write(input_io, "\e[B") # Down arrow once to select Example
+ write(input_io, "\r") # Enter to confirm selection
+ # now editing Example compat
+ write(input_io, "0.4") # Set compat to 0.4
+ write(input_io, "\r") # Enter to confirm input
+ close(input_io)
+
+ Pkg.API._compat(test_ctx; input_io)
+
+ str = String(take!(test_ctx.io))
+ @test occursin("Example = \"0.4\"", str)
+ @test occursin("checking for compliance with the new compat rules..", str)
+ @test occursin("Error empty intersection between", str) # Latest Example is at least 0.5.5
+
+ # Test for issue #3828: Backspace on empty buffer should not cause BoundsError
+ test_ctx = Pkg.Types.Context()
+ test_ctx.io = IOBuffer()
+
+ input_io = Base.BufferStream()
+ write(input_io, "\r") # Select julia (first entry)
+ # Now editing julia compat entry which starts empty
+ write(input_io, "\x7f") # Backspace on empty buffer
+ write(input_io, "\x7f") # Another backspace
+ write(input_io, " ") # Space should not cause error
+ write(input_io, "\r") # Confirm empty input
+ close(input_io)
+
+ # Should not throw BoundsError
+ Pkg.API._compat(test_ctx; input_io)
+ end
+ end
+end
+
end # module
diff --git a/test/resolve.jl b/test/resolve.jl
index 91907e2d10..f2c0e0c6e9 100644
--- a/test/resolve.jl
+++ b/test/resolve.jl
@@ -9,7 +9,6 @@ using Pkg.Types: VersionBound
using UUIDs
using Pkg.Resolve
import Pkg.Resolve: VersionWeight, add_reqs!, simplify_graph!, ResolverError, ResolverTimeoutError, Fixed, Requires
-import ..HistoricalStdlibVersions
include("utils.jl")
using .Utils
@@ -26,8 +25,8 @@ vlst = [
v"1.0.0",
v"1.0.1",
v"1.1.0",
- v"1.1.1"
- ]
+ v"1.1.1",
+]
for v1 in vlst, v2 in vlst
vw1 = VersionWeight(v1)
@@ -45,28 +44,28 @@ end
["A", v"1", "B", "1-*"],
["A", v"2", "B", "2-*"],
["B", v"1"],
- ["B", v"2"]
+ ["B", v"2"],
]
@test sanity_tst(deps_data)
- @test sanity_tst(deps_data, pkgs=["A", "B"])
- @test sanity_tst(deps_data, pkgs=["B"])
- @test sanity_tst(deps_data, pkgs=["A"])
+ @test sanity_tst(deps_data, pkgs = ["A", "B"])
+ @test sanity_tst(deps_data, pkgs = ["B"])
+ @test sanity_tst(deps_data, pkgs = ["A"])
# require just B
reqs_data = Any[
- ["B", "*"]
+ ["B", "*"],
]
- want_data = Dict("B"=>v"2")
+ want_data = Dict("B" => v"2")
resolve_tst(deps_data, reqs_data, want_data)
@test resolve_tst(deps_data, reqs_data, want_data)
# require just A: must bring in B
reqs_data = Any[
- ["A", "*"]
+ ["A", "*"],
]
- want_data = Dict("A"=>v"2", "B"=>v"2")
+ want_data = Dict("A" => v"2", "B" => v"2")
@test resolve_tst(deps_data, reqs_data, want_data)
@@ -76,30 +75,30 @@ end
["A", v"1", "B", "2-*"],
["A", v"2", "B", "1-*"],
["B", v"1", "A", "2-*"],
- ["B", v"2", "A", "1-*"]
+ ["B", v"2", "A", "1-*"],
]
@test sanity_tst(deps_data)
# require just A
reqs_data = Any[
- ["A", "*"]
+ ["A", "*"],
]
- want_data = Dict("A"=>v"2", "B"=>v"2")
+ want_data = Dict("A" => v"2", "B" => v"2")
@test resolve_tst(deps_data, reqs_data, want_data)
# require just B, force lower version
reqs_data = Any[
- ["B", "1"]
+ ["B", "1"],
]
- want_data = Dict("A"=>v"2", "B"=>v"1")
+ want_data = Dict("A" => v"2", "B" => v"1")
@test resolve_tst(deps_data, reqs_data, want_data)
# require just A, force lower version
reqs_data = Any[
- ["A", "1"]
+ ["A", "1"],
]
- want_data = Dict("A"=>v"1", "B"=>v"2")
+ want_data = Dict("A" => v"1", "B" => v"2")
@test resolve_tst(deps_data, reqs_data, want_data)
@@ -111,36 +110,36 @@ end
["B", v"1", "C", "2-*"],
["B", v"2", "C", "1"],
["C", v"1", "A", "1"],
- ["C", v"2", "A", "2-*"]
+ ["C", v"2", "A", "2-*"],
]
@test sanity_tst(deps_data)
# require just A (must choose solution which has the highest version for A)
reqs_data = Any[
- ["A", "*"]
+ ["A", "*"],
]
- want_data = Dict("A"=>v"2", "B"=>v"1", "C"=>v"2")
+ want_data = Dict("A" => v"2", "B" => v"1", "C" => v"2")
@test resolve_tst(deps_data, reqs_data, want_data)
# require just B (must choose solution which has the highest version for B)
reqs_data = Any[
- ["B", "*"]
+ ["B", "*"],
]
- want_data = Dict("A"=>v"1", "B"=>v"2", "C"=>v"1")
+ want_data = Dict("A" => v"1", "B" => v"2", "C" => v"1")
@test resolve_tst(deps_data, reqs_data, want_data)
# require just A, force lower version
reqs_data = Any[
- ["A", "1"]
+ ["A", "1"],
]
- want_data = Dict("A"=>v"1", "B"=>v"2", "C"=>v"1")
+ want_data = Dict("A" => v"1", "B" => v"2", "C" => v"1")
@test resolve_tst(deps_data, reqs_data, want_data)
# require A and C, incompatible versions
reqs_data = Any[
["A", "1"],
- ["C", "2-*"]
+ ["C", "2-*"],
]
@test_throws ResolverError resolve_tst(deps_data, reqs_data)
@@ -149,22 +148,22 @@ end
## DEPENDENCY SCHEME 4: TWO PACKAGES, DAG, WITH TRIVIAL INCONSISTENCY
deps_data = Any[
["A", v"1", "B", "2-*"],
- ["B", v"1"]
+ ["B", v"1"],
]
@test sanity_tst(deps_data, [("A", v"1")])
- @test sanity_tst(deps_data, pkgs=["B"])
+ @test sanity_tst(deps_data, pkgs = ["B"])
# require B (must not give errors)
reqs_data = Any[
- ["B", "*"]
+ ["B", "*"],
]
- want_data = Dict("B"=>v"1")
+ want_data = Dict("B" => v"1")
@test resolve_tst(deps_data, reqs_data, want_data)
# require A (must give an error)
reqs_data = Any[
- ["A", "*"]
+ ["A", "*"],
]
@test_throws ResolverError resolve_tst(deps_data, reqs_data)
@@ -179,23 +178,23 @@ end
["B", v"1", "C", "2-*"],
["B", v"2", "C", "2-*"],
["C", v"1"],
- ["C", v"2"]
+ ["C", v"2"],
]
@test sanity_tst(deps_data, [("A", v"2")])
- @test sanity_tst(deps_data, pkgs=["B"])
- @test sanity_tst(deps_data, pkgs=["C"])
+ @test sanity_tst(deps_data, pkgs = ["B"])
+ @test sanity_tst(deps_data, pkgs = ["C"])
# require A, any version (must use the highest non-inconsistent)
reqs_data = Any[
- ["A", "*"]
+ ["A", "*"],
]
- want_data = Dict("A"=>v"1", "B"=>v"2", "C"=>v"2")
+ want_data = Dict("A" => v"1", "B" => v"2", "C" => v"2")
@test resolve_tst(deps_data, reqs_data, want_data)
# require A, force highest version (impossible)
reqs_data = Any[
- ["A", "2-*"]
+ ["A", "2-*"],
]
@test_throws ResolverError resolve_tst(deps_data, reqs_data)
@@ -206,21 +205,25 @@ end
["A", v"1", "B", "2-*"],
["A", v"2", "B", "1"],
["B", v"1", "A", "1"],
- ["B", v"2", "A", "2-*"]
+ ["B", v"2", "A", "2-*"],
]
- @test sanity_tst(deps_data, [("A", v"1"), ("A", v"2"),
- ("B", v"1"), ("B", v"2")])
+ @test sanity_tst(
+ deps_data, [
+ ("A", v"1"), ("A", v"2"),
+ ("B", v"1"), ("B", v"2"),
+ ]
+ )
# require A (impossible)
reqs_data = Any[
- ["A", "*"]
+ ["A", "*"],
]
@test_throws ResolverError resolve_tst(deps_data, reqs_data)
# require B (impossible)
reqs_data = Any[
- ["B", "*"]
+ ["B", "*"],
]
@test_throws ResolverError resolve_tst(deps_data, reqs_data)
@@ -236,26 +239,30 @@ end
["C", v"2", "A", "2-*"],
]
- @test sanity_tst(deps_data, [("A", v"1"), ("B", v"1"),
- ("C", v"1")])
+ @test sanity_tst(
+ deps_data, [
+ ("A", v"1"), ("B", v"1"),
+ ("C", v"1"),
+ ]
+ )
# require A
reqs_data = Any[
- ["A", "*"]
+ ["A", "*"],
]
- want_data = Dict("A"=>v"2", "B"=>v"2", "C"=>v"2")
+ want_data = Dict("A" => v"2", "B" => v"2", "C" => v"2")
@test resolve_tst(deps_data, reqs_data, want_data)
# require C
reqs_data = Any[
- ["C", "*"]
+ ["C", "*"],
]
- want_data = Dict("A"=>v"2", "B"=>v"2", "C"=>v"2")
+ want_data = Dict("A" => v"2", "B" => v"2", "C" => v"2")
@test resolve_tst(deps_data, reqs_data, want_data)
# require C, lowest version (impossible)
reqs_data = Any[
- ["C", "1"]
+ ["C", "1"],
]
@test_throws ResolverError resolve_tst(deps_data, reqs_data)
@@ -271,25 +278,29 @@ end
["C", v"2", "A", "1"],
]
- @test sanity_tst(deps_data, [("A", v"1"), ("A", v"2"),
- ("B", v"1"), ("B", v"2"),
- ("C", v"1"), ("C", v"2")])
+ @test sanity_tst(
+ deps_data, [
+ ("A", v"1"), ("A", v"2"),
+ ("B", v"1"), ("B", v"2"),
+ ("C", v"1"), ("C", v"2"),
+ ]
+ )
# require A (impossible)
reqs_data = Any[
- ["A", "*"]
+ ["A", "*"],
]
@test_throws ResolverError resolve_tst(deps_data, reqs_data)
# require B (impossible)
reqs_data = Any[
- ["B", "*"]
+ ["B", "*"],
]
@test_throws ResolverError resolve_tst(deps_data, reqs_data)
# require C (impossible)
reqs_data = Any[
- ["C", "*"]
+ ["C", "*"],
]
@test_throws ResolverError resolve_tst(deps_data, reqs_data)
@@ -316,45 +327,55 @@ end
# require just F
reqs_data = Any[
- ["F", "*"]
+ ["F", "*"],
]
- want_data = Dict("A"=>v"3", "B"=>v"2", "C"=>v"2",
- "D"=>v"2", "E"=>v"1", "F"=>v"2")
+ want_data = Dict(
+ "A" => v"3", "B" => v"2", "C" => v"2",
+ "D" => v"2", "E" => v"1", "F" => v"2"
+ )
@test resolve_tst(deps_data, reqs_data, want_data)
# require just F, lower version
reqs_data = Any[
- ["F", "1"]
+ ["F", "1"],
]
- want_data = Dict("A"=>v"2", "B"=>v"2", "D"=>v"2",
- "E"=>v"1", "F"=>v"1")
+ want_data = Dict(
+ "A" => v"2", "B" => v"2", "D" => v"2",
+ "E" => v"1", "F" => v"1"
+ )
@test resolve_tst(deps_data, reqs_data, want_data)
# require F and B; force lower B version -> must bring down F, A, and D versions too
reqs_data = Any[
["F", "*"],
- ["B", "1"]
+ ["B", "1"],
]
- want_data = Dict("A"=>v"1", "B"=>v"1", "D"=>v"1",
- "E"=>v"1", "F"=>v"1")
+ want_data = Dict(
+ "A" => v"1", "B" => v"1", "D" => v"1",
+ "E" => v"1", "F" => v"1"
+ )
@test resolve_tst(deps_data, reqs_data, want_data)
# require F and D; force lower D version -> must not bring down F version
reqs_data = Any[
["F", "*"],
- ["D", "1"]
+ ["D", "1"],
]
- want_data = Dict("A"=>v"3", "B"=>v"2", "C"=>v"2",
- "D"=>v"1", "E"=>v"1", "F"=>v"2")
+ want_data = Dict(
+ "A" => v"3", "B" => v"2", "C" => v"2",
+ "D" => v"1", "E" => v"1", "F" => v"2"
+ )
@test resolve_tst(deps_data, reqs_data, want_data)
# require F and C; force lower C version -> must bring down F and A versions
reqs_data = Any[
["F", "*"],
- ["C", "1"]
+ ["C", "1"],
]
- want_data = Dict("A"=>v"2", "B"=>v"2", "C"=>v"1",
- "D"=>v"2", "E"=>v"1", "F"=>v"1")
+ want_data = Dict(
+ "A" => v"2", "B" => v"2", "C" => v"1",
+ "D" => v"2", "E" => v"1", "F" => v"1"
+ )
@test resolve_tst(deps_data, reqs_data, want_data)
VERBOSE && @info("SCHEME 10")
@@ -371,40 +392,39 @@ end
["D", v"1", "E", "1-*"],
["D", v"2", "E", "2-*"],
["E", v"1"],
- ["E", v"2"]
+ ["E", v"2"],
]
@test sanity_tst(deps_data, [("A", v"2")])
- @test sanity_tst(deps_data, pkgs=["B"])
- @test sanity_tst(deps_data, pkgs=["D"])
- @test sanity_tst(deps_data, pkgs=["E"])
- @test sanity_tst(deps_data, pkgs=["B", "D"])
+ @test sanity_tst(deps_data, pkgs = ["B"])
+ @test sanity_tst(deps_data, pkgs = ["D"])
+ @test sanity_tst(deps_data, pkgs = ["E"])
+ @test sanity_tst(deps_data, pkgs = ["B", "D"])
# require A, any version (must use the highest non-inconsistent)
reqs_data = Any[
- ["A", "*"]
+ ["A", "*"],
]
- want_data = Dict("A"=>v"1", "B"=>v"2", "C"=>v"2")
+ want_data = Dict("A" => v"1", "B" => v"2", "C" => v"2")
@test resolve_tst(deps_data, reqs_data, want_data)
# require just D: must bring in E
reqs_data = Any[
- ["D", "*"]
+ ["D", "*"],
]
- want_data = Dict("D"=>v"2", "E"=>v"2")
+ want_data = Dict("D" => v"2", "E" => v"2")
@test resolve_tst(deps_data, reqs_data, want_data)
# require A and D, must be the merge of the previous two cases
reqs_data = Any[
["A", "*"],
- ["D", "*"]
+ ["D", "*"],
]
- want_data = Dict("A"=>v"1", "B"=>v"2", "C"=>v"2", "D"=>v"2", "E"=>v"2")
+ want_data = Dict("A" => v"1", "B" => v"2", "C" => v"2", "D" => v"2", "E" => v"2")
@test resolve_tst(deps_data, reqs_data, want_data)
-
VERBOSE && @info("SCHEME 11")
## DEPENDENCY SCHEME 11: FOUR PACKAGES, WITH AN INCONSISTENCY
## ref Pkg.jl issue #2740
@@ -429,7 +449,7 @@ end
["A", "*"],
["B", "*"],
]
- want_data = Dict("A"=>v"1", "B"=>v"1", "C"=>v"1", "D"=>v"1")
+ want_data = Dict("A" => v"1", "B" => v"1", "C" => v"1", "D" => v"1")
@test resolve_tst(deps_data, reqs_data, want_data)
@@ -439,40 +459,40 @@ end
["A", v"1", "B", "1-*", :weak],
["A", v"2", "B", "2-*", :weak],
["B", v"1"],
- ["B", v"2"]
+ ["B", v"2"],
]
@test sanity_tst(deps_data)
- @test sanity_tst(deps_data, pkgs=["A", "B"])
- @test sanity_tst(deps_data, pkgs=["B"])
- @test sanity_tst(deps_data, pkgs=["A"])
+ @test sanity_tst(deps_data, pkgs = ["A", "B"])
+ @test sanity_tst(deps_data, pkgs = ["B"])
+ @test sanity_tst(deps_data, pkgs = ["A"])
# require just B
reqs_data = Any[
- ["B", "*"]
+ ["B", "*"],
]
- want_data = Dict("B"=>v"2")
+ want_data = Dict("B" => v"2")
@test resolve_tst(deps_data, reqs_data, want_data)
# require just A
reqs_data = Any[
- ["A", "*"]
+ ["A", "*"],
]
- want_data = Dict("A"=>v"2")
+ want_data = Dict("A" => v"2")
@test resolve_tst(deps_data, reqs_data, want_data)
# require A and B
reqs_data = Any[
["A", "*"],
- ["B", "*"]
+ ["B", "*"],
]
- want_data = Dict("A"=>v"2", "B"=>v"2")
+ want_data = Dict("A" => v"2", "B" => v"2")
@test resolve_tst(deps_data, reqs_data, want_data)
# require A and B, invompatible versions
reqs_data = Any[
["A", "2-*"],
- ["B", "1"]
+ ["B", "1"],
]
@test_throws ResolverError resolve_tst(deps_data, reqs_data, want_data)
@@ -500,45 +520,55 @@ end
# require just F
reqs_data = Any[
- ["F", "*"]
+ ["F", "*"],
]
- want_data = Dict("A"=>v"3", "C"=>v"2",
- "D"=>v"2", "E"=>v"1", "F"=>v"2")
+ want_data = Dict(
+ "A" => v"3", "C" => v"2",
+ "D" => v"2", "E" => v"1", "F" => v"2"
+ )
@test resolve_tst(deps_data, reqs_data, want_data)
# require just F, lower version
reqs_data = Any[
- ["F", "1"]
+ ["F", "1"],
]
- want_data = Dict("A"=>v"2", "D"=>v"2",
- "E"=>v"1", "F"=>v"1")
+ want_data = Dict(
+ "A" => v"2", "D" => v"2",
+ "E" => v"1", "F" => v"1"
+ )
@test resolve_tst(deps_data, reqs_data, want_data)
# require F and B; force lower B version -> must bring down F, A, and D versions too
reqs_data = Any[
["F", "*"],
- ["B", "1"]
+ ["B", "1"],
]
- want_data = Dict("A"=>v"1", "B"=>v"1", "D"=>v"1",
- "E"=>v"1", "F"=>v"1")
+ want_data = Dict(
+ "A" => v"1", "B" => v"1", "D" => v"1",
+ "E" => v"1", "F" => v"1"
+ )
@test resolve_tst(deps_data, reqs_data, want_data)
# require F and D; force lower D version -> must not bring down F version, and bring in B
reqs_data = Any[
["F", "*"],
- ["D", "1"]
+ ["D", "1"],
]
- want_data = Dict("A"=>v"3", "B"=>v"2", "C"=>v"2",
- "D"=>v"1", "E"=>v"1", "F"=>v"2")
+ want_data = Dict(
+ "A" => v"3", "B" => v"2", "C" => v"2",
+ "D" => v"1", "E" => v"1", "F" => v"2"
+ )
@test resolve_tst(deps_data, reqs_data, want_data)
# require F and C; force lower C version -> must bring down F and A versions
reqs_data = Any[
["F", "*"],
- ["C", "1"]
+ ["C", "1"],
]
- want_data = Dict("A"=>v"2", "C"=>v"1",
- "D"=>v"2", "E"=>v"1", "F"=>v"1")
+ want_data = Dict(
+ "A" => v"2", "C" => v"1",
+ "D" => v"2", "E" => v"1", "F" => v"1"
+ )
@test resolve_tst(deps_data, reqs_data, want_data)
@@ -578,16 +608,16 @@ end
["D", "*"],
]
want_data = Dict(
- "A"=>v"1",
- "B"=>v"2",
- "C"=>v"2",
- "D"=>v"2",
- "Y"=>v"0.2.2",
- "X"=>v"0.2",
- "F"=>v"1",
- "G"=>v"2",
- "H"=>v"1",
- "I"=>v"1",
+ "A" => v"1",
+ "B" => v"2",
+ "C" => v"2",
+ "D" => v"2",
+ "Y" => v"0.2.2",
+ "X" => v"0.2",
+ "F" => v"1",
+ "G" => v"2",
+ "H" => v"1",
+ "I" => v"1",
)
@test resolve_tst(deps_data, reqs_data, want_data)
@@ -596,13 +626,13 @@ end
["D", "*"],
]
want_data = Dict(
- "B"=>v"2",
- "C"=>v"2",
- "D"=>v"2",
- "F"=>v"1",
- "G"=>v"2",
- "H"=>v"1",
- "I"=>v"1",
+ "B" => v"2",
+ "C" => v"2",
+ "D" => v"2",
+ "F" => v"1",
+ "G" => v"2",
+ "H" => v"1",
+ "I" => v"1",
)
@test resolve_tst(deps_data, reqs_data, want_data)
@@ -611,9 +641,9 @@ end
["A", "*"],
]
want_data = Dict(
- "A"=>v"1",
- "Y"=>v"0.2.2",
- "X"=>v"0.2",
+ "A" => v"1",
+ "Y" => v"0.2.2",
+ "X" => v"0.2",
)
@test resolve_tst(deps_data, reqs_data, want_data)
@@ -621,19 +651,19 @@ end
reqs_data = Any[
["A", "*"],
["D", "*"],
- ["Y", "0.2.1"]
+ ["Y", "0.2.1"],
]
want_data = Dict(
- "A"=>v"1",
- "B"=>v"1",
- "C"=>v"2",
- "D"=>v"2",
- "Y"=>v"0.2.1",
- "X"=>v"0.2",
- "F"=>v"1",
- "G"=>v"2",
- "H"=>v"1",
- "I"=>v"1",
+ "A" => v"1",
+ "B" => v"1",
+ "C" => v"2",
+ "D" => v"2",
+ "Y" => v"0.2.1",
+ "X" => v"0.2",
+ "F" => v"1",
+ "G" => v"2",
+ "H" => v"1",
+ "I" => v"1",
)
@test resolve_tst(deps_data, reqs_data, want_data)
@@ -674,7 +704,7 @@ end
@testset "realistic" begin
tmp = mktempdir()
- Pkg.PlatformEngines.unpack(joinpath(@__DIR__, "resolvedata.tar.gz"), tmp; verbose=false)
+ Pkg.PlatformEngines.unpack(joinpath(@__DIR__, "resolvedata.tar.gz"), tmp; verbose = false)
VERBOSE && @info("SCHEME REALISTIC 1")
## DEPENDENCY SCHEME 15: A REALISTIC EXAMPLE
@@ -711,12 +741,12 @@ end
include(joinpath(tmp, "resolvedata4.jl"))
@test sanity_tst(ResolveData4.deps_data, ResolveData4.problematic_data)
- withenv("JULIA_PKG_RESOLVE_MAX_TIME"=>10) do
+ withenv("JULIA_PKG_RESOLVE_MAX_TIME" => 10) do
@test_throws ResolverError resolve_tst(ResolveData4.deps_data, ResolveData4.reqs_data, ResolveData4.want_data)
end
- withenv("JULIA_PKG_RESOLVE_MAX_TIME"=>1e-5) do
+ withenv("JULIA_PKG_RESOLVE_MAX_TIME" => 1.0e-5) do
# this test may fail if graph preprocessing or the greedy solver get better
- @test_throws ResolverTimeoutError resolve_tst(ResolveData4.deps_data, ResolveData4.reqs_data, ResolveData4.want_data; validate_versions=false)
+ @test_throws ResolverTimeoutError resolve_tst(ResolveData4.deps_data, ResolveData4.reqs_data, ResolveData4.want_data; validate_versions = false)
end
end
@@ -726,68 +756,21 @@ end
## DEPENDENCY SCHEME 19: A NASTY CASE
include("NastyGenerator.jl")
- deps_data, reqs_data, want_data, problematic_data = NastyGenerator.generate_nasty(5, 20, q=20, d=4, sat=true)
+ deps_data, reqs_data, want_data, problematic_data = NastyGenerator.generate_nasty(5, 20, q = 20, d = 4, sat = true)
@test sanity_tst(deps_data, problematic_data)
@test resolve_tst(deps_data, reqs_data, want_data)
- deps_data, reqs_data, want_data, problematic_data = NastyGenerator.generate_nasty(5, 20, q=20, d=4, sat=false)
+ deps_data, reqs_data, want_data, problematic_data = NastyGenerator.generate_nasty(5, 20, q = 20, d = 4, sat = false)
@test sanity_tst(deps_data, problematic_data)
@test_throws ResolverError resolve_tst(deps_data, reqs_data)
end
-@testset "Resolving for another version of Julia" begin
- HistoricalStdlibVersions.register!()
- temp_pkg_dir() do dir
- function find_by_name(versions, name)
- idx = findfirst(p -> p.name == name, versions)
- if idx === nothing
- return nothing
- end
- return versions[idx]
- end
-
- # First, we're going to resolve for specific versions of Julia, ensuring we get the right dep versions:
- Pkg.Registry.download_default_registries(Pkg.stdout_f())
- ctx = Pkg.Types.Context(;julia_version=v"1.5")
- versions, deps = Pkg.Operations._resolve(ctx.io, ctx.env, ctx.registries, [
- Pkg.Types.PackageSpec(name="MPFR_jll", uuid=Base.UUID("3a97d323-0669-5f0c-9066-3539efd106a3")),
- ], Pkg.Types.PRESERVE_TIERED, ctx.julia_version)
- gmp = find_by_name(versions, "GMP_jll")
- @test gmp !== nothing
- @test gmp.version.major == 6 && gmp.version.minor == 1
- ctx = Pkg.Types.Context(;julia_version=v"1.6")
- versions, deps = Pkg.Operations._resolve(ctx.io, ctx.env, ctx.registries, [
- Pkg.Types.PackageSpec(name="MPFR_jll", uuid=Base.UUID("3a97d323-0669-5f0c-9066-3539efd106a3")),
- ], Pkg.Types.PRESERVE_TIERED, ctx.julia_version)
- gmp = find_by_name(versions, "GMP_jll")
- @test gmp !== nothing
- @test gmp.version.major == 6 && gmp.version.minor == 2
-
- # We'll also test resolving an "impossible" manifest; one that requires two package versions that
- # are not both loadable by the same Julia:
- ctx = Pkg.Types.Context(;julia_version=nothing)
- versions, deps = Pkg.Operations._resolve(ctx.io, ctx.env, ctx.registries, [
- # This version of GMP only works on Julia v1.6
- Pkg.Types.PackageSpec(name="GMP_jll", uuid=Base.UUID("781609d7-10c4-51f6-84f2-b8444358ff6d"), version=v"6.2.0"),
- # This version of MPFR only works on Julia v1.5
- Pkg.Types.PackageSpec(name="MPFR_jll", uuid=Base.UUID("3a97d323-0669-5f0c-9066-3539efd106a3"), version=v"4.0.2"),
- ], Pkg.Types.PRESERVE_TIERED, ctx.julia_version)
- gmp = find_by_name(versions, "GMP_jll")
- @test gmp !== nothing
- @test gmp.version.major == 6 && gmp.version.minor == 2
- mpfr = find_by_name(versions, "MPFR_jll")
- @test mpfr !== nothing
- @test mpfr.version.major == 4 && mpfr.version.minor == 0
- end
- HistoricalStdlibVersions.unregister!()
-end
-
@testset "Stdlib resolve smoketest" begin
# All stdlibs should be installable and resolvable
temp_pkg_dir() do dir
- Pkg.activate(temp=true)
+ Pkg.activate(temp = true)
Pkg.add(map(x -> x.name, values(Pkg.Types.load_stdlib()))) # add all stdlibs
iob = IOBuffer()
Pkg.resolve(io = iob)
diff --git a/test/resolve_utils.jl b/test/resolve_utils.jl
index df10e71ec8..4168f00c41 100644
--- a/test/resolve_utils.jl
+++ b/test/resolve_utils.jl
@@ -17,7 +17,7 @@ const VERBOSE = false
# auxiliary functions
const uuid_package = UUID("cfb74b52-ec16-5bb7-a574-95d9e393895e")
pkguuid(p::String) = uuid5(uuid_package, p)
-function storeuuid(p::String, uuid_to_name::Dict{UUID,String})
+function storeuuid(p::String, uuid_to_name::Dict{UUID, String})
uuid = p == "julia" ? Resolve.uuid_julia : pkguuid(p)
if haskey(uuid_to_name, uuid)
@assert uuid_to_name[uuid] == p
@@ -26,7 +26,7 @@ function storeuuid(p::String, uuid_to_name::Dict{UUID,String})
end
return uuid
end
-wantuuids(want_data) = Dict{UUID,VersionNumber}(pkguuid(p) => v for (p,v) in want_data)
+wantuuids(want_data) = Dict{UUID, VersionNumber}(pkguuid(p) => v for (p, v) in want_data)
"""
graph = graph_from_data(deps_data)
@@ -37,55 +37,103 @@ This states that the package "PkgName" with version `v"x.y.z"` depends on "Depen
specified compatibility information. The last entry of the array can optionally be `:weak`.
"""
function graph_from_data(deps_data)
- uuid_to_name = Dict{UUID,String}()
+ uuid_to_name = Dict{UUID, String}()
uuid(p) = storeuuid(p, uuid_to_name)
- fixed = Dict{UUID,Fixed}()
- all_compat = Dict{UUID,Dict{VersionNumber,Dict{UUID,VersionSpec}}}()
- all_compat_w = Dict{UUID,Dict{VersionNumber,Set{UUID}}}()
+ fixed = Dict{UUID, Fixed}()
- deps = Dict{String,Dict{VersionNumber,Dict{String,VersionSpec}}}()
- deps_w = Dict{String,Dict{VersionNumber,Set{String}}}()
+ deps = Dict{String, Dict{VersionNumber, Dict{String, VersionSpec}}}()
+ deps_w = Dict{String, Dict{VersionNumber, Set{String}}}()
for d in deps_data
p, vn, r = d[1], d[2], d[3:end]
if !haskey(deps, p)
- deps[p] = Dict{VersionNumber,Dict{String,VersionSpec}}()
+ deps[p] = Dict{VersionNumber, Dict{String, VersionSpec}}()
end
if !haskey(deps[p], vn)
- deps[p][vn] = Dict{String,VersionSpec}()
+ deps[p][vn] = Dict{String, VersionSpec}()
end
isempty(r) && continue
rp = r[1]
weak = length(r) > 1 && r[end] == :weak
- rvs = VersionSpec(r[2:(end-weak)]...)
+ rvs = VersionSpec(r[2:(end - weak)]...)
deps[p][vn][rp] = rvs
if weak
# same as push!(deps_w[p][vn], rp) but create keys as needed
- push!(get!(Set{String}, get!(Dict{VersionNumber,Set{String}}, deps_w, p), vn), rp)
+ push!(get!(Set{String}, get!(Dict{VersionNumber, Set{String}}, deps_w, p), vn), rp)
end
end
- for (p,preq) in deps
+ # Build pkg_versions map
+ pkg_versions = Dict{UUID, Vector{VersionNumber}}()
+ for (p, preq) in deps
u = uuid(p)
- deps_pkgs = Dict{String,Set{VersionNumber}}()
- for (vn,vreq) in deps[p], rp in keys(vreq)
- push!(get!(Set{VersionNumber}, deps_pkgs, rp), vn)
- end
- all_compat[u] = Dict{VersionNumber,Dict{UUID,VersionSpec}}()
- for (vn,vreq) in preq
- all_compat[u][vn] = Dict{UUID,VersionSpec}()
- for (rp,rvs) in vreq
- all_compat[u][vn][uuid(rp)] = rvs
+ pkg_versions[u] = sort!(collect(keys(preq)))
+ end
+
+ # Convert per-version data to compressed format
+ # For tests, each version gets its own VersionRange
+ all_deps_compressed = Dict{UUID, Dict{VersionRange, Set{UUID}}}()
+ all_compat_compressed = Dict{UUID, Dict{VersionRange, Dict{UUID, VersionSpec}}}()
+ all_weak_deps_compressed = Dict{UUID, Dict{VersionRange, Set{UUID}}}()
+ all_weak_compat_compressed = Dict{UUID, Dict{VersionRange, Dict{UUID, VersionSpec}}}()
+
+ for (p, preq) in deps
+ u = uuid(p)
+ all_deps_compressed[u] = Dict{VersionRange, Set{UUID}}()
+ all_compat_compressed[u] = Dict{VersionRange, Dict{UUID, VersionSpec}}()
+ all_weak_deps_compressed[u] = Dict{VersionRange, Set{UUID}}()
+ all_weak_compat_compressed[u] = Dict{VersionRange, Dict{UUID, VersionSpec}}()
+
+ for (vn, vreq) in preq
+ # Create a single-version range for this version
+ vrange = VersionRange(vn, vn)
+ deps_set = Set{UUID}()
+ compat_dict = Dict{UUID, VersionSpec}()
+ weak_deps_set = Set{UUID}()
+ weak_compat_dict = Dict{UUID, VersionSpec}()
+
+ for (rp, rvs) in vreq
+ dep_uuid = uuid(rp)
+ push!(deps_set, dep_uuid)
+ compat_dict[dep_uuid] = rvs
+
# weak dependency?
if haskey(deps_w, p) && haskey(deps_w[p], vn) && (rp ∈ deps_w[p][vn])
- # same as push!(all_compat_w[u][vn], uuid(rp)) but create keys as needed
- push!(get!(Set{UUID}, get!(Dict{VersionNumber,Set{UUID}}, all_compat_w, u), vn), uuid(rp))
+ push!(weak_deps_set, dep_uuid)
+ weak_compat_dict[dep_uuid] = rvs
end
end
+
+ all_deps_compressed[u][vrange] = deps_set
+ all_compat_compressed[u][vrange] = compat_dict
+ if !isempty(weak_deps_set)
+ all_weak_deps_compressed[u][vrange] = weak_deps_set
+ all_weak_compat_compressed[u][vrange] = weak_compat_dict
+ end
end
end
- return Graph(all_compat, all_compat_w, uuid_to_name, Requires(), fixed, VERBOSE)
+
+ # Wrap in vectors for multi-registry support (tests simulate a single registry)
+ all_deps_compressed_vec = Dict{UUID, Vector{Dict{VersionRange, Set{UUID}}}}(
+ u => [d] for (u, d) in all_deps_compressed
+ )
+ all_compat_compressed_vec = Dict{UUID, Vector{Dict{VersionRange, Dict{UUID, VersionSpec}}}}(
+ u => [c] for (u, c) in all_compat_compressed
+ )
+ all_weak_deps_compressed_vec = Dict{UUID, Vector{Dict{VersionRange, Set{UUID}}}}(
+ u => [d] for (u, d) in all_weak_deps_compressed
+ )
+ all_weak_compat_compressed_vec = Dict{UUID, Vector{Dict{VersionRange, Dict{UUID, VersionSpec}}}}(
+ u => [c] for (u, c) in all_weak_compat_compressed
+ )
+
+ # Create pkg_versions_per_registry (single registry with all versions)
+ pkg_versions_per_registry = Dict{UUID, Vector{Set{VersionNumber}}}(
+ u => [Set(versions)] for (u, versions) in pkg_versions
+ )
+
+ return Graph(all_deps_compressed_vec, all_compat_compressed_vec, all_weak_deps_compressed_vec, all_weak_compat_compressed_vec, pkg_versions, pkg_versions_per_registry, uuid_to_name, Requires(), fixed, VERBOSE)
end
function reqs_from_data(reqs_data, graph::Graph)
- reqs = Dict{UUID,VersionSpec}()
+ reqs = Dict{UUID, VersionSpec}()
function uuid_check(p)
uuid = pkguuid(p)
@assert graph.data.uuid_to_name[uuid] == p
@@ -95,9 +143,9 @@ function reqs_from_data(reqs_data, graph::Graph)
p = uuid_check(r[1])
reqs[p] = VersionSpec(r[2:end])
end
- reqs
+ return reqs
end
-function sanity_tst(deps_data, expected_result; pkgs=[])
+function sanity_tst(deps_data, expected_result; pkgs = [])
if VERBOSE
println()
@info("sanity check")
@@ -109,9 +157,9 @@ function sanity_tst(deps_data, expected_result; pkgs=[])
result = sanity_check(graph, Set(pkguuid(p) for p in pkgs), VERBOSE)
length(result) == length(expected_result) || return false
- expected_result_uuid = [(id(p), vn) for (p,vn) in expected_result]
+ expected_result_uuid = [(id(p), vn) for (p, vn) in expected_result]
for r in result
- r ∈ expected_result_uuid || return false
+ r ∈ expected_result_uuid || return false
end
return true
end
@@ -133,14 +181,14 @@ function resolve_tst(deps_data, reqs_data, want_data = nothing; validate_version
id(u) = pkgID(u, graph)
wd = wantuuids(want_data)
if want ≠ wd
- for (u,vn) in want
+ for (u, vn) in want
if u ∉ keys(wd)
@info "resolver decided to install $(id(u)) (v$vn), package wasn't expected"
elseif vn ≠ wd[u]
@info "version mismatch for $(id(u)), resolver wants v$vn, expected v$(wd[u])"
end
end
- for (u,vn) in wd
+ for (u, vn) in wd
if u ∉ keys(want)
@info "was expecting the resolver to install $(id(u)) (v$vn)"
end
diff --git a/test/runtests.jl b/test/runtests.jl
index bb4a0b86e8..80ead6c4d3 100644
--- a/test/runtests.jl
+++ b/test/runtests.jl
@@ -9,67 +9,49 @@ original_project = Base.active_project()
module PkgTestsInner
-original_wd = pwd()
+ original_wd = pwd()
-import Pkg
-using Test, Logging
+ import Pkg
+ import REPL # should precompile REPLExt before we disallow it below
+ @assert Base.get_extension(Pkg, :REPLExt) !== nothing
+ using Test, Logging
+ using Base.ScopedValues
-if realpath(dirname(dirname(Base.pathof(Pkg)))) != realpath(dirname(@__DIR__))
- @show dirname(dirname(Base.pathof(Pkg))) realpath(dirname(@__DIR__))
- error("The wrong Pkg is being tested")
-end
+ if realpath(dirname(dirname(Base.pathof(Pkg)))) != realpath(dirname(@__DIR__))
+ @show dirname(dirname(Base.pathof(Pkg))) realpath(dirname(@__DIR__))
+ error("The wrong Pkg is being tested")
+ end
-ENV["JULIA_PKG_PRECOMPILE_AUTO"]=0
-ENV["HISTORICAL_STDLIB_VERSIONS_AUTO_REGISTER"]="false"
+ @test isempty(Test.detect_closure_boxes(Pkg))
-logdir = get(ENV, "JULIA_TEST_VERBOSE_LOGS_DIR", nothing)
-### Send all Pkg output to a file called Pkg.log
-islogging = logdir !== nothing
+ const original_depot_had_registries = isdir(joinpath(Base.DEPOT_PATH[1], "registries"))
-if islogging
- logfile = joinpath(logdir, "Pkg.log")
- Pkg.DEFAULT_IO[] = open(logfile, "a")
- @info "Pkg test output is being logged to file" logfile
-else
- Pkg.DEFAULT_IO[] = devnull # or stdout
-end
+ ENV["JULIA_PKG_PRECOMPILE_AUTO"] = 0
+ ENV["JULIA_PKG_DISALLOW_PKG_PRECOMPILATION"] = 1
-include("utils.jl")
-Logging.with_logger((islogging || Pkg.DEFAULT_IO[] == devnull) ? Logging.ConsoleLogger(Pkg.DEFAULT_IO[]) : Logging.current_logger()) do
-
- # Because julia CI doesn't run stdlib tests via `Pkg.test` test deps must be manually installed if missing
- if Base.find_package("HistoricalStdlibVersions") === nothing
- @debug "Installing HistoricalStdlibVersions for Pkg tests"
- iob = IOBuffer()
- Pkg.activate(; temp = true)
- try
- # Needed for custom julia version resolve tests
- # Don't use the toplevel PKg.add() command to avoid accidentally installing another copy of the registry
- spec = Pkg.PackageSpec(
- name="HistoricalStdlibVersions",
- url="https://github.com/JuliaPackaging/HistoricalStdlibVersions.jl",
- rev="5879c5f690795208481c60b904f4af4e8c1eeef8", #= version="2.0.0", =#
- uuid="6df8b67a-e8a0-4029-b4b7-ac196fe72102")
- Pkg.API.handle_package_input!(spec)
- Pkg.add(Pkg.API.Context(), [spec], io=iob)
- catch
- println(String(take!(iob)))
- rethrow()
- end
- end
+ logdir = get(ENV, "JULIA_TEST_VERBOSE_LOGS_DIR", nothing)
+ ### Send all Pkg output to a file called Pkg.log
+ islogging = logdir !== nothing
- @eval import HistoricalStdlibVersions
-
- if (server = Pkg.pkg_server()) !== nothing && Sys.which("curl") !== nothing
- s = read(`curl -sLI $(server)`, String);
- @info "Pkg Server metadata:\n$s"
+ if islogging
+ logfile = joinpath(logdir, "Pkg.log")
+ default_io = open(logfile, "a")
+ @info "Pkg test output is being logged to file" logfile
+ else
+ default_io = devnull # or stdout
end
- Utils.check_init_reg()
+ include("utils.jl")
+ @with Pkg.DEFAULT_IO => default_io begin
+ Logging.with_logger((islogging || default_io == devnull) ? Logging.ConsoleLogger(default_io) : Logging.current_logger()) do
+ if (server = Pkg.pkg_server()) !== nothing && Sys.which("curl") !== nothing
+ s = read(`curl -sLI $(server)`, String)
+ @info "Pkg Server metadata:\n$s"
+ end
+
+ Utils.check_init_reg()
- @testset "Pkg" begin
- try
- @testset "$f" for f in [
+ test_files = [
"new.jl",
"pkg.jl",
"repl.jl",
@@ -77,35 +59,62 @@ Logging.with_logger((islogging || Pkg.DEFAULT_IO[] == devnull) ? Logging.Console
"registry.jl",
"subdir.jl",
"extensions.jl",
- "artifacts.jl",
"binaryplatforms.jl",
"platformengines.jl",
- "sandbox.jl",
"resolve.jl",
"misc.jl",
"force_latest_compatible_version.jl",
"manifests.jl",
"project_manifest.jl",
"sources.jl",
- "workspaces.jl"
- ]
- @info "==== Testing `test/$f`"
- flush(Pkg.DEFAULT_IO[])
- include(f)
+ "workspaces.jl",
+ "apps.jl",
+ "stdlib_compat.jl",
+ ]
+
+ # Only test these if the test deps are available (they aren't typically via `Base.runtests`)
+ HSV_pkgid = Base.PkgId(Base.UUID("6df8b67a-e8a0-4029-b4b7-ac196fe72102"), "HistoricalStdlibVersions")
+ if Base.locate_package(HSV_pkgid) !== nothing
+ push!(test_files, "historical_stdlib_version.jl")
+ end
+ Aqua_pkgid = Base.PkgId(Base.UUID("4c88cf16-eb10-579e-8560-4a9242c79595"), "Aqua")
+ if Base.locate_package(Aqua_pkgid) !== nothing
+ push!(test_files, "aqua.jl")
end
- finally
- islogging && close(Pkg.DEFAULT_IO[])
- cd(original_wd)
+ Preferences_pkgid = Base.PkgId(Base.UUID("21216c6a-2e73-6563-6e65-726566657250"), "Preferences")
+ if Base.locate_package(Preferences_pkgid) !== nothing
+ push!(test_files, "sandbox.jl")
+ push!(test_files, "artifacts.jl")
+ end
+
+ verbose = true
+ @testset "Pkg" verbose = verbose begin
+ Pkg.activate(; temp = true) # make sure we're in an active project and that it's clean
+ try
+ @testset "$f" verbose = verbose for f in test_files
+ @info "==== Testing `test/$f`"
+ flush(default_io)
+ include(f)
+ end
+ finally
+ islogging && close(default_io)
+ cd(original_wd)
+ end
+ end
+ end
+
+ # Make sure that none of our tests have left temporary registries lying around
+ if isdir(joinpath(Base.DEPOT_PATH[1], "registries")) != original_depot_had_registries
+ @warn "Test left temporary registries in depot" Base.DEPOT_PATH[1] original_depot_had_registries
end
end
-end
-if haskey(ENV, "CI")
- # if CI don't clean up as it will be slower than the runner filesystem reset
- empty!(Base.Filesystem.TEMP_CLEANUP)
-else
- @showtime Base.Filesystem.temp_cleanup_purge(force=true)
-end
+ if haskey(ENV, "CI")
+ # if CI don't clean up as it will be slower than the runner filesystem reset
+ empty!(Base.Filesystem.TEMP_CLEANUP)
+ else
+ @showtime Base.Filesystem.temp_cleanup_purge(force = true)
+ end
end # module
diff --git a/test/sandbox.jl b/test/sandbox.jl
index d06ceaea96..b5d0315f77 100644
--- a/test/sandbox.jl
+++ b/test/sandbox.jl
@@ -1,157 +1,172 @@
module SandboxTests
import ..Pkg # ensure we are using the correct Pkg
-# Order-dependence in the tests, so we delay this until we need it
-if Base.find_package("Preferences") === nothing
- @info "Installing Preferences for Pkg tests"
- Pkg.add("Preferences") # Needed for sandbox and artifacts tests
-end
+using ..Utils
+using Preferences
using Test
using UUIDs
using Pkg
-using Preferences
-using ..Utils
-test_test(fn, name; kwargs...) = Pkg.test(name; test_fn=fn, kwargs...)
-test_test(fn; kwargs...) = Pkg.test(;test_fn=fn, kwargs...)
+test_test(fn, name; kwargs...) = Pkg.test(name; test_fn = fn, kwargs...)
+test_test(fn; kwargs...) = Pkg.test(; test_fn = fn, kwargs...)
@testset "Basic `test` sandboxing" begin
# also indirectly checks that test `compat` is obeyed
- temp_pkg_dir() do project_path; mktempdir() do tmp
- copy_test_package(tmp, "SandboxFallback2")
- proj = joinpath(tmp, "SandboxFallback2")
- Pkg.activate(proj)
- withenv("JULIA_PROJECT" => proj) do; test_test("Unregistered") do
- json = get(Pkg.Types.Context().env.manifest, UUID("682c06a0-de6a-54ab-a142-c8b1cf79cde6"), nothing)
- @test json !== nothing
- @test json.version == v"0.20.0"
- # test that the active project is the tmp one even though
- # JULIA_PROJECT might be set
- @test !haskey(ENV, "JULIA_PROJECT")
- @test Base.active_project() != proj
- @test Base.LOAD_PATH[1] == "@"
- @test startswith(Base.active_project(), Base.LOAD_PATH[2])
- end end
- end end
+ temp_pkg_dir() do project_path
+ mktempdir() do tmp
+ copy_test_package(tmp, "SandboxFallback2")
+ proj = joinpath(tmp, "SandboxFallback2")
+ Pkg.activate(proj)
+ withenv("JULIA_PROJECT" => proj) do;
+ test_test("Unregistered") do
+ json = get(Pkg.Types.Context().env.manifest, UUID("682c06a0-de6a-54ab-a142-c8b1cf79cde6"), nothing)
+ @test json !== nothing
+ @test json.version == v"0.20.0"
+ # test that the active project is the tmp one even though
+ # JULIA_PROJECT might be set
+ @test !haskey(ENV, "JULIA_PROJECT")
+ @test Base.active_project() != proj
+ @test Base.LOAD_PATH[1] == "@"
+ @test startswith(Base.active_project(), Base.LOAD_PATH[2])
+ end
+ end
+ end
+ end
# test dependencies should be preserved, when possible
- temp_pkg_dir() do project_path; mktempdir() do tmp
- copy_test_package(tmp, "Sandbox_PreserveTestDeps")
- Pkg.activate(joinpath(tmp, "Sandbox_PreserveTestDeps"))
- test_test("Foo") do
- x = get(Pkg.Types.Context().env.manifest, UUID("7876af07-990d-54b4-ab0e-23690620f79a"), nothing)
- @test x !== nothing
- @test x.version == v"0.4.0"
+ temp_pkg_dir() do project_path
+ mktempdir() do tmp
+ copy_test_package(tmp, "Sandbox_PreserveTestDeps")
+ Pkg.activate(joinpath(tmp, "Sandbox_PreserveTestDeps"))
+ test_test("Foo") do
+ x = get(Pkg.Types.Context().env.manifest, UUID("7876af07-990d-54b4-ab0e-23690620f79a"), nothing)
+ @test x !== nothing
+ @test x.version == v"0.4.0"
+ end
end
- end end
+ end
end
@testset "Preferences sandboxing without test/Project.toml" begin
# Preferences should be copied over into sandbox
- temp_pkg_dir() do project_path; mktempdir() do tmp
- copy_test_package(tmp, "Sandbox_PreservePreferences")
- Pkg.activate(joinpath(tmp, "Sandbox_PreservePreferences"))
- test_test() do
- uuid = UUID("3872bf94-3adb-11e9-01dc-bf80c7641364")
- @test !Preferences.has_preference(uuid, "does_not_exist")
- @test Preferences.load_preference(uuid, "tree") == "birch"
- @test Preferences.load_preference(uuid, "default") === nothing
+ temp_pkg_dir() do project_path
+ mktempdir() do tmp
+ copy_test_package(tmp, "Sandbox_PreservePreferences")
+ Pkg.activate(joinpath(tmp, "Sandbox_PreservePreferences"))
+ test_test() do
+ uuid = UUID("3872bf94-3adb-11e9-01dc-bf80c7641364")
+ @test !Preferences.has_preference(uuid, "does_not_exist")
+ @test Preferences.load_preference(uuid, "tree") == "birch"
+ @test Preferences.load_preference(uuid, "default") === nothing
+ end
end
- end end
+ end
end
@testset "Preferences sandboxing with test/Project.toml" begin
# Preferences should be copied over into sandbox
- temp_pkg_dir() do project_path; mktempdir() do tmp
- copy_test_package(tmp, "Sandbox_PreservePreferences")
- spp_uuid = UUID("3872bf94-3adb-11e9-01dc-bf80c7641364")
- Pkg.activate(joinpath(tmp, "Sandbox_PreservePreferences"))
-
- # Create fake test/Project.toml and test/LocalPreferences.toml
- open(joinpath(tmp, "Sandbox_PreservePreferences", "test", "Project.toml"), write=true) do io
- print(io, """
- [deps]
- Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40"
- """)
- end
- Preferences.set_preferences!(
- joinpath(tmp, "Sandbox_PreservePreferences", "test", "LocalPreferences.toml"),
- "Sandbox_PreservePreferences",
- "scent" => "juniper",
- )
+ temp_pkg_dir() do project_path
+ mktempdir() do tmp
+ copy_test_package(tmp, "Sandbox_PreservePreferences")
+ spp_uuid = UUID("3872bf94-3adb-11e9-01dc-bf80c7641364")
+ Pkg.activate(joinpath(tmp, "Sandbox_PreservePreferences"))
- # This test should have a set of preferences that have nothing to do with those
- # exported within `Sandbox_PreservePreferences/Project.toml`
- test_test() do
- @test !Preferences.has_preference(spp_uuid, "does_not_exist")
- # Because we are testing with the project set as the active project, we inherit
- # preferences set in the SPP project
- @test Preferences.load_preference(spp_uuid, "tree") === "birch"
- @test Preferences.load_preference(spp_uuid, "scent") == "juniper"
- @test Preferences.load_preference(spp_uuid, "default") === nothing
- end
-
- # Test that `Pkg.test()` layers the test project onto the `LOAD_PATH`,
- # so that preferences set in the calling environment can leak through.
- mktempdir() do outer_layer
- # Create a fake project that references SPP
- open(joinpath(outer_layer, "Project.toml"), write=true) do io
- println(io, """
+ # Create fake test/Project.toml and test/LocalPreferences.toml
+ open(joinpath(tmp, "Sandbox_PreservePreferences", "test", "Project.toml"), write = true) do io
+ print(
+ io, """
[deps]
- Sandbox_PreservePreferences = "$(spp_uuid)"
-
- [preferences.Sandbox_PreservePreferences]
- tree = "pine"
- scent = "shadowed"
- """)
+ Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40"
+ """
+ )
end
+ Preferences.set_preferences!(
+ joinpath(tmp, "Sandbox_PreservePreferences", "test", "LocalPreferences.toml"),
+ "Sandbox_PreservePreferences",
+ "scent" => "juniper",
+ )
- # Use `/` on windows as well
- spp_path = joinpath(tmp, "Sandbox_PreservePreferences")
- if Sys.iswindows()
- spp_path = replace(spp_path, "\\" => "/")
- end
- open(joinpath(outer_layer, "Manifest.toml"), write=true) do io
- println(io, """
- [[Sandbox_PreservePreferences]]
- path = "$(spp_path)"
- uuid = "$(spp_uuid)"
- """)
+ # This test should have a set of preferences that have nothing to do with those
+ # exported within `Sandbox_PreservePreferences/Project.toml`
+ test_test() do
+ @test !Preferences.has_preference(spp_uuid, "does_not_exist")
+ # Because we are testing with the project set as the active project, we inherit
+ # preferences set in the SPP project
+ @test Preferences.load_preference(spp_uuid, "tree") === "birch"
+ @test Preferences.load_preference(spp_uuid, "scent") == "juniper"
+ @test Preferences.load_preference(spp_uuid, "default") === nothing
end
- Pkg.activate(outer_layer)
- test_test("Sandbox_PreservePreferences") do
- # The tree that leaks through is from the outer layer,
- # rather than the overall project
- @test Preferences.load_preference(spp_uuid, "tree") === "pine"
- # The scent is still the inner test preference, since that takes priority.
- @test Preferences.load_preference(spp_uuid, "scent") == "juniper"
+ # Test that `Pkg.test()` layers the test project onto the `LOAD_PATH`,
+ # so that preferences set in the calling environment can leak through.
+ mktempdir() do outer_layer
+ # Create a fake project that references SPP
+ open(joinpath(outer_layer, "Project.toml"), write = true) do io
+ println(
+ io, """
+ [deps]
+ Sandbox_PreservePreferences = "$(spp_uuid)"
+
+ [preferences.Sandbox_PreservePreferences]
+ tree = "pine"
+ scent = "shadowed"
+ """
+ )
+ end
+
+ # Use `/` on windows as well
+ spp_path = joinpath(tmp, "Sandbox_PreservePreferences")
+ if Sys.iswindows()
+ spp_path = replace(spp_path, "\\" => "/")
+ end
+ open(joinpath(outer_layer, "Manifest.toml"), write = true) do io
+ println(
+ io, """
+ [[Sandbox_PreservePreferences]]
+ path = "$(spp_path)"
+ uuid = "$(spp_uuid)"
+ """
+ )
+ end
+
+ Pkg.activate(outer_layer)
+ test_test("Sandbox_PreservePreferences") do
+ # The tree that leaks through is from the outer layer,
+ # rather than the overall project
+ @test Preferences.load_preference(spp_uuid, "tree") === "pine"
+ # The scent is still the inner test preference, since that takes priority.
+ @test Preferences.load_preference(spp_uuid, "scent") == "juniper"
+ end
end
end
- end end
+ end
end
@testset "Nested Preferences sandboxing" begin
# Preferences should be copied over into sandbox
- temp_pkg_dir() do project_path; mktempdir() do tmp
- copy_test_package(tmp, "Sandbox_PreservePreferences")
- Pkg.activate(joinpath(tmp, "Sandbox_PreservePreferences"))
- test_test("Foo") do
- uuid = UUID("48898bec-3adb-11e9-02a6-a164ba74aeae")
- @test !Preferences.has_preference(uuid, "does_not_exist")
- @test Preferences.load_preference(uuid, "toy") == "car"
- @test Preferences.load_preference(uuid, "tree") == "birch"
- @test Preferences.load_preference(uuid, "default") === nothing
+ temp_pkg_dir() do project_path
+ mktempdir() do tmp
+ copy_test_package(tmp, "Sandbox_PreservePreferences")
+ Pkg.activate(joinpath(tmp, "Sandbox_PreservePreferences"))
+ test_test("Foo") do
+ uuid = UUID("48898bec-3adb-11e9-02a6-a164ba74aeae")
+ @test !Preferences.has_preference(uuid, "does_not_exist")
+ @test Preferences.load_preference(uuid, "toy") == "car"
+ @test Preferences.load_preference(uuid, "tree") == "birch"
+ @test Preferences.load_preference(uuid, "default") === nothing
+ end
end
- end end
+ end
end
@testset "Basic `build` sandbox" begin
- temp_pkg_dir() do project_path; mktempdir() do tmp
- copy_test_package(tmp, "BasicSandbox")
- Pkg.activate(joinpath(tmp, "BasicSandbox"))
- Pkg.build()
- end end
+ temp_pkg_dir() do project_path
+ mktempdir() do tmp
+ copy_test_package(tmp, "BasicSandbox")
+ Pkg.activate(joinpath(tmp, "BasicSandbox"))
+ Pkg.build()
+ end
+ end
end
end # module
diff --git a/test/sources.jl b/test/sources.jl
index 311b203f00..e5b5508ec3 100644
--- a/test/sources.jl
+++ b/test/sources.jl
@@ -3,26 +3,26 @@ module SourcesTest
import ..Pkg # ensure we are using the correct Pkg
using Test, Pkg
using ..Utils
+using UUIDs
temp_pkg_dir() do project_path
@testset "test Project.toml [sources]" begin
mktempdir() do dir
- path = abspath(joinpath(dirname(pathof(Pkg)), "../test", "test_packages", "WithSources"))
- cp(path, joinpath(dir, "WithSources"))
- cd(joinpath(dir, "WithSources")) do
+ path = copy_test_package(dir, "WithSources")
+ cd(path) do
with_current_env() do
Pkg.resolve()
@test !isempty(Pkg.project().sources["Example"])
- project_backup = cp("Project.toml", "Project.toml.bak"; force=true)
+ project_backup = cp("Project.toml", "Project.toml.bak"; force = true)
Pkg.free("Example")
@test !haskey(Pkg.project().sources, "Example")
- cp("Project.toml.bak", "Project.toml"; force=true)
- Pkg.add(; url="https://github.com/JuliaLang/Example.jl/", rev="78406c204b8")
+ cp("Project.toml.bak", "Project.toml"; force = true)
+ Pkg.add(; url = "https://github.com/JuliaLang/Example.jl/", rev = "78406c204b8")
@test Pkg.project().sources["Example"] == Dict("url" => "https://github.com/JuliaLang/Example.jl/", "rev" => "78406c204b8")
- cp("Project.toml.bak", "Project.toml"; force=true)
- cp("BadManifest.toml", "Manifest.toml"; force=true)
+ cp("Project.toml.bak", "Project.toml"; force = true)
+ cp("BadManifest.toml", "Manifest.toml"; force = true)
Pkg.resolve()
- @test Pkg.project().sources["Example"] == Dict("url" => "https://github.com/JuliaLang/Example.jl")
+ @test Pkg.project().sources["Example"] == Dict("rev" => "master", "url" => "https://github.com/JuliaLang/Example.jl")
@test Pkg.project().sources["LocalPkg"] == Dict("path" => "LocalPkg")
end
end
@@ -33,11 +33,209 @@ temp_pkg_dir() do project_path
end
end
+ cd(joinpath(dir, "WithSources", "TestMonorepo")) do
+ with_current_env() do
+ Pkg.test()
+ end
+ end
+
cd(joinpath(dir, "WithSources", "TestProject")) do
with_current_env() do
Pkg.test()
end
end
+
+ cd(joinpath(dir, "WithSources", "URLSourceInDevvedPackage")) do
+ with_current_env() do
+ Pkg.test()
+ end
+ end
+ end
+ end
+
+ @testset "path normalization in Project.toml [sources]" begin
+ mktempdir() do tmp
+ cd(tmp) do
+ # Create a minimal Project.toml with sources containing a path
+ write(
+ "Project.toml",
+ """
+ name = "TestPackage"
+ uuid = "12345678-1234-1234-1234-123456789abc"
+
+ [deps]
+ LocalPkg = "87654321-4321-4321-4321-cba987654321"
+
+ [sources]
+ LocalPkg = { path = "subdir/LocalPkg" }
+ """
+ )
+
+ # Read the project
+ project = Pkg.Types.read_project("Project.toml")
+
+ # Verify the path is read correctly (will have native separators internally)
+ @test haskey(project.sources, "LocalPkg")
+ @test haskey(project.sources["LocalPkg"], "path")
+
+ # Write it back
+ Pkg.Types.write_project(project, "Project.toml")
+
+ # Read the written file as string and verify forward slashes are used
+ project_content = read("Project.toml", String)
+ @test occursin("path = \"subdir/LocalPkg\"", project_content)
+ # Verify backslashes are NOT in the path (would indicate Windows path wasn't normalized)
+ @test !occursin("path = \"subdir\\\\LocalPkg\"", project_content)
+ end
+ end
+ end
+
+ @testset "recursive [sources] via repo URLs" begin
+ isolate() do
+ mktempdir() do tmp
+ file_url(path::AbstractString) = begin
+ normalized = replace(abspath(path), '\\' => '/')
+ if Sys.iswindows() && occursin(':', normalized)
+ normalized = "/" * normalized
+ end
+ return "file://$normalized"
+ end
+
+ template_root = joinpath(@__DIR__, "test_packages", "RecursiveSources")
+ function prepare_pkg(name::AbstractString; replacements = Dict{String, String}())
+ src = joinpath(template_root, name)
+ dest = joinpath(tmp, name)
+ cp(src, dest; force = true)
+ Utils.ensure_test_package_user_writable(dest)
+ project_path = joinpath(dest, "Project.toml")
+ if !isempty(replacements)
+ content = read(project_path, String)
+ for (pattern, value) in replacements
+ content = replace(content, pattern => value)
+ end
+ write(project_path, content)
+ end
+ git_init_and_commit(dest)
+ return dest
+ end
+
+ grandchild_path = prepare_pkg("GrandchildPkg")
+ grandchild_url = file_url(grandchild_path)
+
+ child_path = prepare_pkg("ChildPkg"; replacements = Dict("__GRANDCHILD_URL__" => grandchild_url))
+ child_url = file_url(child_path)
+
+ parent_path = prepare_pkg("ParentPkg"; replacements = Dict("__CHILD_URL__" => child_url))
+ parent_url = file_url(parent_path)
+
+ Pkg.activate(temp = true)
+ Pkg.add(; url = parent_url)
+
+ dep_info_by_name = Dict(info.name => info for info in values(Pkg.dependencies()))
+ for pkgname in ("ParentPkg", "ChildPkg", "GrandchildPkg", "SiblingPkg")
+ @test haskey(dep_info_by_name, pkgname)
+ end
+ @test dep_info_by_name["ParentPkg"].git_source == parent_url
+ @test dep_info_by_name["ChildPkg"].git_source == child_url
+ @test dep_info_by_name["GrandchildPkg"].git_source == grandchild_url
+ sibling_info = dep_info_by_name["SiblingPkg"]
+ @test sibling_info.is_tracking_path
+ @test sibling_info.source !== nothing
+ @test endswith(sibling_info.source, "SiblingPkg")
+
+ result = include_string(
+ Module(), """
+ using ParentPkg
+ ParentPkg.parent_value()
+ """
+ )
+ @test result == 47
+ end
+ end
+ end
+
+ # Regression test for https://github.com/JuliaLang/Pkg.jl/issues/4337
+ # Switching between path and repo sources should not cause assertion error
+ @testset "switching between path and repo sources (#4337)" begin
+ mktempdir() do tmp
+ cd(tmp) do
+ # Create a local package and initialize it as a git repo
+ local_pkg_uuid = UUID("00000000-0000-0000-0000-000000000001")
+ mkdir("LocalPkg")
+ write(
+ joinpath("LocalPkg", "Project.toml"), """
+ name = "LocalPkg"
+ uuid = "$local_pkg_uuid"
+ version = "0.1.0"
+ """
+ )
+ mkdir(joinpath("LocalPkg", "src"))
+ write(joinpath("LocalPkg", "src", "LocalPkg.jl"), "module LocalPkg end")
+
+ # Initialize as a git repo
+ git_init_and_commit("LocalPkg")
+
+ # Get the absolute path for file:// URL
+ local_pkg_url = make_file_url(abspath("LocalPkg"))
+
+ # Create test project with path source
+ write(
+ "Project.toml", """
+ [deps]
+ LocalPkg = "$local_pkg_uuid"
+
+ [sources]
+ LocalPkg = { path = "LocalPkg" }
+ """
+ )
+
+ with_current_env() do
+ # Initial resolve with path source
+ Pkg.resolve()
+ manifest = Pkg.Types.read_manifest("Manifest.toml")
+ @test manifest[local_pkg_uuid].path !== nothing
+ @test manifest[local_pkg_uuid].tree_hash === nothing
+ @test manifest[local_pkg_uuid].repo.source === nothing
+ # Update should work without error
+ Pkg.update()
+
+ # Switch to repo source using file:// protocol
+ write(
+ "Project.toml", """
+ [deps]
+ LocalPkg = "$local_pkg_uuid"
+
+ [sources]
+ LocalPkg = { url = "$local_pkg_url", rev = "HEAD" }
+ """
+ )
+
+ # This should NOT cause an assertion error about tree_hash and path both being set
+ Pkg.update()
+ manifest = Pkg.Types.read_manifest("Manifest.toml")
+ @test manifest[local_pkg_uuid].path === nothing
+ @test manifest[local_pkg_uuid].tree_hash !== nothing
+ @test manifest[local_pkg_uuid].repo.source !== nothing
+
+ # Switch back to path source
+ write(
+ "Project.toml", """
+ [deps]
+ LocalPkg = "$local_pkg_uuid"
+
+ [sources]
+ LocalPkg = { path = "LocalPkg" }
+ """
+ )
+
+ # This should work and restore the path source without assertion error
+ Pkg.update()
+ manifest = Pkg.Types.read_manifest("Manifest.toml")
+ @test manifest[local_pkg_uuid].path !== nothing
+ @test manifest[local_pkg_uuid].tree_hash === nothing
+ @test manifest[local_pkg_uuid].repo.source === nothing
+ end
+ end
end
end
end
diff --git a/test/stdlib_compat.jl b/test/stdlib_compat.jl
new file mode 100644
index 0000000000..75cd9e9aae
--- /dev/null
+++ b/test/stdlib_compat.jl
@@ -0,0 +1,30 @@
+using Test
+using Pkg
+using Pkg.Types
+
+@testset "Non-upgradable stdlib compat handling" begin
+ mktempdir() do dir
+ cd(dir) do
+ Pkg.activate(dir) do
+ # Create a project with incompatible compat for LibCURL (non-upgradable stdlib)
+ write(
+ "Project.toml", """
+ name = "TestProject"
+ uuid = "12345678-1234-1234-1234-123456789012"
+
+ [deps]
+ LibCURL = "b27032c2-a3e7-50c8-80cd-2d36dbcbfd21"
+
+ [compat]
+ LibCURL = "0.6"
+ """
+ )
+ Pkg.activate(dir)
+
+ # The compat entry is incompatible with current version
+ # This should trigger a warning but not error
+ @test_logs (:warn, r"Ignoring incompatible compat entry") Pkg.resolve()
+ end
+ end
+ end
+end
diff --git a/test/subdir.jl b/test/subdir.jl
index cddf27992f..99420c714a 100644
--- a/test/subdir.jl
+++ b/test/subdir.jl
@@ -10,8 +10,12 @@ using ..Utils
# Derived from RegistryTools' gitcmd.
function gitcmd(path::AbstractString)
- Cmd(["git", "-C", path, "-c", "user.name=RegistratorTests",
- "-c", "user.email=ci@juliacomputing.com"])
+ return Cmd(
+ [
+ "git", "-C", path, "-c", "user.name=RegistratorTests",
+ "-c", "user.email=ci@juliacomputing.com",
+ ]
+ )
end
# Create a repository containing two packages in different
@@ -20,28 +24,36 @@ end
function setup_packages_repository(dir)
package_dir = joinpath(dir, "julia")
mkpath(joinpath(package_dir, "src"))
- write(joinpath(package_dir, "Project.toml"), """
+ write(
+ joinpath(package_dir, "Project.toml"), """
name = "Package"
uuid = "408b23ff-74ea-48c4-abc7-a671b41e2073"
version = "1.0.0"
[deps]
Dep = "d43cb7ef-9818-40d3-bb27-28fb4aa46cc5"
- """)
- write(joinpath(package_dir, "src", "Package.jl"), """
+ """
+ )
+ write(
+ joinpath(package_dir, "src", "Package.jl"), """
module Package end
- """)
+ """
+ )
dep_dir = joinpath(dir, "dependencies", "Dep")
mkpath(joinpath(dep_dir, "src"))
- write(joinpath(dep_dir, "Project.toml"), """
+ write(
+ joinpath(dep_dir, "Project.toml"), """
name = "Dep"
uuid = "d43cb7ef-9818-40d3-bb27-28fb4aa46cc5"
version = "1.0.0"
- """)
- write(joinpath(dep_dir, "src", "Dep.jl"), """
+ """
+ )
+ write(
+ joinpath(dep_dir, "src", "Dep.jl"), """
module Dep end
- """)
+ """
+ )
git = gitcmd(dir)
run(pipeline(`$git init -q`, stdout = stdout_f(), stderr = stderr_f()))
@@ -53,16 +65,6 @@ function setup_packages_repository(dir)
return package_tree_hash, dep_tree_hash
end
-# Convert a path into a file URL.
-function make_file_url(path)
- # Turn the slashes on Windows. In case the path starts with a
- # drive letter, an extra slash will be needed in the file URL.
- path = replace(path, "\\" => "/")
- if !startswith(path, "/")
- path = "/" * path
- end
- return "file://$(path)"
-end
# Create a registry with the two packages `Package` and `Dep`.
function setup_registry(dir, packages_dir_url, package_tree_hash, dep_tree_hash)
@@ -70,45 +72,57 @@ function setup_registry(dir, packages_dir_url, package_tree_hash, dep_tree_hash)
dep_path = joinpath(dir, "D", "Dep")
mkpath(package_path)
mkpath(dep_path)
- write(joinpath(dir, "Registry.toml"), """
+ write(
+ joinpath(dir, "Registry.toml"), """
name = "Registry"
uuid = "cade28e2-3b52-4f58-aeba-0b1386f9894b"
repo = "https://github.com"
[packages]
408b23ff-74ea-48c4-abc7-a671b41e2073 = { name = "Package", path = "P/Package" }
d43cb7ef-9818-40d3-bb27-28fb4aa46cc5 = { name = "Dep", path = "D/Dep" }
- """)
- write(joinpath(package_path, "Package.toml"), """
+ """
+ )
+ write(
+ joinpath(package_path, "Package.toml"), """
name = "Package"
uuid = "408b23ff-74ea-48c4-abc7-a671b41e2073"
repo = "$(packages_dir_url)"
subdir = "julia"
- """)
- write(joinpath(package_path, "Versions.toml"), """
+ """
+ )
+ write(
+ joinpath(package_path, "Versions.toml"), """
["1.0.0"]
git-tree-sha1 = "$(package_tree_hash)"
- """)
- write(joinpath(package_path, "Deps.toml"), """
+ """
+ )
+ write(
+ joinpath(package_path, "Deps.toml"), """
[1]
Dep = "d43cb7ef-9818-40d3-bb27-28fb4aa46cc5"
- """)
+ """
+ )
- write(joinpath(dep_path, "Package.toml"), """
+ write(
+ joinpath(dep_path, "Package.toml"), """
name = "Dep"
uuid = "d43cb7ef-9818-40d3-bb27-28fb4aa46cc5"
repo = "$(packages_dir_url)"
subdir = "dependencies/Dep"
- """)
- write(joinpath(dep_path, "Versions.toml"), """
+ """
+ )
+ write(
+ joinpath(dep_path, "Versions.toml"), """
["1.0.0"]
git-tree-sha1 = "$(dep_tree_hash)"
- """)
+ """
+ )
git = gitcmd(dir)
run(pipeline(`$git init -q`, stdout = stdout_f(), stderr = stderr_f()))
run(pipeline(`$git add .`, stdout = stdout_f(), stderr = stderr_f()))
run(pipeline(`$git commit -qm 'Create repository.'`, stdout = stdout_f(), stderr = stderr_f()))
- fix_default_branch(; dir)
+ return fix_default_branch(; dir)
end
# Some of our tests assume that the default branch name is `master`.
@@ -153,216 +167,222 @@ end
# with the `pkg"add ..."` calls. Just set it to something that
# exists.
cd(@__DIR__) do
- # Setup a repository with two packages and a registry where
- # these packages are registered.
- packages_dir = mktempdir()
- registry_dir = mktempdir()
- packages_dir_url = make_file_url(packages_dir)
- tree_hashes = setup_packages_repository(packages_dir)
- setup_registry(registry_dir, packages_dir_url, tree_hashes...)
- pkgstr("registry add $(registry_dir)")
- dep = (name="Dep", uuid=UUID("d43cb7ef-9818-40d3-bb27-28fb4aa46cc5"))
-
- # Ordinary add from registry.
- pkg"add Package"
- @test isinstalled("Package")
- @test !isinstalled("Dep")
- @test isinstalled(dep)
- pkg"rm Package"
-
- pkg"add Dep"
- @test !isinstalled("Package")
- @test isinstalled("Dep")
- pkg"rm Dep"
-
- # Add version from registry.
- pkg"add Package@1.0.0"
- @test isinstalled("Package")
- @test !isinstalled("Dep")
- @test isinstalled(dep)
- pkg"rm Package"
-
- pkg"add Dep@1.0.0"
- @test !isinstalled("Package")
- @test isinstalled("Dep")
- pkg"rm Dep"
-
- # Add branch from registry.
- pkg"add Package#master"
- @test isinstalled("Package")
- @test !isinstalled("Dep")
- @test isinstalled(dep)
-
- # Test that adding a second time doesn't error (#3391)
- pkg"add Package#master"
- @test isinstalled("Package")
- pkg"rm Package"
-
- pkg"add Dep#master"
- @test !isinstalled("Package")
- @test isinstalled("Dep")
- pkg"rm Dep"
-
- # Develop from registry.
- pkg"develop Package"
- @test isinstalled("Package")
- @test !isinstalled("Dep")
- @test isinstalled(dep)
-
- # Test developing twice (#3391)
- pkg"develop Package"
- @test isinstalled("Package")
- pkg"rm Package"
-
- pkg"develop Dep"
- @test !isinstalled("Package")
- @test isinstalled("Dep")
- pkg"rm Dep"
-
- # Add from path.
- Pkg.add(Pkg.PackageSpec(path=packages_dir, subdir="julia"))
- @test isinstalled("Package")
- @test !isinstalled("Dep")
- @test isinstalled(dep)
- pkg"rm Package"
-
- Pkg.add(Pkg.PackageSpec(path=packages_dir, subdir="dependencies/Dep"))
- @test !isinstalled("Package")
- @test isinstalled("Dep")
- pkg"rm Dep"
-
- # Add from path, REPL subdir syntax.
- pkgstr("add $(packages_dir):julia")
- @test isinstalled("Package")
- @test !isinstalled("Dep")
- @test isinstalled(dep)
- pkg"rm Package"
-
- pkgstr("add $(packages_dir):dependencies/Dep")
- @test !isinstalled("Package")
- @test isinstalled("Dep")
- pkg"rm Dep"
-
- # Add from path at branch.
- Pkg.add(Pkg.PackageSpec(path=packages_dir, subdir="julia", rev="master"))
- @test isinstalled("Package")
- @test !isinstalled("Dep")
- @test isinstalled(dep)
- pkg"rm Package"
-
- Pkg.add(Pkg.PackageSpec(path=packages_dir, subdir="dependencies/Dep", rev="master"))
- @test !isinstalled("Package")
- @test isinstalled("Dep")
- pkg"rm Dep"
-
- # Add from path at branch, REPL subdir syntax
- pkgstr("add $(packages_dir):julia#master")
- @test isinstalled("Package")
- @test !isinstalled("Dep")
- @test isinstalled(dep)
- pkg"rm Package"
-
- pkgstr("add $(packages_dir):dependencies/Dep#master")
- @test !isinstalled("Package")
- @test isinstalled("Dep")
- pkg"rm Dep"
-
- # Develop from path.
- Pkg.develop(Pkg.PackageSpec(path=packages_dir, subdir="julia"))
- @test isinstalled("Package")
- @test !isinstalled("Dep")
- @test isinstalled(dep)
- pkg"rm Package"
-
- Pkg.develop(Pkg.PackageSpec(path=packages_dir, subdir="dependencies/Dep"))
- @test !isinstalled("Package")
- @test isinstalled("Dep")
- pkg"rm Dep"
-
- # Develop from path, REPL subdir syntax.
- pkgstr("develop $(packages_dir):julia")
- @test isinstalled("Package")
- @test !isinstalled("Dep")
- @test isinstalled(dep)
- pkg"rm Package"
-
- pkgstr("develop $(packages_dir):dependencies/Dep")
- @test !isinstalled("Package")
- @test isinstalled("Dep")
- pkg"rm Dep"
-
- # Add from url.
- Pkg.add(Pkg.PackageSpec(url=packages_dir_url, subdir="julia"))
- @test isinstalled("Package")
- @test !isinstalled("Dep")
- @test isinstalled(dep)
- pkg"rm Package"
-
- Pkg.add(Pkg.PackageSpec(url=packages_dir_url, subdir="dependencies/Dep"))
- @test !isinstalled("Package")
- @test isinstalled("Dep")
- pkg"rm Dep"
-
- # Add from url, REPL subdir syntax.
- pkgstr("add $(packages_dir_url):julia")
- @test isinstalled("Package")
- @test !isinstalled("Dep")
- @test isinstalled(dep)
- pkg"rm Package"
-
- pkgstr("add $(packages_dir_url):dependencies/Dep")
- @test !isinstalled("Package")
- @test isinstalled("Dep")
- pkg"rm Dep"
-
- # Add from url at branch.
- Pkg.add(Pkg.PackageSpec(url=packages_dir_url, subdir="julia",
- rev="master"))
- @test isinstalled("Package")
- @test !isinstalled("Dep")
- @test isinstalled(dep)
- pkg"rm Package"
-
- Pkg.add(Pkg.PackageSpec(url=packages_dir_url, subdir="dependencies/Dep", rev="master"))
- @test !isinstalled("Package")
- @test isinstalled("Dep")
- pkg"rm Dep"
-
- # Add from url at branch, REPL subdir syntax.
- pkgstr("add $(packages_dir_url):julia#master")
- @test isinstalled("Package")
- @test !isinstalled("Dep")
- @test isinstalled(dep)
- pkg"rm Package"
-
- pkgstr("add $(packages_dir_url):dependencies/Dep#master")
- @test !isinstalled("Package")
- @test isinstalled("Dep")
- pkg"rm Dep"
-
- # Develop from url.
- Pkg.develop(Pkg.PackageSpec(url=packages_dir_url, subdir="julia"))
- @test isinstalled("Package")
- @test !isinstalled("Dep")
- @test isinstalled(dep)
- pkg"rm Package"
-
- Pkg.develop(Pkg.PackageSpec(url=packages_dir_url, subdir="dependencies/Dep"))
- @test !isinstalled("Package")
- @test isinstalled("Dep")
- pkg"rm Dep"
-
- # Develop from url, REPL subdir syntax.
- pkgstr("develop $(packages_dir_url):julia")
- @test isinstalled("Package")
- @test !isinstalled("Dep")
- @test isinstalled(dep)
- pkg"rm Package"
-
- pkgstr("develop $(packages_dir_url):dependencies/Dep")
- @test !isinstalled("Package")
- @test isinstalled("Dep")
- pkg"rm Dep"
+ # Setup a repository with two packages and a registry where
+ # these packages are registered.
+ packages_dir = mktempdir()
+ registry_dir = mktempdir()
+ packages_dir_url = make_file_url(packages_dir)
+ tree_hashes = setup_packages_repository(packages_dir)
+ setup_registry(registry_dir, packages_dir_url, tree_hashes...)
+ pkgstr("registry add $(registry_dir)")
+ dep = (name = "Dep", uuid = UUID("d43cb7ef-9818-40d3-bb27-28fb4aa46cc5"))
+
+ # Ordinary add from registry.
+ pkg"add Package"
+ @test isinstalled("Package")
+ @test !isinstalled("Dep")
+ @test isinstalled(dep)
+ pkg"rm Package"
+
+ pkg"add Dep"
+ @test !isinstalled("Package")
+ @test isinstalled("Dep")
+ pkg"rm Dep"
+
+ # Add version from registry.
+ pkg"add Package@1.0.0"
+ @test isinstalled("Package")
+ @test !isinstalled("Dep")
+ @test isinstalled(dep)
+ pkg"rm Package"
+
+ pkg"add Dep@1.0.0"
+ @test !isinstalled("Package")
+ @test isinstalled("Dep")
+ pkg"rm Dep"
+
+ # Add branch from registry.
+ pkg"add Package#master"
+ @test isinstalled("Package")
+ @test !isinstalled("Dep")
+ @test isinstalled(dep)
+
+ # Test that adding a second time doesn't error (#3391)
+ pkg"add Package#master"
+ @test isinstalled("Package")
+ pkg"rm Package"
+
+ pkg"add Dep#master"
+ @test !isinstalled("Package")
+ @test isinstalled("Dep")
+ pkg"rm Dep"
+
+ # Develop from registry.
+ pkg"develop Package"
+ @test isinstalled("Package")
+ @test !isinstalled("Dep")
+ @test isinstalled(dep)
+
+ # Test developing twice (#3391)
+ pkg"develop Package"
+ @test isinstalled("Package")
+ pkg"rm Package"
+
+ pkg"develop Dep"
+ @test !isinstalled("Package")
+ @test isinstalled("Dep")
+ pkg"rm Dep"
+
+ # Add from path.
+ Pkg.add(Pkg.PackageSpec(path = packages_dir, subdir = "julia"))
+ @test isinstalled("Package")
+ @test !isinstalled("Dep")
+ @test isinstalled(dep)
+ pkg"rm Package"
+
+ Pkg.add(Pkg.PackageSpec(path = packages_dir, subdir = "dependencies/Dep"))
+ @test !isinstalled("Package")
+ @test isinstalled("Dep")
+ pkg"rm Dep"
+
+ # Add from path, REPL subdir syntax.
+ pkgstr("add $(packages_dir):julia")
+ @test isinstalled("Package")
+ @test !isinstalled("Dep")
+ @test isinstalled(dep)
+ pkg"rm Package"
+
+ pkgstr("add $(packages_dir):dependencies/Dep")
+ @test !isinstalled("Package")
+ @test isinstalled("Dep")
+ pkg"dev Dep" # 4269
+ @test isinstalled("Dep")
+ pkg"rm Dep"
+
+ # Add from path at branch.
+ Pkg.add(Pkg.PackageSpec(path = packages_dir, subdir = "julia", rev = "master"))
+ @test isinstalled("Package")
+ @test !isinstalled("Dep")
+ @test isinstalled(dep)
+ pkg"rm Package"
+
+ Pkg.add(Pkg.PackageSpec(path = packages_dir, subdir = "dependencies/Dep", rev = "master"))
+ @test !isinstalled("Package")
+ @test isinstalled("Dep")
+ pkg"rm Dep"
+
+ # Add from path at branch, REPL subdir syntax
+ pkgstr("add $(packages_dir)#master:julia")
+ @test isinstalled("Package")
+ @test !isinstalled("Dep")
+ @test isinstalled(dep)
+ pkg"rm Package"
+
+ pkgstr("add $(packages_dir)#master:dependencies/Dep")
+ @test !isinstalled("Package")
+ @test isinstalled("Dep")
+ pkg"rm Dep"
+
+ # Develop from path.
+ Pkg.develop(Pkg.PackageSpec(path = packages_dir, subdir = "julia"))
+ @test isinstalled("Package")
+ @test !isinstalled("Dep")
+ @test isinstalled(dep)
+ pkg"rm Package"
+
+ Pkg.develop(Pkg.PackageSpec(path = packages_dir, subdir = "dependencies/Dep"))
+ @test !isinstalled("Package")
+ @test isinstalled("Dep")
+ pkg"rm Dep"
+
+ # Develop from path, REPL subdir syntax.
+ pkgstr("develop $(packages_dir):julia")
+ @test isinstalled("Package")
+ @test !isinstalled("Dep")
+ @test isinstalled(dep)
+ pkg"rm Package"
+
+ pkgstr("develop $(packages_dir):dependencies/Dep")
+ @test !isinstalled("Package")
+ @test isinstalled("Dep")
+ pkg"rm Dep"
+
+ # Add from url.
+ Pkg.add(Pkg.PackageSpec(url = packages_dir_url, subdir = "julia"))
+ @test isinstalled("Package")
+ @test !isinstalled("Dep")
+ @test isinstalled(dep)
+ pkg"rm Package"
+
+ Pkg.add(Pkg.PackageSpec(url = packages_dir_url, subdir = "dependencies/Dep"))
+ @test !isinstalled("Package")
+ @test isinstalled("Dep")
+ pkg"rm Dep"
+
+ # Add from url, REPL subdir syntax.
+ pkgstr("add $(packages_dir_url):julia")
+ @test isinstalled("Package")
+ @test !isinstalled("Dep")
+ @test isinstalled(dep)
+ pkg"rm Package"
+
+ pkgstr("add $(packages_dir_url):dependencies/Dep")
+ @test !isinstalled("Package")
+ @test isinstalled("Dep")
+ pkg"rm Dep"
+
+ # Add from url at branch.
+ Pkg.add(
+ Pkg.PackageSpec(
+ url = packages_dir_url, subdir = "julia",
+ rev = "master"
+ )
+ )
+ @test isinstalled("Package")
+ @test !isinstalled("Dep")
+ @test isinstalled(dep)
+ pkg"rm Package"
+
+ Pkg.add(Pkg.PackageSpec(url = packages_dir_url, subdir = "dependencies/Dep", rev = "master"))
+ @test !isinstalled("Package")
+ @test isinstalled("Dep")
+ pkg"rm Dep"
+
+ # Add from url at branch, REPL subdir syntax.
+ pkgstr("add $(packages_dir_url)#master:julia")
+ @test isinstalled("Package")
+ @test !isinstalled("Dep")
+ @test isinstalled(dep)
+ pkg"rm Package"
+
+ pkgstr("add $(packages_dir_url)#master:dependencies/Dep")
+ @test !isinstalled("Package")
+ @test isinstalled("Dep")
+ pkg"rm Dep"
+
+ # Develop from url.
+ Pkg.develop(Pkg.PackageSpec(url = packages_dir_url, subdir = "julia"))
+ @test isinstalled("Package")
+ @test !isinstalled("Dep")
+ @test isinstalled(dep)
+ pkg"rm Package"
+
+ Pkg.develop(Pkg.PackageSpec(url = packages_dir_url, subdir = "dependencies/Dep"))
+ @test !isinstalled("Package")
+ @test isinstalled("Dep")
+ pkg"rm Dep"
+
+ # Develop from url, REPL subdir syntax.
+ pkgstr("develop $(packages_dir_url):julia")
+ @test isinstalled("Package")
+ @test !isinstalled("Dep")
+ @test isinstalled(dep)
+ pkg"rm Package"
+
+ pkgstr("develop $(packages_dir_url):dependencies/Dep")
+ @test !isinstalled("Package")
+ @test isinstalled("Dep")
+ pkg"rm Dep"
end #cd
end
end
diff --git a/test/test_packages/.gitignore b/test/test_packages/.gitignore
new file mode 100644
index 0000000000..3d68ab37d6
--- /dev/null
+++ b/test/test_packages/.gitignore
@@ -0,0 +1,2 @@
+Manifest.toml
+!AllowReresolveTest/Manifest.toml
diff --git a/test/test_packages/AllowReresolveTest/Manifest.toml b/test/test_packages/AllowReresolveTest/Manifest.toml
new file mode 100644
index 0000000000..518f8d04bc
--- /dev/null
+++ b/test/test_packages/AllowReresolveTest/Manifest.toml
@@ -0,0 +1,62 @@
+# This file is machine-generated - editing it directly is not advised
+
+julia_version = "1.13.0-DEV"
+manifest_format = "2.0"
+project_hash = "a100b4eee2a8dd47230a6724ae4de850bddbb7a5"
+
+[[deps.AllowReresolveTest]]
+deps = ["Example"]
+path = "."
+uuid = "12345678-1234-1234-1234-123456789abc"
+version = "0.1.0"
+
+[[deps.Base64]]
+uuid = "2a0f44e3-6c83-55bd-87e4-b1978d98bd5f"
+version = "1.11.0"
+
+[[deps.Example]]
+deps = ["Test"]
+git-tree-sha1 = "8eb7b4d4ca487caade9ba3e85932e28ce6d6e1f8"
+uuid = "7876af07-990d-54b4-ab0e-23690620f79a"
+version = "0.5.1"
+
+[[deps.InteractiveUtils]]
+deps = ["Markdown"]
+uuid = "b77e0a4c-d291-57a0-90e8-8db25a27a240"
+version = "1.11.0"
+
+[[deps.JuliaSyntaxHighlighting]]
+deps = ["StyledStrings"]
+uuid = "ac6e5ff7-fb65-4e79-a425-ec3bc9c03011"
+version = "1.12.0"
+
+[[deps.Logging]]
+uuid = "56ddb016-857b-54e1-b83d-db4d58db5568"
+version = "1.11.0"
+
+[[deps.Markdown]]
+deps = ["Base64", "JuliaSyntaxHighlighting", "StyledStrings"]
+uuid = "d6f4376e-aef5-505a-96c1-9c027394607a"
+version = "1.11.0"
+
+[[deps.Random]]
+deps = ["SHA"]
+uuid = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c"
+version = "1.11.0"
+
+[[deps.SHA]]
+uuid = "ea8e919c-243c-51af-8825-aaa63cd721ce"
+version = "0.7.0"
+
+[[deps.Serialization]]
+uuid = "9e88b42a-f829-5b0c-bbe9-9e923198166b"
+version = "1.11.0"
+
+[[deps.StyledStrings]]
+uuid = "f489334b-da3d-4c2e-b8f0-e476e12c162b"
+version = "1.11.0"
+
+[[deps.Test]]
+deps = ["InteractiveUtils", "Logging", "Random", "Serialization"]
+uuid = "8dfed614-e22c-5e08-85e1-65c5234f0b40"
+version = "1.11.0"
diff --git a/test/test_packages/AllowReresolveTest/Project.toml b/test/test_packages/AllowReresolveTest/Project.toml
new file mode 100644
index 0000000000..643237b7b5
--- /dev/null
+++ b/test/test_packages/AllowReresolveTest/Project.toml
@@ -0,0 +1,16 @@
+name = "AllowReresolveTest"
+uuid = "12345678-1234-1234-1234-123456789abc"
+version = "0.1.0"
+
+[deps]
+Example = "7876af07-990d-54b4-ab0e-23690620f79a"
+
+[compat]
+Example = "0.5"
+
+[extras]
+Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40"
+
+[targets]
+test = ["Test"]
+build = ["Test"]
diff --git a/test/test_packages/AllowReresolveTest/deps/build.jl b/test/test_packages/AllowReresolveTest/deps/build.jl
new file mode 100644
index 0000000000..28e53db871
--- /dev/null
+++ b/test/test_packages/AllowReresolveTest/deps/build.jl
@@ -0,0 +1,3 @@
+# Build script for AllowReresolveTest
+using Test
+println("Build completed successfully!")
diff --git a/test/test_packages/AllowReresolveTest/src/AllowReresolveTest.jl b/test/test_packages/AllowReresolveTest/src/AllowReresolveTest.jl
new file mode 100644
index 0000000000..e549c3a22e
--- /dev/null
+++ b/test/test_packages/AllowReresolveTest/src/AllowReresolveTest.jl
@@ -0,0 +1,7 @@
+module AllowReresolveTest
+
+import Example
+
+greet() = "Hello from AllowReresolveTest using Example!"
+
+end
diff --git a/test/test_packages/AllowReresolveTest/test/runtests.jl b/test/test_packages/AllowReresolveTest/test/runtests.jl
new file mode 100644
index 0000000000..a1c953c162
--- /dev/null
+++ b/test/test_packages/AllowReresolveTest/test/runtests.jl
@@ -0,0 +1,6 @@
+using Test
+using AllowReresolveTest
+
+@testset "AllowReresolveTest.jl" begin
+ @test AllowReresolveTest.greet() == "Hello from AllowReresolveTest using Example!"
+end
diff --git a/test/test_packages/ArtifactInstallation/Artifacts.toml b/test/test_packages/ArtifactInstallation/Artifacts.toml
index 798e65c7bd..e32e4c7d56 100644
--- a/test/test_packages/ArtifactInstallation/Artifacts.toml
+++ b/test/test_packages/ArtifactInstallation/Artifacts.toml
@@ -147,16 +147,16 @@ git-tree-sha1 = "43563e7631a7eafae1f9f8d9d332e3de44ad7239"
lazy = true
[[socrates.download]]
-url = "https://github.com/staticfloat/small_bin/raw/master/socrates.tar.gz"
+url = "https://github.com/staticfloat/small_bin/raw/91f3ecf327d1de943fe076657833252791ba9f60/socrates.tar.gz"
sha256 = "e65d2f13f2085f2c279830e863292312a72930fee5ba3c792b14c33ce5c5cc58"
[[socrates.download]]
-url = "https://github.com/staticfloat/small_bin/raw/master/socrates.tar.bz2"
+url = "https://github.com/staticfloat/small_bin/raw/91f3ecf327d1de943fe076657833252791ba9f60/socrates.tar.bz2"
sha256 = "13fc17b97be41763b02cbb80e9d048302cec3bd3d446c2ed6e8210bddcd3ac76"
[collapse_the_symlink]
git-tree-sha1 = "69a468bd51751f4ed7eda31c240e775df06d6ee6"
[[collapse_the_symlink.download]]
-url = "https://github.com/staticfloat/small_bin/raw/master/collapse_the_symlink/collapse_the_symlink.tar.gz"
+url = "https://github.com/staticfloat/small_bin/raw/91f3ecf327d1de943fe076657833252791ba9f60/collapse_the_symlink/collapse_the_symlink.tar.gz"
sha256 = "956c1201405f64d3465cc28cb0dec9d63c11a08cad28c381e13bb22e1fc469d3"
diff --git a/test/test_packages/ArtifactInstallation/src/ArtifactInstallation.jl b/test/test_packages/ArtifactInstallation/src/ArtifactInstallation.jl
index e5ee4a6fc3..062ec1c065 100644
--- a/test/test_packages/ArtifactInstallation/src/ArtifactInstallation.jl
+++ b/test/test_packages/ArtifactInstallation/src/ArtifactInstallation.jl
@@ -17,10 +17,10 @@ function do_test()
# Test that we can use a variable, not just a literal:
hello_world = "HelloWorldC"
hello_world_exe = joinpath(@artifact_str(hello_world), "bin", "hello_world")
- if Sys.iswindows()
+ if Sys.iswindows()
hello_world_exe = "$(hello_world_exe).exe"
end
- @test isfile(hello_world_exe)
+ return @test isfile(hello_world_exe)
end
end
diff --git a/test/test_packages/ArtifactOverrideLoading/Artifacts.toml b/test/test_packages/ArtifactOverrideLoading/Artifacts.toml
index faa5a53769..c06a4e2b32 100644
--- a/test/test_packages/ArtifactOverrideLoading/Artifacts.toml
+++ b/test/test_packages/ArtifactOverrideLoading/Artifacts.toml
@@ -3,4 +3,3 @@ git-tree-sha1 = "0000000000000000000000000000000000000000"
[barty]
git-tree-sha1 = "1111111111111111111111111111111111111111"
-
diff --git a/test/test_packages/ArtifactOverrideLoading/src/ArtifactOverrideLoading.jl b/test/test_packages/ArtifactOverrideLoading/src/ArtifactOverrideLoading.jl
index f541bd0068..63eb5b6e01 100644
--- a/test/test_packages/ArtifactOverrideLoading/src/ArtifactOverrideLoading.jl
+++ b/test/test_packages/ArtifactOverrideLoading/src/ArtifactOverrideLoading.jl
@@ -1,10 +1,10 @@
__precompile__(false)
module ArtifactOverrideLoading
-using Artifacts
-export arty_path, barty_path
+ using Artifacts
+ export arty_path, barty_path
-# These will fail (get set to `nothing`) unless they get redirected
-const arty_path = artifact"arty"
-const barty_path = artifact"barty"
+ # These will fail (get set to `nothing`) unless they get redirected
+ const arty_path = artifact"arty"
+ const barty_path = artifact"barty"
end # module
diff --git a/test/test_packages/ArtifactTOMLSearch/julia_artifacts_test/pkg.jl b/test/test_packages/ArtifactTOMLSearch/julia_artifacts_test/pkg.jl
index 051e436fe6..bb7279448a 100644
--- a/test/test_packages/ArtifactTOMLSearch/julia_artifacts_test/pkg.jl
+++ b/test/test_packages/ArtifactTOMLSearch/julia_artifacts_test/pkg.jl
@@ -9,4 +9,4 @@ function do_test()
return isfile(joinpath(arty, "bin", "socrates"))
end
-end # module
\ No newline at end of file
+end # module
diff --git a/test/test_packages/ArtifactTOMLSearch/pkg.jl b/test/test_packages/ArtifactTOMLSearch/pkg.jl
index 051e436fe6..bb7279448a 100644
--- a/test/test_packages/ArtifactTOMLSearch/pkg.jl
+++ b/test/test_packages/ArtifactTOMLSearch/pkg.jl
@@ -9,4 +9,4 @@ function do_test()
return isfile(joinpath(arty, "bin", "socrates"))
end
-end # module
\ No newline at end of file
+end # module
diff --git a/test/test_packages/ArtifactTOMLSearch/sub_module/pkg.jl b/test/test_packages/ArtifactTOMLSearch/sub_module/pkg.jl
index e5c4db6359..c2cabcd328 100644
--- a/test/test_packages/ArtifactTOMLSearch/sub_module/pkg.jl
+++ b/test/test_packages/ArtifactTOMLSearch/sub_module/pkg.jl
@@ -3,4 +3,4 @@ using Pkg.Artifacts
# All this module will do is reference its `arty` Artifact.
arty = artifact"arty"
-end
\ No newline at end of file
+end
diff --git a/test/test_packages/ArtifactTOMLSearch/sub_package/pkg.jl b/test/test_packages/ArtifactTOMLSearch/sub_package/pkg.jl
index 642d03d92d..898a5ac95a 100644
--- a/test/test_packages/ArtifactTOMLSearch/sub_package/pkg.jl
+++ b/test/test_packages/ArtifactTOMLSearch/sub_package/pkg.jl
@@ -3,4 +3,4 @@ using Pkg.Artifacts
# All this module will do is reference its `arty` Artifact.
arty = artifact"arty"
-end
\ No newline at end of file
+end
diff --git a/test/test_packages/BigProject/RecursiveDep/Project.toml b/test/test_packages/BigProject/RecursiveDep/Project.toml
index c6e7f49c49..3eb5284036 100644
--- a/test/test_packages/BigProject/RecursiveDep/Project.toml
+++ b/test/test_packages/BigProject/RecursiveDep/Project.toml
@@ -3,4 +3,4 @@ uuid = "f5db5478-804a-11e8-3275-3180cf89cd91"
version = "0.1.0"
[deps]
-RecursiveDep2 = "63fe803a-804b-11e8-2b51-3d06555b755a"
\ No newline at end of file
+RecursiveDep2 = "63fe803a-804b-11e8-2b51-3d06555b755a"
diff --git a/test/test_packages/BigProject/RecursiveDep/src/RecursiveDep.jl b/test/test_packages/BigProject/RecursiveDep/src/RecursiveDep.jl
index 209d0f6939..04e06c625d 100644
--- a/test/test_packages/BigProject/RecursiveDep/src/RecursiveDep.jl
+++ b/test/test_packages/BigProject/RecursiveDep/src/RecursiveDep.jl
@@ -4,4 +4,4 @@ module RecursiveDep
using RecursiveDep2
-end
\ No newline at end of file
+end
diff --git a/test/test_packages/BigProject/RecursiveDep2/Project.toml b/test/test_packages/BigProject/RecursiveDep2/Project.toml
index eca4098bf4..6a228c8984 100644
--- a/test/test_packages/BigProject/RecursiveDep2/Project.toml
+++ b/test/test_packages/BigProject/RecursiveDep2/Project.toml
@@ -1,3 +1,3 @@
name = "RecursiveDep2"
uuid = "63fe803a-804b-11e8-2b51-3d06555b755a"
-version = "0.1.0"
\ No newline at end of file
+version = "0.1.0"
diff --git a/test/test_packages/BigProject/RecursiveDep2/src/RecursiveDep2.jl b/test/test_packages/BigProject/RecursiveDep2/src/RecursiveDep2.jl
index ab97c773ed..fa1399704e 100644
--- a/test/test_packages/BigProject/RecursiveDep2/src/RecursiveDep2.jl
+++ b/test/test_packages/BigProject/RecursiveDep2/src/RecursiveDep2.jl
@@ -2,4 +2,4 @@
module RecursiveDep2
-end
\ No newline at end of file
+end
diff --git a/test/test_packages/BuildProjectFixedDeps/.gitignore b/test/test_packages/BuildProjectFixedDeps/.gitignore
index 387750f19f..ca97c0f7d8 100644
--- a/test/test_packages/BuildProjectFixedDeps/.gitignore
+++ b/test/test_packages/BuildProjectFixedDeps/.gitignore
@@ -1,2 +1,2 @@
deps/artifact
-deps/build.log
\ No newline at end of file
+deps/build.log
diff --git a/test/test_packages/ExtensionExamples/HasDepWithExtensions.jl/src/HasDepWithExtensions.jl b/test/test_packages/ExtensionExamples/HasDepWithExtensions.jl/src/HasDepWithExtensions.jl
index 1f4d5790e1..0ce3f4ab1f 100644
--- a/test/test_packages/ExtensionExamples/HasDepWithExtensions.jl/src/HasDepWithExtensions.jl
+++ b/test/test_packages/ExtensionExamples/HasDepWithExtensions.jl/src/HasDepWithExtensions.jl
@@ -15,7 +15,7 @@ function do_something()
HasExtensions.foo(OffsetArray(rand(Float64, 2), 0:1)) == 2 || error("Unexpected value")
# @info "Now do something with extended IndirectArray support"
- HasExtensions.foo(IndirectArray(rand(1:6, 32, 32), 1:6)) == 3 || error("Unexpected value")
+ return HasExtensions.foo(IndirectArray(rand(1:6, 32, 32), 1:6)) == 3 || error("Unexpected value")
end
end # module
diff --git a/test/test_packages/ExtensionExamples/HasExtensions.jl/ext/IndirectArraysExt.jl b/test/test_packages/ExtensionExamples/HasExtensions.jl/ext/IndirectArraysExt.jl
index 9f9611e533..b4c359b43c 100644
--- a/test/test_packages/ExtensionExamples/HasExtensions.jl/ext/IndirectArraysExt.jl
+++ b/test/test_packages/ExtensionExamples/HasExtensions.jl/ext/IndirectArraysExt.jl
@@ -8,7 +8,7 @@ function foo(::IndirectArray)
end
function __init__()
- HasExtensions.indirectarrays_loaded = true
+ return HasExtensions.indirectarrays_loaded = true
end
end
diff --git a/test/test_packages/ExtensionExamples/HasExtensions.jl/ext/OffsetArraysExt.jl b/test/test_packages/ExtensionExamples/HasExtensions.jl/ext/OffsetArraysExt.jl
index 7bdad0b352..03a784753c 100644
--- a/test/test_packages/ExtensionExamples/HasExtensions.jl/ext/OffsetArraysExt.jl
+++ b/test/test_packages/ExtensionExamples/HasExtensions.jl/ext/OffsetArraysExt.jl
@@ -8,7 +8,7 @@ function foo(::OffsetArray)
end
function __init__()
- HasExtensions.offsetarrays_loaded = true
+ return HasExtensions.offsetarrays_loaded = true
end
end
diff --git a/test/test_packages/RecursiveSources/ChildPkg/Project.toml b/test/test_packages/RecursiveSources/ChildPkg/Project.toml
new file mode 100644
index 0000000000..1c473064b0
--- /dev/null
+++ b/test/test_packages/RecursiveSources/ChildPkg/Project.toml
@@ -0,0 +1,11 @@
+name = "ChildPkg"
+uuid = "22222222-2222-2222-2222-222222222222"
+version = "0.1.0"
+
+[deps]
+GrandchildPkg = "33333333-3333-3333-3333-333333333333"
+SiblingPkg = "44444444-4444-4444-4444-444444444444"
+
+[sources]
+GrandchildPkg = { url = "__GRANDCHILD_URL__" }
+SiblingPkg = { path = "SiblingPkg" }
diff --git a/test/test_packages/RecursiveSources/ChildPkg/SiblingPkg/Project.toml b/test/test_packages/RecursiveSources/ChildPkg/SiblingPkg/Project.toml
new file mode 100644
index 0000000000..0c094d71dc
--- /dev/null
+++ b/test/test_packages/RecursiveSources/ChildPkg/SiblingPkg/Project.toml
@@ -0,0 +1,3 @@
+name = "SiblingPkg"
+uuid = "44444444-4444-4444-4444-444444444444"
+version = "0.1.0"
diff --git a/test/test_packages/RecursiveSources/ChildPkg/SiblingPkg/src/SiblingPkg.jl b/test/test_packages/RecursiveSources/ChildPkg/SiblingPkg/src/SiblingPkg.jl
new file mode 100644
index 0000000000..075934719d
--- /dev/null
+++ b/test/test_packages/RecursiveSources/ChildPkg/SiblingPkg/src/SiblingPkg.jl
@@ -0,0 +1,5 @@
+module SiblingPkg
+
+offset() = 5
+
+end
diff --git a/test/test_packages/RecursiveSources/ChildPkg/src/ChildPkg.jl b/test/test_packages/RecursiveSources/ChildPkg/src/ChildPkg.jl
new file mode 100644
index 0000000000..0ea405807b
--- /dev/null
+++ b/test/test_packages/RecursiveSources/ChildPkg/src/ChildPkg.jl
@@ -0,0 +1,8 @@
+module ChildPkg
+
+using GrandchildPkg
+using SiblingPkg
+
+child_value() = GrandchildPkg.VALUE + SiblingPkg.offset()
+
+end
diff --git a/test/test_packages/RecursiveSources/GrandchildPkg/Project.toml b/test/test_packages/RecursiveSources/GrandchildPkg/Project.toml
new file mode 100644
index 0000000000..e6c8611502
--- /dev/null
+++ b/test/test_packages/RecursiveSources/GrandchildPkg/Project.toml
@@ -0,0 +1,3 @@
+name = "GrandchildPkg"
+uuid = "33333333-3333-3333-3333-333333333333"
+version = "0.1.0"
diff --git a/test/test_packages/RecursiveSources/GrandchildPkg/src/GrandchildPkg.jl b/test/test_packages/RecursiveSources/GrandchildPkg/src/GrandchildPkg.jl
new file mode 100644
index 0000000000..63463012d5
--- /dev/null
+++ b/test/test_packages/RecursiveSources/GrandchildPkg/src/GrandchildPkg.jl
@@ -0,0 +1,5 @@
+module GrandchildPkg
+
+const VALUE = 42
+
+end
diff --git a/test/test_packages/RecursiveSources/ParentPkg/Project.toml b/test/test_packages/RecursiveSources/ParentPkg/Project.toml
new file mode 100644
index 0000000000..3c2dd816b9
--- /dev/null
+++ b/test/test_packages/RecursiveSources/ParentPkg/Project.toml
@@ -0,0 +1,9 @@
+name = "ParentPkg"
+uuid = "11111111-1111-1111-1111-111111111111"
+version = "0.1.0"
+
+[deps]
+ChildPkg = "22222222-2222-2222-2222-222222222222"
+
+[sources]
+ChildPkg = { url = "__CHILD_URL__" }
diff --git a/test/test_packages/RecursiveSources/ParentPkg/src/ParentPkg.jl b/test/test_packages/RecursiveSources/ParentPkg/src/ParentPkg.jl
new file mode 100644
index 0000000000..b1ed56e867
--- /dev/null
+++ b/test/test_packages/RecursiveSources/ParentPkg/src/ParentPkg.jl
@@ -0,0 +1,7 @@
+module ParentPkg
+
+using ChildPkg
+
+parent_value() = ChildPkg.child_value()
+
+end
diff --git a/test/test_packages/Rot13.jl/Manifest.toml b/test/test_packages/Rot13.jl/Manifest.toml
new file mode 100644
index 0000000000..bbef702fb7
--- /dev/null
+++ b/test/test_packages/Rot13.jl/Manifest.toml
@@ -0,0 +1,10 @@
+# This file is machine-generated - editing it directly is not advised
+
+julia_version = "1.12.0-DEV"
+manifest_format = "2.0"
+project_hash = "2610b29b73f9f9432fb181a7f9f7c5c9e3de5557"
+
+[[deps.Rot13]]
+path = "."
+uuid = "43ef800a-eac4-47f4-949b-25107b932e8f"
+version = "0.1.0"
diff --git a/test/test_packages/Rot13.jl/Project.toml b/test/test_packages/Rot13.jl/Project.toml
new file mode 100644
index 0000000000..49cfb15765
--- /dev/null
+++ b/test/test_packages/Rot13.jl/Project.toml
@@ -0,0 +1,8 @@
+name = "Rot13"
+uuid = "43ef800a-eac4-47f4-949b-25107b932e8f"
+version = "0.1.0"
+
+[apps]
+juliarot13 = {}
+juliarot13cli = { submodule = "CLI" }
+juliarot13flags = { submodule = "FlagsDemo", julia_flags = ["--threads=2", "--optimize=3"] }
diff --git a/test/test_packages/Rot13.jl/src/CLI.jl b/test/test_packages/Rot13.jl/src/CLI.jl
new file mode 100644
index 0000000000..4d6a859264
--- /dev/null
+++ b/test/test_packages/Rot13.jl/src/CLI.jl
@@ -0,0 +1,18 @@
+module CLI
+
+using ..Rot13: rot13
+
+function (@main)(ARGS)
+ if length(ARGS) == 0
+ println("Usage: rot13cli ")
+ return 1
+ end
+
+ for arg in ARGS
+ # Add a prefix to distinguish from main module output
+ println("CLI: $(rot13(arg))")
+ end
+ return 0
+end
+
+end # module CLI
diff --git a/test/test_packages/Rot13.jl/src/FlagsDemo.jl b/test/test_packages/Rot13.jl/src/FlagsDemo.jl
new file mode 100644
index 0000000000..c2989d294e
--- /dev/null
+++ b/test/test_packages/Rot13.jl/src/FlagsDemo.jl
@@ -0,0 +1,12 @@
+module FlagsDemo
+
+function (@main)(ARGS)
+ println("Julia flags demo!")
+ println("Thread count: $(Threads.nthreads())")
+ println("Optimization level: $(Base.JLOptions().opt_level)")
+ println("Startup file enabled: $(Base.JLOptions().startupfile != 2)")
+ println("App arguments: $(join(ARGS, " "))")
+ return 0
+end
+
+end # module FlagsDemo
diff --git a/test/test_packages/Rot13.jl/src/Rot13.jl b/test/test_packages/Rot13.jl/src/Rot13.jl
new file mode 100644
index 0000000000..ce66987a3e
--- /dev/null
+++ b/test/test_packages/Rot13.jl/src/Rot13.jl
@@ -0,0 +1,20 @@
+module Rot13
+
+function rot13(c::Char)
+ shft = islowercase(c) ? 'a' : 'A'
+ return isletter(c) ? shft + (c - shft + 13) % 26 : c
+end
+
+rot13(str::AbstractString) = map(rot13, str)
+
+function (@main)(ARGS)
+ for arg in ARGS
+ println(rot13(arg))
+ end
+ return 0
+end
+
+include("CLI.jl")
+include("FlagsDemo.jl")
+
+end # module Rot13
diff --git a/test/test_packages/Rot13.jl/src/Rot13_edited.jl b/test/test_packages/Rot13.jl/src/Rot13_edited.jl
new file mode 100644
index 0000000000..26de592056
--- /dev/null
+++ b/test/test_packages/Rot13.jl/src/Rot13_edited.jl
@@ -0,0 +1,7 @@
+module Rot13
+
+function (@main)(ARGS)
+ return println("Updated!")
+end
+
+end # module Rot13
diff --git a/test/test_packages/Sandbox_PreservePreferences/Project.toml b/test/test_packages/Sandbox_PreservePreferences/Project.toml
index fdef8266a3..2d979c2144 100644
--- a/test/test_packages/Sandbox_PreservePreferences/Project.toml
+++ b/test/test_packages/Sandbox_PreservePreferences/Project.toml
@@ -8,4 +8,4 @@ Example = "7876af07-990d-54b4-ab0e-23690620f79a"
Foo = "48898bec-3adb-11e9-02a6-a164ba74aeae"
[preferences]
-Sandbox_PreservePreferences.tree = "birch"
\ No newline at end of file
+Sandbox_PreservePreferences.tree = "birch"
diff --git a/test/test_packages/Sandbox_PreservePreferences/dev/Foo/Project.toml b/test/test_packages/Sandbox_PreservePreferences/dev/Foo/Project.toml
index d7ab2543f7..16b57b3a8e 100644
--- a/test/test_packages/Sandbox_PreservePreferences/dev/Foo/Project.toml
+++ b/test/test_packages/Sandbox_PreservePreferences/dev/Foo/Project.toml
@@ -8,4 +8,4 @@ Preferences = "21216c6a-2e73-6563-6e65-726566657250"
[preferences]
Foo.toy = "train"
Foo.tree = "oak"
-Foo.default = "default"
\ No newline at end of file
+Foo.default = "default"
diff --git a/test/test_packages/Sandbox_PreservePreferences/dev/Foo/src/Foo.jl b/test/test_packages/Sandbox_PreservePreferences/dev/Foo/src/Foo.jl
index 0cd72d4280..f1fd161198 100644
--- a/test/test_packages/Sandbox_PreservePreferences/dev/Foo/src/Foo.jl
+++ b/test/test_packages/Sandbox_PreservePreferences/dev/Foo/src/Foo.jl
@@ -2,7 +2,7 @@ module Foo
using Preferences
-set!(key, value) = @set_preferences!(key=>value)
+set!(key, value) = @set_preferences!(key => value)
get(key) = @load_preference(key)
end # module
diff --git a/test/test_packages/Sandbox_PreservePreferences/dev/Foo/test/Project.toml b/test/test_packages/Sandbox_PreservePreferences/dev/Foo/test/Project.toml
index ed0fb056c6..cc43ab0f6d 100644
--- a/test/test_packages/Sandbox_PreservePreferences/dev/Foo/test/Project.toml
+++ b/test/test_packages/Sandbox_PreservePreferences/dev/Foo/test/Project.toml
@@ -3,4 +3,4 @@ Preferences = "21216c6a-2e73-6563-6e65-726566657250"
Foo = "48898bec-3adb-11e9-02a6-a164ba74aeae"
[preferences]
-Foo.tree = "birch"
\ No newline at end of file
+Foo.tree = "birch"
diff --git a/test/test_packages/Sandbox_PreservePreferences/test/runtests.jl b/test/test_packages/Sandbox_PreservePreferences/test/runtests.jl
index e69de29bb2..1bb8bf6d7f 100644
--- a/test/test_packages/Sandbox_PreservePreferences/test/runtests.jl
+++ b/test/test_packages/Sandbox_PreservePreferences/test/runtests.jl
@@ -0,0 +1 @@
+# empty
diff --git a/test/test_packages/ShouldPreserveSemver/Manifest.toml b/test/test_packages/ShouldPreserveSemver/Manifest_MbedTLS.toml
similarity index 100%
rename from test/test_packages/ShouldPreserveSemver/Manifest.toml
rename to test/test_packages/ShouldPreserveSemver/Manifest_MbedTLS.toml
diff --git a/test/test_packages/ShouldPreserveSemver/Manifest_OpenSSL.toml b/test/test_packages/ShouldPreserveSemver/Manifest_OpenSSL.toml
new file mode 100644
index 0000000000..f1fe5150bd
--- /dev/null
+++ b/test/test_packages/ShouldPreserveSemver/Manifest_OpenSSL.toml
@@ -0,0 +1,258 @@
+# This file is machine-generated - editing it directly is not advised
+
+julia_version = "1.9.0-DEV"
+manifest_format = "2.0"
+project_hash = "9af0d7a4d60a77b1a42f518d7da50edc4261ffcb"
+
+[[deps.ArgTools]]
+uuid = "0dad84c5-d112-42e6-8d28-ef12dabb789f"
+version = "1.1.1"
+
+[[deps.Arpack]]
+deps = ["BinaryProvider", "Libdl", "LinearAlgebra"]
+git-tree-sha1 = "07a2c077bdd4b6d23a40342a8a108e2ee5e58ab6"
+uuid = "7d9fca2a-8960-54d3-9f78-7d1dccf2cb97"
+version = "0.3.1"
+
+[[deps.Artifacts]]
+uuid = "56f22d72-fd6d-98f1-02f0-08ddc0907c33"
+
+[[deps.Base64]]
+uuid = "2a0f44e3-6c83-55bd-87e4-b1978d98bd5f"
+
+[[deps.BinaryProvider]]
+deps = ["Libdl", "Logging", "SHA"]
+git-tree-sha1 = "c7361ce8a2129f20b0e05a89f7070820cfed6648"
+uuid = "b99e7846-7c00-51b0-8f62-c81ae34c0232"
+version = "0.5.6"
+
+[[deps.CSTParser]]
+deps = ["Tokenize"]
+git-tree-sha1 = "c69698c3d4a7255bc1b4bc2afc09f59db910243b"
+uuid = "00ebfdb7-1f24-5e51-bd34-a7502290713f"
+version = "0.6.2"
+
+[[deps.CodecZlib]]
+deps = ["BinaryProvider", "Libdl", "TranscodingStreams"]
+git-tree-sha1 = "05916673a2627dd91b4969ff8ba6941bc85a960e"
+uuid = "944b1d66-785c-5afd-91f1-9de20f533193"
+version = "0.6.0"
+
+[[deps.Compat]]
+deps = ["Base64", "Dates", "DelimitedFiles", "Distributed", "InteractiveUtils", "LibGit2", "Libdl", "LinearAlgebra", "Markdown", "Mmap", "Pkg", "Printf", "REPL", "Random", "Serialization", "SharedArrays", "Sockets", "SparseArrays", "Statistics", "Test", "UUIDs", "Unicode"]
+git-tree-sha1 = "84aa74986c5b9b898b0d1acaf3258741ee64754f"
+uuid = "34da2185-b29b-5c13-b0c7-acf172513d20"
+version = "2.1.0"
+
+[[deps.CompilerSupportLibraries_jll]]
+deps = ["Artifacts", "Libdl"]
+uuid = "e66e0078-7015-5450-92f7-15fbd957f2ae"
+version = "0.5.2+0"
+
+[[deps.DataStructures]]
+deps = ["InteractiveUtils", "OrderedCollections"]
+git-tree-sha1 = "0809951a1774dc724da22d26e4289bbaab77809a"
+uuid = "864edb3b-99cc-5e75-8d2d-829cb0a9cfe8"
+version = "0.17.0"
+
+[[deps.Dates]]
+deps = ["Printf"]
+uuid = "ade2ca70-3891-5945-98fb-dc099432e06a"
+
+[[deps.DelimitedFiles]]
+deps = ["Mmap"]
+git-tree-sha1 = "19b1417ff479c07e523fcbf2fd735a3fde3d1ab3"
+uuid = "8bb1440f-4735-579b-a4ab-409b98df4dab"
+version = "1.9.0"
+
+[[deps.Distributed]]
+deps = ["Random", "Serialization", "Sockets"]
+uuid = "8ba89e20-285c-5b6f-9357-94700520ee1b"
+
+[[deps.Downloads]]
+deps = ["ArgTools", "FileWatching", "LibCURL", "NetworkOptions"]
+uuid = "f43a241f-c20a-4ad4-852c-f6b1247861c6"
+version = "1.6.0"
+
+[[deps.FileWatching]]
+uuid = "7b1f6079-737a-58dc-b8bc-7a2ca5c1b5ee"
+
+[[deps.InteractiveUtils]]
+deps = ["Markdown"]
+uuid = "b77e0a4c-d291-57a0-90e8-8db25a27a240"
+
+[[deps.LibCURL]]
+deps = ["LibCURL_jll", "MozillaCACerts_jll"]
+uuid = "b27032c2-a3e7-50c8-80cd-2d36dbcbfd21"
+version = "0.6.3"
+
+[[deps.LibCURL_jll]]
+deps = ["Artifacts", "LibSSH2_jll", "Libdl", "OpenSSL_jll", "Zlib_jll", "nghttp2_jll"]
+uuid = "deac9b47-8bc7-5906-a0fe-35ac56dc84c0"
+version = "8.9.1+0"
+
+[[deps.LibGit2]]
+deps = ["Base64", "NetworkOptions", "Printf", "SHA"]
+uuid = "76f85450-5226-5b5a-8eaa-529ad045b433"
+
+[[deps.LibSSH2_jll]]
+deps = ["Artifacts", "Libdl", "OpenSSL_jll"]
+uuid = "29816b5a-b9ab-546f-933c-edad1886dfa8"
+version = "1.11.3+0"
+
+[[deps.Libdl]]
+uuid = "8f399da3-3557-5675-b5ff-fb832c97cbdb"
+
+[[deps.LightGraphs]]
+deps = ["Arpack", "Base64", "CodecZlib", "DataStructures", "DelimitedFiles", "Distributed", "LinearAlgebra", "Markdown", "Random", "SharedArrays", "SimpleTraits", "SparseArrays", "Statistics", "Test"]
+git-tree-sha1 = "e7e380a7c009019df1203bf400894aa04ee37ba0"
+uuid = "093fc24a-ae57-5d10-9952-331d41423f4d"
+version = "1.0.1"
+
+[[deps.LinearAlgebra]]
+deps = ["Libdl", "OpenBLAS_jll", "libblastrampoline_jll"]
+uuid = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e"
+
+[[deps.Logging]]
+uuid = "56ddb016-857b-54e1-b83d-db4d58db5568"
+
+[[deps.MacroTools]]
+deps = ["CSTParser", "Compat", "DataStructures", "Test", "Tokenize"]
+git-tree-sha1 = "d6e9dedb8c92c3465575442da456aec15a89ff76"
+uuid = "1914dd2f-81c6-5fcd-8719-6d5c9610ff09"
+version = "0.5.1"
+
+[[deps.Markdown]]
+deps = ["Base64"]
+uuid = "d6f4376e-aef5-505a-96c1-9c027394607a"
+
+[[deps.OpenSSL_jll]]
+deps = ["Artifacts", "Libdl"]
+uuid = "458c3c95-2e84-50aa-8efc-19380b2a3a95"
+version = "3.0.15+1"
+
+[[deps.Mmap]]
+uuid = "a63ad114-7e13-5084-954f-fe012c677804"
+
+[[deps.MozillaCACerts_jll]]
+uuid = "14a3606d-f60d-562e-9121-12d972cd8159"
+version = "2024.11.26"
+
+[[deps.NetworkOptions]]
+uuid = "ca575930-c2e3-43a9-ace4-1e988b2c1908"
+version = "1.2.0"
+
+[[deps.OpenBLAS_jll]]
+deps = ["Artifacts", "CompilerSupportLibraries_jll", "Libdl"]
+uuid = "4536629a-c528-5b80-bd46-f80d51c5b363"
+version = "0.3.20+0"
+
+[[deps.OrderedCollections]]
+deps = ["Random", "Serialization", "Test"]
+git-tree-sha1 = "c4c13474d23c60d20a67b217f1d7f22a40edf8f1"
+uuid = "bac558e1-5e72-5ebc-8fee-abe8a469f55d"
+version = "1.1.0"
+
+[[deps.Pkg]]
+deps = ["Artifacts", "Dates", "Downloads", "FileWatching", "LibGit2", "Libdl", "Logging", "Markdown", "Printf", "REPL", "Random", "SHA", "TOML", "Tar", "UUIDs", "p7zip_jll"]
+uuid = "44cfe95a-1eb2-52ea-b672-e2afdf69b78f"
+version = "1.8.0"
+
+[[deps.Printf]]
+deps = ["Unicode"]
+uuid = "de0858da-6303-5e67-8744-51eddeeeb8d7"
+
+[[deps.REPL]]
+deps = ["InteractiveUtils", "Markdown", "Sockets", "Unicode"]
+uuid = "3fa0cd96-eef1-5676-8a61-b3b8758bbffb"
+
+[[deps.Random]]
+deps = ["SHA", "Serialization"]
+uuid = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c"
+
+[[deps.SHA]]
+uuid = "ea8e919c-243c-51af-8825-aaa63cd721ce"
+version = "0.7.0"
+
+[[deps.Serialization]]
+uuid = "9e88b42a-f829-5b0c-bbe9-9e923198166b"
+
+[[deps.SharedArrays]]
+deps = ["Distributed", "Mmap", "Random", "Serialization"]
+uuid = "1a1011a3-84de-559e-8e89-a11a2f7dc383"
+
+[[deps.SimpleTraits]]
+deps = ["InteractiveUtils", "MacroTools"]
+git-tree-sha1 = "05bbf4484b975782e5e54bb0750f21f7f2f66171"
+uuid = "699a6c99-e7fa-54fc-8d76-47d257e15c1d"
+version = "0.9.0"
+
+[[deps.Sockets]]
+uuid = "6462fe0b-24de-5631-8697-dd941f90decc"
+
+[[deps.SparseArrays]]
+deps = ["Libdl", "LinearAlgebra", "Random", "Serialization", "SuiteSparse_jll"]
+uuid = "2f01184e-e22b-5df5-ae63-d93ebab69eaf"
+
+[[deps.Statistics]]
+deps = ["LinearAlgebra", "SparseArrays"]
+git-tree-sha1 = "83850190e0f902ae1673d63ae349fc2a36dc6afb"
+uuid = "10745b16-79ce-11e8-11f9-7d13ad32a3b2"
+version = "1.11"
+
+[[deps.SuiteSparse_jll]]
+deps = ["Artifacts", "Libdl", "Pkg", "libblastrampoline_jll"]
+uuid = "bea87d4a-7f5b-5778-9afe-8cc45184846c"
+version = "5.10.1+0"
+
+[[deps.TOML]]
+deps = ["Dates"]
+uuid = "fa267f1f-6049-4f14-aa54-33bafae1ed76"
+version = "1.0.0"
+
+[[deps.Tar]]
+deps = ["ArgTools", "SHA"]
+uuid = "a4e569a6-e804-4fa4-b0f3-eef7a1d5b13e"
+version = "1.10.0"
+
+[[deps.Test]]
+deps = ["InteractiveUtils", "Logging", "Random", "Serialization"]
+uuid = "8dfed614-e22c-5e08-85e1-65c5234f0b40"
+
+[[deps.Tokenize]]
+git-tree-sha1 = "dfcdbbfb2d0370716c815cbd6f8a364efb6f42cf"
+uuid = "0796e94c-ce3b-5d07-9a54-7f471281c624"
+version = "0.5.6"
+
+[[deps.TranscodingStreams]]
+deps = ["Random", "Test"]
+git-tree-sha1 = "7c53c35547de1c5b9d46a4797cf6d8253807108c"
+uuid = "3bb67fe8-82b1-5028-8e26-92a6c54297fa"
+version = "0.9.5"
+
+[[deps.UUIDs]]
+deps = ["Random", "SHA"]
+uuid = "cf7118a7-6976-5b1a-9a39-7adc72f591a4"
+
+[[deps.Unicode]]
+uuid = "4ec0a83e-493e-50e2-b9ac-8f72acf5a8f5"
+
+[[deps.Zlib_jll]]
+deps = ["Libdl"]
+uuid = "83775a58-1f1d-513f-b197-d71354ab007a"
+version = "1.2.12+3"
+
+[[deps.libblastrampoline_jll]]
+deps = ["Artifacts", "Libdl"]
+uuid = "8e850b90-86db-534c-a0d3-1478176c7d93"
+version = "5.1.1+0"
+
+[[deps.nghttp2_jll]]
+deps = ["Artifacts", "Libdl"]
+uuid = "8e850ede-7688-5339-a07c-302acd2aaf8d"
+version = "1.48.0+0"
+
+[[deps.p7zip_jll]]
+deps = ["Artifacts", "Libdl"]
+uuid = "3f19e933-33d8-53b3-aaab-bd5110c3b7a0"
+version = "17.4.0+0"
diff --git a/test/test_packages/TestDepTrackingPath/test/runtests.jl b/test/test_packages/TestDepTrackingPath/test/runtests.jl
index e69de29bb2..1bb8bf6d7f 100644
--- a/test/test_packages/TestDepTrackingPath/test/runtests.jl
+++ b/test/test_packages/TestDepTrackingPath/test/runtests.jl
@@ -0,0 +1 @@
+# empty
diff --git a/test/test_packages/TestFailure/Project.toml b/test/test_packages/TestFailure/Project.toml
index 1033510237..7f4c5c01b8 100644
--- a/test/test_packages/TestFailure/Project.toml
+++ b/test/test_packages/TestFailure/Project.toml
@@ -7,4 +7,4 @@ version = "0.1.0"
Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40"
[targets]
-test = ["Test"]
\ No newline at end of file
+test = ["Test"]
diff --git a/test/test_packages/TestThreads/Project.toml b/test/test_packages/TestThreads/Project.toml
new file mode 100644
index 0000000000..35e36aed33
--- /dev/null
+++ b/test/test_packages/TestThreads/Project.toml
@@ -0,0 +1,2 @@
+name = "TestThreads"
+uuid = "79df5fe7-ed23-44ca-b7b9-b3881e57664d"
diff --git a/test/test_packages/TestThreads/src/TestThreads.jl b/test/test_packages/TestThreads/src/TestThreads.jl
new file mode 100644
index 0000000000..11d357747f
--- /dev/null
+++ b/test/test_packages/TestThreads/src/TestThreads.jl
@@ -0,0 +1,2 @@
+module TestThreads
+end
diff --git a/test/test_packages/TestThreads/test/runtests.jl b/test/test_packages/TestThreads/test/runtests.jl
new file mode 100644
index 0000000000..cd19c368da
--- /dev/null
+++ b/test/test_packages/TestThreads/test/runtests.jl
@@ -0,0 +1,11 @@
+@assert haskey(ENV, "EXPECTED_NUM_THREADS_DEFAULT")
+@assert haskey(ENV, "EXPECTED_NUM_THREADS_INTERACTIVE")
+EXPECTED_NUM_THREADS_DEFAULT = parse(Int, ENV["EXPECTED_NUM_THREADS_DEFAULT"])
+EXPECTED_NUM_THREADS_INTERACTIVE = parse(Int, ENV["EXPECTED_NUM_THREADS_INTERACTIVE"])
+@assert Threads.nthreads() == EXPECTED_NUM_THREADS_DEFAULT
+@assert Threads.nthreads(:default) == EXPECTED_NUM_THREADS_DEFAULT
+if Threads.nthreads() == 1
+ @info "Convert me back to an assert once https://github.com/JuliaLang/julia/pull/57454 has landed" Threads.nthreads(:interactive) EXPECTED_NUM_THREADS_INTERACTIVE
+else
+ @assert Threads.nthreads(:interactive) == EXPECTED_NUM_THREADS_INTERACTIVE
+end
diff --git a/test/test_packages/WithSources/TestMonorepo/Project.toml b/test/test_packages/WithSources/TestMonorepo/Project.toml
new file mode 100644
index 0000000000..7c726b9389
--- /dev/null
+++ b/test/test_packages/WithSources/TestMonorepo/Project.toml
@@ -0,0 +1,17 @@
+name = "TestMonorepo"
+uuid = "864d8eef-2526-4817-933e-34008eadd182"
+authors = ["KristofferC "]
+version = "0.1.0"
+
+[deps]
+Unregistered = "dcb67f36-efa0-11e8-0cef-2fc465ed98ae"
+
+[extras]
+Example = "d359f271-ef68-451f-b4fc-6b43e571086c"
+
+[sources]
+Example = {url = "https://github.com/JuliaLang/Pkg.jl", subdir = "test/test_packages/Example"}
+Unregistered = {url = "https://github.com/00vareladavid/Unregistered.jl", rev = "1b7a462"}
+
+[targets]
+test = ["Example"]
diff --git a/test/test_packages/WithSources/TestMonorepo/src/TestMonorepo.jl b/test/test_packages/WithSources/TestMonorepo/src/TestMonorepo.jl
new file mode 100644
index 0000000000..9a4aa4f8f7
--- /dev/null
+++ b/test/test_packages/WithSources/TestMonorepo/src/TestMonorepo.jl
@@ -0,0 +1,6 @@
+module TestMonorepo
+using Unregistered
+
+greet() = print("Hello World!")
+
+end
diff --git a/test/test_packages/WithSources/TestMonorepo/test/runtests.jl b/test/test_packages/WithSources/TestMonorepo/test/runtests.jl
new file mode 100644
index 0000000000..81a7bcd223
--- /dev/null
+++ b/test/test_packages/WithSources/TestMonorepo/test/runtests.jl
@@ -0,0 +1,2 @@
+using Example
+using Unregistered
diff --git a/test/test_packages/WithSources/URLSourceInDevvedPackage/Project.toml b/test/test_packages/WithSources/URLSourceInDevvedPackage/Project.toml
new file mode 100644
index 0000000000..a73c636c7d
--- /dev/null
+++ b/test/test_packages/WithSources/URLSourceInDevvedPackage/Project.toml
@@ -0,0 +1,10 @@
+name = "URLSourceInDevvedPackage"
+uuid = "78d3b172-12ec-4a7f-9187-8bf78594552a"
+version = "0.1.0"
+authors = ["Kristoffer "]
+
+[deps]
+TestMonorepo = "864d8eef-2526-4817-933e-34008eadd182"
+
+[sources]
+TestMonorepo = {path = "../TestMonorepo"}
diff --git a/test/test_packages/WithSources/URLSourceInDevvedPackage/src/URLSourceInDevvedPackage.jl b/test/test_packages/WithSources/URLSourceInDevvedPackage/src/URLSourceInDevvedPackage.jl
new file mode 100644
index 0000000000..e1de92b8a8
--- /dev/null
+++ b/test/test_packages/WithSources/URLSourceInDevvedPackage/src/URLSourceInDevvedPackage.jl
@@ -0,0 +1,5 @@
+module URLSourceInDevvedPackage
+
+greet() = print("Hello World!")
+
+end # module URLSourceInDevvedPackage
diff --git a/test/test_packages/WithSources/URLSourceInDevvedPackage/test/runtests.jl b/test/test_packages/WithSources/URLSourceInDevvedPackage/test/runtests.jl
new file mode 100644
index 0000000000..7279d9d735
--- /dev/null
+++ b/test/test_packages/WithSources/URLSourceInDevvedPackage/test/runtests.jl
@@ -0,0 +1,2 @@
+using URLSourceInDevvedPackage
+using TestMonorepo
diff --git a/test/test_packages/WorkspacePathResolution/Project.toml b/test/test_packages/WorkspacePathResolution/Project.toml
new file mode 100644
index 0000000000..3550d64c98
--- /dev/null
+++ b/test/test_packages/WorkspacePathResolution/Project.toml
@@ -0,0 +1,5 @@
+[workspace]
+projects = [
+ "SubProjectA",
+ "SubProjectB",
+]
diff --git a/test/test_packages/WorkspacePathResolution/SubProjectA/Project.toml b/test/test_packages/WorkspacePathResolution/SubProjectA/Project.toml
new file mode 100644
index 0000000000..e5aa2bbe50
--- /dev/null
+++ b/test/test_packages/WorkspacePathResolution/SubProjectA/Project.toml
@@ -0,0 +1,9 @@
+name = "SubProjectA"
+uuid = "87654321-4321-4321-4321-210987654321"
+version = "0.1.0"
+
+[deps]
+SubProjectB = "12345678-1234-1234-1234-123456789012"
+
+[sources]
+SubProjectB = {path = "SubProjectB"}
diff --git a/test/test_packages/WorkspacePathResolution/SubProjectA/src/SubProjectA.jl b/test/test_packages/WorkspacePathResolution/SubProjectA/src/SubProjectA.jl
new file mode 100644
index 0000000000..89fb8e379d
--- /dev/null
+++ b/test/test_packages/WorkspacePathResolution/SubProjectA/src/SubProjectA.jl
@@ -0,0 +1,7 @@
+module SubProjectA
+
+using SubProjectB
+
+greet() = "Hello from SubProjectA! " * SubProjectB.greet()
+
+end
diff --git a/test/test_packages/WorkspacePathResolution/SubProjectB/Project.toml b/test/test_packages/WorkspacePathResolution/SubProjectB/Project.toml
new file mode 100644
index 0000000000..8f8bf6fd07
--- /dev/null
+++ b/test/test_packages/WorkspacePathResolution/SubProjectB/Project.toml
@@ -0,0 +1,3 @@
+name = "SubProjectB"
+uuid = "12345678-1234-1234-1234-123456789012"
+version = "0.1.0"
diff --git a/test/test_packages/WorkspacePathResolution/SubProjectB/src/SubProjectB.jl b/test/test_packages/WorkspacePathResolution/SubProjectB/src/SubProjectB.jl
new file mode 100644
index 0000000000..4486af34ba
--- /dev/null
+++ b/test/test_packages/WorkspacePathResolution/SubProjectB/src/SubProjectB.jl
@@ -0,0 +1,5 @@
+module SubProjectB
+
+greet() = "Hello from SubProjectB!"
+
+end
diff --git a/test/test_packages/WorkspaceSourcesParent/Project.toml b/test/test_packages/WorkspaceSourcesParent/Project.toml
new file mode 100644
index 0000000000..5de751862b
--- /dev/null
+++ b/test/test_packages/WorkspaceSourcesParent/Project.toml
@@ -0,0 +1,6 @@
+name = "WorkspaceSourcesParent"
+uuid = "11111111-1111-1111-1111-111111111111"
+version = "0.1.0"
+
+[workspace]
+projects = ["docs"]
diff --git a/test/test_packages/WorkspaceSourcesParent/docs/Project.toml b/test/test_packages/WorkspaceSourcesParent/docs/Project.toml
new file mode 100644
index 0000000000..b1911c8d9c
--- /dev/null
+++ b/test/test_packages/WorkspaceSourcesParent/docs/Project.toml
@@ -0,0 +1,6 @@
+[deps]
+WorkspaceSourcesParent = "11111111-1111-1111-1111-111111111111"
+Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40"
+
+[sources]
+WorkspaceSourcesParent = {path = ".."}
diff --git a/test/test_packages/WorkspaceSourcesParent/src/WorkspaceSourcesParent.jl b/test/test_packages/WorkspaceSourcesParent/src/WorkspaceSourcesParent.jl
new file mode 100644
index 0000000000..b1823e4e3c
--- /dev/null
+++ b/test/test_packages/WorkspaceSourcesParent/src/WorkspaceSourcesParent.jl
@@ -0,0 +1,3 @@
+module WorkspaceSourcesParent
+greet() = "Hello from WorkspaceSourcesParent!"
+end
diff --git a/test/test_packages/WorkspaceTestInstantiate/test/Project.toml b/test/test_packages/WorkspaceTestInstantiate/test/Project.toml
index ce542a73d4..7709bd092c 100644
--- a/test/test_packages/WorkspaceTestInstantiate/test/Project.toml
+++ b/test/test_packages/WorkspaceTestInstantiate/test/Project.toml
@@ -1,2 +1,3 @@
[deps]
Example = "7876af07-990d-54b4-ab0e-23690620f79a"
+Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40"
diff --git a/test/test_packages/WorkspaceTestInstantiate/test/runtests.jl b/test/test_packages/WorkspaceTestInstantiate/test/runtests.jl
index 3e04fee8cc..4e976b4144 100644
--- a/test/test_packages/WorkspaceTestInstantiate/test/runtests.jl
+++ b/test/test_packages/WorkspaceTestInstantiate/test/runtests.jl
@@ -1 +1,5 @@
+using Test
+# Example is a test-only dep (not in root Project.toml).
+# Verify it was precompiled against the test project, not the parent.
+@test Base.isprecompiled(Base.identify_package("Example"))
using Example
diff --git a/test/test_packages/monorepo/packages/D/Project.toml b/test/test_packages/monorepo/packages/D/Project.toml
index a816c0ea74..b4469b5f58 100644
--- a/test/test_packages/monorepo/packages/D/Project.toml
+++ b/test/test_packages/monorepo/packages/D/Project.toml
@@ -1,4 +1,4 @@
name = "D"
uuid = "bf733257-898a-45a0-b2f2-c1c188bdd870"
version = "0.0.0"
-manifest = "../../Manifest.toml"
\ No newline at end of file
+manifest = "../../Manifest.toml"
diff --git a/test/test_packages/monorepo/test/runtests.jl b/test/test_packages/monorepo/test/runtests.jl
index 8cacea0deb..ef5dbaf141 100644
--- a/test/test_packages/monorepo/test/runtests.jl
+++ b/test/test_packages/monorepo/test/runtests.jl
@@ -1,4 +1,4 @@
using Test, A
@test A.test()
-@test A.testC()
\ No newline at end of file
+@test A.testC()
diff --git a/test/test_packages/sources_only_rev/Project.toml b/test/test_packages/sources_only_rev/Project.toml
new file mode 100644
index 0000000000..73a01c5d00
--- /dev/null
+++ b/test/test_packages/sources_only_rev/Project.toml
@@ -0,0 +1,5 @@
+[deps]
+Example = "7876af07-990d-54b4-ab0e-23690620f79a"
+
+[sources]
+Example = {rev = "ba3d6704f09330ae973773496a4212f85e0ffe45"}
diff --git a/test/utils.jl b/test/utils.jl
index ee19cb9bcb..3879c3353d 100644
--- a/test/utils.jl
+++ b/test/utils.jl
@@ -9,9 +9,9 @@ using TOML
using UUIDs
export temp_pkg_dir, cd_tempdir, isinstalled, write_build, with_current_env,
- with_temp_env, with_pkg_env, git_init_and_commit, copy_test_package,
- git_init_package, add_this_pkg, TEST_SIG, TEST_PKG, isolate, LOADED_DEPOT,
- list_tarball_files, recursive_rm_cov_files
+ with_temp_env, with_pkg_env, git_init_and_commit, copy_test_package,
+ git_init_package, add_this_pkg, TEST_SIG, TEST_PKG, isolate, LOADED_DEPOT,
+ list_tarball_files, recursive_rm_cov_files, copy_this_pkg_cache, make_file_url
const CACHE_DIRECTORY = realpath(mktempdir(; cleanup = true))
@@ -22,6 +22,18 @@ const REGISTRY_DIR = joinpath(REGISTRY_DEPOT, "registries", "General")
const GENERAL_UUID = UUID("23338594-aafe-5451-b93e-139f81909106")
+function copy_this_pkg_cache(new_depot)
+ for p in ("Pkg", "REPLExt")
+ subdir = joinpath("compiled", "v$(VERSION.major).$(VERSION.minor)")
+ source = joinpath(Base.DEPOT_PATH[1], subdir, p)
+ isdir(source) || continue # doesn't exist if using shipped Pkg (e.g. Julia CI)
+ dest = joinpath(new_depot, subdir, p)
+ mkpath(dirname(dest))
+ cp(source, dest)
+ end
+ return
+end
+
function check_init_reg()
isfile(joinpath(REGISTRY_DIR, "Registry.toml")) && return
mkpath(REGISTRY_DIR)
@@ -34,21 +46,24 @@ function check_init_reg()
write(tree_info_file, "git-tree-sha1 = " * repr(string(hash)))
else
Base.shred!(LibGit2.CachedCredentials()) do creds
- f = retry(delays = fill(5.0, 3), check=(s,e)->isa(e, Pkg.Types.PkgError)) do
- LibGit2.with(Pkg.GitTools.clone(
- stderr_f(),
- "https://github.com/JuliaRegistries/General.git",
- REGISTRY_DIR,
- credentials = creds)) do repo
+ f = retry(delays = fill(5.0, 3), check = (s, e) -> isa(e, Pkg.Types.PkgError)) do
+ LibGit2.with(
+ Pkg.GitTools.clone(
+ stderr_f(),
+ "https://github.com/JuliaRegistries/General.git",
+ REGISTRY_DIR,
+ credentials = creds
+ )
+ ) do repo
end
end
f() # retry returns a function that should be called
end
end
- isfile(joinpath(REGISTRY_DIR, "Registry.toml")) || error("Registry did not install properly")
+ return isfile(joinpath(REGISTRY_DIR, "Registry.toml")) || error("Registry did not install properly")
end
-function isolate(fn::Function; loaded_depot=false, linked_reg=true)
+function isolate(fn::Function; loaded_depot = false, linked_reg = true)
old_load_path = copy(LOAD_PATH)
old_depot_path = copy(DEPOT_PATH)
old_home_project = Base.HOME_PROJECT[]
@@ -57,7 +72,7 @@ function isolate(fn::Function; loaded_depot=false, linked_reg=true)
old_general_registry_url = Pkg.Registry.DEFAULT_REGISTRIES[1].url
old_general_registry_path = Pkg.Registry.DEFAULT_REGISTRIES[1].path
old_general_registry_linked = Pkg.Registry.DEFAULT_REGISTRIES[1].linked
- try
+ return try
# Clone/download the registry only once
check_init_reg()
@@ -70,9 +85,12 @@ function isolate(fn::Function; loaded_depot=false, linked_reg=true)
Pkg.Registry.DEFAULT_REGISTRIES[1].path = REGISTRY_DIR
Pkg.Registry.DEFAULT_REGISTRIES[1].linked = linked_reg
Pkg.REPLMode.TEST_MODE[] = false
- withenv("JULIA_PROJECT" => nothing,
- "JULIA_LOAD_PATH" => nothing,
- "JULIA_PKG_DEVDIR" => nothing) do
+ withenv(
+ "JULIA_PROJECT" => nothing,
+ "JULIA_LOAD_PATH" => nothing,
+ "JULIA_PKG_DEVDIR" => nothing,
+ "JULIA_DEPOT_PATH" => nothing
+ ) do
target_depot = realpath(mktempdir())
push!(LOAD_PATH, "@", "@v#.#", "@stdlib")
push!(DEPOT_PATH, target_depot)
@@ -91,7 +109,7 @@ function isolate(fn::Function; loaded_depot=false, linked_reg=true)
end
if !haskey(ENV, "CI") && target_depot !== nothing && isdir(target_depot)
try
- Base.rm(target_depot; force=true, recursive=true)
+ Base.rm(target_depot; force = true, recursive = true)
catch err
println("warning: isolate failed to clean up depot.\n $err")
end
@@ -127,7 +145,7 @@ function isolate_and_pin_registry(fn::Function; registry_url::String, registry_c
return nothing
end
-function temp_pkg_dir(fn::Function;rm=true, linked_reg=true)
+function temp_pkg_dir(fn::Function; rm = true, linked_reg = true)
old_load_path = copy(LOAD_PATH)
old_depot_path = copy(DEPOT_PATH)
old_home_project = Base.HOME_PROJECT[]
@@ -135,7 +153,7 @@ function temp_pkg_dir(fn::Function;rm=true, linked_reg=true)
old_general_registry_url = Pkg.Registry.DEFAULT_REGISTRIES[1].url
old_general_registry_path = Pkg.Registry.DEFAULT_REGISTRIES[1].path
old_general_registry_linked = Pkg.Registry.DEFAULT_REGISTRIES[1].linked
- try
+ return try
# Clone/download the registry only once
check_init_reg()
@@ -146,9 +164,12 @@ function temp_pkg_dir(fn::Function;rm=true, linked_reg=true)
Pkg.Registry.DEFAULT_REGISTRIES[1].url = nothing
Pkg.Registry.DEFAULT_REGISTRIES[1].path = REGISTRY_DIR
Pkg.Registry.DEFAULT_REGISTRIES[1].linked = linked_reg
- withenv("JULIA_PROJECT" => nothing,
- "JULIA_LOAD_PATH" => nothing,
- "JULIA_PKG_DEVDIR" => nothing) do
+ withenv(
+ "JULIA_PROJECT" => nothing,
+ "JULIA_LOAD_PATH" => nothing,
+ "JULIA_PKG_DEVDIR" => nothing,
+ "JULIA_DEPOT_PATH" => nothing
+ ) do
env_dir = realpath(mktempdir())
depot_dir = realpath(mktempdir())
try
@@ -159,8 +180,8 @@ function temp_pkg_dir(fn::Function;rm=true, linked_reg=true)
finally
if rm && !haskey(ENV, "CI")
try
- Base.rm(env_dir; force=true, recursive=true)
- Base.rm(depot_dir; force=true, recursive=true)
+ Base.rm(env_dir; force = true, recursive = true)
+ Base.rm(depot_dir; force = true, recursive = true)
catch err
# Avoid raising an exception here as it will mask the original exception
println(stderr_f(), "Exception in finally: $(sprint(showerror, err))")
@@ -181,12 +202,12 @@ function temp_pkg_dir(fn::Function;rm=true, linked_reg=true)
end
end
-function cd_tempdir(f; rm=true)
+function cd_tempdir(f; rm = true)
tmp = realpath(mktempdir())
cd(tmp) do
f(tmp)
end
- if rm && !haskey(ENV, "CI")
+ return if rm && !haskey(ENV, "CI")
try
Base.rm(tmp; force = true, recursive = true)
catch err
@@ -203,25 +224,25 @@ isinstalled(pkg::String) = Base.find_package(pkg) !== nothing
function write_build(path, content)
build_filename = joinpath(path, "deps", "build.jl")
mkpath(dirname(build_filename))
- write(build_filename, content)
+ return write(build_filename, content)
end
function with_current_env(f)
prev_active = Base.ACTIVE_PROJECT[]
Pkg.activate(".")
- try
+ return try
f()
finally
Base.ACTIVE_PROJECT[] = prev_active
end
end
-function with_temp_env(f, env_name::AbstractString="Dummy"; rm=true)
+function with_temp_env(f, env_name::AbstractString = "Dummy"; rm = true)
prev_active = Base.ACTIVE_PROJECT[]
env_path = joinpath(realpath(mktempdir()), env_name)
Pkg.generate(env_path)
Pkg.activate(env_path)
- try
+ return try
applicable(f, env_path) ? f(env_path) : f()
finally
Base.ACTIVE_PROJECT[] = prev_active
@@ -236,10 +257,10 @@ function with_temp_env(f, env_name::AbstractString="Dummy"; rm=true)
end
end
-function with_pkg_env(fn::Function, path::AbstractString="."; change_dir=false)
+function with_pkg_env(fn::Function, path::AbstractString = "."; change_dir = false)
prev_active = Base.ACTIVE_PROJECT[]
Pkg.activate(path)
- try
+ return try
if change_dir
cd(fn, path)
else
@@ -256,9 +277,9 @@ const TEST_SIG = LibGit2.Signature("TEST", "TEST@TEST.COM", round(time()), 0)
const TEST_PKG = (name = "Example", uuid = UUID("7876af07-990d-54b4-ab0e-23690620f79a"))
function git_init_and_commit(path; msg = "initial commit")
- LibGit2.with(LibGit2.init(path)) do repo
+ return LibGit2.with(LibGit2.init(path)) do repo
LibGit2.add!(repo, "*")
- LibGit2.commit(repo, msg; author=TEST_SIG, committer=TEST_SIG)
+ LibGit2.commit(repo, msg; author = TEST_SIG, committer = TEST_SIG)
end
end
@@ -270,9 +291,22 @@ function git_init_package(tmp, path)
return pkgpath
end
-function copy_test_package(tmpdir::String, name::String; use_pkg=true)
+function ensure_test_package_user_writable(dir)
+ for (root, _, files) in walkdir(dir)
+ chmod(root, filemode(root) | 0o200 | 0o100)
+
+ for file in files
+ filepath = joinpath(root, file)
+ chmod(filepath, filemode(filepath) | 0o200)
+ end
+ end
+ return
+end
+
+function copy_test_package(tmpdir::String, name::String; use_pkg = true)
target = joinpath(tmpdir, name)
cp(joinpath(@__DIR__, "test_packages", name), target)
+ ensure_test_package_user_writable(target)
use_pkg || return target
# The known Pkg UUID, and whatever UUID we're currently using for testing
@@ -290,15 +324,15 @@ function copy_test_package(tmpdir::String, name::String; use_pkg=true)
return target
end
-function add_this_pkg(; platform=Base.BinaryPlatforms.HostPlatform())
- try
+function add_this_pkg(; platform = Base.BinaryPlatforms.HostPlatform())
+ return try
Pkg.respect_sysimage_versions(false)
pkg_dir = dirname(@__DIR__)
pkg_uuid = TOML.parsefile(joinpath(pkg_dir, "Project.toml"))["uuid"]
spec = Pkg.PackageSpec(
- name="Pkg",
- uuid=UUID(pkg_uuid),
- path=pkg_dir,
+ name = "Pkg",
+ uuid = UUID(pkg_uuid),
+ path = pkg_dir,
)
Pkg.develop(spec; platform)
finally
@@ -316,14 +350,16 @@ end
function show_output_if_command_errors(cmd::Cmd)
out = IOBuffer()
- proc = run(pipeline(cmd; stdout=out); wait = false)
+ err = IOBuffer()
+ proc = run(pipeline(cmd; stdout = out, stderr = err); wait = false)
wait(proc)
if !success(proc)
- seekstart(out)
+ seekstart(out); seekstart(err)
println(read(out, String))
+ println(read(err, String))
Base.pipeline_error(proc)
end
- return nothing
+ return true
end
function recursive_rm_cov_files(rootdir::String)
@@ -332,6 +368,18 @@ function recursive_rm_cov_files(rootdir::String)
endswith(file, ".cov") && rm(joinpath(root, file))
end
end
+ return
+end
+
+# Convert a path into a file URL.
+function make_file_url(path)
+ # Turn the slashes on Windows. In case the path starts with a
+ # drive letter, an extra slash will be needed in the file URL.
+ path = replace(path, "\\" => "/")
+ if !startswith(path, "/")
+ path = "/" * path
+ end
+ return "file://$(path)"
end
end
diff --git a/test/workspaces.jl b/test/workspaces.jl
index acb41be7bd..87654ccf4c 100644
--- a/test/workspaces.jl
+++ b/test/workspaces.jl
@@ -4,161 +4,286 @@ import ..Pkg # ensure we are using the correct Pkg
using Test
using TOML
using UUIDs
-if !isdefined(@__MODULE__, :Utils)
- include("utils.jl")
- using .Utils
-end
-
+using ..Utils
temp_pkg_dir() do project_path
- cd(project_path) do; with_temp_env() do
- name = "MonorepoSub"
- rm(name, force=true, recursive=true)
- Pkg.generate(name)
- cd("MonorepoSub") do
- Pkg.activate(".")
- # Add Example, Crayons, PrivatePackage to the "MonorepoSub" package
- Pkg.add("Example")
- Pkg.add(;name="Crayons", version="v4.0.3")
- Pkg.compat("Crayons", "=4.0.0, =4.0.1, =4.0.2, =4.0.3")
- Pkg.generate("PrivatePackage")
- Pkg.develop(path="PrivatePackage")
- d = TOML.parsefile("Project.toml")
- d["workspace"] = Dict("projects" => ["test", "docs", "benchmarks", "PrivatePackage"])
- abs_path = abspath("PrivatePackage") # TODO: Make relative after #3842 is fixed
- d["sources"] = Dict("PrivatePackage" => Dict("path" => abs_path))
- Pkg.Types.write_project(d, "Project.toml")
- write("src/MonorepoSub.jl", """
- module MonorepoSub
- using Example, Crayons, PrivatePackage
+ cd(project_path) do;
+ with_temp_env() do
+ name = "MonorepoSub"
+ rm(name, force = true, recursive = true)
+ Pkg.generate(name)
+ cd("MonorepoSub") do
+ Pkg.activate(".")
+ # Add Example, Crayons, PrivatePackage to the "MonorepoSub" package
+ Pkg.add("Example")
+ Pkg.add(; name = "Crayons", version = "v4.0.3")
+ Pkg.compat("Crayons", "=4.0.0, =4.0.1, =4.0.2, =4.0.3")
+ Pkg.generate("PrivatePackage")
+ Pkg.develop(path = "PrivatePackage")
+ d = TOML.parsefile("Project.toml")
+ d["workspace"] = Dict("projects" => ["test", "docs", "benchmarks", "PrivatePackage"])
+ d["sources"] = Dict("PrivatePackage" => Dict("path" => "PrivatePackage"))
+ Pkg.Types.write_project(d, "Project.toml")
+ write(
+ "src/MonorepoSub.jl", """
+ module MonorepoSub
+ using Example, Crayons, PrivatePackage
+ end
+ """
+ )
+
+ # Add some deps to PrivatePackage
+ Pkg.activate("PrivatePackage")
+ Pkg.add(; name = "Chairmarks", version = v"1.1.2")
+ @test !isfile("PrivatePackage/Manifest.toml")
+ d = TOML.parsefile("PrivatePackage/Project.toml")
+ d["workspace"] = Dict("projects" => ["test"])
+ Pkg.Types.write_project(d, "PrivatePackage/Project.toml")
+ write(
+ "PrivatePackage/src/PrivatePackage.jl", """
+ module PrivatePackage
+ using Chairmarks
+ end
+ """
+ )
+ io = IOBuffer()
+ Pkg.status(; io)
+ status = String(take!(io))
+ for pkg in ["Crayons v", "Example v", "TestSpecificPackage v"]
+ @test !occursin(pkg, status)
end
- """)
-
- # Add some deps to PrivatePackage
- Pkg.activate("PrivatePackage")
- Pkg.add(; name="Chairmarks", version=v"1.1.2")
- @test !isfile("PrivatePackage/Manifest.toml")
- d = TOML.parsefile("PrivatePackage/Project.toml")
- d["workspace"] = Dict("projects" => ["test"])
- Pkg.Types.write_project(d, "PrivatePackage/Project.toml")
- write("PrivatePackage/src/PrivatePackage.jl", """
- module PrivatePackage
- using Chairmarks
- end
- """)
- io = IOBuffer()
- Pkg.status(; io)
- status = String(take!(io))
- for pkg in ["Crayons v", "Example v", "TestSpecificPackage v"]
- @test !occursin(pkg, status)
- end
- @test occursin("Chairmarks v", status)
-
- # Make a test subproject in PrivatePackage
- # Note that this is a "nested subproject" since in this environment
- # PrivatePackage is a subproject of MonorepoSub
- mkdir("PrivatePackage/test")
- Pkg.activate("PrivatePackage/test")
- # This adds too many packages to the Project file...
- Pkg.add("Test")
- Pkg.develop(path="PrivatePackage")
- @test length(Pkg.project().dependencies) == 2
- write("PrivatePackage/test/runtests.jl", """
- using Test
- using PrivatePackage
- """)
- # A nested subproject should still use the root base manifest
- @test !isfile("PrivatePackage/test/Manifest.toml")
- # Test status shows deps in test-subproject + base (MonoRepoSub)
- io = IOBuffer()
- Pkg.status(; io)
- status = String(take!(io))
- for pkg in ["Crayons", "Example", "TestSpecificPackage"]
- @test !occursin(pkg, status)
- end
- @test occursin("Test v", status)
+ @test occursin("Chairmarks v", status)
- Pkg.status(; io, workspace=true)
- status = String(take!(io))
- for pkg in ["Crayons", "Example", "Test"]
- @test occursin(pkg, status)
- end
+ # Make a test subproject in PrivatePackage
+ # Note that this is a "nested subproject" since in this environment
+ # PrivatePackage is a subproject of MonorepoSub
+ mkdir("PrivatePackage/test")
+ Pkg.activate("PrivatePackage/test")
+ # This adds too many packages to the Project file...
+ Pkg.add("Test")
+ Pkg.develop(path = "PrivatePackage")
+ @test length(Pkg.project().dependencies) == 2
+ write(
+ "PrivatePackage/test/runtests.jl", """
+ using Test
+ using PrivatePackage
+ """
+ )
+ # A nested subproject should still use the root base manifest
+ @test !isfile("PrivatePackage/test/Manifest.toml")
+ # Test status shows deps in test-subproject + base (MonoRepoSub)
+ io = IOBuffer()
+ Pkg.status(; io)
+ status = String(take!(io))
+ for pkg in ["Crayons", "Example", "TestSpecificPackage"]
+ @test !occursin(pkg, status)
+ end
+ @test occursin("Test v", status)
+
+ Pkg.status(; io, workspace = true)
+ status = String(take!(io))
+ for pkg in ["Crayons", "Example", "Test"]
+ @test occursin(pkg, status)
+ end
+
+ # Add tests to MonorepoSub
+ mkdir("test")
+ Pkg.activate("test")
+ # Test specific deps
+ Pkg.add("Test")
+ Pkg.add("Crayons")
+ Pkg.compat("Crayons", "=4.0.1, =4.0.2, =4.0.3, =4.0.4")
+ Pkg.develop(; path = ".")
+ # Compat in base package should prevent updating to 4.0.4
+ Pkg.update()
+ @test Pkg.dependencies()[UUID("a8cc5b0e-0ffa-5ad4-8c14-923d3ee1735f")].version == v"4.0.3"
+ Pkg.generate("TestSpecificPackage")
+ Pkg.develop(path = "TestSpecificPackage")
+ d = TOML.parsefile("test/Project.toml")
+ d["sources"] = Dict("TestSpecificPackage" => Dict("path" => "../TestSpecificPackage"))
+ Pkg.Types.write_project(d, "test/Project.toml")
+
+ @test !isfile("test/Manifest.toml")
+ write(
+ "test/runtests.jl", """
+ using Test
+ using Crayons
+ using TestSpecificPackage
+ using MonorepoSub
+ """
+ )
- # Add tests to MonorepoSub
- mkdir("test")
- Pkg.activate("test")
- # Test specific deps
- Pkg.add("Test")
- Pkg.add("Crayons")
- Pkg.compat("Crayons", "=4.0.1, =4.0.2, =4.0.3, =4.0.4")
- Pkg.develop(; path=".")
- # Compat in base package should prevent updating to 4.0.4
- Pkg.update()
- @test Pkg.dependencies()[UUID("a8cc5b0e-0ffa-5ad4-8c14-923d3ee1735f")].version == v"4.0.3"
- Pkg.generate("TestSpecificPackage")
- Pkg.develop(path="TestSpecificPackage")
- d = TOML.parsefile("test/Project.toml")
- abs_pkg = abspath("TestSpecificPackage") # TODO: Make relative after #3842 is fixed
- d["sources"] = Dict("TestSpecificPackage" => Dict("path" => abs_pkg))
- Pkg.Types.write_project(d, "test/Project.toml")
-
- @test !isfile("test/Manifest.toml")
- write("test/runtests.jl", """
- using Test
- using Crayons
- using TestSpecificPackage
- using MonorepoSub
- """)
-
- Pkg.activate(".")
- env = Pkg.Types.EnvCache()
- hash_1 = Pkg.Types.workspace_resolve_hash(env)
- Pkg.activate("PrivatePackage")
- env = Pkg.Types.EnvCache()
- hash_2 = Pkg.Types.workspace_resolve_hash(env)
- Pkg.activate("test")
- env = Pkg.Types.EnvCache()
- hash_3 = Pkg.Types.workspace_resolve_hash(env)
- Pkg.activate("PrivatePackage/test")
- env = Pkg.Types.EnvCache()
- hash_4 = Pkg.Types.workspace_resolve_hash(env)
-
- @test hash_1 == hash_2 == hash_3 == hash_4
-
- # Test that the subprojects are working
- depot_path_string = join(Base.DEPOT_PATH, Sys.iswindows() ? ";" : ":")
- withenv("JULIA_DEPOT_PATH" => depot_path_string) do
- @test success(run(`$(Base.julia_cmd()) --startup-file=no --project="test" test/runtests.jl`))
- @test success(run(`$(Base.julia_cmd()) --startup-file=no --project -e 'using MonorepoSub'`))
- @test success(run(`$(Base.julia_cmd()) --startup-file=no --project="PrivatePackage" -e 'using PrivatePackage'`))
- @test success(run(`$(Base.julia_cmd()) --startup-file=no --project="PrivatePackage/test" PrivatePackage/test/runtests.jl`))
-
- rm("Manifest.toml")
Pkg.activate(".")
- Pkg.resolve()
- # Resolve should have fixed the manifest so that everything above works from the existing project files
- @test success(run(`$(Base.julia_cmd()) --startup-file=no --project="test" test/runtests.jl`))
- @test success(run(`$(Base.julia_cmd()) --startup-file=no --project -e 'using MonorepoSub'`))
- @test success(run(`$(Base.julia_cmd()) --startup-file=no --project="PrivatePackage" -e 'using PrivatePackage'`))
- @test success(run(`$(Base.julia_cmd()) --startup-file=no --project="PrivatePackage/test" PrivatePackage/test/runtests.jl`))
+ env = Pkg.Types.EnvCache()
+ hash_1 = Pkg.Types.workspace_resolve_hash(env)
+ Pkg.activate("PrivatePackage")
+ env = Pkg.Types.EnvCache()
+ hash_2 = Pkg.Types.workspace_resolve_hash(env)
+ Pkg.activate("test")
+ env = Pkg.Types.EnvCache()
+ hash_3 = Pkg.Types.workspace_resolve_hash(env)
+ Pkg.activate("PrivatePackage/test")
+ env = Pkg.Types.EnvCache()
+ hash_4 = Pkg.Types.workspace_resolve_hash(env)
+
+ @test hash_1 == hash_2 == hash_3 == hash_4
+
+ # Test workspace option for update, pin, free
+ Pkg.activate(".")
+ # Chairmarks is only a dep of the PrivatePackage subproject, not the root
+ all_deps = Pkg.dependencies()
+ chairmarks_uuid = only([uuid for (uuid, info) in all_deps if info.name == "Chairmarks"])
+ @test all_deps[chairmarks_uuid].version == v"1.1.2"
+
+ # update without workspace should not touch Chairmarks (not a root dep)
+ Pkg.update()
+ Pkg.dependencies(chairmarks_uuid) do pkg
+ @test pkg.version == v"1.1.2"
+ end
+
+ # update with workspace=true should update Chairmarks from the subproject
+ Pkg.update(; workspace = true)
+ Pkg.dependencies(chairmarks_uuid) do pkg
+ @test pkg.version > v"1.1.2"
+ end
+
+ # Test that the subprojects are working
+ depot_path_string = join(Base.DEPOT_PATH, Sys.iswindows() ? ";" : ":")
+ withenv("JULIA_DEPOT_PATH" => depot_path_string) do
+ @test success(run(`$(Base.julia_cmd()) --startup-file=no --project="test" test/runtests.jl`))
+ @test success(run(`$(Base.julia_cmd()) --startup-file=no --project -e 'using MonorepoSub'`))
+ @test success(run(`$(Base.julia_cmd()) --startup-file=no --project="PrivatePackage" -e 'using PrivatePackage'`))
+ @test success(run(`$(Base.julia_cmd()) --startup-file=no --project="PrivatePackage/test" PrivatePackage/test/runtests.jl`))
+
+ rm("Manifest.toml")
+ Pkg.activate(".")
+ Pkg.resolve()
+ # Resolve should have fixed the manifest so that everything above works from the existing project files
+ @test success(run(`$(Base.julia_cmd()) --startup-file=no --project="test" test/runtests.jl`))
+ @test success(run(`$(Base.julia_cmd()) --startup-file=no --project -e 'using MonorepoSub'`))
+ @test success(run(`$(Base.julia_cmd()) --startup-file=no --project="PrivatePackage" -e 'using PrivatePackage'`))
+ @test success(run(`$(Base.julia_cmd()) --startup-file=no --project="PrivatePackage/test" PrivatePackage/test/runtests.jl`))
+ end
end
end
- end end
+ end
end
@testset "test resolve with tree hash" begin
+ isolate() do
+ mktempdir() do dir
+ path = copy_test_package(dir, "WorkspaceTestInstantiate")
+ cd(path) do
+ with_current_env() do
+ withenv("JULIA_PKG_PRECOMPILE_AUTO" => "1") do
+ @test !isfile("Manifest.toml")
+ @test !isfile("test/Manifest.toml")
+ Pkg.test()
+ @test isfile("Manifest.toml")
+ @test !isfile("test/Manifest.toml")
+ rm(joinpath(DEPOT_PATH[1], "packages", "Example"); recursive = true)
+ Pkg.test()
+ end
+ end
+ end
+ end
+ end
+end
+
+@testset "workspace path resolution issue #4222" begin
+ isolate() do
+ mktempdir() do dir
+ path = copy_test_package(dir, "WorkspacePathResolution")
+ cd(path) do
+ with_current_env() do
+ # First resolve SubProjectB (non-root project) without existing Manifest
+ Pkg.activate("SubProjectB")
+ @test !isfile("Manifest.toml")
+ # Should be able to find SubProjectA and succeed
+ Pkg.update()
+ end
+ end
+ end
+ end
+end
+
+# Test that workspace child projects with [sources] pointing to parent work correctly
+# This was broken in 1.12.3 due to stale assertions after #4539
+@testset "workspace sources pointing to parent package" begin
mktempdir() do dir
- path = abspath(joinpath(@__DIR__, "../test", "test_packages", "WorkspaceTestInstantiate"))
- cp(path, joinpath(dir, "WorkspaceTestInstantiate"))
- cd(joinpath(dir, "WorkspaceTestInstantiate")) do
+ path = copy_test_package(dir, "WorkspaceSourcesParent")
+ cd(path) do
with_current_env() do
+ # Activate the docs subproject which has [sources] WorkspaceSourcesParent = {path = ".."}
+ Pkg.activate("docs")
@test !isfile("Manifest.toml")
- @test !isfile("test/Manifest.toml")
- Pkg.test()
+ # This should succeed without AssertionError
+ Pkg.instantiate()
@test isfile("Manifest.toml")
- @test !isfile("test/Manifest.toml")
- rm(joinpath(DEPOT_PATH[1], "packages", "Example"); recursive = true)
- Pkg.test()
+ # Verify the manifest has the correct path for the parent package
+ manifest = TOML.parsefile("Manifest.toml")
+ parent_entry = only(manifest["deps"]["WorkspaceSourcesParent"])
+ @test parent_entry["path"] == "."
+ # Verify the Project.toml sources path was NOT corrupted (issue #4575)
+ # The path should remain ".." (project-relative), not "." (manifest-relative)
+ project = TOML.parsefile("docs/Project.toml")
+ @test project["sources"]["WorkspaceSourcesParent"]["path"] == ".."
+ end
+ end
+ end
+end
+
+@testset "selective workspace instantiate" begin
+ mktempdir() do dir
+ path = copy_test_package(dir, "WorkspaceTestInstantiate")
+ cd(path) do
+ with_current_env() do
+ # Add Crayons dependency to root project to differentiate from subproject's Example
+ Pkg.activate(".")
+ Pkg.add("Crayons")
+
+ # The test subproject already has Example dependency
+ # Workspace structure is already set up in the test package
+
+ # Resolve to create full manifest
+ Pkg.resolve()
+ @test isfile("Manifest.toml")
+
+ # Verify manifest contains both dependencies
+ manifest = Pkg.Types.read_manifest("Manifest.toml")
+ example_uuid = UUID("7876af07-990d-54b4-ab0e-23690620f79a") # From test subproject
+ crayons_uuid = UUID("a8cc5b0e-0ffa-5ad4-8c14-923d3ee1735f") # From root project
+ @test haskey(manifest.deps, example_uuid)
+ @test haskey(manifest.deps, crayons_uuid)
+
+ # Clear package installations to test selective download
+ depot_path = first(Pkg.depots())
+ packages_dir = joinpath(depot_path, "packages")
+ for pkg_name in ["Example", "Crayons"]
+ pkg_dir = joinpath(packages_dir, pkg_name)
+ rm(pkg_dir, recursive = true, force = true)
+ end
+
+ # Test workspace=false only downloads root project deps (Crayons)
+ Pkg.instantiate(workspace = false)
+ example_installed = isdir(joinpath(packages_dir, "Example"))
+ crayons_installed = isdir(joinpath(packages_dir, "Crayons"))
+ @test crayons_installed # Should be installed (root project dependency)
+ @test !example_installed # Should not be installed with workspace=false
+
+ # Clear and test workspace=true downloads all deps
+ rm(joinpath(packages_dir, "Crayons"), recursive = true, force = true)
+ Pkg.instantiate(workspace = true)
+ example_installed = isdir(joinpath(packages_dir, "Example"))
+ crayons_installed = isdir(joinpath(packages_dir, "Crayons"))
+ @test crayons_installed # Should be installed
+ @test example_installed # Should be installed with workspace=true
+
+ # Test is_instantiated behavior
+ rm(joinpath(packages_dir, "Example"), recursive = true, force = true)
+ ctx = Pkg.Types.Context()
+ @test Pkg.Operations.is_instantiated(ctx.env, false) # Root project complete (has Crayons)
+ @test !Pkg.Operations.is_instantiated(ctx.env, true) # Workspace incomplete (missing Example)
end
end
end