From 2550b812ea337552fbb68de80f20bdb91f281001 Mon Sep 17 00:00:00 2001 From: Kristoffer Carlsson Date: Mon, 13 Nov 2017 14:41:07 +0100 Subject: [PATCH 1/3] internalize BinaryProvider --- ext/BinaryProvider/.codecov.yml | 1 + ext/BinaryProvider/.gitignore | 5 + ext/BinaryProvider/.travis.yml | 30 + ext/BinaryProvider/LICENSE.md | 22 + ext/BinaryProvider/README.md | 35 ++ ext/BinaryProvider/REQUIRE | 3 + ext/BinaryProvider/appveyor.yml | 48 ++ ext/BinaryProvider/src/BinDepsIntegration.jl | 37 ++ ext/BinaryProvider/src/BinaryPackage.jl | 151 +++++ ext/BinaryProvider/src/BinaryProvider.jl | 39 ++ ext/BinaryProvider/src/OutputCollector.jl | 362 +++++++++++ ext/BinaryProvider/src/PlatformEngines.jl | 543 +++++++++++++++++ ext/BinaryProvider/src/PlatformNames.jl | 212 +++++++ ext/BinaryProvider/src/Prefix.jl | 543 +++++++++++++++++ ext/BinaryProvider/src/Products.jl | 345 +++++++++++ ext/BinaryProvider/test/LibFoo.jl/.gitignore | 2 + ext/BinaryProvider/test/LibFoo.jl/README.md | 3 + .../test/LibFoo.jl/deps/build.jl | 37 ++ .../test/LibFoo.jl/src/LibFoo.jl | 38 ++ .../test/LibFoo.jl/test/runtests.jl | 5 + ext/BinaryProvider/test/output_tests/fail.sh | 6 + ext/BinaryProvider/test/output_tests/kill.sh | 10 + ext/BinaryProvider/test/output_tests/long.sh | 5 + .../test/output_tests/newlines.sh | 6 + .../test/output_tests/simple.sh | 8 + ext/BinaryProvider/test/runtests.jl | 575 ++++++++++++++++++ 26 files changed, 3071 insertions(+) create mode 100644 ext/BinaryProvider/.codecov.yml create mode 100644 ext/BinaryProvider/.gitignore create mode 100644 ext/BinaryProvider/.travis.yml create mode 100644 ext/BinaryProvider/LICENSE.md create mode 100644 ext/BinaryProvider/README.md create mode 100644 ext/BinaryProvider/REQUIRE create mode 100644 ext/BinaryProvider/appveyor.yml create mode 100644 ext/BinaryProvider/src/BinDepsIntegration.jl create mode 100644 ext/BinaryProvider/src/BinaryPackage.jl create mode 100644 ext/BinaryProvider/src/BinaryProvider.jl create mode 100644 ext/BinaryProvider/src/OutputCollector.jl create mode 100644 ext/BinaryProvider/src/PlatformEngines.jl create mode 100644 ext/BinaryProvider/src/PlatformNames.jl create mode 100644 ext/BinaryProvider/src/Prefix.jl create mode 100644 ext/BinaryProvider/src/Products.jl create mode 100644 ext/BinaryProvider/test/LibFoo.jl/.gitignore create mode 100644 ext/BinaryProvider/test/LibFoo.jl/README.md create mode 100644 ext/BinaryProvider/test/LibFoo.jl/deps/build.jl create mode 100644 ext/BinaryProvider/test/LibFoo.jl/src/LibFoo.jl create mode 100644 ext/BinaryProvider/test/LibFoo.jl/test/runtests.jl create mode 100755 ext/BinaryProvider/test/output_tests/fail.sh create mode 100755 ext/BinaryProvider/test/output_tests/kill.sh create mode 100755 ext/BinaryProvider/test/output_tests/long.sh create mode 100755 ext/BinaryProvider/test/output_tests/newlines.sh create mode 100755 ext/BinaryProvider/test/output_tests/simple.sh create mode 100644 ext/BinaryProvider/test/runtests.jl diff --git a/ext/BinaryProvider/.codecov.yml b/ext/BinaryProvider/.codecov.yml new file mode 100644 index 0000000000000..69cb76019a474 --- /dev/null +++ b/ext/BinaryProvider/.codecov.yml @@ -0,0 +1 @@ +comment: false diff --git a/ext/BinaryProvider/.gitignore b/ext/BinaryProvider/.gitignore new file mode 100644 index 0000000000000..16a5e3a735f44 --- /dev/null +++ b/ext/BinaryProvider/.gitignore @@ -0,0 +1,5 @@ +*.jl.cov +*.jl.*.cov +*.jl.mem + +global_prefix diff --git a/ext/BinaryProvider/.travis.yml b/ext/BinaryProvider/.travis.yml new file mode 100644 index 0000000000000..b744962267abe --- /dev/null +++ b/ext/BinaryProvider/.travis.yml @@ -0,0 +1,30 @@ +# Documentation: http://docs.travis-ci.com/user/languages/julia/ +language: julia +os: + - linux + - osx +julia: + - 0.6 + - nightly +notifications: + email: false + +matrix: + include: + # Make sure to override to "wget" at least once + - julia: 0.6 + os: linux + env: BINARYPROVIDER_DOWNLOAD_ENGINE="wget" + +# Ironic. He could provide binaries for others but not himself... +addons: + apt: + packages: + - curl + - wget + - tar + - gzip + +after_success: + # push coverage results to Codecov + - julia -e 'cd(Pkg.dir("BinaryProvider")); Pkg.add("Coverage"); using Coverage; Codecov.submit(Codecov.process_folder())' diff --git a/ext/BinaryProvider/LICENSE.md b/ext/BinaryProvider/LICENSE.md new file mode 100644 index 0000000000000..3baf9cd204117 --- /dev/null +++ b/ext/BinaryProvider/LICENSE.md @@ -0,0 +1,22 @@ +The BinaryProvider.jl package is licensed under the MIT "Expat" License: + +> Copyright (c) 2017: SimonDanisch. +> +> Permission is hereby granted, free of charge, to any person obtaining a copy +> of this software and associated documentation files (the "Software"), to deal +> in the Software without restriction, including without limitation the rights +> to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +> copies of the Software, and to permit persons to whom the Software is +> furnished to do so, subject to the following conditions: +> +> The above copyright notice and this permission notice shall be included in all +> copies or substantial portions of the Software. +> +> THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +> IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +> FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +> AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +> LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +> OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +> SOFTWARE. +> diff --git a/ext/BinaryProvider/README.md b/ext/BinaryProvider/README.md new file mode 100644 index 0000000000000..dbe4a5d488c52 --- /dev/null +++ b/ext/BinaryProvider/README.md @@ -0,0 +1,35 @@ +# BinaryProvider + +[![Travis Status](https://travis-ci.org/JuliaPackaging/BinaryProvider.jl.svg?branch=master)](https://travis-ci.org/JuliaPackaging/BinaryProvider.jl) + +[![Appveyor Status](https://ci.appveyor.com/api/projects/status/0sbp28iie07c5dn3/branch/master?svg=true)](https://ci.appveyor.com/project/staticfloat/binaryprovider-jl-fu5p5/branch/master) + +[![codecov.io](http://codecov.io/github/JuliaPackaging/BinaryProvider.jl/coverage.svg?branch=master)](http://codecov.io/github/JuliaPackaging/BinaryProvider.jl?branch=master) + +**staticfloat's third draft**: This package is intended to work alongside [`BinaryBuilder.jl`](https://github.com/JuliaPackaging/BinaryBuilder.jl); within this is all logic necessary to download and unpack tarballs into `Prefix`es. + +## Basic concepts + +Packages are installed to a `Prefix`; a folder that acts similar to the `/usr/local` directory on Unix-like systems, containing a `bin` folder for binaries, a `lib` folder for libraries, etc... `Prefix` objects can have tarballs `install()`'ed within them, `uninstall()`'ed from them, etc... + +`BinaryProvider` has the concept of a `Product`, the result of a package installation. `LibraryProduct` and `ExecutableProduct` are two example `Product` object types that can be used to keep track of the binary objects installed by an `install()` invocation. `Products` can check to see if they are already satisfied (e.g. whether a file exists, or is executable, or is `dlopen()`'able), allowing for very quick and easy `build.jl` construction. + +`BinaryProvider` also contains a platform abstraction layer for common operations like downloading and unpacking tarballs. The primary method you should be using to interact with these operations is through the `install()` method, however if you need more control, there are more fundamental methods such as `download_verify()`, or `unpack()`, or even the wittingly-named `download_verify_unpack()`. + +The method documentation within the `BinaryProvider` module should be considered the primary source of documentation for this package, usage examples are provided in the form of the `LibFoo.jl` mock package [within this repository](test/LibFoo.jl), as well as other packages that use this package for binary installation such as + +## Usage + +To download and install a package into a `Prefix`, the basic syntax is: +```julia +prefix = Prefix("./deps") +install(url, tarball_hash; prefix=prefix) +``` + +It is recommended to inspect examples for a fuller treatment of installation, the [`LibFoo.jl` package within this repository](test/LibFoo.jl) contains a [`deps/build.jl` file](test/LibFoo.jl/deps/build.jl) that may be instructive. + +To actually generate the tarballs that are installed by this package, check out the [`BinaryBuilder.jl` package](https://github.com/JuliaPackaging/BinaryBuilder.jl). + +## Miscellanea + +* This package contains a `run(::Cmd)` wrapper class named `OutputCollector` that captures the output of shell commands, and in particular, captures the `stdout` and `stderr` streams separately, colorizing, buffering and timestamping appropriately to provide seamless printing of shell output in a consistent and intuitive way. Critically, it also allows for saving of the captured streams to log files, a very useful feature for [`BinaryBuilder.jl`](https://github.com/JuliaPackaging/BinaryBuilder.jl), which makes extensive use of this class, however all commands run by `BinaryProvider.jl` also use this same mechanism to provide coloring of `stderr`. \ No newline at end of file diff --git a/ext/BinaryProvider/REQUIRE b/ext/BinaryProvider/REQUIRE new file mode 100644 index 0000000000000..f8981b77cb007 --- /dev/null +++ b/ext/BinaryProvider/REQUIRE @@ -0,0 +1,3 @@ +julia 0.6 +SHA +Compat 0.27.0 diff --git a/ext/BinaryProvider/appveyor.yml b/ext/BinaryProvider/appveyor.yml new file mode 100644 index 0000000000000..11f663d64327c --- /dev/null +++ b/ext/BinaryProvider/appveyor.yml @@ -0,0 +1,48 @@ +environment: + matrix: + - JULIA_URL: "https://julialang-s3.julialang.org/bin/winnt/x86/0.6/julia-0.6-latest-win32.exe" + - JULIA_URL: "https://julialang-s3.julialang.org/bin/winnt/x64/0.6/julia-0.6-latest-win64.exe" + - JULIA_URL: "https://julialangnightlies-s3.julialang.org/bin/winnt/x86/julia-latest-win32.exe" + - JULIA_URL: "https://julialangnightlies-s3.julialang.org/bin/winnt/x64/julia-latest-win64.exe" + +matrix: + allow_failures: + - JULIA_URL: "https://julialangnightlies-s3.julialang.org/bin/winnt/x86/julia-latest-win32.exe" + - JULIA_URL: "https://julialangnightlies-s3.julialang.org/bin/winnt/x64/julia-latest-win64.exe" + +branches: + only: + - master + - /release-.*/ + +notifications: + - provider: Email + on_build_success: false + on_build_failure: false + on_build_status_changed: false + +install: + - ps: "[System.Net.ServicePointManager]::SecurityProtocol = [System.Net.SecurityProtocolType]::Tls12" +# If there's a newer build queued for the same PR, cancel this one + - ps: if ($env:APPVEYOR_PULL_REQUEST_NUMBER -and $env:APPVEYOR_BUILD_NUMBER -ne ((Invoke-RestMethod ` + https://ci.appveyor.com/api/projects/$env:APPVEYOR_ACCOUNT_NAME/$env:APPVEYOR_PROJECT_SLUG/history?recordsNumber=50).builds | ` + Where-Object pullRequestId -eq $env:APPVEYOR_PULL_REQUEST_NUMBER)[0].buildNumber) { ` + throw "There are newer queued builds for this pull request, failing early." } +# Download most recent Julia Windows binary + - ps: (new-object net.webclient).DownloadFile( + $env:JULIA_URL, + "C:\projects\julia-binary.exe") +# Run installer silently, output to C:\projects\julia + - C:\projects\julia-binary.exe /S /D=C:\projects\julia + +build_script: +# Need to convert from shallow to complete for Pkg.clone to work + - IF EXIST .git\shallow (git fetch --unshallow) + - C:\projects\julia\bin\julia -e "versioninfo(); + Pkg.clone(pwd(), \"BinaryProvider\"); Pkg.build(\"BinaryProvider\")" + +test_script: + - C:\projects\julia\bin\julia -e "Pkg.test(\"BinaryProvider\", coverage=true)" + +after_test: + - C:\projects\julia\bin\julia -e "cd(Pkg.dir(\"BinaryProvider\")); Pkg.add(\"Coverage\"); using Coverage; Codecov.submit(process_folder())" diff --git a/ext/BinaryProvider/src/BinDepsIntegration.jl b/ext/BinaryProvider/src/BinDepsIntegration.jl new file mode 100644 index 0000000000000..5a9cadda85142 --- /dev/null +++ b/ext/BinaryProvider/src/BinDepsIntegration.jl @@ -0,0 +1,37 @@ +# This file contains the ingredients to create a PackageManager for BinDeps +import BinDeps: Binaries, can_use, package_available, bindir, libdir, + generate_steps, LibraryDependency, provider, provides +import Base: show + +type BP <: Binaries + url::String + hash::String + prefix::Prefix +end + +show(io::IO, p::BP) = write(io, "BinaryProvider for $(p.url)") + +# We are cross-platform baby, and we never say no to a party +can_use(::Type{BP}) = true +package_available(p::BP) = true +libdir(p::BP, dep) = @static if is_windows() + joinpath(p.prefix, "bin") +else + joinpath(p.prefix, "lib") +end + +# We provide our own overload of provides() for BP +macro BP_provides(url, hash, dep, opts...) + return quote + prefix = Prefix(joinpath(dirname(@__FILE__), "usr")) + activate(prefix) + return provides(BP, ($url, $hash, prefix), $(esc(dep)), $(opts...)) + end +end +provider(::Type{BP}, data; opts...) = BP(data...) + +function generate_steps(dep::LibraryDependency, p::BP, opts) + () -> begin + install(p.url, p.hash; prefix=p.prefix, verbose=true) + end +end diff --git a/ext/BinaryProvider/src/BinaryPackage.jl b/ext/BinaryProvider/src/BinaryPackage.jl new file mode 100644 index 0000000000000..00b294b6f3f83 --- /dev/null +++ b/ext/BinaryProvider/src/BinaryPackage.jl @@ -0,0 +1,151 @@ +# Package objects provide a _slightly_ higher-level API for dealing with +# installing/uninstalling tarballs to a Prefix. +export BinaryPackage, install, uninstall, satisfied + +""" +A `BinaryPackage` collects all the information needed to download and install a +tarball containing binary objects; it has the `url` to download form, the +`hash` to verify package integrity, a list of `products` to check for proper +functioning after installation, and a list of `dependencies` that must also be +installed before this package can be used. + +There exist `install()`, `uninstall()` and `satisfied()` methods for +`BinaryPackage` objects, similar to the lower-level versions that take direct +`url` and `hash` arguments. +""" +immutable BinaryPackage + url::String + hash::String + platform::Platform + products::Vector{Product} + dependencies::Vector{BinaryPackage} + + function BinaryPackage(url::AbstractString, + hash::AbstractString, + platform::Platform, + products::Vector{Product}=Product[], + dependencies::Vector{BinaryPackage}=BinaryPackage[]) + return new(url, hash, platform, products, dependencies) + end +end + +function pkg_name(pkg::BinaryPackage) + name = basename(pkg.url) + if endswith(name, ".tar.gz") + return name[1:end-7] + end + return name +end + +""" +`install(pkg::BinaryPackage; verbose::Bool = false, kwargs...)` + +A thin wrapper over the main `install(url, path; ...)` method. Installs all +of `pkg`'s dependencies, then installs `pkg`, but only of it is not already +satisfied. +""" +function install(pkg::BinaryPackage; verbose::Bool = false, kwargs...) + name = pkg_name(pkg) + + # If we are already satisfied, don't do nuthin' + if satisfied(pkg) + if verbose + info("Not installing $(name), as it is already satisfied") + end + return true + end + + # Begin by installing all the dependencies if they are not already + for dep in pkg.dependencies + # TODO: We may want to handle this through `Pkg3` operations + install(dep; verbose=verbose, kwargs...) + end + + # Finally, install ourselves + install(pkg.url, pkg.hash; verbose=verbose, kwargs...) + + # Check to see if we are actually satisfied + if !satisfied(pkg; verbose=verbose) + warn("$(name) did not satisfy itself after installation!") + return false + end + + return true +end + +""" +manifest_path(pkg::BinaryPackage; prefix::Prefix = global_prefix(), + verbose::Bool = false) + +Discovers the manifest path for the given `BinaryPackage` within the given +`Prefix`. First attempts to guess from the `url` what the manifest file would +have been named, if that doesn't work, will search for manifests that contain +any of the `products` that are within `pkg`. If neither approach works, throws +an error. +""" +function manifest_path(pkg::BinaryPackage; prefix::Prefix = global_prefix(), + verbose::Bool = false) + name = pkg_name(pkg) + # First, see if we can auto-guess the manifest file path: + manifest_path = manifest_from_url(pkg.url, prefix=prefix) + if isfile(manifest_path) + if verbose + info("Correctly auto-guessed manifest path $(manifest_path)") + end + return manifest_path + end + + if verbose + info("Could not auto-guess manifest path for $(name)") + end + + # Otherwise, let's try to guess from our products + if isempty(pkg.products) + msg = """ + Cannot find manifest path for package $(name) with unguessable manifest + file and no products. + """ + error(replace(strip(msg),"\n", " ")) + end + + for product in pkg.products + product_path = locate(product, platform=pkg.platform) + if product_path != nothing + try + manifest_path = manifest_for_file(product_path; prefix=prefix) + relmani = relpath(manifest_path, prefix.path) + relprod = relpath(product_path, prefix.path) + info("Found $(relmani) for product $(relprod)") + return manifest_path + end + end + end + + error("Cannot find manifest path for package $(name)") +end + + +""" +uninstall(pkg::BinaryPackage; prefix::Prefix = global_prefix, + verbose::Bool = false) + +Uninstall `pkg` from the given `prefix` by automatically determining the +manifest path created when the package was installed. Throws an error if +this `pkg` was not installed in the first place. +""" +function uninstall(pkg::BinaryPackage; prefix::Prefix = global_prefix, + verbose::Bool = false) + # Find the manifest path for this pkg, then uninstall it + manipath = manifest_path(pkg; prefix=prefix, verbose=verbose) + uninstall(manipath; verbose=verbose) +end + +""" +`satisfied(pkg::BinaryPackage; verbose::Bool = false)` + +Returns `true` if all products defined within `pkg` are satisfied. +""" +function satisfied(pkg::BinaryPackage; verbose::Bool = false) + s = p -> satisfied(p; platform=pkg.platform, verbose=verbose) + return all(s(p) for p in pkg.products) +end diff --git a/ext/BinaryProvider/src/BinaryProvider.jl b/ext/BinaryProvider/src/BinaryProvider.jl new file mode 100644 index 0000000000000..2e7d7dc67d803 --- /dev/null +++ b/ext/BinaryProvider/src/BinaryProvider.jl @@ -0,0 +1,39 @@ +module BinaryProvider + +using Compat + +# Include our subprocess running funtionality +include("OutputCollector.jl") +# External utilities such as downloading/decompressing tarballs +include("PlatformEngines.jl") +# Platform naming +include("PlatformNames.jl") +# Everything related to file/path management +include("Prefix.jl") +# Abstraction of "needing" a file, that would trigger an install +include("Products.jl") +# Abstraction of bundled binary package +include("BinaryPackage.jl") + +# BinDeps support, disabled for now because I don't particularly want to force +# users to install BinDeps to install this package. That seems counter-productive +#include("BinDepsIntegration.jl") + + +function __init__() + global global_prefix + + # Initialize our global_prefix + global_prefix = Prefix(joinpath(dirname(@__FILE__), "../", "global_prefix")) + activate(global_prefix) + + # Find the right download/compression engines for this platform + probe_platform_engines!() + + # If we're on a julia that's too old, then fixup the color mappings + if !haskey(Base.text_colors, :default) + Base.text_colors[:default] = Base.color_normal + end +end + +end # module diff --git a/ext/BinaryProvider/src/OutputCollector.jl b/ext/BinaryProvider/src/OutputCollector.jl new file mode 100644 index 0000000000000..b0e44d616e603 --- /dev/null +++ b/ext/BinaryProvider/src/OutputCollector.jl @@ -0,0 +1,362 @@ +# In this file, we define a helper class that will run subprocesses, collecting +# the stdout and stderr, timestamped such that we can merge the two streams +# intelligently after the fact, or keep them separate for proper analysis. +import Base: wait, merge + +export OutputCollector, merge, stdout, stderr, tail, tee + +immutable LineStream + pipe::Pipe + lines::Vector{Tuple{Float64,String}} + task::Task +end + +""" + readuntil_many(s::IO, delims) + +Given a collection of delimiter characters, read from `s` until one of those +delimiters is reached, or we reach the end of `s`. +""" +function readuntil_many(s::IO, delims) + out = IOBuffer() + while !eof(s) + c = read(s, Char) + write(out, c) + if c in delims + break + end + end + return String(take!(out)) +end + +""" +`LineStream(pipe::Pipe)` + +Given a `Pipe` that has been initialized by `spawn()`, create an async Task to +read in lines as they come in and annotate the time the line was captured for +later replay/merging with other simultaneously captured streams. +""" +function LineStream(pipe::Pipe, event::Condition) + # We always have to close() the input half of the stream before we can + # read() from it. I don't know why, and this is honestly kind of annoying + close(pipe.in) + + lines = Tuple{Float64,String}[] + task = @async begin + # Read lines in until we can't anymore + while !eof(pipe) + # Push this line onto our lines, then notify() the event + line = chomp(readuntil_many(pipe, ['\n', '\r'])) + push!(lines, (time(), line)) + notify(event) + end + end + + # Create a second task that runs after the first just to notify() + # This ensures that anybody that's listening to the event but gated on our + # being alive (e.g. `tee()`) can die alongside us gracefully as well. + @async begin + wait(task) + notify(event) + end + return LineStream(pipe, lines, task) +end + +""" +`alive(s::LineStream)` + +Returns `true`` if the task owned by this `LineStream` is still processing +output from an underlying `Pipe`. +""" +function alive(s::LineStream) + return !(s.task.state in [:done, :failed]) +end + + +""" +OutputCollector + +A `run()` wrapper class that captures subprocess `stdout` and `stderr` streams +independently, resynthesizing and colorizing the streams appropriately. +""" +type OutputCollector + cmd::Base.AbstractCmd + P::Base.AbstractPipe + stdout_linestream::LineStream + stderr_linestream::LineStream + event::Condition + verbose::Bool + done::Bool + + extra_tasks::Vector{Task} + + function OutputCollector(cmd, P, out_ls, err_ls, event, verbose) + return new(cmd, P, out_ls, err_ls, event, verbose, false, Task[]) + end +end + +""" +`OutputCollector(cmd::AbstractCmd; verbose::Bool = false)` + +Run `cmd`, and collect the output such that `stdout` and `stderr` are captured +independently, but with the time of each line recorded such that they can be +stored/analyzed independently, but replayed synchronously. +""" +function OutputCollector(cmd::Base.AbstractCmd; verbose::Bool=false, tee_stream=STDOUT) + # First, launch the command + out_pipe = Pipe() + err_pipe = Pipe() + P = try + spawn(cmd, (DevNull, out_pipe, err_pipe)) + catch + warn("Could not spawn $(cmd)") + rethrow() + end + + # Next, start siphoning off the first couple lines of output and error + event = Condition() + out_ls = LineStream(out_pipe, event) + err_ls = LineStream(err_pipe, event) + + # Finally, wrap this up in an object so that we can merge stdout and stderr + # back together again at the end + self = OutputCollector(cmd, P, out_ls, err_ls, event, verbose) + + # If we're set as verbose, then start reading ourselves out to stdout + if verbose + tee(self; stream = tee_stream) + end + + return self +end + +""" +`wait(collector::OutputCollector)` + +Wait for the command and all line streams within an `OutputCollector` to finish +their respective tasks and be ready for full merging. Return the success of +the underlying process. Prints out the last 10 lines of the process if it does +not complete successfully unless the OutputCollector was created as `verbose`. +""" +function wait(collector::OutputCollector) + # If we've already done this song and dance before, then don't do it again + if collector.done + return success(collector.P) + end + + wait(collector.P) + wait(collector.stdout_linestream.task) + wait(collector.stderr_linestream.task) + + # Also wait on any extra tasks we've jimmied onto the end of this guy + for t in collector.extra_tasks + wait(t) + end + + # From this point on, we are actually done! + collector.done = true + + # If we failed, then tail the output, unless we've been tee()'ing it out + # this whole time + if !success(collector.P) && !collector.verbose + print(tail(collector; colored=Base.have_color)) + end + + # Shout to the world how we've done + return success(collector.P) +end + +""" +`merge(collector::OutputCollector; colored::Bool = false)` + +Merge the stdout and stderr streams of the `OutputCollector` on a per-line +basis, returning a single string containing all collected lines, interleaved by +capture time. If `colored` is set to true, embeds terminal color codes to +print `stderr` in red. +""" +function merge(collector::OutputCollector; colored::Bool = false) + # First, wait for things to be done. No incomplete mergings here yet. + wait(collector) + + # We copy here so that you can `merge()` more than once, if you want. + stdout_lines = copy(collector.stdout_linestream.lines) + stderr_lines = copy(collector.stderr_linestream.lines) + output = IOBuffer() + + # Write out an stdout line, optionally with color, and pop off that line + function write_line(lines, should_color, color) + if should_color && colored + print(output, color) + end + t, line = shift!(lines) + println(output, line) + end + + # These help us keep track of colorizing the output + out_color = Base.text_colors[:default] + err_color = Base.text_colors[:red] + last_line_stderr = false + + # Merge stdout and stderr + while !isempty(stdout_lines) && !isempty(stderr_lines) + # Figure out if stdout's timestamp is earlier than stderr's + if stdout_lines[1][1] < stderr_lines[1][1] + write_line(stdout_lines, last_line_stderr, out_color) + last_line_stderr = false + else + write_line(stderr_lines, !last_line_stderr, err_color) + last_line_stderr = true + end + end + + # Now drain whichever one still has data within it + while !isempty(stdout_lines) + write_line(stdout_lines, last_line_stderr, out_color) + last_line_stderr = false + end + while !isempty(stderr_lines) + write_line(stderr_lines, !last_line_stderr, err_color) + last_line_stderr = true + end + + # Clear text colors at the end, if we need to + if last_line_stderr && colored + print(output, Base.text_colors[:default]) + end + + # Return our ill-gotten goods + return String(output) +end + +""" +`stdout(collector::OutputCollector)` + +Returns all stdout lines collected by this collector so far. +""" +function stdout(collector::OutputCollector) + return join([l[2] * "\n" for l in collector.stdout_linestream.lines], "") +end + +""" +`stderr(collector::OutputCollector)` + +Returns all stderr lines collected by this collector so far. +""" +function stderr(collector::OutputCollector) + return join([l[2] * "\n" for l in collector.stderr_linestream.lines], "") +end + +""" +`tail(collector::OutputCollector; len::Int = 10, colored::Bool = false)` + +Write out the last `len` lines, optionally writing colored lines. +""" +function tail(collector::OutputCollector; len::Int = 10, colored::Bool = false) + out = merge(collector; colored=colored) + + idx = length(out) + for line_idx in 1:len + idx = findprev(out, '\n', idx-1) + if idx <= 0 + break + end + end + + return out[idx+1:end] +end + +""" +`tee(c::OutputCollector; colored::Bool = false)` + +Spawn a background task to incrementally output lines from `collector` to the +standard output, optionally colored. +""" +function tee(c::OutputCollector; colored::Bool = Base.have_color, stream=STDOUT) + tee_task = @async begin + out_idx = 1 + err_idx = 1 + out_lines = c.stdout_linestream.lines + err_lines = c.stderr_linestream.lines + + # Helper function to print out the next line of stdout/stderr + function print_next_line() + timestr = Libc.strftime("[%T] ", time()) + # We know we have data, so figure out if it's for stdout or stderr + if length(out_lines) >= out_idx + print_color(:default, stream, timestr; bold=true) + if length(err_lines) >= err_idx + # If we've got input waiting from both lines, then output + # the one with the lowest capture time + if out_lines[out_idx][1] < err_lines[err_idx][1] + # Print the out line as it's older + println(stream, out_lines[out_idx][2]) + out_idx += 1 + else + # Print the err line as it's older + print_color(:red, stream, err_lines[err_idx][2]) + println(stream) + err_idx += 1 + end + else + # Pring the out line that is the only one waiting + println(stream, out_lines[out_idx][2]) + out_idx += 1 + end + else length(err_lines) > err_idx + # Print the err line that is the only one waiting + print_color(:default, stream, timestr; bold=true) + print_color(:red, stream, err_lines[err_idx][2]) + println(stream) + err_idx += 1 + end + end + + # First thing, wait for some input. This avoids us trying to inspect + # the liveliness of the linestreams before they've even started. + wait(c.event) + + while alive(c.stdout_linestream) || alive(c.stderr_linestream) + if length(out_lines) >= out_idx || length(err_lines) >= err_idx + # If we have data to output, then do so + print_next_line() + else + # Otherwise, wait for more input + wait(c.event) + end + end + + # Drain the rest of stdout and stderr + while length(out_lines) >= out_idx || length(err_lines) >= err_idx + print_next_line() + end + end + + # Let the collector know that he might have to wait on this `tee()` to + # finish its business as well. + push!(c.extra_tasks, tee_task) + + return tee_task +end + +""" +`print_color(color::Symbol, msg::AbstractString; bold::Bool = false)` + +Functionally identical to `Base.print_with_color` except that this works +identically across Julia 0.5 and 0.6. +""" +function print_color(color::Symbol, out::IO, msg::AbstractString; bold::Bool=false) + # Engage the color, and optionally the boldness + print(out, Base.text_colors[color]) + if bold + print(out, "\e[1m") + end + + # Print the message + print(out, msg) + + # Disengage the color, and optionally the boldness + if bold + print(out, "\e[22m") + end + print(out, Base.text_colors[:normal]) +end diff --git a/ext/BinaryProvider/src/PlatformEngines.jl b/ext/BinaryProvider/src/PlatformEngines.jl new file mode 100644 index 0000000000000..89c5c3fd499ea --- /dev/null +++ b/ext/BinaryProvider/src/PlatformEngines.jl @@ -0,0 +1,543 @@ +# In this file, we setup the `gen_download_cmd()`, `gen_unpack_cmd()` and +# `gen_package_cmd()` functions by providing methods to probe the environment +# and determine the most appropriate platform binaries to call. + +export gen_download_cmd, gen_unpack_cmd, gen_package_cmd, gen_list_tarball_cmd, + parse_tarball_listing, gen_sh_cmd, parse_7z_list, parse_tar_list, + download_verify_unpack, download_verify, unpack + +""" +`gen_download_cmd(url::AbstractString, out_path::AbstractString)` + +Return a `Cmd` that will download resource located at `url` and store it at +the location given by `out_path`. + +This method is initialized by `probe_platform_engines()`, which should be +automatically called upon first import of `BinaryProvider`. +""" +gen_download_cmd = (url::AbstractString, out_path::AbstractString) -> + error("Call `probe_platform_engines()` before `gen_download_cmd()`") + +""" +`gen_unpack_cmd(tarball_path::AbstractString, out_path::AbstractString)` + +Return a `Cmd` that will unpack the given `tarball_path` into the given +`out_path`. If `out_path` is not already a directory, it will be created. + +This method is initialized by `probe_platform_engines()`, which should be +automatically called upon first import of `BinaryProvider`. +""" +gen_unpack_cmd = (tarball_path::AbstractString, out_path::AbstractString) -> + error("Call `probe_platform_engines()` before `gen_unpack_cmd()`") + +""" +`gen_package_cmd(in_path::AbstractString, tarball_path::AbstractString)` + +Return a `Cmd` that will package up the given `in_path` directory into a +tarball located at `tarball_path`. + +This method is initialized by `probe_platform_engines()`, which should be +automatically called upon first import of `BinaryProvider`. +""" +gen_package_cmd = (in_path::AbstractString, tarball_path::AbstractString) -> + error("Call `probe_platform_engines()` before `gen_package_cmd()`") + +""" +`gen_list_tarball_cmd(tarball_path::AbstractString)` + +Return a `Cmd` that will list the files contained within the tarball located at +`tarball_path`. The list will not include directories contained within the +tarball. + +This method is initialized by `probe_platform_engines()`, which should be +automatically called upon first import of `BinaryProvider`. +""" +gen_list_tarball_cmd = (tarball_path::AbstractString) -> + error("Call `probe_platform_engines()` before `gen_list_tarball_cmd()`") + +""" +`parse_tarball_listing(output::AbstractString)` + +Parses the result of `gen_list_tarball_cmd()` into something useful. + +This method is initialized by `probe_platform_engines()`, which should be +automatically called upon first import of `BinaryProvider`. +""" +parse_tarball_listing = (output::AbstractString) -> + error("Call `probe_platform_engines()` before `parse_tarball_listing()`") + +""" +`gen_sh_cmd(cmd::Cmd)` + +Runs a command using `sh`. On Unices, this will default to the first `sh` +found on the `PATH`, however on Windows if that is not found it will fall back +to the `sh` provided by the `busybox.exe` shipped with Julia. + +This method is initialized by `probe_platform_engines()`, which should be +automatically called upon first import of `BinaryProvider`. +""" +gen_sh_cmd = (cmd::Cmd) -> + error("Call `probe_platform_engines()` before `gen_sh_cmd()`") + + +""" +`probe_cmd(cmd::Cmd; verbose::Bool = false)` + +Returns `true` if the given command executes successfully, `false` otherwise. +""" +function probe_cmd(cmd::Cmd; verbose::Bool = false) + if verbose + info("Probing $(cmd.exec[1]) as a possibility...") + end + try + success(cmd) + if verbose + info(" Probe successful for $(cmd.exec[1])") + end + return true + catch + return false + end +end + +""" +`probe_platform_engines!(;verbose::Bool = false)` + +Searches the environment for various tools needed to download, unpack, and +package up binaries. Searches for a download engine to be used by +`gen_download_cmd()` and a compression engine to be used by `gen_unpack_cmd()`, +`gen_package_cmd()`, `gen_list_tarball_cmd()` and `parse_tarball_listing()`, as +well as a `sh` execution engine for `gen_sh_cmd()`. Running this function +will set the global functions to their appropriate implementations given the +environment this package is running on. + +This probing function will automatically search for download engines using a +particular ordering; if you wish to override this ordering and use one over all +others, set the `BINARYPROVIDER_DOWNLOAD_ENGINE` environment variable to its +name, and it will be the only engine searched for. For example, put: + + ENV["BINARYPROVIDER_DOWNLOAD_ENGINE"] = "fetch" + +within your `~/.juliarc.jl` file to force `fetch` to be used over `curl`. If +the given override does not match any of the download engines known to this +function, a warning will be printed and the typical ordering will be performed. + +Similarly, if you wish to override the compression engine used, set the +`BINARYPROVIDER_COMPRESSION_ENGINE` environment variable to its name (e.g. `7z` +or `tar`) and it will be the only engine searched for. If the given override +does not match any of the compression engines known to this function, a warning +will be printed and the typical searching will be performed. + +If `verbose` is `true`, print out the various engines as they are searched. +""" +function probe_platform_engines!(;verbose::Bool = false) + global gen_download_cmd, gen_list_tarball_cmd, gen_package_cmd + global gen_unpack_cmd, parse_tarball_listing, gen_sh_cmd + + # download_engines is a list of (test_cmd, download_opts_functor) + # The probulator will check each of them by attempting to run `$test_cmd`, + # and if that works, will set the global download functions appropriately. + const download_engines = [ + (`curl --help`, (url, path) -> `curl -C - -\# -f -o $path -L $url`), + (`wget --help`, (url, path) -> `wget -c -O $path $url`), + (`fetch --help`, (url, path) -> `fetch -f $path $url`), + ] + + # 7z is rather intensely verbose. We also want to try running not only + # `7z` but also a direct path to the `7z.exe` bundled with Julia on + # windows, so we create generator functions to spit back functors to invoke + # the correct 7z given the path to the executable: + unpack_7z = (exe7z) -> begin + return (tarball_path, out_path) -> + pipeline(`$exe7z x $(tarball_path) -y -so`, + `$exe7z x -si -y -ttar -o$(out_path)`) + end + package_7z = (exe7z) -> begin + return (in_path, tarball_path) -> + pipeline(`$exe7z a -ttar -so a.tar "$(joinpath(".",in_path,"*"))"`, + `$exe7z a -si $(tarball_path)`) + end + list_7z = (exe7z) -> begin + return (path) -> + pipeline(`$exe7z x $path -so`, `$exe7z l -ttar -y -si`) + end + + # Tar is rather less verbose, and we don't need to search multiple places + # for it, so just rely on PATH to have `tar` available for us: + unpack_tar = (tarball_path, out_path) -> + `tar xzf $(tarball_path) --directory=$(out_path)` + package_tar = (in_path, tarball_path) -> + `tar -czvf $tarball_path -C $(in_path) .` + list_tar = (in_path) -> `tar tzf $in_path` + + # compression_engines is a list of (test_cmd, unpack_opts_functor, + # package_opts_functor, list_opts_functor, parse_functor). The probulator + # will check each of them by attempting to run `$test_cmd`, and if that + # works, will set the global compression functions appropriately. + gen_7z = (p) -> (unpack_7z(p), package_7z(p), list_7z(p), parse_7z_list) + const compression_engines = Tuple[ + (`tar --help`, unpack_tar, package_tar, list_tar, parse_tar_list), + ] + + # sh_engines is just a list of Cmds-as-paths + const sh_engines = [ + `sh` + ] + + # For windows, we need to tweak a few things, as the tools available differ + @static if is_windows() + # For download engines, we will most likely want to use powershell. + # Let's generate a functor to return the necessary powershell magics + # to download a file, given a path to the powershell executable + psh_download = (psh_path) -> begin + return (url, path) -> begin + webclient_code = """ + [System.Net.ServicePointManager]::SecurityProtocol = + [System.Net.SecurityProtocolType]::Tls12; + \$webclient = (New-Object System.Net.Webclient); + \$webclient.DownloadFile(\"$url\", \"$path\") + """ + replace(webclient_code, "\n", " ") + return `$psh_path -NoProfile -Command "$webclient_code"` + end + end + + # We want to search both the `PATH`, and the direct path for powershell + psh_path = "C:\\Windows\\System32\\WindowsPowerShell\\v1.0\\powershell" + prepend!(download_engines, [ + (`$psh_path -Help`, psh_download(psh_path)) + ]) + prepend!(download_engines, [ + (`powershell -Help`, psh_download(`powershell`)) + ]) + + # We greatly prefer `7z` as a compression engine on Windows + prepend!(compression_engines, [(`7z --help`, gen_7z("7z")...)]) + + # On windows, we bundle 7z with Julia, so try invoking that directly + const exe7z = joinpath(JULIA_HOME, "7z.exe") + prepend!(compression_engines, [(`$exe7z --help`, gen_7z(exe7z)...)]) + + # And finally, we want to look for sh as busybox as well: + const busybox = joinpath(JULIA_HOME, "busybox.exe") + prepend!(sh_engines, [(`$busybox sh`)]) + end + + # Allow environment override + if haskey(ENV, "BINARYPROVIDER_DOWNLOAD_ENGINE") + engine = ENV["BINARYPROVIDER_DOWNLOAD_ENGINE"] + dl_ngs = filter(e -> e[1].exec[1] == engine, download_engines) + if isempty(dl_ngs) + all_ngs = join([d[1].exec[1] for d in download_engines], ", ") + warn_msg = "Ignoring BINARYPROVIDER_DOWNLOAD_ENGINE as its value " + warn_msg *= "of `$(engine)` doesn't match any known valid engines." + warn_msg *= " Try one of `$(all_ngs)`." + warn(warn_msg) + else + # If BINARYPROVIDER_DOWNLOAD_ENGINE matches one of our download engines, + # then restrict ourselves to looking only at that engine + download_engines = dl_ngs + end + end + + if haskey(ENV, "BINARYPROVIDER_COMPRESSION_ENGINE") + engine = ENV["BINARYPROVIDER_COMPRESSION_ENGINE"] + comp_ngs = filter(e -> e[1].exec[1] == engine, compression_engines) + if isempty(comp_ngs) + all_ngs = join([c[1].exec[1] for c in compression_engines], ", ") + warn_msg = "Ignoring BINARYPROVIDER_COMPRESSION_ENGINE as its " + warn_msg *= "value of `$(engine)` doesn't match any known valid " + warn_msg *= "engines. Try one of `$(all_ngs)`." + warn(warn_msg) + else + # If BINARYPROVIDER_COMPRESSION_ENGINE matches one of our download + # engines, then restrict ourselves to looking only at that engine + compression_engines = comp_ngs + end + end + + download_found = false + compression_found = false + sh_found = false + + if verbose + info("Probing for download engine...") + end + + # Search for a download engine + for (test, dl_func) in download_engines + if probe_cmd(`$test`; verbose=verbose) + # Set our download command generator + gen_download_cmd = dl_func + download_found = true + + if verbose + info("Found download engine $(test.exec[1])") + end + break + end + end + + if verbose + info("Probing for compression engine...") + end + + # Search for a compression engine + for (test, unpack, package, list, parse) in compression_engines + if probe_cmd(`$test`; verbose=verbose) + # Set our compression command generators + gen_unpack_cmd = unpack + gen_package_cmd = package + gen_list_tarball_cmd = list + parse_tarball_listing = parse + + if verbose + info("Found compression engine $(test.exec[1])") + end + + compression_found = true + break + end + end + + if verbose + info("Probing for sh engine...") + end + + for path in sh_engines + if probe_cmd(`$path --help`; verbose=verbose) + gen_sh_cmd = (cmd) -> `$path -c $cmd` + if verbose + info("Found sh engine $(path.exec[1])") + end + sh_found = true + break + end + end + + + # Build informative error messages in case things go sideways + errmsg = "" + if !download_found + errmsg *= "No download engines found. We looked for: " + errmsg *= join([d[1].exec[1] for d in download_engines], ", ") + errmsg *= ". Install one and ensure it is available on the path.\n" + end + + if !compression_found + errmsg *= "No compression engines found. We looked for: " + errmsg *= join([c[1].exec[1] for c in compression_engines], ", ") + errmsg *= ". Install one and ensure it is available on the path.\n" + end + + if !sh_found + errmsg *= "No sh engines found. We looked for: " + errmsg *= join([b.exec[1] for b in sh_engines], ", ") + errmsg *= ". Install one and ensure it is available on the path.\n" + end + + # Error out if we couldn't find something + if !download_found || !compression_found || !sh_found + error(errmsg) + end +end + +""" +`parse_7z_list(output::AbstractString)` + +Given the output of `7z l`, parse out the listed filenames. This funciton used +by `list_tarball_files`. +""" +function parse_7z_list(output::AbstractString) + lines = [chomp(l) for l in split(output, "\n")] + # Remove extraneous "\r" for windows platforms + for idx in 1:length(lines) + if endswith(lines[idx], '\r') + lines[idx] = lines[idx][1:end-1] + end + end + + # Find index of " Name". (can't use `findfirst(generator)` until this is + # closed: https://github.com/JuliaLang/julia/issues/16884 + header_row = find(contains(l, " Name") && contains(l, " Attr") for l in lines)[1] + name_idx = search(lines[header_row], "Name")[1] + attr_idx = search(lines[header_row], "Attr")[1] - 1 + + # Filter out only the names of files, ignoring directories + lines = [l[name_idx:end] for l in lines if length(l) > name_idx && l[attr_idx] != 'D'] + if isempty(lines) + return [] + end + + # Extract within the bounding lines of ------------ + bounds = [i for i in 1:length(lines) if all([c for c in lines[i]] .== '-')] + lines = lines[bounds[1]+1:bounds[2]-1] + + # Eliminate `./` prefix, if it exists + for idx in 1:length(lines) + if startswith(lines[idx], "./") || startswith(lines[idx], ".\\") + lines[idx] = lines[idx][3:end] + end + end + + return lines +end + +""" +`parse_7z_list(output::AbstractString)` + +Given the output of `tar -t`, parse out the listed filenames. This funciton +used by `list_tarball_files`. +""" +function parse_tar_list(output::AbstractString) + lines = [chomp(l) for l in split(output, "\n")] + + # Drop empty lines and and directories + lines = [l for l in lines if !isempty(l) && !endswith(l, '/')] + + # Eliminate `./` prefix, if it exists + for idx in 1:length(lines) + if startswith(lines[idx], "./") || startswith(lines[idx], ".\\") + lines[idx] = lines[idx][3:end] + end + end + + return lines +end + +""" + download(url::AbstractString, dest::AbstractString; + verbose::Bool = false) + +Download file located at `url`, store it at `dest`, continuing if `dest` +already exists and the server and download engine support it. +""" +function download(url::AbstractString, dest::AbstractString; + verbose::Bool = false) + download_cmd = gen_download_cmd(url, dest) + if verbose + info("Downloading $(url) to $(dest)...") + end + oc = OutputCollector(download_cmd; verbose=verbose) + try + if !wait(oc) + error() + end + catch + error("Could not download $(url) to $(dest)") + end +end + +""" + download_verify(url::AbstractString, hash::AbstractString; + verbose::Bool = false, force::Bool = false) + +Download file located at `url`, verify it matches the given `hash`, and throw +an error if anything goes wrong. If `dest` already exists, just verify it. If +`force` is set to `true`, overwrite the given file if it exists but does not +match the given `hash`. +""" +function download_verify(url::AbstractString, hash::AbstractString, + dest::AbstractString; verbose::Bool = false, + force::Bool = false) + # Whether the file existed in the first place + file_existed = false + + if isfile(dest) + file_existed = true + if verbose + info("Destination file $(dest) already exists, verifying...") + end + + # verify download, if it passes, return happy. If it fails, (and + # `force` is `true`, re-download!) + try + verify(dest, hash; verbose=verbose) + return true + catch + if !force + rethrow() + end + if verbose + info("Verification failed, re-downloading...") + end + end + end + + # Download the file, optionally continuing + download(url, dest; verbose=verbose) + + # If it worked, then yay! + try + return verify(dest, hash; verbose=verbose) + catch + # If the file already existed, it's possible the initially downloaded chunk + # was bad. If verification fails after downloading, auto-delete the file + # and start over from scratch. + if file_existed + if verbose + msg = strip(""" + Continued download did not yield change in file size, restarting + from scratch...""") + info(msg) + end + rm(dest; force=true) + + # Download and verify from scratch + download(url, dest; verbose=verbose) + return verify(dest, hash; verbose=verbose) + else + # If it didn't verify properly and we didn't resume, something is + # very wrong and we must complain mightily. + rethrow() + end + end +end + +""" +`unpack(tarball_path::AbstractString, dest::AbstractString; + verbose::Bool = false)` + +Unpack tarball located at file `tarball_path` into directory `dest`. +""" +function unpack(tarball_path::AbstractString, dest::AbstractString; + verbose::Bool = false) + # unpack into dest + try mkpath(dest) end + oc = OutputCollector(gen_unpack_cmd(tarball_path, dest); verbose=verbose) + try + if !wait(oc) + error() + end + catch + error("Could not unpack $(tarball_path) into $(dest)") + end +end + + +""" +`download_verify_unpack(url::AbstractString, hash::AbstractString, + dest::AbstractString; verbose::Bool = false)` + +Helper method to download tarball located at `url`, verify it matches the +given `hash`, then unpack it into folder `dest`. In general, the method +`install()` should be used to download and install tarballs into a `Prefix`; +this method should only be used if the extra functionality of `install()` is +undesired. +""" +function download_verify_unpack(url::AbstractString, + hash::AbstractString, + dest::AbstractString; + verbose::Bool = false) + # First, download tarball to temporary path and verify it + tarball_path = "$(tempname())-download.tar.gz" + download_verify(url, hash, tarball_path) + + try + unpack(tarball_path, dest; verbose=verbose) + finally + # Clear out the tarball path no matter what + rm(tarball_path) + end +end + diff --git a/ext/BinaryProvider/src/PlatformNames.jl b/ext/BinaryProvider/src/PlatformNames.jl new file mode 100644 index 0000000000000..2b1598012b529 --- /dev/null +++ b/ext/BinaryProvider/src/PlatformNames.jl @@ -0,0 +1,212 @@ +export supported_platforms, platform_key, platform_dlext, valid_dl_path, + arch, wordsize, triplet, Platform, Linux, MacOS, Windows + +abstract type Platform end + +struct Linux <: Platform + arch::Symbol + libc::Symbol + + function Linux(arch::Symbol, libc::Symbol=:glibc) + if !in(arch, [:i686, :x86_64, :aarch64, :powerpc64le, :ppc64le, :armv7l]) + throw(ArgumentError("Unsupported architecture '$arch' for Linux")) + end + if libc !== :glibc && libc !== :musl + throw(ArgumentError("Unsupported libc '$libc' for Linux")) + end + if arch === :ppc64le + arch = :powerpc64le + end + new(arch, libc) + end +end + +struct MacOS <: Platform + + function MacOS(arch::Symbol) + if arch !== :x86_64 + throw(ArgumentError("Unsupported architecture '$arch' for macOS")) + end + new() + end + MacOS() = new() +end + +struct Windows <: Platform + arch::Symbol + + function Windows(arch::Symbol) + if arch !== :i686 && arch !== :x86_64 + throw(ArgumentError("Unsupported architecture '$arch' for Windows")) + end + new(arch) + end +end + +""" + arch(platform) + +Get the architecture for the given `Platform` object as a `Symbol`. + +# Examples +```jldoctest +julia> arch(Linux(:aarch64)) +:aarch64 + +julia> arch(MacOS()) +:x86_64 +``` +""" +arch(p::Platform) = p.arch +arch(m::MacOS) = :x86_64 + +""" + wordsize(platform) + +Get the word size for the given `Platform` object. + +# Examples +```jldoctest +julia> wordsize(Linux(:arm7vl)) +32 + +julia> wordsize(MacOS()) +64 +``` +""" +wordsize(l::Linux) = arch(l) === :i686 || arch(l) === :armv7l ? 32 : 64 +wordsize(w::Windows) = arch(w) === :i686 ? 32 : 64 +wordsize(m::MacOS) = 64 + +""" + triplet(platform) + +Get the target triplet for the given `Platform` object as a `String`. + +# Examples +```jldoctest +julia> triplet(MacOS()) +"x86_64-apple-darwin14" + +julia> triplet(Windows(:i686)) +"i686-w64-mingw32" + +julia> triplet(Linux(:armv7l)) +"arm-linux-gnueabihf" +``` +""" +function triplet(l::Linux) + c = l.libc === :glibc ? "gnu" : "musl" # Currently only glibc and musl are recognized + if arch(l) === :armv7l + string("arm-linux-", c, "eabihf") + else + string(arch(l), "-linux-", c) + end +end +triplet(w::Windows) = string(arch(w), "-w64-mingw32") +triplet(m::MacOS) = "x86_64-apple-darwin14" + +""" + supported_platforms() + +Return the list of supported platforms as an array of `Platform`s. +""" +function supported_platforms() + return [ + Linux(:i686), + Linux(:x86_64), + Linux(:aarch64), + Linux(:armv7l), + Linux(:powerpc64le), + MacOS(), + Windows(:i686), + Windows(:x86_64), + ] +end + +# Compat doesn't use the Base definitions for whatever terrible reason, so we'll overload +# both, ensuring the user gets our definitions regardless of whether they use Sys.is* or +# Compat.Sys.is*. +if isdefined(Base.Sys, :isapple) + Base.Sys.isapple(p::Platform) = p isa MacOS + Base.Sys.islinux(p::Platform) = p isa Linux + Base.Sys.iswindows(p::Platform) = p isa Windows +end +Compat.Sys.isapple(p::Platform) = p isa MacOS +Compat.Sys.islinux(p::Platform) = p isa Linux +Compat.Sys.iswindows(p::Platform) = p isa Windows + +""" + platform_key(machine::AbstractString = Sys.MACHINE) + +Returns the platform key for the current platform, or any other though the +the use of the `machine` parameter. +""" +function platform_key(machine::AbstractString = Sys.MACHINE) + # First, off, if `machine` is literally one of the values of our mapping + # above, just return the relevant key + for key in supported_platforms() + if machine == triplet(key) + return key + end + end + + # Otherwise, try to parse the machine into one of our keys + if startswith(machine, "x86_64-apple-darwin") + return MacOS() + end + if ismatch(r"x86_64-(pc-)?(unknown-)?linux-gnu", machine) + return Linux(:x86_64) + end + if ismatch(r"i\d86-(pc-)?(unknown-)?linux-gnu", machine) + return Linux(:i686) + end + if ismatch(r"aarch64-(pc-)?(unknown-)?linux-gnu", machine) + return Linux(:aarch64) + end + if ismatch(r"armv7l-(pc-)?(unknown-)?linux-gnueabihf", machine) + return Linux(:armv7l) + end + if ismatch(r"powerpc64le-(pc-)?(unknown-)?linux-gnu", machine) + return Linux(:powerpc64le) + end + + throw(ArgumentError("Platform `$(machine)` is not an officially supported platform")) +end + + +""" + platform_dlext(platform::Platform = platform_key()) + +Return the dynamic library extension for the given platform, defaulting to the +currently running platform. E.g. returns "so" for a Linux-based platform, +"dll" for a Windows-based platform, etc... +""" +platform_dlext(l::Linux) = "so" +platform_dlext(m::MacOS) = "dylib" +platform_dlext(w::Windows) = "dll" +platform_dlext() = platform_dlext(platform_key()) + +""" + valid_dl_path(path::AbstractString, platform::Platform) + +Return `true` if the given `path` ends in a valid dynamic library filename. +E.g. returns `true` for a path like `"usr/lib/libfoo.so.3.5"`, but returns +`false` for a path like `"libbar.so.f.a"`. +""" +function valid_dl_path(path::AbstractString, platform::Platform) + const dlext_regexes = Dict( + # On Linux, libraries look like `libnettle.so.6.3.0` + "so" => r"^(.*).so(\.[\d]+){0,3}$", + # On OSX, libraries look like `libnettle.6.3.dylib` + "dylib" => r"^(.*).dylib$", + # On Windows, libraries look like `libnettle-6.dylib` + "dll" => r"^(.*).dll$" + ) + + # Given a platform, find the dlext regex that matches it + dlregex = dlext_regexes[platform_dlext(platform)] + + # Return whether or not that regex matches the basename of the given path + return ismatch(dlregex, basename(path)) +end diff --git a/ext/BinaryProvider/src/Prefix.jl b/ext/BinaryProvider/src/Prefix.jl new file mode 100644 index 0000000000000..176b7b2f23b38 --- /dev/null +++ b/ext/BinaryProvider/src/Prefix.jl @@ -0,0 +1,543 @@ +## This file contains functionality related to the actual layout of the files +# on disk. Things like the name of where downloads are stored, and what +# environment variables must be updated to, etc... +import Base: convert, joinpath, show +using SHA + +export Prefix, bindir, libdir, includedir, logdir, activate, deactivate, + extract_platform_key, install, uninstall, manifest_from_url, + manifest_for_file, list_tarball_files, verify, temp_prefix, package + +""" + temp_prefix(func::Function) + +Create a temporary prefix, passing the prefix into the user-defined function so +that build/packaging operations can occur within the temporary prefix, which is +then cleaned up after all operations are finished. If the path provided exists +already, it will be deleted. + +Usage example: + + out_path = abspath("./libfoo") + temp_prefix() do p + # + + # tarball up the built package + tarball_path, tarball_hash = package(p, out_path) + end +""" +function temp_prefix(func::Function) + # Helper function to create a docker-mountable temporary directory + function _tempdir() + @static if is_apple() + # Docker, on OSX at least, can only mount from certain locations by + # default, so we ensure all our temporary directories live within + # those locations so that they are accessible by Docker. + return "/tmp" + else + return tempdir() + end + end + + mktempdir(_tempdir()) do path + prefix = Prefix(path) + + # Run the user function + func(prefix) + end +end + +# This is the default prefix that things get saved to, it is initialized within +# __init__() on first module load. +global_prefix = nothing +immutable Prefix + path::String + + """ + Prefix(path::AbstractString) + + A `Prefix` represents a binary installation location. There is a default + global `Prefix` (available at `BinaryProvider.global_prefix`) that packages + are installed into by default, however custom prefixes can be created + trivially by simply constructing a `Prefix` with a given `path` to install + binaries into, likely including folders such as `bin`, `lib`, etc... + """ + function Prefix(path::AbstractString) + # Canonicalize immediately, create the overall prefix, then return + path = abspath(path) + mkpath(path) + return new(path) + end +end + +# Make it easy to bandy about prefixes as paths. There has got to be a better +# way to do this, but it's hackin' time, so just go with the flow. +joinpath(prefix::Prefix, args...) = joinpath(prefix.path, args...) +joinpath(s::AbstractString, prefix::Prefix, args...) = joinpath(s, prefix.path, args...) + +convert(::Type{AbstractString}, prefix::Prefix) = prefix.path +show(io::IO, prefix::Prefix) = show(io, "Prefix($(prefix.path))") + +""" + split_PATH(PATH::AbstractString = ENV["PATH"]) + +Splits a string such as the `PATH` environment variable into a list of strings +according to the path separation rules for the current platform. +""" +function split_PATH(PATH::AbstractString = ENV["PATH"]) + @static if is_windows() + return split(PATH, ";") + else + return split(PATH, ":") + end +end + +""" + join_PATH(PATH::Vector{AbstractString}) + +Given a list of strings, return a joined string suitable for the `PATH` +environment variable appropriate for the current platform. +""" +function join_PATH{S<:AbstractString}(paths::Vector{S}) + @static if is_windows() + return join(paths, ";") + else + return join(paths, ":") + end +end + +""" + bindir(prefix::Prefix) + +Returns the binary directory for the given `prefix`. +""" +function bindir(prefix::Prefix) + return joinpath(prefix, "bin") +end + +""" + libdir(prefix::Prefix) + +Returns the library directory for the given `prefix` (not ethat this differs +between unix systems and windows systems). +""" +function libdir(prefix::Prefix) + @static if is_windows() + return joinpath(prefix, "bin") + else + return joinpath(prefix, "lib") + end +end + +""" + includedir(prefix::Prefix) + +Returns the include directory for the given `prefix` +""" +function includedir(prefix::Prefix) + return joinpath(prefix, "include") +end + +""" + logdir(prefix::Prefix) + +Returns the logs directory for the given `prefix`. +""" +function logdir(prefix::Prefix) + return joinpath(prefix, "logs") +end + +""" + activate(prefix::Prefix) + +Prepends paths to environment variables so that binaries and libraries are +available to Julia. +""" +function activate(prefix::Prefix) + # Add to PATH + paths = split_PATH() + if !(bindir(prefix) in paths) + prepend!(paths, [bindir(prefix)]) + end + ENV["PATH"] = join_PATH(paths) + + # Add to DL_LOAD_PATH + if !(libdir(prefix) in Libdl.DL_LOAD_PATH) + prepend!(Libdl.DL_LOAD_PATH, [libdir(prefix)]) + end + return nothing +end + +""" + activate(func::Function, prefix::Prefix) + +Prepends paths to environment variables so that binaries and libraries are +available to Julia, calls the user function `func`, then `deactivate()`'s +the `prefix`` again. +""" +function activate(func::Function, prefix::Prefix) + activate(prefix) + func() + deactivate(prefix) +end + +""" + deactivate(prefix::Prefix) + +Removes paths added to environment variables by `activate()` +""" +function deactivate(prefix::Prefix) + # Remove from PATH + paths = split_PATH() + filter!(p -> p != bindir(prefix), paths) + ENV["PATH"] = join_PATH(paths) + + # Remove from DL_LOAD_PATH + filter!(p -> p != libdir(prefix), Libdl.DL_LOAD_PATH) + return nothing +end + +""" + extract_platform_key(path::AbstractString) + +Given the path to a tarball, return the platform key of that tarball. If none +can be found, prints a warning and return the current platform suffix. +""" +function extract_platform_key(path::AbstractString) + if endswith(path, ".tar.gz") + path = path[1:end-7] + end + idx = rsearch(path, '.') + if idx == 0 + warn("Could not extract the platform key of $(path); continuing...") + return platform_key() + end + return platform_key(path[idx+1:end]) +end + +""" + install(tarball_url::AbstractString, + hash::AbstractString; + prefix::Prefix = global_prefix, + force::Bool = false, + ignore_platform::Bool = false, + verbose::Bool = false) + +Given a `prefix`, a `tarball_url` and a `hash`, download that tarball into the +prefix, verify its integrity with the `hash`, and install it into the `prefix`. +Also save a manifest of the files into the prefix for uninstallation later. +""" +function install(tarball_url::AbstractString, + hash::AbstractString; + prefix::Prefix = global_prefix, + force::Bool = false, + ignore_platform::Bool = false, + verbose::Bool = false) + # Get the platform key from the tarball and complain if it doesn't match + # the platform we're currently running on + platform = extract_platform_key(tarball_url) + if !ignore_platform && platform_key() != platform + msg = "Will not install a tarball of platform $(platform) on a system " + msg *= "of platform $(platform_key()) unless `ignore_platform` is " + msg *= "explicitly set to `true`." + error(msg) + end + + # Create the downloads directory if it does not already exist + tarball_path = joinpath(prefix, "downloads", basename(tarball_url)) + try mkpath(dirname(tarball_path)) end + + # Check to see if we're "installing" from a file + if isfile(tarball_url) + # If we are, just verify it's already downloaded properly + tarball_path = tarball_url + + verify(tarball_path, hash; verbose=verbose) + else + # If not, actually download it + if verbose + info("Downloading $(tarball_url) to $(tarball_path)") + end + download_verify(tarball_url, hash, tarball_path; verbose=verbose) + end + + if verbose + info("Installing $(tarball_path) into $(prefix.path)") + end + + # First, get list of files that are contained within the tarball + file_list = list_tarball_files(tarball_path) + + # Check to see if any files are already present + for file in file_list + if isfile(joinpath(prefix, file)) + if !force + msg = "$(file) already exists and would be overwritten while " + msg *= "installing $(basename(tarball_path))\n" + msg *= "Will not overwrite unless `force = true` is set." + error(msg) + else + if verbose + info("$(file) already exists, force-removing") + end + rm(file; force=true) + end + end + end + + # Unpack the tarball into prefix + unpack(tarball_path, prefix.path; verbose=verbose) + + # Save installation manifest + manifest_path = manifest_from_url(tarball_path, prefix=prefix) + mkpath(dirname(manifest_path)) + open(manifest_path, "w") do f + write(f, join(file_list, "\n")) + end + + return true +end + +""" + uninstall(manifest::AbstractString; verbose::Bool = false) + +Uninstall a package from a prefix by providing the `manifest_path` that was +generated during `install()`. To find the `manifest_file` for a particular +installed file, use `manifest_for_file(file_path; prefix=prefix)`. +""" +function uninstall(manifest::AbstractString; + verbose::Bool = false) + # Complain if this manifest file doesn't exist + if !isfile(manifest) + error("Manifest path $(manifest) does not exist") + end + + prefix_path = dirname(dirname(manifest)) + if verbose + relmanipath = relpath(manifest, prefix_path) + info("Removing files installed by $(relmanipath)") + end + + # Remove every file listed within the manifest file + for path in [chomp(l) for l in readlines(manifest)] + delpath = joinpath(prefix_path, path) + if !isfile(delpath) + if verbose + info(" $delpath does not exist, but ignoring") + end + else + if verbose + delrelpath = relpath(delpath, prefix_path) + info(" $delrelpath removed") + end + rm(delpath; force=true) + end + end + + if verbose + info(" $(relmanipath) removed") + end + rm(manifest; force=true) + return true +end + +""" + manifest_from_url(url::AbstractString; prefix::Prefix = global_prefix()) + +Returns the file path of the manifest file for the tarball located at `url`. +""" +function manifest_from_url(url::AbstractString; + prefix::Prefix = global_prefix()) + # Given an URL, return an autogenerated manifest name + return joinpath(prefix, "manifests", basename(url)[1:end-7] * ".list") +end + +""" + manifest_for_file(path::AbstractString; prefix::Prefix = global_prefix) + +Returns the manifest file containing the installation receipt for the given +`path`, throws an error if it cannot find a matching manifest. +""" +function manifest_for_file(path::AbstractString; + prefix::Prefix = global_prefix) + if !isfile(path) + error("File $(path) does not exist") + end + + search_path = relpath(path, prefix.path) + if startswith(search_path, "..") + error("Cannot search for paths outside of the given Prefix!") + end + + manidir = joinpath(prefix, "manifests") + for fname in [f for f in readdir(manidir) if endswith(f, ".list")] + manifest_path = joinpath(manidir, fname) + if search_path in [chomp(l) for l in readlines(manifest_path)] + return manifest_path + end + end + + error("Could not find $(search_path) in any manifest files") +end + +""" + list_tarball_files(path::AbstractString; verbose::Bool = false) + +Given a `.tar.gz` filepath, list the compressed contents. +""" +function list_tarball_files(path::AbstractString; verbose::Bool = false) + if !isfile(path) + error("Tarball path $(path) does not exist") + end + + # Run the listing command, then parse the output + oc = OutputCollector(gen_list_tarball_cmd(path); verbose=verbose) + try + if !wait(oc) + error() + end + catch + error("Could not list contents of tarball $(path)") + end + return parse_tarball_listing(stdout(oc)) +end + +""" + verify(path::String, hash::String; verbose::Bool) + +Given a file `path` and a `hash`, calculate the SHA256 of the file and compare +it to `hash`. If an error occurs, `verify()` will throw an error. This method +caches verification results in a `"\$(path).sha256"` file to accelerate re- +verification of files that have been previously verified. If no `".sha256"` +file exists, a full verification will be done and the file will be created, +with the calculated hash being stored within the `".sha256"` file.. If a +`".sha256"` file does exist, its contents are checked to ensure that the hash +contained within matches the given `hash` parameter, and its modification time +shows that the file located at `path` has not been modified since the last +verification. +""" +function verify(path::AbstractString, hash::AbstractString; verbose::Bool = false) + if length(hash) != 64 + msg = "Hash must be 256 bits (64 characters) long, " + msg *= "given hash is $(length(hash)) characters long" + error(msg) + end + + # Fist, check to see if the hash cache is consistent + hash_path = "$(path).sha256" + + # First, it must exist + if isfile(hash_path) + # Next, it must contain the same hash as what we're verifying against + if readstring(open(hash_path, "r")) == hash + # Next, it must be no older than the actual path + if stat(hash_path).mtime >= stat(path).mtime + # If all of that is true, then we're good! + if verbose + info("Hash cache is consistent, returning true") + end + return true + else + if verbose + info("File has been modified, hash cache invalidated") + end + end + else + if verbose + info("Hash has changed, hash cache invalidated") + end + end + else + if verbose + info("No hash cache found") + end + end + + open(path) do file + calc_hash = bytes2hex(sha256(file)) + if verbose + info("Calculated hash $calc_hash for file $path") + end + + if calc_hash != hash + msg = "Hash Mismatch!\n" + msg *= " Expected sha256: $hash\n" + msg *= " Calculated sha256: $calc_hash" + error(msg) + end + end + + # Save a hash cache if everything worked out fine + open(hash_path, "w") do file + write(file, hash) + end + + return true +end + +""" + package(prefix::Prefix, tarball_base::AbstractString, + platform::Platform = platform_key(), verbose::Bool = false) + +Build a tarball of the `prefix`, storing the tarball at `tarball_base` plus a +platform-dependent suffix and a file extension (defaults to the current +platform, but overridable through the `platform` argument. Runs an `audit()` +on the `prefix`, to ensure that libraries can be `dlopen()`'ed, that all +dependencies are located within the prefix, etc... See the `audit()` +documentation for a full list of the audit steps. + +Returns the full path to and the hash of the generated tarball. +""" +function package(prefix::Prefix, + tarball_base::AbstractString; + platform::Platform = platform_key(), + verbose::Bool = false, + force::Bool = false) + # First calculate the output path given our tarball_base and platform + out_path = try + "$(tarball_base).$(triplet(platform)).tar.gz" + catch + error("Platform key `$(platform)` not recognized") + end + + if isfile(out_path) + if force + if verbose + info("$(out_path) already exists, force-overwriting...") + end + rm(out_path; force=true) + else + msg = replace(strip(""" + $(out_path) already exists, refusing to package into it without + `force` being set to `true`. + """), "\n", " ") + error(msg) + end + end + + withenv("GZIP" => "-9") do + package_cmd = gen_package_cmd(prefix.path, out_path) + oc = OutputCollector(package_cmd; verbose=verbose) + + # Actually run the `tar` command + try + if !wait(oc) + error() + end + catch + # If we made a boo-boo, fess up. Remember that the `oc` will auto- + # `tail()` failing commands. + error("Packaging of $(prefix.path) did not complete successfully") + end + end + + # Also spit out the hash of the archive file + hash = open(out_path, "r") do f + return bytes2hex(sha256(f)) + end + if verbose + info("SHA256 of $(basename(out_path)): $(hash)") + end + + return out_path, hash +end diff --git a/ext/BinaryProvider/src/Products.jl b/ext/BinaryProvider/src/Products.jl new file mode 100644 index 0000000000000..73e9445fc80a0 --- /dev/null +++ b/ext/BinaryProvider/src/Products.jl @@ -0,0 +1,345 @@ +export Product, LibraryProduct, FileProduct, ExecutableProduct, satisfied, locate, @write_deps_file + +""" +A `Product` is an expected result after building or installation of a package. +""" +abstract Product + +""" +A `LibraryProduct` is a special kind of `Product` that not only needs to exist, +but needs to be `dlopen()`'able. You must know which directory the library +will be installed to, and its name, e.g. to build a `LibraryProduct` that +refers to `"/lib/libnettle.so"`, the "directory" would be "/lib", and the +"libname" would be "libnettle". +""" +immutable LibraryProduct <: Product + dir_path::String + libname::String + + """ + `LibraryProduct(prefix::Prefix, libname::AbstractString)` + + Declares a `LibraryProduct` that points to a library located within the + `libdir` of the given `Prefix`, with a name containing `libname`. As an + example, given that `libdir(prefix)` is equal to `usr/lib`, and `libname` + is equal to `libnettle`, this would be satisfied by the following paths: + + usr/lib/libnettle.so + usr/lib/libnettle.so.6 + usr/lib/libnettle.6.dylib + usr/lib/libnettle-6.dll + + Libraries matching the search pattern are rejected if they are not + `dlopen()`'able. + """ + function LibraryProduct(prefix::Prefix, libname::AbstractString) + return LibraryProduct(libdir(prefix), libname) + end + + """ + `LibraryProduct(dir_path::AbstractString, libname::AbstractString)` + + For finer-grained control over `LibraryProduct` locations, you may directly + pass in the `dir_path` instead of auto-inferring it from `libdir(prefix)`. + """ + function LibraryProduct(dir_path::AbstractString, libname::AbstractString) + return new(dir_path, libname) + end +end + +""" +locate(lp::LibraryProduct; verbose::Bool = false, + platform::Platform = platform_key()) + +If the given library exists (under any reasonable name) and is `dlopen()`able, +(assuming it was built for the current platform) return its location. Note +that the `dlopen()` test is only run if the current platform matches the given +`platform` keyword argument, as cross-compiled libraries cannot be `dlopen()`ed +on foreign platforms. +""" +function locate(lp::LibraryProduct; verbose::Bool = false, + platform::Platform = platform_key()) + if !isdir(lp.dir_path) + if verbose + info("Directory $(lp.dir_path) does not exist!") + end + return nothing + end + for f in readdir(lp.dir_path) + # Skip any names that aren't a valid dynamic library for the given + # platform (note this will cause problems if something compiles a `.so` + # on OSX, for instance) + if !valid_dl_path(f, platform) + continue + end + + if verbose + info("Found a valid dl path $(f) while looking for $(lp.libname)") + end + + # If we found something that is a dynamic library, let's check to see + # if it matches our libname: + if startswith(basename(f), lp.libname) + dl_path = abspath(joinpath(lp.dir_path), f) + if verbose + info("$(dl_path) matches our search criteria of $(lp.libname)") + end + + # If it does, try to `dlopen()` it if the current platform is good + if platform == platform_key() + hdl = Libdl.dlopen_e(dl_path) + if hdl == C_NULL + if verbose + info("$(dl_path) cannot be dlopen'ed") + end + else + # Hey! It worked! Yay! + Libdl.dlclose(hdl) + return dl_path + end + else + # If the current platform doesn't match, then just trust in our + # cross-compilers and go with the flow + return dl_path + end + end + end + + if verbose + info("Could not locate $(lp.libname) inside $(lp.dir_path)") + end + return nothing +end + +""" +An `ExecutableProduct` is a `Product` that represents an executable file. + +On all platforms, an ExecutableProduct checks for existence of the file. On +non-Windows platforms, it will check for the executable bit being set. On +Windows platforms, it will check that the file ends with ".exe", (adding it on +automatically, if it is not already present). +""" +immutable ExecutableProduct <: Product + path::AbstractString + + """ + `ExecutableProduct(prefix::Prefix, binname::AbstractString)` + + Declares an `ExecutableProduct` that points to an executable located within + the `bindir` of the given `Prefix`, named `binname`. + """ + function ExecutableProduct(prefix::Prefix, binname::AbstractString) + return ExecutableProduct(joinpath(bindir(prefix), binname)) + end + + """ + `ExecutableProduct(binpath::AbstractString)` + + For finer-grained control over `ExecutableProduct` locations, you may directly + pass in the full `binpath` instead of auto-inferring it from `bindir(prefix)`. + """ + function ExecutableProduct(binpath::AbstractString) + return new(binpath) + end +end + +""" +`locate(fp::FileProduct; platform::Platform = platform_key(), + verbose::Bool = false)` + +If the given executable file exists and is executable, return its path. + +On all platforms, an ExecutableProduct checks for existence of the file. On +non-Windows platforms, it will check for the executable bit being set. On +Windows platforms, it will check that the file ends with ".exe", (adding it on +automatically, if it is not already present). +""" +function locate(ep::ExecutableProduct; platform::Platform = platform_key(), + verbose::Bool = false) + # On windows, we always slap an .exe onto the end if it doesn't already + # exist, as Windows won't execute files that don't have a .exe at the end. + path = if platform isa Windows && !endswith(ep.path, ".exe") + "$(ep.path).exe" + else + ep.path + end + + if !isfile(path) + if verbose + info("$(ep.path) does not exist, reporting unsatisfied") + end + return nothing + end + + # If the file is not executable, fail out (unless we're on windows since + # windows doesn't honor these permissions on its filesystems) + @static if !is_windows() + if uperm(path) & 0x1 == 0 + if verbose + info("$(path) is not executable, reporting unsatisfied") + end + return nothing + end + end + + return path +end + +""" +A `FileProduct` represents a file that simply must exist to be satisfied. +""" +immutable FileProduct <: Product + path::AbstractString +end + +""" +locate(fp::FileProduct; platform::Platform = platform_key(), + verbose::Bool = false) + +If the given file exists, return its path. The platform argument is ignored +here, but included for uniformity. +""" +function locate(fp::FileProduct; platform::Platform = platform_key(), + verbose::Bool = false) + if isfile(fp.path) + if verbose + info("FileProduct $(fp.path) does not exist") + end + return fp.path + end + return nothing +end + + +""" +`satisfied(p::Product; platform::Platform = platform_key(), + verbose::Bool = false)` + +Given a `Product`, return `true` if that `Product` is satisfied, e.g. whether +a file exists that matches all criteria setup for that `Product`. +""" +function satisfied(p::Product; platform::Platform = platform_key(), + verbose::Bool = false) + return locate(p; platform=platform, verbose=verbose) != nothing +end + + +""" +`@write_deps_file(products...)` + +Helper macro to generate a `deps.jl` file out of a mapping of variable name +to `Product` objects. Call using something like: + + fooifier = ExecutableProduct(...) + libbar = LibraryProduct(...) + @write_deps_file fooifier libbar + +If any `Product` object cannot be satisfied (e.g. `LibraryProduct` objects must +be `dlopen()`-able, `FileProduct` objects must exist on the filesystem, etc...) +this macro will error out. Ensure that you have used `install()` to install +the binaries you wish to write a `deps.jl` file for, and, optionally that you +have used `activate()` on the `Prefix` in which the binaries were installed so +as to make sure that the binaries are locatable. + +The result of this macro call is a `deps.jl` file containing variables named +the same as the keys of the passed-in dictionary, holding the full path to the +installed binaries. Given the example above, it would contain code similar to: + + global const fooifier = "/deps/usr/bin/fooifier" + global const libbar = "/deps/usr/lib/libbar.so" + +This file is intended to be `include()`'ed from within the `__init__()` method +of your package. Note that all files are checked for consistency on package +load time, and if an error is discovered, package loading will fail, asking +the user to re-run `Pkg.build("package_name")`. +""" +macro write_deps_file(capture...) + # props to @tshort for his macro wizardry + const names = :($(capture)) + const products = esc(Expr(:tuple, capture...)) + + # We have to create this dummy_source, because we cannot, in a single line, + # have both `@__FILE__` and `__source__` interpreted by the same julia. + const dummy_source = VERSION >= v"0.7.0-" ? __source__.file : "" + + return quote + # First pick up important pieces of information from the call-site + const source = VERSION >= v"0.7.0-" ? $("$(dummy_source)") : @__FILE__ + const depsjl_path = joinpath(dirname(source), "deps.jl") + const package_name = basename(dirname(dirname(source))) + const platform = platform_key() + escape_path = path -> replace(path, "\\", "\\\\") + + const rebuild = strip(""" + Please re-run Pkg.build(\\\"$(package_name)\\\"), and restart Julia. + """) + + # Begin by ensuring that we can satisfy every product RIGHT NOW + for product in $(products) + # Check to make sure that we've passed in the right kind of + # objects, e.g. subclasses of `Product` + if !(typeof(product) <: Product) + msg = "Cannot @write_deps_file for $product, which is " * + "of type $(typeof(product)), which is not a " * + "subtype of `Product`!" + error(msg) + end + + if !satisfied(product; verbose=true) + error("$product is not satisfied, cannot generate deps.jl!") + end + end + + # If things look good, let's generate the `deps.jl` file + open(depsjl_path, "w") do depsjl_file + # First, dump the preamble + println(depsjl_file, strip(""" + ## This file autogenerated by BinaryProvider.@write_deps_file. + ## Do not edit. + """)) + + # Next, spit out the paths of all our products + for idx in 1:$(length(capture)) + product = $(products)[idx] + name = $(names)[idx] + + # Escape the location so that e.g. Windows platforms are happy + # with the backslashes in a string literal + escaped_path = escape_path(locate(product, platform=platform)) + println(depsjl_file, strip(""" + const $(name) = \"$(escaped_path)\" + """)) + end + + # Next, generate a function to check they're all on the up-and-up + println(depsjl_file, "function check_deps()") + + for idx in 1:$(length(capture)) + product = $(products)[idx] + name = $(names)[idx] + + # Add a `global $(name)` + println(depsjl_file, " global $(name)"); + + # Check that any file exists + println(depsjl_file, """ + if !isfile($(name)) + error("\$($(name)) does not exist, $(rebuild)") + end + """) + + # For Library products, check that we can dlopen it: + if typeof(product) <: LibraryProduct + println(depsjl_file, """ + if Libdl.dlopen_e($(name)) == C_NULL + error("\$($(name)) cannot be opened, $(rebuild)") + end + """) + end + end + + # Close the `check_deps()` function + println(depsjl_file, "end") + end + end +end diff --git a/ext/BinaryProvider/test/LibFoo.jl/.gitignore b/ext/BinaryProvider/test/LibFoo.jl/.gitignore new file mode 100644 index 0000000000000..8adc8c2a1fbfe --- /dev/null +++ b/ext/BinaryProvider/test/LibFoo.jl/.gitignore @@ -0,0 +1,2 @@ +deps/usr +deps/deps.jl diff --git a/ext/BinaryProvider/test/LibFoo.jl/README.md b/ext/BinaryProvider/test/LibFoo.jl/README.md new file mode 100644 index 0000000000000..cd6e50071ec27 --- /dev/null +++ b/ext/BinaryProvider/test/LibFoo.jl/README.md @@ -0,0 +1,3 @@ +# LibFoo.jl + +Example Julia Package, showing off how to install binaries provided through `BinaryProvider.jl`. Look at `deps/build.jl` for an example of how to download and install binaries, then generate a `deps/deps.jl` file that contains the paths to desired binary objects. Look at `src/LibFoo.jl` for an example of how to load the `deps/deps.jl` file, and how to use the binary objects stored within. \ No newline at end of file diff --git a/ext/BinaryProvider/test/LibFoo.jl/deps/build.jl b/ext/BinaryProvider/test/LibFoo.jl/deps/build.jl new file mode 100644 index 0000000000000..a3451ab02665a --- /dev/null +++ b/ext/BinaryProvider/test/LibFoo.jl/deps/build.jl @@ -0,0 +1,37 @@ +using BinaryProvider + +# BinaryProvider support +const prefix = Prefix(joinpath(dirname(@__FILE__),"usr")) +const platform = platform_key() + +# These are the two binary objects we care about +libfoo = LibraryProduct(prefix, "libfoo") +fooifier = ExecutableProduct(prefix, "fooifier") + +# This is where we download things from, for different platforms +const bin_prefix = "https://github.com/staticfloat/small_bin/raw/74b7fd81e3fbc8963b14b0ebbe5421e270d8bdcf" +const download_info = Dict( + Linux(:i686) => ("$bin_prefix/libfoo.i686-linux-gnu.tar.gz", "1398353bcbbd88338189ece9c1d6e7c508df120bc4f93afbaed362a9f91358ff"), + Linux(:x86_64) => ("$bin_prefix/libfoo.x86_64-linux-gnu.tar.gz", "b9d57a6e032a56b1f8641771fa707523caa72f1a2e322ab99eeeb011f13ad9f3"), + Linux(:aarch64) => ("$bin_prefix/libfoo.aarch64-linux-gnu.tar.gz", "19d9da0e6e7fb506bf4889eb91e936fda43493a39cd4fd7bd5d65506cede6f95"), + Linux(:armv7l) => ("$bin_prefix/libfoo.arm-linux-gnueabihf.tar.gz", "8e33c1a0e091e6e5b8fcb902e5d45329791bb57763ee9cbcde49c1ec9bd8532a"), + Linux(:ppc64le) => ("$bin_prefix/libfoo.powerpc64le-linux-gnu.tar.gz", "b48a64d48be994ec99b1a9fb60e0af7f4415a57596518cb90a340987b79fad81"), + MacOS() => ("$bin_prefix/libfoo.x86_64-apple-darwin14.tar.gz", "661b71edb433ab334b0fef70db3b5c45d35f2b3bee0d244f54875f1ec899c10f"), + Windows(:i686) => ("$bin_prefix/libfoo.i686-w64-mingw32.tar.gz", "3d4a8d4bf0169007a42d809a1d560083635b1540a1bc4a42108841dcb6d2aaea"), + Windows(:x86_64) => ("$bin_prefix/libfoo.x86_64-w64-mingw32.tar.gz", "2d08fbc9a534cd021f36b6bbe86ddabb2dafbedeb589581240aa4a8c5b896055"), +) +if platform in keys(download_info) + # Grab the url and tarball hash for this particular platform + url, tarball_hash = download_info[platform] + + # Build a BinaryPackage from the metadata, and install it + binpkg = BinaryPackage(url, tarball_hash, platform, [libfoo, fooifier]) + install(binpkg; prefix=prefix, force=true, verbose=true) + + # Finaly, write out a deps file containing paths to libfoo and fooifier + @write_deps_file libfoo fooifier +else + error("Your platform $(Sys.MACHINE) is not recognized, we cannot install Libfoo!") +end + + diff --git a/ext/BinaryProvider/test/LibFoo.jl/src/LibFoo.jl b/ext/BinaryProvider/test/LibFoo.jl/src/LibFoo.jl new file mode 100644 index 0000000000000..c6c969d7ec143 --- /dev/null +++ b/ext/BinaryProvider/test/LibFoo.jl/src/LibFoo.jl @@ -0,0 +1,38 @@ +__precompile__() +module LibFoo + +# Load in `deps.jl`, complaining if it does not exist +const depsjl_path = joinpath(dirname(@__FILE__), "..", "deps", "deps.jl") +if !isfile(depsjl_path) + error("LibFoo not installed properly, run Pkg.build(\"LibFoo\"), restart Julia and try again") +end +include(depsjl_path) + +# Module initialization function +function __init__() + # Always check your dependencies from `deps.jl` + check_deps() +end + +# Export our two super-useful functions +export call_fooifier, call_libfoo + + +# Function to call the `fooifier` binary with the given arguments +function call_fooifier(a, b) + global fooifier + return parse(Float64, readchomp(`$fooifier $a $b`)) +end + +# Function to call into the `libfoo` shared library with the given arguments +function call_libfoo(a, b) + global libfoo + + hdl = Libdl.dlopen_e(libfoo) + @assert hdl != C_NULL "Could not open $libfoo" + foo = Libdl.dlsym_e(hdl, :foo) + @assert foo != C_NULL "Could not find foo() within $libfoo" + return ccall(foo, Cdouble, (Cdouble, Cdouble), a, b) +end + +end #module LibFoo \ No newline at end of file diff --git a/ext/BinaryProvider/test/LibFoo.jl/test/runtests.jl b/ext/BinaryProvider/test/LibFoo.jl/test/runtests.jl new file mode 100644 index 0000000000000..bf11bf06fa998 --- /dev/null +++ b/ext/BinaryProvider/test/LibFoo.jl/test/runtests.jl @@ -0,0 +1,5 @@ +using Base.Test +using LibFoo + +@test call_fooifier(2.2, 1.1) ≈ 2*2.2^2 - 1.1 +@test call_libfoo(2.2, 1.1) ≈ 2*2.2^2 - 1.1 \ No newline at end of file diff --git a/ext/BinaryProvider/test/output_tests/fail.sh b/ext/BinaryProvider/test/output_tests/fail.sh new file mode 100755 index 0000000000000..6e3c35eb4f8e1 --- /dev/null +++ b/ext/BinaryProvider/test/output_tests/fail.sh @@ -0,0 +1,6 @@ +#!/bin/sh + +echo "1" +sleep 1 +echo "2" >&2 +exit 1 \ No newline at end of file diff --git a/ext/BinaryProvider/test/output_tests/kill.sh b/ext/BinaryProvider/test/output_tests/kill.sh new file mode 100755 index 0000000000000..e0251a960fcb5 --- /dev/null +++ b/ext/BinaryProvider/test/output_tests/kill.sh @@ -0,0 +1,10 @@ +#!/bin/sh + +SELF=$$ +(sleep 3; kill $SELF) & + +# Count 1, 2, 3 then kill yourself +for i in $(seq 1 10); do + echo $i + sleep 2 +done \ No newline at end of file diff --git a/ext/BinaryProvider/test/output_tests/long.sh b/ext/BinaryProvider/test/output_tests/long.sh new file mode 100755 index 0000000000000..e724aaddff662 --- /dev/null +++ b/ext/BinaryProvider/test/output_tests/long.sh @@ -0,0 +1,5 @@ +#!/bin/sh + +for i in $(seq 1 100); do + echo $i +done \ No newline at end of file diff --git a/ext/BinaryProvider/test/output_tests/newlines.sh b/ext/BinaryProvider/test/output_tests/newlines.sh new file mode 100755 index 0000000000000..ad58e0fdda112 --- /dev/null +++ b/ext/BinaryProvider/test/output_tests/newlines.sh @@ -0,0 +1,6 @@ +#!/bin/sh + +# Ideally, all three of these will give the same result according to lines +printf "marco\npolo\n" +printf "marco\rpolo\r" +printf "marco\r\npolo\r\n" diff --git a/ext/BinaryProvider/test/output_tests/simple.sh b/ext/BinaryProvider/test/output_tests/simple.sh new file mode 100755 index 0000000000000..a61ca06966e4b --- /dev/null +++ b/ext/BinaryProvider/test/output_tests/simple.sh @@ -0,0 +1,8 @@ +#!/bin/sh +echo 1 +sleep 1 +echo 2 >&2 +sleep 1 +echo 3 +sleep 1 +echo 4 \ No newline at end of file diff --git a/ext/BinaryProvider/test/runtests.jl b/ext/BinaryProvider/test/runtests.jl new file mode 100644 index 0000000000000..faa32d889968b --- /dev/null +++ b/ext/BinaryProvider/test/runtests.jl @@ -0,0 +1,575 @@ +using BinaryProvider +using Compat +using Compat.Test +using SHA + +# The platform we're running on +const platform = platform_key() + +# Useful command to launch `sh` on any platform +const sh = gen_sh_cmd + +# Output of a few scripts we are going to run +const simple_out = "1\n2\n3\n4\n" +const long_out = join(["$(idx)\n" for idx in 1:100], "") +const newlines_out = join(["marco$d\npolo$d\n" for d in ("","\r","\r\n")], "") + +# Explicitly probe platform engines in verbose mode to get coverage and make +# CI debugging easier +BinaryProvider.probe_platform_engines!(;verbose=true) + +@testset "OutputCollector" begin + cd("output_tests") do + # Collect the output of `simple.sh`` + oc = OutputCollector(sh(`./simple.sh`)) + + # Ensure we can wait on it and it exited properly + @test wait(oc) + + # Ensure further waits are fast and still return 0 + let + tstart = time() + @test wait(oc) + @test time() - tstart < 0.1 + end + + # Test that we can merge properly + @test merge(oc) == simple_out + + # Test that merging twice works + @test merge(oc) == simple_out + + # Test that `tail()` gives the same output as well + @test tail(oc) == simple_out + + # Test that colorization works + let + red = Base.text_colors[:red] + def = Base.text_colors[:default] + gt = "1\n$(red)2\n$(def)3\n4\n" + @test merge(oc; colored=true) == gt + @test tail(oc; colored=true) == gt + end + + # Test that we can grab stdout and stderr separately + @test stdout(oc) == "1\n3\n4\n" + @test stderr(oc) == "2\n" + end + + # Next test a much longer output program + cd("output_tests") do + oc = OutputCollector(sh(`./long.sh`)) + + # Test that it worked, we can read it, and tail() works + @test wait(oc) + @test merge(oc) == long_out + @test tail(oc; len=10) == join(["$(idx)\n" for idx in 91:100], "") + end + + # Next, test a command that fails + cd("output_tests") do + oc = OutputCollector(sh(`./fail.sh`)) + + @test !wait(oc) + @test merge(oc) == "1\n2\n" + end + + # Next, test a command that kills itself (NOTE: This doesn't work on windows. sigh.) + @static if !is_windows() + cd("output_tests") do + oc = OutputCollector(sh(`./kill.sh`)) + + @test !wait(oc) + @test stdout(oc) == "1\n2\n" + end + end + + # Next, test reading the output of a pipeline() + grepline = pipeline(sh(`-c 'printf "Hello\nWorld\nJulia"'`), `grep ul`) + oc = OutputCollector(grepline) + + @test wait(oc) + @test merge(oc) == "Julia\n" + + # Next, test that \r and \r\n are treated like \n + cd("output_tests") do + oc = OutputCollector(sh(`./newlines.sh`)) + + @test wait(oc) + @test stdout(oc) == newlines_out + end + + # Next, test that tee'ing to a stream works + cd("output_tests") do + ios = IOBuffer() + oc = OutputCollector(sh(`./simple.sh`); tee_stream=ios, verbose=true) + @test wait(oc) + @test merge(oc) == simple_out + seekstart(ios) + println(readstring(ios)) + end +end + +@testset "PlatformNames" begin + # Ensure the platform type constructors are well behaved + @test_throws ArgumentError Linux(:not_a_platform) + @test_throws ArgumentError MacOS(:i686) + @test_throws ArgumentError Windows(:armv7l) + @test_throws ArgumentError Linux(:x86_64, :crazy_libc) + + # Test that our platform_dlext stuff works + @test platform_dlext(Linux(:x86_64)) == platform_dlext(Linux(:i686)) + @test platform_dlext(Windows(:x86_64)) == platform_dlext(Windows(:i686)) + @test platform_dlext(MacOS()) != platform_dlext(Linux(:armv7l)) + + # Test some valid dynamic library paths + @test valid_dl_path("libfoo.so.1.2.3", Linux(:x86_64)) + @test valid_dl_path("libfoo-1.dll", Windows(:x86_64)) + @test valid_dl_path("libfoo.1.2.3.dylib", MacOS()) + @test !valid_dl_path("libfoo.dylib", Linux(:x86_64)) + @test !valid_dl_path("libfoo.so", Windows(:x86_64)) + + # Make sure the platform_key() with explicit triplet works or doesn't + @test platform_key("x86_64-linux-gnu") == Linux(:x86_64) + @test platform_key("i686-unknown-linux-gnu") == Linux(:i686) + @test platform_key("x86_64-apple-darwin14") == MacOS() + @test platform_key("armv7l-pc-linux-gnueabihf") == Linux(:armv7l) + @test platform_key("aarch64-unknown-linux-gnu") == Linux(:aarch64) + @test platform_key("powerpc64le-linux-gnu") == Linux(:ppc64le) + @test platform_key("x86_64-w64-mingw32") == Windows(:x86_64) + @test platform_key("i686-w64-mingw32") == Windows(:i686) + @test_throws ArgumentError platform_key("invalid-triplet-yo") + @test_throws ArgumentError platform_key("aarch64-unknown-gnueabihf") + @test_throws ArgumentError platform_key("x86_64-w32-mingw64") + + # Test that we can indeed ask if something is linux or windows, etc... + @test Compat.Sys.islinux(Linux(:aarch64)) + @test !Compat.Sys.islinux(Windows(:x86_64)) + @test Compat.Sys.iswindows(Windows(:i686)) + @test !Compat.Sys.iswindows(Linux(:x86_64)) + @test Compat.Sys.isapple(MacOS()) + @test !Compat.Sys.isapple(Linux(:ppc64le)) + + # Test that every supported platform is _something_ + if isdefined(Base.Sys, :isapple) + isbasesomething(p) = Sys.islinux(p) || Sys.iswindows(p) || Sys.isapple(p) + @test all(isbasesomething, supported_platforms()) + end + issomething(p) = Compat.Sys.islinux(p) || Compat.Sys.iswindows(p) || + Compat.Sys.isapple(p) + @test all(issomething, supported_platforms()) + + @test wordsize(Linux(:i686)) == wordsize(Linux(:armv7l)) == 32 + @test wordsize(MacOS()) == wordsize(Linux(:aarch64)) == 64 + + @test triplet(Windows(:i686)) == "i686-w64-mingw32" + @test triplet(Linux(:x86_64, :musl)) == "x86_64-linux-musl" + @test triplet(Linux(:armv7l, :musl)) == "arm-linux-musleabihf" + @test triplet(Linux(:x86_64)) == "x86_64-linux-gnu" + @test triplet(Linux(:armv7l)) == "arm-linux-gnueabihf" + @test triplet(MacOS()) == "x86_64-apple-darwin14" +end + +@testset "Prefix" begin + mktempdir() do temp_dir + prefix = Prefix(temp_dir) + + # Test that it's taking the absolute path + @test prefix.path == abspath(temp_dir) + + # Test that `bindir()`, `libdir()` and `includedir()` all work + for dir in unique([bindir(prefix), libdir(prefix), includedir(prefix)]) + @test !isdir(dir) + mkpath(dir) + end + + # Create a little script within the bindir to ensure we can run it + ppt_path = joinpath(bindir(prefix), "prefix_path_test.sh") + open(ppt_path, "w") do f + write(f, "#!/bin/sh\n") + write(f, "echo yolo\n") + end + chmod(ppt_path, 0o775) + + # Test that activation adds certain paths to our environment variables + activate(prefix) + + # PATH[1] should be "/bin" now + @test BinaryProvider.split_PATH()[1] == bindir(prefix) + @test Libdl.DL_LOAD_PATH[1] == libdir(prefix) + + # Test we can run the script we dropped within this prefix. Once again, + # something about Windows | busybox | Julia won't pick this up even though + # the path clearly points to the file. :( + @static if !is_windows() + @test success(sh(`$(ppt_path)`)) + @test success(sh(`prefix_path_test.sh`)) + end + + # Now deactivate and make sure that all traces are gone + deactivate(prefix) + @test BinaryProvider.split_PATH()[1] != bindir(prefix) + @test Libdl.DL_LOAD_PATH[1] != libdir(prefix) + end +end + +@testset "Products" begin + temp_prefix() do prefix + # Test that basic satisfication is not guaranteed + e_path = joinpath(bindir(prefix), "fooifier") + l_path = joinpath(libdir(prefix), "libfoo.$(Libdl.dlext)") + e = ExecutableProduct(prefix, "fooifier") + ef = FileProduct(e_path) + l = LibraryProduct(prefix, "libfoo") + lf = FileProduct(l_path) + + @test !satisfied(e; verbose=true) + @test !satisfied(ef; verbose=true) + @test !satisfied(l, verbose=true) + @test !satisfied(lf, verbose=true) + + # Test that simply creating a file that is not executable doesn't + # satisfy an Executable Product (and say it's on Linux so it doesn't + # complain about the lack of an .exe extension) + mkpath(bindir(prefix)) + touch(e_path) + @test satisfied(ef, verbose=true) + @static if !is_windows() + # Windows doesn't care about executable bit, grumble grumble + @test !satisfied(e, verbose=true, platform=Linux(:x86_64)) + end + + # Make it executable and ensure this does satisfy the Executable + chmod(e_path, 0o777) + @test satisfied(e, verbose=true, platform=Linux(:x86_64)) + + # Remove it and add a `$(path).exe` version to check again, this + # time saying it's a Windows executable + rm(e_path; force=true) + touch("$(e_path).exe") + chmod("$(e_path).exe", 0o777) + @test locate(e, platform=Windows(:x86_64)) == "$(e_path).exe" + + # Test that simply creating a library file doesn't satisfy it if we are + # testing something that matches the current platform's dynamic library + # naming scheme, because it must be `dlopen()`able. + mkpath(libdir(prefix)) + touch(l_path) + @test satisfied(lf, verbose=true) + @test !satisfied(l, verbose=true) + + # But if it is from a different platform, simple existence will be + # enough to satisfy a LibraryProduct + @static if is_windows() + l_path = joinpath(libdir(prefix), "libfoo.so") + touch(l_path) + @test satisfied(l, verbose=true, platform=Linux(:x86_64)) + else + l_path = joinpath(libdir(prefix), "libfoo.dll") + touch(l_path) + @test satisfied(l, verbose=true, platform=Windows(:x86_64)) + end + end + + # Ensure that the test suite thinks that these libraries are foreign + # so that it doesn't try to `dlopen()` them: + foreign_platform = @static if platform_key() == Linux(:aarch64) + # Arbitrary architecture that is not dlopen()'able + Linux(:ppc64le) + else + # If we're not Linux(:aarch64), then say the libraries are + Linux(:aarch64) + end + + # Test for valid library name permutations + for ext in ["1.so", "so", "so.1", "so.1.2", "so.1.2.3"] + temp_prefix() do prefix + l_path = joinpath(libdir(prefix), "libfoo.$ext") + l = LibraryProduct(prefix, "libfoo") + mkdir(dirname(l_path)) + touch(l_path) + @test satisfied(l; verbose=true, platform=foreign_platform) + end + end + + # Test for invalid library name permutations + for ext in ["so.1.2.3a", "so.1.a"] + temp_prefix() do prefix + l_path = joinpath(libdir(prefix), "libfoo.$ext") + l = LibraryProduct(prefix, "libfoo") + mkdir(dirname(l_path)) + touch(l_path) + @test !satisfied(l; verbose=true, platform=foreign_platform) + end + end +end + +@testset "Packaging" begin + # Clear out previous build products + for f in readdir(".") + if !endswith(f, ".tar.gz") || !endswith(f, ".sha256") + continue + end + rm(f; force=true) + end + + # Gotta set this guy up beforehand + tarball_path = nothing + tarball_hash = nothing + + temp_prefix() do prefix + # Create random files + mkpath(bindir(prefix)) + mkpath(libdir(prefix)) + bar_path = joinpath(bindir(prefix), "bar.sh") + open(bar_path, "w") do f + write(f, "#!/bin/sh\n") + write(f, "echo yolo\n") + end + baz_path = joinpath(libdir(prefix), "baz.so") + open(baz_path, "w") do f + write(f, "this is not an actual .so\n") + end + + # Next, package it up as a .tar.gz file + tarball_path, tarball_hash = package(prefix, "./libfoo"; verbose=true) + @test isfile(tarball_path) + + # Check that we are calculating the hash properly + tarball_hash_check = open(tarball_path, "r") do f + bytes2hex(sha256(f)) + end + @test tarball_hash_check == tarball_hash + + # Test that packaging into a file that already exists fails + @test_throws ErrorException package(prefix, "./libfoo") + end + + # Test that we can inspect the contents of the tarball + contents = list_tarball_files(tarball_path) + const libdir_name = is_windows() ? "bin" : "lib" + @test joinpath("bin", "bar.sh") in contents + @test joinpath(libdir_name, "baz.so") in contents + + # Install it within a new Prefix + temp_prefix() do prefix + # Install the thing + @test install(tarball_path, tarball_hash; prefix=prefix, verbose=true) + + # Ensure we can use it + bar_path = joinpath(bindir(prefix), "bar.sh") + baz_path = joinpath(libdir(prefix), "baz.so") + + # Ask for the manifest that contains these files to ensure it works + manifest_path = manifest_for_file(bar_path; prefix=prefix) + @test isfile(manifest_path) + manifest_path = manifest_for_file(baz_path; prefix=prefix) + @test isfile(manifest_path) + + # Ensure that manifest_for_file doesn't work on nonexistant files + @test_throws ErrorException manifest_for_file("nonexistant"; prefix=prefix) + + # Ensure that manifest_for_file doesn't work on orphan files + orphan_path = joinpath(bindir(prefix), "orphan_file") + touch(orphan_path) + @test isfile(orphan_path) + @test_throws ErrorException manifest_for_file(orphan_path; prefix=prefix) + + # Ensure that trying to install again over our existing files is an error + @test_throws ErrorException install(tarball_path, tarball_path; prefix=prefix) + + # Ensure we can uninstall this tarball + @test uninstall(manifest_path; verbose=true) + @test !isfile(bar_path) + @test !isfile(baz_path) + @test !isfile(manifest_path) + + # Ensure that we don't want to install tarballs from other platforms + cp(tarball_path, "./libfoo_juliaos64.tar.gz") + @test_throws ArgumentError install("./libfoo_juliaos64.tar.gz", tarball_hash; prefix=prefix) + rm("./libfoo_juliaos64.tar.gz"; force=true) + + # Ensure that hash mismatches throw errors + fake_hash = reverse(tarball_hash) + @test_throws ErrorException install(tarball_path, fake_hash; prefix=prefix) + end + + rm(tarball_path; force=true) + rm("$(tarball_path).sha256"; force=true) +end + +@testset "Verification" begin + temp_prefix() do prefix + foo_path = joinpath(prefix, "foo") + open(foo_path, "w") do file + write(file, "test") + end + foo_hash = bytes2hex(sha256("test")) + + # Check that verifying with the right hash works + info("This should say; no hash cache found") + @test verify(foo_path, foo_hash; verbose=true) + + # Check that it created a .sha256 file + @test isfile("$(foo_path).sha256") + + # Check that it verifies the second time around properly + info("This should say; hash cache is consistent") + @test verify(foo_path, foo_hash; verbose=true) + + # Sleep for imprecise filesystems + sleep(2) + + # Get coverage of messing with different parts of the verification chain + touch(foo_path) + info("This should say; file has been modified") + @test verify(foo_path, foo_hash; verbose=true) + @test_throws ErrorException verify(foo_path, "0"^32; verbose=true) + touch(foo_path) + @test verify(foo_path, foo_hash; verbose=true) + open("$(foo_path).sha256", "w") do file + write(file, "this is not the right hash") + end + info("This should say; hash has changed") + @test verify(foo_path, foo_hash; verbose=true) + end +end + +# Use `build_libfoo_tarball.jl` in the BinDeps2.jl repository to generate more of these +const bin_prefix = "https://github.com/staticfloat/small_bin/raw/74b7fd81e3fbc8963b14b0ebbe5421e270d8bdcf" +const libfoo_downloads = Dict( + Linux(:i686) => ("$bin_prefix/libfoo.i686-linux-gnu.tar.gz", "1398353bcbbd88338189ece9c1d6e7c508df120bc4f93afbaed362a9f91358ff"), + Linux(:x86_64) => ("$bin_prefix/libfoo.x86_64-linux-gnu.tar.gz", "b9d57a6e032a56b1f8641771fa707523caa72f1a2e322ab99eeeb011f13ad9f3"), + Linux(:aarch64) => ("$bin_prefix/libfoo.aarch64-linux-gnu.tar.gz", "19d9da0e6e7fb506bf4889eb91e936fda43493a39cd4fd7bd5d65506cede6f95"), + Linux(:armv7l) => ("$bin_prefix/libfoo.arm-linux-gnueabihf.tar.gz", "8e33c1a0e091e6e5b8fcb902e5d45329791bb57763ee9cbcde49c1ec9bd8532a"), + Linux(:ppc64le) => ("$bin_prefix/libfoo.powerpc64le-linux-gnu.tar.gz", "b48a64d48be994ec99b1a9fb60e0af7f4415a57596518cb90a340987b79fad81"), + MacOS() => ("$bin_prefix/libfoo.x86_64-apple-darwin14.tar.gz", "661b71edb433ab334b0fef70db3b5c45d35f2b3bee0d244f54875f1ec899c10f"), + Windows(:i686) => ("$bin_prefix/libfoo.i686-w64-mingw32.tar.gz", "3d4a8d4bf0169007a42d809a1d560083635b1540a1bc4a42108841dcb6d2aaea"), + Windows(:x86_64) => ("$bin_prefix/libfoo.x86_64-w64-mingw32.tar.gz", "2d08fbc9a534cd021f36b6bbe86ddabb2dafbedeb589581240aa4a8c5b896055"), +) + +# Test manually downloading and using libfoo +@testset "Downloading" begin + temp_prefix() do prefix + if !haskey(libfoo_downloads, platform) + warn("Platform $platform does not have a libfoo download, skipping download tests") + else + # Test a good download works + url, hash = libfoo_downloads[platform] + @test install(url, hash; prefix=prefix, verbose=true) + + fooifier = ExecutableProduct(prefix, "fooifier") + libfoo = LibraryProduct(prefix, "libfoo") + + @test satisfied(fooifier; verbose=true) + @test satisfied(libfoo; verbose=true) + + fooifier_path = locate(fooifier) + libfoo_path = locate(libfoo) + + + # We know that foo(a, b) returns 2*a^2 - b + result = 2*2.2^2 - 1.1 + + # Test that we can invoke fooifier + @test !success(`$fooifier_path`) + @test success(`$fooifier_path 1.5 2.0`) + @test parse(Float64,readchomp(`$fooifier_path 2.2 1.1`)) ≈ result + + # Test that we can dlopen() libfoo and invoke it directly + hdl = Libdl.dlopen_e(libfoo_path) + @test hdl != C_NULL + foo = Libdl.dlsym_e(hdl, :foo) + @test foo != C_NULL + @test ccall(foo, Cdouble, (Cdouble, Cdouble), 2.2, 1.1) ≈ result + Libdl.dlclose(hdl) + + # Test uninstallation + @test uninstall(manifest_from_url(url; prefix=prefix); verbose=true) + + # Test that download_verify_unpack() works + download_verify_unpack(url, hash, prefix.path) + @test satisfied(fooifier; verbose=true) + @test satisfied(libfoo; verbose=true) + + # Test that download_verify twice in a row works, and that mucking + # with the file causes a redownload if `force` is true: + tmpfile = joinpath(prefix, "libfoo.tar.gz") + @test download_verify(url, hash, tmpfile; verbose=true) + @test download_verify(url, hash, tmpfile; verbose=true) + + # We sleep for at least a second here so that filesystems with low + # precision in their mtime implementations don't get confused + sleep(2) + + open(tmpfile, "w") do f + write(f, "hehehehe") + end + + @test_throws ErrorException download_verify(url, hash, tmpfile; verbose=true) + @test download_verify(url, hash, tmpfile; verbose=true, force=true) + + end + + # Test a bad download fails properly + bad_url = "http://localhost:1/this_is_not_a_file.x86_64-linux-gnu.tar.gz" + bad_hash = "0"^64 + @test_throws ErrorException install(bad_url, bad_hash; prefix=prefix, verbose=true) + end +end + +# Test the same as the above, but using BinaryPackage abstraction +@testset "BinaryPackage" begin + temp_prefix() do prefix + if !haskey(libfoo_downloads, platform) + warn("Platform $platform does not have a libfoo download, skipping download tests") + else + url, hash = libfoo_downloads[platform] + fooifier = ExecutableProduct(prefix, "fooifier") + libfoo = LibraryProduct(prefix, "libfoo") + binpkg = BinaryPackage(url, hash, platform, [fooifier, libfoo]) + + # Test installation and uninstallation + @test install(binpkg; prefix=prefix, verbose=true) + @test uninstall(binpkg; prefix=prefix, verbose=true) + + # Now test that we can uninstall even if we don't have the right `url`: + @test install(binpkg; prefix=prefix, verbose=true) + binpkg2 = BinaryPackage("fakeurl", hash, platform, [fooifier, libfoo]) + @test uninstall(binpkg2; prefix=prefix, verbose=true) + + # Test that we can't uninstall from the wrong prefix + temp_prefix() do wrong_prefix + @test_throws ErrorException uninstall(binpkg; prefix=wrong_prefix, verbose=true) + end + + # Test that we can't guess a manifest path from a package with the wrong + # url and no products: + binpkg3 = BinaryPackage("fakeurl", hash, platform) + @test_throws ErrorException uninstall(binpkg3; prefix=prefix, verbose=true) + end + end +end + +# Test installation and failure modes of the bundled LibFoo.jl +@testset "LibFoo.jl" begin + const color="--color=$(Base.have_color ? "yes" : "no")" + cd("LibFoo.jl") do + rm("./deps/deps.jl"; force=true) + rm("./deps/usr"; force=true, recursive=true) + + # Install `libfoo` and build the `deps.jl` file for `LibFoo.jl` + run(`$(Base.julia_cmd()) $(color) deps/build.jl`) + + # Ensure `deps.jl` was actually created + @test isfile("deps/deps.jl") + end + + cd("LibFoo.jl/test") do + # Now, run `LibFoo.jl`'s tests, adding `LibFoo.jl` to the LOAD_PATH + # so that the tests can pick up the `LibFoo` module + withenv("JULIA_LOAD_PATH"=>joinpath(pwd(),"..","src")) do + run(`$(Base.julia_cmd()) $(color) runtests.jl`) + end + end +end From 4cbe4610bba3410ba2fb9af96104db066325c536 Mon Sep 17 00:00:00 2001 From: Kristoffer Carlsson Date: Thu, 16 Nov 2017 14:00:19 +0100 Subject: [PATCH 2/3] small fixes to BinaryProvider --- ext/BinaryProvider/src/BinaryProvider.jl | 12 +++++------- ext/BinaryProvider/src/PlatformNames.jl | 12 ------------ ext/BinaryProvider/src/Products.jl | 2 +- 3 files changed, 6 insertions(+), 20 deletions(-) diff --git a/ext/BinaryProvider/src/BinaryProvider.jl b/ext/BinaryProvider/src/BinaryProvider.jl index 2e7d7dc67d803..e2b1943ee4aeb 100644 --- a/ext/BinaryProvider/src/BinaryProvider.jl +++ b/ext/BinaryProvider/src/BinaryProvider.jl @@ -1,7 +1,5 @@ module BinaryProvider -using Compat - # Include our subprocess running funtionality include("OutputCollector.jl") # External utilities such as downloading/decompressing tarballs @@ -24,16 +22,16 @@ function __init__() global global_prefix # Initialize our global_prefix - global_prefix = Prefix(joinpath(dirname(@__FILE__), "../", "global_prefix")) - activate(global_prefix) + # global_prefix = Prefix(joinpath(dirname(@__FILE__), "../", "global_prefix")) + # activate(global_prefix) # Find the right download/compression engines for this platform probe_platform_engines!() # If we're on a julia that's too old, then fixup the color mappings - if !haskey(Base.text_colors, :default) - Base.text_colors[:default] = Base.color_normal - end + # if !haskey(Base.text_colors, :default) + # Base.text_colors[:default] = Base.color_normal + # end end end # module diff --git a/ext/BinaryProvider/src/PlatformNames.jl b/ext/BinaryProvider/src/PlatformNames.jl index 2b1598012b529..c998e39bcab65 100644 --- a/ext/BinaryProvider/src/PlatformNames.jl +++ b/ext/BinaryProvider/src/PlatformNames.jl @@ -124,18 +124,6 @@ function supported_platforms() ] end -# Compat doesn't use the Base definitions for whatever terrible reason, so we'll overload -# both, ensuring the user gets our definitions regardless of whether they use Sys.is* or -# Compat.Sys.is*. -if isdefined(Base.Sys, :isapple) - Base.Sys.isapple(p::Platform) = p isa MacOS - Base.Sys.islinux(p::Platform) = p isa Linux - Base.Sys.iswindows(p::Platform) = p isa Windows -end -Compat.Sys.isapple(p::Platform) = p isa MacOS -Compat.Sys.islinux(p::Platform) = p isa Linux -Compat.Sys.iswindows(p::Platform) = p isa Windows - """ platform_key(machine::AbstractString = Sys.MACHINE) diff --git a/ext/BinaryProvider/src/Products.jl b/ext/BinaryProvider/src/Products.jl index 73e9445fc80a0..2264b8ccf65ee 100644 --- a/ext/BinaryProvider/src/Products.jl +++ b/ext/BinaryProvider/src/Products.jl @@ -3,7 +3,7 @@ export Product, LibraryProduct, FileProduct, ExecutableProduct, satisfied, locat """ A `Product` is an expected result after building or installation of a package. """ -abstract Product +abstract type Product end """ A `LibraryProduct` is a special kind of `Product` that not only needs to exist, From 2ef7b9f73350d7c09c31373f03bdf4d52208b3b1 Mon Sep 17 00:00:00 2001 From: Kristoffer Carlsson Date: Thu, 16 Nov 2017 14:00:42 +0100 Subject: [PATCH 3/3] install packages async and use tarballs if possible --- src/Operations.jl | 147 ++++++++++++++++++++++++++++++++++------------ src/Pkg3.jl | 4 ++ 2 files changed, 113 insertions(+), 38 deletions(-) diff --git a/src/Operations.jl b/src/Operations.jl index e9f2c24d2b034..2cb40d24dc6d0 100644 --- a/src/Operations.jl +++ b/src/Operations.jl @@ -5,7 +5,7 @@ using Base: LibGit2 using Base: Pkg using Pkg3.TerminalMenus using Pkg3.Types -import Pkg3: depots +import Pkg3: depots, BinaryProvider, USE_LIBGIT2_FOR_ALL_DOWNLOADS, NUM_CONCURRENT_DOWNLOADS const SlugInt = UInt32 # max p = 4 const chars = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789" @@ -13,8 +13,8 @@ const nchars = SlugInt(length(chars)) const max_p = floor(Int, log(nchars, typemax(SlugInt) >>> 8)) function slug(x::SlugInt, p::Int) - 1 ≤ p ≤ max_p || # otherwise previous steps are wrong - error("invalid slug size: $p (need 1 ≤ p ≤ $max_p)") + 1 ≤ p ≤ max_p || # otherwise previous steps are wrong + error("invalid slug size: $p (need 1 ≤ p ≤ $max_p)") return sprint() do io for i = 1:p x, d = divrem(x, nchars) @@ -74,7 +74,7 @@ function package_env_info(pkg::String, env::EnvCache = EnvCache(); verb::String if haskey(paths, uuid) for path in paths[uuid] info′ = parse_toml(path, "package.toml") - option *= " – $(info′["repo"])" + option *= " – $(info′["repo"])" break end else @@ -218,6 +218,13 @@ end const refspecs = ["+refs/*:refs/remotes/cache/*"] +function get_archive_url_for_version(url::String, version) + if (m = match(r"https://github.com/(.*?)/(.*?).git", url)) != nothing + return "https://github.com/$(m.captures[1])/$(m.captures[2])/archive/v$(version).tar.gz" + end + return nothing +end + function install( env::EnvCache, uuid::UUID, @@ -229,41 +236,71 @@ function install( # returns path to version & if it's newly installed version_path = find_installed(uuid, hash) ispath(version_path) && return version_path, false - upstream_dir = joinpath(depots()[1], "upstream") - ispath(upstream_dir) || mkpath(upstream_dir) - repo_path = joinpath(upstream_dir, string(uuid)) - repo = ispath(repo_path) ? LibGit2.GitRepo(repo_path) : begin - info("Cloning [$uuid] $name") - LibGit2.clone(urls[1], repo_path, isbare=true) - end - git_hash = LibGit2.GitHash(hash.bytes) - for i = 2:length(urls) - try LibGit2.GitObject(repo, git_hash) - break # object was found, we can stop + http_download_successful = true + if !USE_LIBGIT2_FOR_ALL_DOWNLOADS && version != nothing + for url in urls + archive_url = get_archive_url_for_version(url, version) + if archive_url != nothing + path = joinpath(tempdir(), name * "_" * randstring(6) * ".tar.gz") + url_success = true + try + cmd = BinaryProvider.gen_download_cmd(archive_url, path); + run(cmd, (DevNull, DevNull, DevNull)) + catch e + e isa InterruptException && rethrow(e) + url_success = false + end + url_success || continue + http_download_successful = true + dir = joinpath(tempdir(), randstring(12)) + mkpath(dir) + cmd = BinaryProvider.gen_unpack_cmd(path, dir); + run(cmd, (DevNull, DevNull, DevNull)) + dirs = readdir(dir) + # 7z on Win might create this spurious file + filter!(x -> x != "pax_global_header", dirs) + @assert length(dirs) == 1 + !isdir(version_path) && mkpath(version_path) + mv(joinpath(dir, dirs[1]), version_path; remove_destination=true) + Base.rm(path; force = true) + break # object was found, we can stop + end + end + end + if !http_download_successful || USE_LIBGIT2_FOR_ALL_DOWNLOADS + upstream_dir = joinpath(depots()[1], "upstream") + ispath(upstream_dir) || mkpath(upstream_dir) + repo_path = joinpath(upstream_dir, string(uuid)) + repo = ispath(repo_path) ? LibGit2.GitRepo(repo_path) : begin + # info("Cloning [$uuid] $name") + LibGit2.clone(urls[1], repo_path, isbare=true) + end + git_hash = LibGit2.GitHash(hash.bytes) + for i = 2:length(urls) + try LibGit2.GitObject(repo, git_hash) + break # object was found, we can stop + catch err + err isa LibGit2.GitError && err.code == LibGit2.Error.ENOTFOUND || rethrow(err) + end + url = urls[i] + LibGit2.fetch(repo, remoteurl=url, refspecs=refspecs) + end + tree = try + LibGit2.GitObject(repo, git_hash) catch err err isa LibGit2.GitError && err.code == LibGit2.Error.ENOTFOUND || rethrow(err) + error("$name: git object $(string(hash)) could not be found") end - url = urls[i] - info("Updating $name $(repr(url))") - LibGit2.fetch(repo, remoteurl=url, refspecs=refspecs) - end - tree = try - LibGit2.GitObject(repo, git_hash) - catch err - err isa LibGit2.GitError && err.code == LibGit2.Error.ENOTFOUND || rethrow(err) - error("$name: git object $(string(hash)) could not be found") - end - tree isa LibGit2.GitTree || - error("$name: git object $(string(hash)) should be a tree, not $(typeof(tree))") - mkpath(version_path) - opts = LibGit2.CheckoutOptions( - checkout_strategy = LibGit2.Consts.CHECKOUT_FORCE, - target_directory = Base.unsafe_convert(Cstring, version_path) - ) - h = string(hash)[1:16] - vstr = version != nothing ? "v$version [$h]" : "[$h]" - info("Installing $name $vstr") - LibGit2.checkout_tree(repo, tree, options=opts) + tree isa LibGit2.GitTree || + error("$name: git object $(string(hash)) should be a tree, not $(typeof(tree))") + mkpath(version_path) + opts = LibGit2.CheckoutOptions( + checkout_strategy = LibGit2.Consts.CHECKOUT_FORCE, + target_directory = Base.unsafe_convert(Cstring, version_path) + ) + h = string(hash)[1:16] + LibGit2.checkout_tree(repo, tree, options=opts) + end return version_path, true end @@ -318,11 +355,44 @@ function apply_versions(env::EnvCache, pkgs::Vector{PackageSpec})::Vector{UUID} names, hashes, urls = version_data(env, pkgs) # install & update manifest new_versions = UUID[] - for pkg in pkgs + + jobs = Channel(NUM_CONCURRENT_DOWNLOADS); + results = Channel(NUM_CONCURRENT_DOWNLOADS); + @schedule begin + for pkg in pkgs + put!(jobs, pkg) + end + end + + for i in 1:NUM_CONCURRENT_DOWNLOADS + @schedule begin + for pkg in jobs + uuid = pkg.uuid + version = pkg.version::VersionNumber + name, hash = names[uuid], hashes[uuid] + try + version_path, new = install(env, uuid, name, hash, urls[uuid], version) + put!(results, (pkg, version_path, version, hash, true)) + catch e + put!(results, e) + end + end + end + end + + max_name = maximum(strwidth(names[pkg.uuid]) for pkg in pkgs) + + for _ in 1:length(pkgs) + r = take!(results) + r isa Exception && cmderror("Error when installing packages:\n", sprint(Base.showerror, r)) + pkg, path, version, hash, new = r + if new + vstr = version != nothing ? "v$version" : "[$h]" + new && info("Installed $(rpad(names[pkg.uuid] * " ", max_name + 2, "─")) $vstr") + end uuid = pkg.uuid version = pkg.version::VersionNumber name, hash = names[uuid], hashes[uuid] - path, new = install(env, uuid, name, hash, urls[uuid], version) update_manifest(env, uuid, name, hash, version) new && push!(new_versions, uuid) end @@ -508,3 +578,4 @@ function up(env::EnvCache, pkgs::Vector{PackageSpec}) end end # module + diff --git a/src/Pkg3.jl b/src/Pkg3.jl index 393136837d1e6..267db347e0fcb 100644 --- a/src/Pkg3.jl +++ b/src/Pkg3.jl @@ -3,7 +3,11 @@ module Pkg3 const DEPOTS = [joinpath(homedir(), ".julia")] depots() = DEPOTS +const USE_LIBGIT2_FOR_ALL_DOWNLOADS = false +const NUM_CONCURRENT_DOWNLOADS = 8 + # load snapshotted dependencies +include("../ext/BinaryProvider/src/BinaryProvider.jl") include("../ext/TOML/src/TOML.jl") include("../ext/TerminalMenus/src/TerminalMenus.jl")