From c464748b45f6c87686d0966f7f8e1e47cd18b036 Mon Sep 17 00:00:00 2001 From: Aria Beingessner Date: Wed, 13 Sep 2023 14:59:42 -0400 Subject: [PATCH] feat(windows-sign): add unstable ssl.com windows signing ssldotcom-windows-sign=true --- cargo-dist/src/backend/ci/github.rs | 5 + cargo-dist/src/config.rs | 35 +- cargo-dist/src/init.rs | 9 + cargo-dist/src/tasks.rs | 9 +- cargo-dist/templates/ci/github_ci.yml.j2 | 64 +- cargo-dist/tests/integration-tests.rs | 68 + .../axolotlsay_ssldotcom_windows_sign.snap | 1741 +++++++++++++++++ ...xolotlsay_ssldotcom_windows_sign_prod.snap | 1741 +++++++++++++++++ 8 files changed, 3668 insertions(+), 4 deletions(-) create mode 100644 cargo-dist/tests/snapshots/axolotlsay_ssldotcom_windows_sign.snap create mode 100644 cargo-dist/tests/snapshots/axolotlsay_ssldotcom_windows_sign_prod.snap diff --git a/cargo-dist/src/backend/ci/github.rs b/cargo-dist/src/backend/ci/github.rs index 5a8652e0e..2c782ee28 100644 --- a/cargo-dist/src/backend/ci/github.rs +++ b/cargo-dist/src/backend/ci/github.rs @@ -9,6 +9,7 @@ use tracing::warn; use crate::{ backend::{diff_files, templates::TEMPLATE_CI_GITHUB}, + config::ProductionMode, errors::DistResult, DistGraph, SortedMap, SortedSet, TargetTriple, }; @@ -39,6 +40,8 @@ pub struct GithubCiInfo { pub publish_jobs: Vec, /// whether to create the release or assume an existing one pub create_release: bool, + /// \[unstable\] whether to add ssl.com windows binary signing + pub ssldotcom_windows_sign: Option, } impl GithubCiInfo { @@ -55,6 +58,7 @@ impl GithubCiInfo { .unwrap_or(&self_dist_version); let fail_fast = dist.fail_fast; let create_release = dist.create_release; + let ssldotcom_windows_sign = dist.ssldotcom_windows_sign.clone(); // Figure out what builds we need to do let mut needs_global_build = false; @@ -123,6 +127,7 @@ impl GithubCiInfo { pr_run_mode, global_task, create_release, + ssldotcom_windows_sign, } } diff --git a/cargo-dist/src/config.rs b/cargo-dist/src/config.rs index d14c827c1..73bd3b0ca 100644 --- a/cargo-dist/src/config.rs +++ b/cargo-dist/src/config.rs @@ -261,6 +261,11 @@ pub struct DistMetadata { #[serde(skip_serializing_if = "Option::is_none")] #[serde(rename = "create-release")] pub create_release: Option, + + /// \[unstable\] Whether we should sign windows binaries with ssl.com + #[serde(skip_serializing_if = "Option::is_none")] + #[serde(rename = "ssldotcom-windows-sign")] + pub ssldotcom_windows_sign: Option, } impl DistMetadata { @@ -293,6 +298,7 @@ impl DistMetadata { create_release: _, pr_run_mode: _, allow_dirty: _, + ssldotcom_windows_sign: _, } = self; if let Some(include) = include { for include in include { @@ -332,8 +338,9 @@ impl DistMetadata { publish_jobs, publish_prereleases, create_release, - pr_run_mode: _, + pr_run_mode, allow_dirty, + ssldotcom_windows_sign, } = self; // Check for global settings on local packages @@ -366,6 +373,12 @@ impl DistMetadata { if publish_prereleases.is_some() { warn!("package.metadata.dist.publish-prereleases is set, but this is only accepted in workspace.metadata (value is being ignored): {}", package_manifest_path); } + if pr_run_mode.is_some() { + warn!("package.metadata.dist.pr-run-mode is set, but this is only accepted in workspace.metadata (value is being ignored): {}", package_manifest_path); + } + if ssldotcom_windows_sign.is_some() { + warn!("package.metadata.dist.ssldotcom-windows-sign is set, but this is only accepted in workspace.metadata (value is being ignored): {}", package_manifest_path); + } // Merge non-global settings if installers.is_none() { @@ -792,6 +805,26 @@ impl DirtyMode { } } +/// For features that can be generated in "test" or "production" mode +#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize, Deserialize)] +pub enum ProductionMode { + /// test mode + #[serde(rename = "test")] + Test, + /// production mode + #[serde(rename = "prod")] + Prod, +} + +impl std::fmt::Display for ProductionMode { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + ProductionMode::Test => "test".fmt(f), + ProductionMode::Prod => "prod".fmt(f), + } + } +} + pub(crate) fn parse_metadata_table( manifest_path: &Utf8Path, metadata_table: Option<&serde_json::Value>, diff --git a/cargo-dist/src/init.rs b/cargo-dist/src/init.rs index d8b41ed28..fca5ebeb4 100644 --- a/cargo-dist/src/init.rs +++ b/cargo-dist/src/init.rs @@ -225,6 +225,7 @@ fn get_new_dist_metadata( create_release: None, pr_run_mode: None, allow_dirty: None, + ssldotcom_windows_sign: None, } }; @@ -684,6 +685,7 @@ fn apply_dist_to_metadata(metadata: &mut toml_edit::Item, meta: &DistMetadata) { create_release, pr_run_mode, allow_dirty, + ssldotcom_windows_sign, } = &meta; apply_optional_value( @@ -861,6 +863,13 @@ fn apply_dist_to_metadata(metadata: &mut toml_edit::Item, meta: &DistMetadata) { allow_dirty.as_ref(), ); + apply_optional_value( + table, + "ssldotcom-windows-sign", + "", + ssldotcom_windows_sign.as_ref().map(|p| p.to_string()), + ); + // Finalize the table table .decor_mut() diff --git a/cargo-dist/src/tasks.rs b/cargo-dist/src/tasks.rs index a7ec40519..12fef57d2 100644 --- a/cargo-dist/src/tasks.rs +++ b/cargo-dist/src/tasks.rs @@ -62,7 +62,7 @@ use tracing::{info, warn}; use crate::backend::ci::github::GithubCiInfo; use crate::backend::ci::CiInfo; -use crate::config::DirtyMode; +use crate::config::{DirtyMode, ProductionMode}; use crate::{ backend::{ installer::{ @@ -154,8 +154,10 @@ pub struct DistGraph { pub merge_tasks: bool, /// Whether failing tasks should make us give up on all other tasks pub fail_fast: bool, - /// Whether to creat a github release or edit an existing draft + /// Whether to create a github release or edit an existing draft pub create_release: bool, + /// \[unstable\] if Some, sign binaries with ssl.com + pub ssldotcom_windows_sign: Option, /// The desired cargo-dist version for handling this project pub desired_cargo_dist_version: Option, /// The desired rust toolchain for handling this project @@ -614,6 +616,7 @@ impl<'pkg_graph> DistGraphBuilder<'pkg_graph> { precise_builds, merge_tasks, fail_fast, + ssldotcom_windows_sign, // Processed elsewhere // // FIXME?: this is the last vestige of us actually needing to keep workspace_metadata @@ -665,6 +668,7 @@ impl<'pkg_graph> DistGraphBuilder<'pkg_graph> { let merge_tasks = merge_tasks.unwrap_or(false); let fail_fast = fail_fast.unwrap_or(false); let create_release = create_release.unwrap_or(true); + let ssldotcom_windows_sign = ssldotcom_windows_sign.clone(); let mut packages_with_mismatched_features = vec![]; // Compute/merge package configs let mut package_metadata = vec![]; @@ -720,6 +724,7 @@ impl<'pkg_graph> DistGraphBuilder<'pkg_graph> { fail_fast, merge_tasks, create_release, + ssldotcom_windows_sign, desired_cargo_dist_version, desired_rust_toolchain, tools, diff --git a/cargo-dist/templates/ci/github_ci.yml.j2 b/cargo-dist/templates/ci/github_ci.yml.j2 index fc73ccb6f..5e07fd82e 100644 --- a/cargo-dist/templates/ci/github_ci.yml.j2 +++ b/cargo-dist/templates/ci/github_ci.yml.j2 @@ -177,6 +177,66 @@ jobs: path: ${{ steps.cargo-dist.outputs.paths }} {{%- endif %}} +{{%- if ssldotcom_windows_sign %}} + + # Sign Windows artifacts with ssl.com + sign-windows-artifacts: + needs: + - plan + - upload-local-artifacts + {{%- if global_task %}} + - upload-global-artifacts + {{%- endif %}} + runs-on: "ubuntu-20.04" + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + SIGN_DIR_IN: target/distrib/sign-input + SIGN_DIR_OUT: target/distrib/sign-output + steps: + # Get all the artifacts for the signing tasks to use + - name: Fetch local artifacts + uses: actions/download-artifact@v3 + with: + name: artifacts + path: target/distrib/ + # Only try to sign files that the tool can handle + - name: Select Signable Artifacts + run: | + mkdir -p "$SIGN_DIR_IN" + mkdir -p "$SIGN_DIR_OUT" + for file in target/distrib/*.{msi,ps1}; do + [[ -e $file ]] && mv "$file" "$SIGN_DIR_IN" && echo "signing $file"; + done + # Sign the files + - name: Sign Artifacts with CodeSignTool + uses: ssldotcom/esigner-codesign@develop + with: + command: batch_sign + username: ${{ secrets.SSLDOTCOM_USERNAME }} + password: ${{ secrets.SSLDOTCOM_PASSWORD }} + credential_id: ${{ secrets.SSLDOTCOM_CREDENTIAL_ID }} + totp_secret: ${{ secrets.SSLDOTCOM_TOTP_SECRET }} + dir_path: ${{ env.SIGN_DIR_IN }} + output_path: ${{ env.SIGN_DIR_OUT }} + # Set this to TEST for testing (sandbox) and PROD for production + environment_name: {{%- if ssldotcom_windows_sign == "test" %}} TEST {{%- else %}} PROD {{%- endif %}} + # Regenerate checksum files for things that have been signed + - name: Regenerate Checksums + run: | + pushd "$SIGN_DIR_OUT" + for filename in *; do + echo "checksuming $filename" + sha256sum --binary "$filename" > "$filename.sha256" + done + popd + # Upload the result, overwriting old files + - name: "Upload artifacts" + uses: actions/upload-artifact@v3 + with: + name: artifacts + path: ${{ env.SIGN_DIR_OUT }} +{{%- endif %}} + should-publish: needs: - plan @@ -184,13 +244,15 @@ jobs: {{%- if global_task %}} - upload-global-artifacts {{%- endif %}} + {{%- if ssldotcom_windows_sign %}} + - sign-windows-artifacts + {{%- endif %}} if: ${{ needs.plan.outputs.publishing == 'true' }} runs-on: ubuntu-latest steps: - name: print tag run: echo "ok we're publishing!" - {{%- if 'homebrew' in publish_jobs and tap %}} publish-homebrew-formula: diff --git a/cargo-dist/tests/integration-tests.rs b/cargo-dist/tests/integration-tests.rs index 97461e2ef..39fb355aa 100644 --- a/cargo-dist/tests/integration-tests.rs +++ b/cargo-dist/tests/integration-tests.rs @@ -126,6 +126,74 @@ create-release = false }) } +#[test] +fn axolotlsay_ssldotcom_windows_sign() -> Result<(), miette::Report> { + let test_name = _function_name!(); + AXOLOTLSAY.run_test(|ctx| { + let dist_version = ctx.tools.cargo_dist.version().unwrap(); + ctx.patch_cargo_toml(format!(r#" +[workspace.metadata.dist] +cargo-dist-version = "{dist_version}" +installers = ["shell", "powershell", "msi"] +targets = ["x86_64-unknown-linux-gnu", "x86_64-apple-darwin", "x86_64-pc-windows-msvc", "aarch64-apple-darwin"] +ci = ["github"] +ssldotcom-windows-sign = "test" +unix-archive = ".tar.gz" +windows-archive = ".tar.gz" + +[package.metadata.wix] +upgrade-guid = "B36177BE-EA4D-44FB-B05C-EDDABDAA95CA" +path-guid = "BFD25009-65A4-4D1E-97F1-0030465D90D6" + +"# + ))?; + + // Run generate to make sure stuff is up to date before running other commands + let ci_result = ctx.cargo_dist_generate(test_name)?; + let ci_snap = ci_result.check_all()?; + // Do usual build+plan checks + let main_result = ctx.cargo_dist_build_and_plan(test_name)?; + let main_snap = main_result.check_all(ctx, ".cargo/bin/")?; + // snapshot all + main_snap.join(ci_snap).snap(); + Ok(()) + }) +} + +#[test] +fn axolotlsay_ssldotcom_windows_sign_prod() -> Result<(), miette::Report> { + let test_name = _function_name!(); + AXOLOTLSAY.run_test(|ctx| { + let dist_version = ctx.tools.cargo_dist.version().unwrap(); + ctx.patch_cargo_toml(format!(r#" +[workspace.metadata.dist] +cargo-dist-version = "{dist_version}" +installers = ["shell", "powershell", "msi"] +targets = ["x86_64-unknown-linux-gnu", "x86_64-apple-darwin", "x86_64-pc-windows-msvc", "aarch64-apple-darwin"] +ci = ["github"] +ssldotcom-windows-sign = "prod" +unix-archive = ".tar.gz" +windows-archive = ".tar.gz" + +[package.metadata.wix] +upgrade-guid = "B36177BE-EA4D-44FB-B05C-EDDABDAA95CA" +path-guid = "BFD25009-65A4-4D1E-97F1-0030465D90D6" + +"# + ))?; + + // Run generate to make sure stuff is up to date before running other commands + let ci_result = ctx.cargo_dist_generate(test_name)?; + let ci_snap = ci_result.check_all()?; + // Do usual build+plan checks + let main_result = ctx.cargo_dist_build_and_plan(test_name)?; + let main_snap = main_result.check_all(ctx, ".cargo/bin/")?; + // snapshot all + main_snap.join(ci_snap).snap(); + Ok(()) + }) +} + #[test] fn akaikatana_basic() -> Result<(), miette::Report> { let test_name = _function_name!(); diff --git a/cargo-dist/tests/snapshots/axolotlsay_ssldotcom_windows_sign.snap b/cargo-dist/tests/snapshots/axolotlsay_ssldotcom_windows_sign.snap new file mode 100644 index 000000000..835617e22 --- /dev/null +++ b/cargo-dist/tests/snapshots/axolotlsay_ssldotcom_windows_sign.snap @@ -0,0 +1,1741 @@ +--- +source: cargo-dist/tests/gallery/dist.rs +expression: self.payload +--- +================ installer.sh ================ +#!/bin/sh +# shellcheck shell=dash +# +# Licensed under the MIT license +# , at your +# option. This file may not be copied, modified, or distributed +# except according to those terms. + +if [ "$KSH_VERSION" = 'Version JM 93t+ 2010-03-05' ]; then + # The version of ksh93 that ships with many illumos systems does not + # support the "local" extension. Print a message rather than fail in + # subtle ways later on: + echo 'this installer does not work with this ksh93 version; please try bash!' >&2 + exit 1 +fi + +set -u + +APP_NAME="axolotlsay" +APP_VERSION="0.1.0" +ARTIFACT_DOWNLOAD_URL="${INSTALLER_DOWNLOAD_URL:-https://github.com/axodotdev/axolotlsay/releases/download/v0.1.0}" +PRINT_VERBOSE=${INSTALLER_PRINT_VERBOSE:-0} +PRINT_QUIET=${INSTALLER_PRINT_QUIET:-0} +NO_MODIFY_PATH=${INSTALLER_NO_MODIFY_PATH:-0} + +usage() { + # print help (this cat/EOF stuff is a "heredoc" string) + cat <&2 + say_verbose " from $_url" 1>&2 + say_verbose " to $_file" 1>&2 + + ensure mkdir -p "$_dir" + + if ! downloader "$_url" "$_file"; then + say "failed to download $_url" + say "this may be a standard network error, but it may also indicate" + say "that $APP_NAME's release process is not working. When in doubt" + say "please feel free to open an issue!" + exit 1 + fi + + # unpack the archive + case "$_zip_ext" in + ".zip") + ensure unzip -q "$_file" -d "$_dir" + ;; + + ".tar."*) + ensure tar xf "$_file" --strip-components 1 -C "$_dir" + ;; + *) + err "unknown archive format: $_zip_ext" + ;; + esac + + install "$_dir" "$_bins" "$@" + local _retval=$? + + ignore rm -rf "$_dir" + + return "$_retval" +} + +# See discussion of late-bound vs early-bound for why we use single-quotes with env vars +# shellcheck disable=SC2016 +install() { + # This code needs to both compute certain paths for itself to write to, and + # also write them to shell/rc files so that they can look them up to e.g. + # add them to PATH. This requires an active distinction between paths + # and expressions that can compute them. + # + # The distinction lies in when we want env-vars to be evaluated. For instance + # if we determine that we want to install to $HOME/.myapp, which do we add + # to e.g. $HOME/.profile: + # + # * early-bound: export PATH="/home/myuser/.myapp:$PATH" + # * late-bound: export PATH="$HOME/.myapp:$PATH" + # + # In this case most people would prefer the late-bound version, but in other + # cases the early-bound version might be a better idea. In particular when using + # other env-vars than $HOME, they are more likely to be only set temporarily + # for the duration of this install script, so it's more advisable to erase their + # existence with early-bounding. + # + # This distinction is handled by "double-quotes" (early) vs 'single-quotes' (late). + # + # This script has a few different variants, the most complex one being the + # CARGO_HOME version which attempts to install things to Cargo's bin dir, + # potentially setting up a minimal version if the user hasn't ever installed Cargo. + # + # In this case we need to: + # + # * Install to $HOME/.cargo/bin/ + # * Create a shell script at $HOME/.cargo/env that: + # * Checks if $HOME/.cargo/bin/ is on PATH + # * and if not prepends it to PATH + # * Edits $HOME/.profile to run $HOME/.cargo/env (if the line doesn't exist) + # + # To do this we need these 4 values: + + # The actual path we're going to install to + local _install_dir + # Path to the an shell script that adds install_dir to PATH + local _env_script_path + # Potentially-late-bound version of install_dir to write env_script + local _install_dir_expr + # Potentially-late-bound version of env_script_path to write to rcfiles like $HOME/.profile + local _env_script_path_expr + + + # first try CARGO_HOME, then fallback to HOME + if [ -n "${CARGO_HOME:-}" ]; then + _install_dir="$CARGO_HOME/bin" + _env_script_path="$CARGO_HOME/env" + # If CARGO_HOME was set but it ended up being the default $HOME-based path, + # then keep things late-bound. Otherwise bake the value for safety. + # This is what rustup does, and accurately reproducing it is useful. + if [ -n "${HOME:-}" ]; then + if [ "$HOME/.cargo/bin" = "$_install_dir" ]; then + _install_dir_expr='$HOME/.cargo/bin' + _env_script_path_expr='$HOME/.cargo/env' + else + _install_dir_expr="$_install_dir" + _env_script_path_expr="$_env_script_path" + fi + else + _install_dir_expr="$_install_dir" + _env_script_path_expr="$_env_script_path" + fi + elif [ -n "${HOME:-}" ]; then + _install_dir="$HOME/.cargo/bin" + _env_script_path="$HOME/.cargo/env" + _install_dir_expr='$HOME/.cargo/bin' + _env_script_path_expr='$HOME/.cargo/env' + else + err "could not find your CARGO_HOME or HOME dir to install binaries to" + fi + + say "installing to $_install_dir" + ensure mkdir -p "$_install_dir" + + # copy all the binaries to the install dir + local _src_dir="$1" + local _bins="$2" + for _bin_name in $_bins; do + local _bin="$_src_dir/$_bin_name" + ensure cp "$_bin" "$_install_dir" + # unzip seems to need this chmod + ensure chmod +x "$_install_dir/$_bin_name" + say " $_bin_name" + done + + say "everything's installed!" + + if [ "0" = "$NO_MODIFY_PATH" ]; then + add_install_dir_to_path "$_install_dir_expr" "$_env_script_path" "$_env_script_path_expr" + fi +} + +add_install_dir_to_path() { + # Edit rcfiles ($HOME/.profile) to add install_dir to $PATH + # + # We do this slightly indirectly by creating an "env" shell script which checks if install_dir + # is on $PATH already, and prepends it if not. The actual line we then add to rcfiles + # is to just source that script. This allows us to blast it into lots of different rcfiles and + # have it run multiple times without causing problems. It's also specifically compatible + # with the system rustup uses, so that we don't conflict with it. + local _install_dir_expr="$1" + local _env_script_path="$2" + local _env_script_path_expr="$3" + if [ -n "${HOME:-}" ]; then + local _rcfile="$HOME/.profile" + # `source x` is an alias for `. x`, and the latter is more portable/actually-posix. + # This apparently comes up a lot on freebsd. It's easy enough to always add + # the more robust line to rcfiles, but when telling the user to apply the change + # to their current shell ". x" is pretty easy to misread/miscopy, so we use the + # prettier "source x" line there. Hopefully people with Weird Shells are aware + # this is a thing and know to tweak it (or just restart their shell). + local _robust_line=". \"$_env_script_path_expr\"" + local _pretty_line="source \"$_env_script_path_expr\"" + + # Add the env script if it doesn't already exist + if [ ! -f "$_env_script_path" ]; then + say_verbose "creating $_env_script_path" + write_env_script "$_install_dir_expr" "$_env_script_path" + else + say_verbose "$_env_script_path already exists" + fi + + # Check if the line is already in the rcfile + # grep: 0 if matched, 1 if no match, and 2 if an error occurred + # + # Ideally we could use quiet grep (-q), but that makes "match" and "error" + # have the same behaviour, when we want "no match" and "error" to be the same + # (on error we want to create the file, which >> conveniently does) + # + # We search for both kinds of line here just to do the right thing in more cases. + if ! grep -F "$_robust_line" "$_rcfile" > /dev/null 2>/dev/null && \ + ! grep -F "$_pretty_line" "$_rcfile" > /dev/null 2>/dev/null + then + # If the script now exists, add the line to source it to the rcfile + # (This will also create the rcfile if it doesn't exist) + if [ -f "$_env_script_path" ]; then + say_verbose "adding $_robust_line to $_rcfile" + ensure echo "$_robust_line" >> "$_rcfile" + say "" + say "To add $_install_dir_expr to your PATH, either restart your shell or run:" + say "" + say " $_pretty_line" + fi + else + say_verbose "$_install_dir already on PATH" + fi + fi +} + +write_env_script() { + # write this env script to the given path (this cat/EOF stuff is a "heredoc" string) + local _install_dir_expr="$1" + local _env_script_path="$2" + ensure cat < "$_env_script_path" +#!/bin/sh +# add binaries to PATH if they aren't added yet +# affix colons on either side of \$PATH to simplify matching +case ":\${PATH}:" in + *:"$_install_dir_expr":*) + ;; + *) + # Prepending path in case a system-installed binary needs to be overridden + export PATH="$_install_dir_expr:\$PATH" + ;; +esac +EOF +} + +check_proc() { + # Check for /proc by looking for the /proc/self/exe link + # This is only run on Linux + if ! test -L /proc/self/exe ; then + err "fatal: Unable to find /proc/self/exe. Is /proc mounted? Installation cannot proceed without /proc." + fi +} + +get_bitness() { + need_cmd head + # Architecture detection without dependencies beyond coreutils. + # ELF files start out "\x7fELF", and the following byte is + # 0x01 for 32-bit and + # 0x02 for 64-bit. + # The printf builtin on some shells like dash only supports octal + # escape sequences, so we use those. + local _current_exe_head + _current_exe_head=$(head -c 5 /proc/self/exe ) + if [ "$_current_exe_head" = "$(printf '\177ELF\001')" ]; then + echo 32 + elif [ "$_current_exe_head" = "$(printf '\177ELF\002')" ]; then + echo 64 + else + err "unknown platform bitness" + fi +} + +is_host_amd64_elf() { + need_cmd head + need_cmd tail + # ELF e_machine detection without dependencies beyond coreutils. + # Two-byte field at offset 0x12 indicates the CPU, + # but we're interested in it being 0x3E to indicate amd64, or not that. + local _current_exe_machine + _current_exe_machine=$(head -c 19 /proc/self/exe | tail -c 1) + [ "$_current_exe_machine" = "$(printf '\076')" ] +} + +get_endianness() { + local cputype=$1 + local suffix_eb=$2 + local suffix_el=$3 + + # detect endianness without od/hexdump, like get_bitness() does. + need_cmd head + need_cmd tail + + local _current_exe_endianness + _current_exe_endianness="$(head -c 6 /proc/self/exe | tail -c 1)" + if [ "$_current_exe_endianness" = "$(printf '\001')" ]; then + echo "${cputype}${suffix_el}" + elif [ "$_current_exe_endianness" = "$(printf '\002')" ]; then + echo "${cputype}${suffix_eb}" + else + err "unknown platform endianness" + fi +} + +get_architecture() { + local _ostype + local _cputype + _ostype="$(uname -s)" + _cputype="$(uname -m)" + local _clibtype="gnu" + + if [ "$_ostype" = Linux ]; then + if [ "$(uname -o)" = Android ]; then + _ostype=Android + fi + if ldd --version 2>&1 | grep -q 'musl'; then + _clibtype="musl" + fi + fi + + if [ "$_ostype" = Darwin ] && [ "$_cputype" = i386 ]; then + # Darwin `uname -m` lies + if sysctl hw.optional.x86_64 | grep -q ': 1'; then + _cputype=x86_64 + fi + fi + + if [ "$_ostype" = SunOS ]; then + # Both Solaris and illumos presently announce as "SunOS" in "uname -s" + # so use "uname -o" to disambiguate. We use the full path to the + # system uname in case the user has coreutils uname first in PATH, + # which has historically sometimes printed the wrong value here. + if [ "$(/usr/bin/uname -o)" = illumos ]; then + _ostype=illumos + fi + + # illumos systems have multi-arch userlands, and "uname -m" reports the + # machine hardware name; e.g., "i86pc" on both 32- and 64-bit x86 + # systems. Check for the native (widest) instruction set on the + # running kernel: + if [ "$_cputype" = i86pc ]; then + _cputype="$(isainfo -n)" + fi + fi + + case "$_ostype" in + + Android) + _ostype=linux-android + ;; + + Linux) + check_proc + _ostype=unknown-linux-$_clibtype + _bitness=$(get_bitness) + ;; + + FreeBSD) + _ostype=unknown-freebsd + ;; + + NetBSD) + _ostype=unknown-netbsd + ;; + + DragonFly) + _ostype=unknown-dragonfly + ;; + + Darwin) + _ostype=apple-darwin + ;; + + illumos) + _ostype=unknown-illumos + ;; + + MINGW* | MSYS* | CYGWIN* | Windows_NT) + _ostype=pc-windows-gnu + ;; + + *) + err "unrecognized OS type: $_ostype" + ;; + + esac + + case "$_cputype" in + + i386 | i486 | i686 | i786 | x86) + _cputype=i686 + ;; + + xscale | arm) + _cputype=arm + if [ "$_ostype" = "linux-android" ]; then + _ostype=linux-androideabi + fi + ;; + + armv6l) + _cputype=arm + if [ "$_ostype" = "linux-android" ]; then + _ostype=linux-androideabi + else + _ostype="${_ostype}eabihf" + fi + ;; + + armv7l | armv8l) + _cputype=armv7 + if [ "$_ostype" = "linux-android" ]; then + _ostype=linux-androideabi + else + _ostype="${_ostype}eabihf" + fi + ;; + + aarch64 | arm64) + _cputype=aarch64 + ;; + + x86_64 | x86-64 | x64 | amd64) + _cputype=x86_64 + ;; + + mips) + _cputype=$(get_endianness mips '' el) + ;; + + mips64) + if [ "$_bitness" -eq 64 ]; then + # only n64 ABI is supported for now + _ostype="${_ostype}abi64" + _cputype=$(get_endianness mips64 '' el) + fi + ;; + + ppc) + _cputype=powerpc + ;; + + ppc64) + _cputype=powerpc64 + ;; + + ppc64le) + _cputype=powerpc64le + ;; + + s390x) + _cputype=s390x + ;; + riscv64) + _cputype=riscv64gc + ;; + loongarch64) + _cputype=loongarch64 + ;; + *) + err "unknown CPU type: $_cputype" + + esac + + # Detect 64-bit linux with 32-bit userland + if [ "${_ostype}" = unknown-linux-gnu ] && [ "${_bitness}" -eq 32 ]; then + case $_cputype in + x86_64) + # 32-bit executable for amd64 = x32 + if is_host_amd64_elf; then { + err "x32 linux unsupported" + }; else + _cputype=i686 + fi + ;; + mips64) + _cputype=$(get_endianness mips '' el) + ;; + powerpc64) + _cputype=powerpc + ;; + aarch64) + _cputype=armv7 + if [ "$_ostype" = "linux-android" ]; then + _ostype=linux-androideabi + else + _ostype="${_ostype}eabihf" + fi + ;; + riscv64gc) + err "riscv64 with 32-bit userland unsupported" + ;; + esac + fi + + # treat armv7 systems without neon as plain arm + if [ "$_ostype" = "unknown-linux-gnueabihf" ] && [ "$_cputype" = armv7 ]; then + if ensure grep '^Features' /proc/cpuinfo | grep -q -v neon; then + # At least one processor does not have NEON. + _cputype=arm + fi + fi + + _arch="${_cputype}-${_ostype}" + + RETVAL="$_arch" +} + +say() { + if [ "0" = "$PRINT_QUIET" ]; then + echo "$1" + fi +} + +say_verbose() { + if [ "1" = "$PRINT_VERBOSE" ]; then + echo "$1" + fi +} + +err() { + if [ "0" = "$PRINT_QUIET" ]; then + local red + local reset + red=$(tput setaf 1 2>/dev/null || echo '') + reset=$(tput sgr0 2>/dev/null || echo '') + say "${red}ERROR${reset}: $1" >&2 + fi + exit 1 +} + +need_cmd() { + if ! check_cmd "$1" + then err "need '$1' (command not found)" + fi +} + +check_cmd() { + command -v "$1" > /dev/null 2>&1 + return $? +} + +assert_nz() { + if [ -z "$1" ]; then err "assert_nz $2"; fi +} + +# Run a command that should never fail. If the command fails execution +# will immediately terminate with an error showing the failing +# command. +ensure() { + if ! "$@"; then err "command failed: $*"; fi +} + +# This is just for indicating that commands' results are being +# intentionally ignored. Usually, because it's being executed +# as part of error handling. +ignore() { + "$@" +} + +# This wraps curl or wget. Try curl first, if not installed, +# use wget instead. +downloader() { + if check_cmd curl + then _dld=curl + elif check_cmd wget + then _dld=wget + else _dld='curl or wget' # to be used in error message of need_cmd + fi + + if [ "$1" = --check ] + then need_cmd "$_dld" + elif [ "$_dld" = curl ] + then curl -sSfL "$1" -o "$2" + elif [ "$_dld" = wget ] + then wget "$1" -O "$2" + else err "Unknown downloader" # should not reach here + fi +} + +download_binary_and_run_installer "$@" || exit 1 + +================ installer.ps1 ================ +# Licensed under the MIT license +# , at your +# option. This file may not be copied, modified, or distributed +# except according to those terms. + +<# +.SYNOPSIS + +The installer for axolotlsay 0.1.0 + +.DESCRIPTION + +This script detects what platform you're on and fetches an appropriate archive from +https://github.com/axodotdev/axolotlsay/releases/download/v0.1.0 +then unpacks the binaries and installs them to $env:CARGO_HOME\bin ($HOME\.cargo\bin) + +It will then add that dir to PATH by editing your Environment.Path registry key + +.PARAMETER ArtifactDownloadUrl +The URL of the directory where artifacts can be fetched from + +.PARAMETER NoModifyPath +Don't add the install directory to PATH + +.PARAMETER Help +Print help + +#> + +param ( + [Parameter(HelpMessage = "The URL of the directory where artifacts can be fetched from")] + [string]$ArtifactDownloadUrl = 'https://github.com/axodotdev/axolotlsay/releases/download/v0.1.0', + [Parameter(HelpMessage = "Don't add the install directory to PATH")] + [switch]$NoModifyPath, + [Parameter(HelpMessage = "Print Help")] + [switch]$Help +) + +$app_name = 'axolotlsay' +$app_version = '0.1.0' + +function Install-Binary($install_args) { + if ($Help) { + Get-Help $PSCommandPath -Detailed + Exit + } + $old_erroractionpreference = $ErrorActionPreference + $ErrorActionPreference = 'stop' + + Initialize-Environment + + # Platform info injected by cargo-dist + $platforms = @{ + "x86_64-pc-windows-msvc" = @{ + "artifact_name" = "axolotlsay-x86_64-pc-windows-msvc.tar.gz" + "bins" = "axolotlsay.exe" + "zip_ext" = ".tar.gz" + } + } + + $fetched = Download "$ArtifactDownloadUrl" $platforms + # FIXME: add a flag that lets the user not do this step + Invoke-Installer $fetched "$install_args" + + $ErrorActionPreference = $old_erroractionpreference +} + +function Get-TargetTriple() { + try { + # NOTE: this might return X64 on ARM64 Windows, which is OK since emulation is available. + # It works correctly starting in PowerShell Core 7.3 and Windows PowerShell in Win 11 22H2. + # Ideally this would just be + # [System.Runtime.InteropServices.RuntimeInformation]::OSArchitecture + # but that gets a type from the wrong assembly on Windows PowerShell (i.e. not Core) + $a = [System.Reflection.Assembly]::LoadWithPartialName("System.Runtime.InteropServices.RuntimeInformation") + $t = $a.GetType("System.Runtime.InteropServices.RuntimeInformation") + $p = $t.GetProperty("OSArchitecture") + # Possible OSArchitecture Values: https://learn.microsoft.com/dotnet/api/system.runtime.interopservices.architecture + # Rust supported platforms: https://doc.rust-lang.org/stable/rustc/platform-support.html + switch ($p.GetValue($null).ToString()) + { + "X86" { return "i686-pc-windows-msvc" } + "X64" { return "x86_64-pc-windows-msvc" } + "Arm" { return "thumbv7a-pc-windows-msvc" } + "Arm64" { return "aarch64-pc-windows-msvc" } + } + } catch { + # The above was added in .NET 4.7.1, so Windows PowerShell in versions of Windows + # prior to Windows 10 v1709 may not have this API. + Write-Verbose "Get-TargetTriple: Exception when trying to determine OS architecture." + Write-Verbose $_ + } + + # This is available in .NET 4.0. We already checked for PS 5, which requires .NET 4.5. + Write-Verbose("Get-TargetTriple: falling back to Is64BitOperatingSystem.") + if ([System.Environment]::Is64BitOperatingSystem) { + return "x86_64-pc-windows-msvc" + } else { + return "i686-pc-windows-msvc" + } +} + +function Download($download_url, $platforms) { + $arch = Get-TargetTriple + + if (-not $platforms.ContainsKey($arch)) { + # X64 is well-supported, including in emulation on ARM64 + Write-Verbose "$arch is not availablem falling back to X64" + $arch = "x86_64-pc-windows-msvc" + } + + if (-not $platforms.ContainsKey($arch)) { + # should not be possible, as currently we always produce X64 binaries. + $platforms_json = ConvertTo-Json $platforms + throw "ERROR: could not find binaries for this platform. Last platform tried: $arch platform info: $platforms_json" + } + + # Lookup what we expect this platform to look like + $info = $platforms[$arch] + $zip_ext = $info["zip_ext"] + $bin_names = $info["bins"] + $artifact_name = $info["artifact_name"] + + # Make a new temp dir to unpack things to + $tmp = New-Temp-Dir + $dir_path = "$tmp\$app_name$zip_ext" + + # Download and unpack! + $url = "$download_url/$artifact_name" + Write-Information "Downloading $app_name $app_version ($arch)" + Write-Verbose " from $url" + Write-Verbose " to $dir_path" + $wc = New-Object Net.Webclient + $wc.downloadFile($url, $dir_path) + + Write-Verbose "Unpacking to $tmp" + + # Select the tool to unpack the files with. + # + # As of windows 10(?), powershell comes with tar preinstalled, but in practice + # it only seems to support .tar.gz, and not xz/zstd. Still, we should try to + # forward all tars to it in case the user has a machine that can handle it! + switch -Wildcard ($zip_ext) { + ".zip" { + Expand-Archive -Path $dir_path -DestinationPath "$tmp"; + Break + } + ".tar.*" { + tar xf $dir_path --strip-components 1 -C "$tmp"; + Break + } + Default { + throw "ERROR: unknown archive format $zip_ext" + } + } + + # Let the next step know what to copy + $bin_paths = @() + foreach ($bin_name in $bin_names) { + Write-Verbose " Unpacked $bin_name" + $bin_paths += "$tmp\$bin_name" + } + return $bin_paths +} + +function Invoke-Installer($bin_paths) { + + # first try CARGO_HOME, then fallback to HOME + # (for whatever reason $HOME is not a normal env var and doesn't need the $env: prefix) + $dest_dir = if (($base_dir = $env:CARGO_HOME)) { + Join-Path $base_dir "bin" + } elseif (($base_dir = $HOME)) { + Join-Path $base_dir ".cargo\bin" + } else { + throw "ERROR: could not find your HOME dir or CARGO_HOME to install binaries to" + } + + $dest_dir = New-Item -Force -ItemType Directory -Path $dest_dir + Write-Information "Installing to $dest_dir" + # Just copy the binaries from the temp location to the install dir + foreach ($bin_path in $bin_paths) { + $installed_file = Split-Path -Path "$bin_path" -Leaf + Copy-Item "$bin_path" -Destination "$dest_dir" + Remove-Item "$bin_path" -Recurse -Force + Write-Information " $installed_file" + } + + Write-Information "Everything's installed!" + if (-not $NoModifyPath) { + if (Add-Path $dest_dir) { + Write-Information "" + Write-Information "$dest_dir was added to your PATH, you may need to restart your shell for that to take effect." + } + } +} + +# Try to add the given path to PATH via the registry +# +# Returns true if the registry was modified, otherwise returns false +# (indicating it was already on PATH) +function Add-Path($OrigPathToAdd) { + $RegistryPath = "HKCU:\Environment" + $PropertyName = "Path" + $PathToAdd = $OrigPathToAdd + + $Item = if (Test-Path $RegistryPath) { + # If the registry key exists, get it + Get-Item -Path $RegistryPath + } else { + # If the registry key doesn't exist, create it + Write-Verbose "Creating $RegistryPath" + New-Item -Path $RegistryPath -Force + } + + $OldPath = "" + try { + # Try to get the old PATH value. If that fails, assume we're making it from scratch. + # Otherwise assume there's already paths in here and use a ; separator + $OldPath = $Item | Get-ItemPropertyValue -Name $PropertyName + $PathToAdd = "$PathToAdd;" + } catch { + # We'll be creating the PATH from scratch + Write-Verbose "Adding $PropertyName Property to $RegistryPath" + } + + # Check if the path is already there + # + # We don't want to incorrectly match "C:\blah\" to "C:\blah\blah\", so we include the semicolon + # delimiters when searching, ensuring exact matches. To avoid corner cases we add semicolons to + # both sides of the input, allowing us to pretend we're always in the middle of a list. + if (";$OldPath;" -like "*;$OrigPathToAdd;*") { + # Already on path, nothing to do + Write-Verbose "install dir already on PATH, all done!" + return $false + } else { + # Actually update PATH + Write-Verbose "Adding $OrigPathToAdd to your PATH" + $NewPath = $PathToAdd + $OldPath + # We use -Force here to make the value already existing not be an error + $Item | New-ItemProperty -Name $PropertyName -Value $NewPath -PropertyType String -Force | Out-Null + return $true + } +} + +function Initialize-Environment() { + If (($PSVersionTable.PSVersion.Major) -lt 5) { + Write-Error "PowerShell 5 or later is required to install $app_name." + Write-Error "Upgrade PowerShell: https://docs.microsoft.com/en-us/powershell/scripting/setup/installing-windows-powershell" + break + } + + # show notification to change execution policy: + $allowedExecutionPolicy = @('Unrestricted', 'RemoteSigned', 'ByPass') + If ((Get-ExecutionPolicy).ToString() -notin $allowedExecutionPolicy) { + Write-Error "PowerShell requires an execution policy in [$($allowedExecutionPolicy -join ", ")] to run $app_name." + Write-Error "For example, to set the execution policy to 'RemoteSigned' please run :" + Write-Error "'Set-ExecutionPolicy RemoteSigned -scope CurrentUser'" + break + } + + # GitHub requires TLS 1.2 + If ([System.Enum]::GetNames([System.Net.SecurityProtocolType]) -notcontains 'Tls12') { + Write-Error "Installing $app_name requires at least .NET Framework 4.5" + Write-Error "Please download and install it first:" + Write-Error "https://www.microsoft.com/net/download" + break + } +} + +function New-Temp-Dir() { + [CmdletBinding(SupportsShouldProcess)] + param() + $parent = [System.IO.Path]::GetTempPath() + [string] $name = [System.Guid]::NewGuid() + New-Item -ItemType Directory -Path (Join-Path $parent $name) +} + +# PSScriptAnalyzer doesn't like how we use our params as globals, this calms it +$Null = $ArtifactDownloadUrl, $NoModifyPath, $Help +# Make Write-Information statements be visible +$InformationPreference = "Continue" +Install-Binary "$Args" + +================ dist-manifest.json ================ +{ + "dist_version": "CENSORED", + "announcement_tag": "v0.1.0", + "announcement_is_prerelease": false, + "announcement_title": "Version 0.1.0", + "announcement_changelog": "```text\n +------------------------+\n | the initial release!!! |\n +------------------------+\n /\n≽(◕ ᴗ ◕)≼\n```", + "announcement_github_body": "## Release Notes\n\n```text\n +------------------------+\n | the initial release!!! |\n +------------------------+\n /\n≽(◕ ᴗ ◕)≼\n```\n\n## Install axolotlsay 0.1.0\n\n### Install prebuilt binaries via shell script\n\n```sh\ncurl --proto '=https' --tlsv1.2 -LsSf https://github.com/axodotdev/axolotlsay/releases/download/v0.1.0/axolotlsay-installer.sh | sh\n```\n\n### Install prebuilt binaries via powershell script\n\n```sh\nirm https://github.com/axodotdev/axolotlsay/releases/download/v0.1.0/axolotlsay-installer.ps1 | iex\n```\n\n## Download axolotlsay 0.1.0\n\n| File | Platform | Checksum |\n|--------|----------|----------|\n| [axolotlsay-aarch64-apple-darwin.tar.gz](https://github.com/axodotdev/axolotlsay/releases/download/v0.1.0/axolotlsay-aarch64-apple-darwin.tar.gz) | macOS Apple Silicon | [checksum](https://github.com/axodotdev/axolotlsay/releases/download/v0.1.0/axolotlsay-aarch64-apple-darwin.tar.gz.sha256) |\n| [axolotlsay-x86_64-apple-darwin.tar.gz](https://github.com/axodotdev/axolotlsay/releases/download/v0.1.0/axolotlsay-x86_64-apple-darwin.tar.gz) | macOS Intel | [checksum](https://github.com/axodotdev/axolotlsay/releases/download/v0.1.0/axolotlsay-x86_64-apple-darwin.tar.gz.sha256) |\n| [axolotlsay-x86_64-pc-windows-msvc.tar.gz](https://github.com/axodotdev/axolotlsay/releases/download/v0.1.0/axolotlsay-x86_64-pc-windows-msvc.tar.gz) | Windows x64 | [checksum](https://github.com/axodotdev/axolotlsay/releases/download/v0.1.0/axolotlsay-x86_64-pc-windows-msvc.tar.gz.sha256) |\n| [axolotlsay-x86_64-unknown-linux-gnu.tar.gz](https://github.com/axodotdev/axolotlsay/releases/download/v0.1.0/axolotlsay-x86_64-unknown-linux-gnu.tar.gz) | Linux x64 | [checksum](https://github.com/axodotdev/axolotlsay/releases/download/v0.1.0/axolotlsay-x86_64-unknown-linux-gnu.tar.gz.sha256) |\n| [axolotlsay-x86_64-pc-windows-msvc.msi](https://github.com/axodotdev/axolotlsay/releases/download/v0.1.0/axolotlsay-x86_64-pc-windows-msvc.msi) | Windows x64 | [checksum](https://github.com/axodotdev/axolotlsay/releases/download/v0.1.0/axolotlsay-x86_64-pc-windows-msvc.msi.sha256) |\n\n", + "system_info": { + "cargo_version_line": "CENSORED" + }, + "releases": [ + { + "app_name": "axolotlsay", + "app_version": "0.1.0", + "artifacts": [ + "axolotlsay-installer.sh", + "axolotlsay-installer.ps1", + "axolotlsay-aarch64-apple-darwin.tar.gz", + "axolotlsay-aarch64-apple-darwin.tar.gz.sha256", + "axolotlsay-x86_64-apple-darwin.tar.gz", + "axolotlsay-x86_64-apple-darwin.tar.gz.sha256", + "axolotlsay-x86_64-pc-windows-msvc.tar.gz", + "axolotlsay-x86_64-pc-windows-msvc.tar.gz.sha256", + "axolotlsay-x86_64-pc-windows-msvc.msi", + "axolotlsay-x86_64-pc-windows-msvc.msi.sha256", + "axolotlsay-x86_64-unknown-linux-gnu.tar.gz", + "axolotlsay-x86_64-unknown-linux-gnu.tar.gz.sha256" + ] + } + ], + "artifacts": { + "axolotlsay-aarch64-apple-darwin.tar.gz": { + "name": "axolotlsay-aarch64-apple-darwin.tar.gz", + "kind": "executable-zip", + "target_triples": [ + "aarch64-apple-darwin" + ], + "assets": [ + { + "name": "CHANGELOG.md", + "path": "CHANGELOG.md", + "kind": "changelog" + }, + { + "name": "LICENSE-APACHE", + "path": "LICENSE-APACHE", + "kind": "license" + }, + { + "name": "LICENSE-MIT", + "path": "LICENSE-MIT", + "kind": "license" + }, + { + "name": "README.md", + "path": "README.md", + "kind": "readme" + }, + { + "name": "axolotlsay", + "path": "axolotlsay", + "kind": "executable" + } + ], + "checksum": "axolotlsay-aarch64-apple-darwin.tar.gz.sha256" + }, + "axolotlsay-aarch64-apple-darwin.tar.gz.sha256": { + "name": "axolotlsay-aarch64-apple-darwin.tar.gz.sha256", + "kind": "checksum", + "target_triples": [ + "aarch64-apple-darwin" + ] + }, + "axolotlsay-installer.ps1": { + "name": "axolotlsay-installer.ps1", + "kind": "installer", + "target_triples": [ + "x86_64-pc-windows-msvc" + ], + "install_hint": "irm https://github.com/axodotdev/axolotlsay/releases/download/v0.1.0/axolotlsay-installer.ps1 | iex", + "description": "Install prebuilt binaries via powershell script" + }, + "axolotlsay-installer.sh": { + "name": "axolotlsay-installer.sh", + "kind": "installer", + "target_triples": [ + "aarch64-apple-darwin", + "x86_64-apple-darwin", + "x86_64-unknown-linux-gnu" + ], + "install_hint": "curl --proto '=https' --tlsv1.2 -LsSf https://github.com/axodotdev/axolotlsay/releases/download/v0.1.0/axolotlsay-installer.sh | sh", + "description": "Install prebuilt binaries via shell script" + }, + "axolotlsay-x86_64-apple-darwin.tar.gz": { + "name": "axolotlsay-x86_64-apple-darwin.tar.gz", + "kind": "executable-zip", + "target_triples": [ + "x86_64-apple-darwin" + ], + "assets": [ + { + "name": "CHANGELOG.md", + "path": "CHANGELOG.md", + "kind": "changelog" + }, + { + "name": "LICENSE-APACHE", + "path": "LICENSE-APACHE", + "kind": "license" + }, + { + "name": "LICENSE-MIT", + "path": "LICENSE-MIT", + "kind": "license" + }, + { + "name": "README.md", + "path": "README.md", + "kind": "readme" + }, + { + "name": "axolotlsay", + "path": "axolotlsay", + "kind": "executable" + } + ], + "checksum": "axolotlsay-x86_64-apple-darwin.tar.gz.sha256" + }, + "axolotlsay-x86_64-apple-darwin.tar.gz.sha256": { + "name": "axolotlsay-x86_64-apple-darwin.tar.gz.sha256", + "kind": "checksum", + "target_triples": [ + "x86_64-apple-darwin" + ] + }, + "axolotlsay-x86_64-pc-windows-msvc.msi": { + "name": "axolotlsay-x86_64-pc-windows-msvc.msi", + "kind": "installer", + "target_triples": [ + "x86_64-pc-windows-msvc" + ], + "assets": [ + { + "name": "axolotlsay", + "path": "axolotlsay.exe", + "kind": "executable" + } + ], + "description": "install via msi", + "checksum": "axolotlsay-x86_64-pc-windows-msvc.msi.sha256" + }, + "axolotlsay-x86_64-pc-windows-msvc.msi.sha256": { + "name": "axolotlsay-x86_64-pc-windows-msvc.msi.sha256", + "kind": "checksum", + "target_triples": [ + "x86_64-pc-windows-msvc" + ] + }, + "axolotlsay-x86_64-pc-windows-msvc.tar.gz": { + "name": "axolotlsay-x86_64-pc-windows-msvc.tar.gz", + "kind": "executable-zip", + "target_triples": [ + "x86_64-pc-windows-msvc" + ], + "assets": [ + { + "name": "CHANGELOG.md", + "path": "CHANGELOG.md", + "kind": "changelog" + }, + { + "name": "LICENSE-APACHE", + "path": "LICENSE-APACHE", + "kind": "license" + }, + { + "name": "LICENSE-MIT", + "path": "LICENSE-MIT", + "kind": "license" + }, + { + "name": "README.md", + "path": "README.md", + "kind": "readme" + }, + { + "name": "axolotlsay", + "path": "axolotlsay.exe", + "kind": "executable" + } + ], + "checksum": "axolotlsay-x86_64-pc-windows-msvc.tar.gz.sha256" + }, + "axolotlsay-x86_64-pc-windows-msvc.tar.gz.sha256": { + "name": "axolotlsay-x86_64-pc-windows-msvc.tar.gz.sha256", + "kind": "checksum", + "target_triples": [ + "x86_64-pc-windows-msvc" + ] + }, + "axolotlsay-x86_64-unknown-linux-gnu.tar.gz": { + "name": "axolotlsay-x86_64-unknown-linux-gnu.tar.gz", + "kind": "executable-zip", + "target_triples": [ + "x86_64-unknown-linux-gnu" + ], + "assets": [ + { + "name": "CHANGELOG.md", + "path": "CHANGELOG.md", + "kind": "changelog" + }, + { + "name": "LICENSE-APACHE", + "path": "LICENSE-APACHE", + "kind": "license" + }, + { + "name": "LICENSE-MIT", + "path": "LICENSE-MIT", + "kind": "license" + }, + { + "name": "README.md", + "path": "README.md", + "kind": "readme" + }, + { + "name": "axolotlsay", + "path": "axolotlsay", + "kind": "executable" + } + ], + "checksum": "axolotlsay-x86_64-unknown-linux-gnu.tar.gz.sha256" + }, + "axolotlsay-x86_64-unknown-linux-gnu.tar.gz.sha256": { + "name": "axolotlsay-x86_64-unknown-linux-gnu.tar.gz.sha256", + "kind": "checksum", + "target_triples": [ + "x86_64-unknown-linux-gnu" + ] + } + }, + "publish_prereleases": false, + "ci": { + "github": { + "artifacts_matrix": { + "include": [ + { + "runner": "macos-11", + "install_dist": "curl --proto '=https' --tlsv1.2 -LsSf https://github.com/axodotdev/cargo-dist/releases/download/vSOME_VERSION/cargo-dist-installer.sh | sh", + "dist_args": "--artifacts=local --target=aarch64-apple-darwin" + }, + { + "runner": "macos-11", + "install_dist": "curl --proto '=https' --tlsv1.2 -LsSf https://github.com/axodotdev/cargo-dist/releases/download/vSOME_VERSION/cargo-dist-installer.sh | sh", + "dist_args": "--artifacts=local --target=x86_64-apple-darwin" + }, + { + "runner": "windows-2019", + "install_dist": "irm https://github.com/axodotdev/cargo-dist/releases/download/vSOME_VERSION/cargo-dist-installer.ps1 | iex", + "dist_args": "--artifacts=local --target=x86_64-pc-windows-msvc" + }, + { + "runner": "ubuntu-20.04", + "install_dist": "curl --proto '=https' --tlsv1.2 -LsSf https://github.com/axodotdev/cargo-dist/releases/download/vSOME_VERSION/cargo-dist-installer.sh | sh", + "dist_args": "--artifacts=local --target=x86_64-unknown-linux-gnu" + } + ] + }, + "pr_run_mode": "plan" + } + } +} + +================ github-ci.yml ================ +# Copyright 2022-2023, axodotdev +# SPDX-License-Identifier: MIT or Apache-2.0 +# +# CI that: +# +# * checks for a Git Tag that looks like a release +# * builds artifacts with cargo-dist (executable-zips, installers, hashes) +# * uploads those artifacts to temporary workflow zip +# * on success, uploads the artifacts to a Github Release™ +# +# Note that the Github Release™ will be created with a generated +# title/body based on your changelogs. +name: Release + +permissions: + contents: write + +# This task will run whenever you push a git tag that looks like a version +# like "1.0.0", "v0.1.0-prerelease.1", "my-app/0.1.0", "releases/v1.0.0", etc. +# Various formats will be parsed into a VERSION and an optional PACKAGE_NAME, where +# PACKAGE_NAME must be the name of a Cargo package in your workspace, and VERSION +# must be a Cargo-style SemVer Version (must have at least major.minor.patch). +# +# If PACKAGE_NAME is specified, then the release will be for that +# package (erroring out if it doesn't have the given version or isn't cargo-dist-able). +# +# If PACKAGE_NAME isn't specified, then the release will be for all +# (cargo-dist-able) packages in the workspace with that version (this mode is +# intended for workspaces with only one dist-able package, or with all dist-able +# packages versioned/released in lockstep). +# +# If you push multiple tags at once, separate instances of this workflow will +# spin up, creating an independent Github Release™ for each one. However Github +# will hard limit this to 3 tags per commit, as it will assume more tags is a +# mistake. +# +# If there's a prerelease-style suffix to the version, then the Github Release™ +# will be marked as a prerelease. +on: + push: + tags: + - '**[0-9]+.[0-9]+.[0-9]+*' + pull_request: + +jobs: + # Run 'cargo dist plan' to determine what tasks we need to do + plan: + runs-on: ubuntu-latest + outputs: + val: ${{ steps.plan.outputs.manifest }} + tag: ${{ !github.event.pull_request && github.ref_name || '' }} + tag-flag: ${{ !github.event.pull_request && format('--tag={0}', github.ref_name) || '' }} + publishing: ${{ !github.event.pull_request }} + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + steps: + - uses: actions/checkout@v3 + with: + submodules: recursive + - name: Install cargo-dist + run: "curl --proto '=https' --tlsv1.2 -LsSf https://github.com/axodotdev/cargo-dist/releases/download/vSOME_VERSION/cargo-dist-installer.sh | sh" + - id: plan + run: | + cargo dist plan ${{ !github.event.pull_request && format('--tag={0}', github.ref_name) || '' }} --output-format=json > dist-manifest.json + echo "cargo dist plan ran successfully" + cat dist-manifest.json + echo "manifest=$(jq -c "." dist-manifest.json)" >> "$GITHUB_OUTPUT" + - name: "Upload dist-manifest.json" + uses: actions/upload-artifact@v3 + with: + name: artifacts + path: dist-manifest.json + + # Build and packages all the platform-specific things + upload-local-artifacts: + # Let the initial task tell us to not run (currently very blunt) + needs: plan + if: ${{ fromJson(needs.plan.outputs.val).releases != null && (needs.plan.outputs.publishing == 'true' || fromJson(needs.plan.outputs.val).ci.github.pr_run_mode == 'upload') }} + strategy: + fail-fast: false + # Target platforms/runners are computed by cargo-dist in create-release. + # Each member of the matrix has the following arguments: + # + # - runner: the github runner + # - dist-args: cli flags to pass to cargo dist + # - install-dist: expression to run to install cargo-dist on the runner + # + # Typically there will be: + # - 1 "global" task that builds universal installers + # - N "local" tasks that build each platform's binaries and platform-specific installers + matrix: ${{ fromJson(needs.plan.outputs.val).ci.github.artifacts_matrix }} + runs-on: ${{ matrix.runner }} + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + steps: + - uses: actions/checkout@v3 + with: + submodules: recursive + - uses: swatinem/rust-cache@v2 + - name: Install cargo-dist + run: ${{ matrix.install_dist }} + - id: cargo-dist + # We force bash here just because github makes it really hard to get values up + # to "real" actions without writing to env-vars, and writing to env-vars has + # inconsistent syntax between shell and powershell. cargo-dist and jq work fine + # in powershell. + shell: bash + run: | + # Actually do builds and make zips and whatnot + cargo dist build ${{ needs.plan.outputs.tag-flag }} --output-format=json ${{ matrix.dist_args }} > dist-manifest.json + echo "cargo dist ran successfully" + + # Parse out what we just built and upload it to the Github Release™ + echo "paths<> "$GITHUB_OUTPUT" + jq --raw-output ".artifacts[]?.path | select( . != null )" dist-manifest.json >> "$GITHUB_OUTPUT" + echo "EOF" >> "$GITHUB_OUTPUT" + - name: "Upload artifacts" + uses: actions/upload-artifact@v3 + with: + name: artifacts + path: ${{ steps.cargo-dist.outputs.paths }} + + # Build and package all the platform-agnostic(ish) things + upload-global-artifacts: + needs: [plan, upload-local-artifacts] + runs-on: "ubuntu-20.04" + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + steps: + - uses: actions/checkout@v3 + with: + submodules: recursive + - name: Install cargo-dist + run: "curl --proto '=https' --tlsv1.2 -LsSf https://github.com/axodotdev/cargo-dist/releases/download/vSOME_VERSION/cargo-dist-installer.sh | sh" + # Get all the local artifacts for the global tasks to use (for e.g. checksums) + - name: Fetch local artifacts + uses: actions/download-artifact@v3 + with: + name: artifacts + path: target/distrib/ + - id: cargo-dist + shell: bash + run: | + cargo dist build ${{ needs.plan.outputs.tag-flag }} --output-format=json "--artifacts=global" > dist-manifest.json + echo "cargo dist ran successfully" + + # Parse out what we just built and upload it to the Github Release™ + echo "paths<> "$GITHUB_OUTPUT" + jq --raw-output ".artifacts[]?.path | select( . != null )" dist-manifest.json >> "$GITHUB_OUTPUT" + echo "EOF" >> "$GITHUB_OUTPUT" + - name: "Upload artifacts" + uses: actions/upload-artifact@v3 + with: + name: artifacts + path: ${{ steps.cargo-dist.outputs.paths }} + + # Sign Windows artifacts with ssl.com + sign-windows-artifacts: + needs: + - plan + - upload-local-artifacts + - upload-global-artifacts + runs-on: "ubuntu-20.04" + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + SIGN_DIR_IN: target/distrib/sign-input + SIGN_DIR_OUT: target/distrib/sign-output + steps: + # Get all the artifacts for the signing tasks to use + - name: Fetch local artifacts + uses: actions/download-artifact@v3 + with: + name: artifacts + path: target/distrib/ + # Only try to sign files that the tool can handle + - name: Select Signable Artifacts + run: | + mkdir -p "$SIGN_DIR_IN" + mkdir -p "$SIGN_DIR_OUT" + for file in target/distrib/*.{msi,ps1}; do + [[ -e $file ]] && mv "$file" "$SIGN_DIR_IN" && echo "signing $file"; + done + # Sign the files + - name: Sign Artifacts with CodeSignTool + uses: ssldotcom/esigner-codesign@develop + with: + command: batch_sign + username: ${{ secrets.SSLDOTCOM_USERNAME }} + password: ${{ secrets.SSLDOTCOM_PASSWORD }} + credential_id: ${{ secrets.SSLDOTCOM_CREDENTIAL_ID }} + totp_secret: ${{ secrets.SSLDOTCOM_TOTP_SECRET }} + dir_path: ${{ env.SIGN_DIR_IN }} + output_path: ${{ env.SIGN_DIR_OUT }} + # Set this to TEST for testing (sandbox) and PROD for production + environment_name: TEST + # Regenerate checksum files for things that have been signed + - name: Regenerate Checksums + run: | + pushd "$SIGN_DIR_OUT" + for filename in *; do + echo "checksuming $filename" + sha256sum --binary "$filename" > "$filename.sha256" + done + popd + # Upload the result, overwriting old files + - name: "Upload artifacts" + uses: actions/upload-artifact@v3 + with: + name: artifacts + path: ${{ env.SIGN_DIR_OUT }} + + should-publish: + needs: + - plan + - upload-local-artifacts + - upload-global-artifacts + - sign-windows-artifacts + if: ${{ needs.plan.outputs.publishing == 'true' }} + runs-on: ubuntu-latest + steps: + - name: print tag + run: echo "ok we're publishing!" + + # Create a Github Release with all the results once everything is done, + publish-release: + needs: [plan, should-publish] + runs-on: ubuntu-latest + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + steps: + - uses: actions/checkout@v3 + with: + submodules: recursive + - name: "Download artifacts" + uses: actions/download-artifact@v3 + with: + name: artifacts + path: artifacts + - name: Create Release + uses: ncipollo/release-action@v1 + with: + tag: ${{ needs.plan.outputs.tag }} + name: ${{ fromJson(needs.plan.outputs.val).announcement_title }} + body: ${{ fromJson(needs.plan.outputs.val).announcement_github_body }} + prerelease: ${{ fromJson(needs.plan.outputs.val).announcement_is_prerelease }} + artifacts: "artifacts/*" + +================ main.wxs ================ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 1 + 1 + + + + + + + + + + + + + + + + + + + + diff --git a/cargo-dist/tests/snapshots/axolotlsay_ssldotcom_windows_sign_prod.snap b/cargo-dist/tests/snapshots/axolotlsay_ssldotcom_windows_sign_prod.snap new file mode 100644 index 000000000..dd2d13eb5 --- /dev/null +++ b/cargo-dist/tests/snapshots/axolotlsay_ssldotcom_windows_sign_prod.snap @@ -0,0 +1,1741 @@ +--- +source: cargo-dist/tests/gallery/dist.rs +expression: self.payload +--- +================ installer.sh ================ +#!/bin/sh +# shellcheck shell=dash +# +# Licensed under the MIT license +# , at your +# option. This file may not be copied, modified, or distributed +# except according to those terms. + +if [ "$KSH_VERSION" = 'Version JM 93t+ 2010-03-05' ]; then + # The version of ksh93 that ships with many illumos systems does not + # support the "local" extension. Print a message rather than fail in + # subtle ways later on: + echo 'this installer does not work with this ksh93 version; please try bash!' >&2 + exit 1 +fi + +set -u + +APP_NAME="axolotlsay" +APP_VERSION="0.1.0" +ARTIFACT_DOWNLOAD_URL="${INSTALLER_DOWNLOAD_URL:-https://github.com/axodotdev/axolotlsay/releases/download/v0.1.0}" +PRINT_VERBOSE=${INSTALLER_PRINT_VERBOSE:-0} +PRINT_QUIET=${INSTALLER_PRINT_QUIET:-0} +NO_MODIFY_PATH=${INSTALLER_NO_MODIFY_PATH:-0} + +usage() { + # print help (this cat/EOF stuff is a "heredoc" string) + cat <&2 + say_verbose " from $_url" 1>&2 + say_verbose " to $_file" 1>&2 + + ensure mkdir -p "$_dir" + + if ! downloader "$_url" "$_file"; then + say "failed to download $_url" + say "this may be a standard network error, but it may also indicate" + say "that $APP_NAME's release process is not working. When in doubt" + say "please feel free to open an issue!" + exit 1 + fi + + # unpack the archive + case "$_zip_ext" in + ".zip") + ensure unzip -q "$_file" -d "$_dir" + ;; + + ".tar."*) + ensure tar xf "$_file" --strip-components 1 -C "$_dir" + ;; + *) + err "unknown archive format: $_zip_ext" + ;; + esac + + install "$_dir" "$_bins" "$@" + local _retval=$? + + ignore rm -rf "$_dir" + + return "$_retval" +} + +# See discussion of late-bound vs early-bound for why we use single-quotes with env vars +# shellcheck disable=SC2016 +install() { + # This code needs to both compute certain paths for itself to write to, and + # also write them to shell/rc files so that they can look them up to e.g. + # add them to PATH. This requires an active distinction between paths + # and expressions that can compute them. + # + # The distinction lies in when we want env-vars to be evaluated. For instance + # if we determine that we want to install to $HOME/.myapp, which do we add + # to e.g. $HOME/.profile: + # + # * early-bound: export PATH="/home/myuser/.myapp:$PATH" + # * late-bound: export PATH="$HOME/.myapp:$PATH" + # + # In this case most people would prefer the late-bound version, but in other + # cases the early-bound version might be a better idea. In particular when using + # other env-vars than $HOME, they are more likely to be only set temporarily + # for the duration of this install script, so it's more advisable to erase their + # existence with early-bounding. + # + # This distinction is handled by "double-quotes" (early) vs 'single-quotes' (late). + # + # This script has a few different variants, the most complex one being the + # CARGO_HOME version which attempts to install things to Cargo's bin dir, + # potentially setting up a minimal version if the user hasn't ever installed Cargo. + # + # In this case we need to: + # + # * Install to $HOME/.cargo/bin/ + # * Create a shell script at $HOME/.cargo/env that: + # * Checks if $HOME/.cargo/bin/ is on PATH + # * and if not prepends it to PATH + # * Edits $HOME/.profile to run $HOME/.cargo/env (if the line doesn't exist) + # + # To do this we need these 4 values: + + # The actual path we're going to install to + local _install_dir + # Path to the an shell script that adds install_dir to PATH + local _env_script_path + # Potentially-late-bound version of install_dir to write env_script + local _install_dir_expr + # Potentially-late-bound version of env_script_path to write to rcfiles like $HOME/.profile + local _env_script_path_expr + + + # first try CARGO_HOME, then fallback to HOME + if [ -n "${CARGO_HOME:-}" ]; then + _install_dir="$CARGO_HOME/bin" + _env_script_path="$CARGO_HOME/env" + # If CARGO_HOME was set but it ended up being the default $HOME-based path, + # then keep things late-bound. Otherwise bake the value for safety. + # This is what rustup does, and accurately reproducing it is useful. + if [ -n "${HOME:-}" ]; then + if [ "$HOME/.cargo/bin" = "$_install_dir" ]; then + _install_dir_expr='$HOME/.cargo/bin' + _env_script_path_expr='$HOME/.cargo/env' + else + _install_dir_expr="$_install_dir" + _env_script_path_expr="$_env_script_path" + fi + else + _install_dir_expr="$_install_dir" + _env_script_path_expr="$_env_script_path" + fi + elif [ -n "${HOME:-}" ]; then + _install_dir="$HOME/.cargo/bin" + _env_script_path="$HOME/.cargo/env" + _install_dir_expr='$HOME/.cargo/bin' + _env_script_path_expr='$HOME/.cargo/env' + else + err "could not find your CARGO_HOME or HOME dir to install binaries to" + fi + + say "installing to $_install_dir" + ensure mkdir -p "$_install_dir" + + # copy all the binaries to the install dir + local _src_dir="$1" + local _bins="$2" + for _bin_name in $_bins; do + local _bin="$_src_dir/$_bin_name" + ensure cp "$_bin" "$_install_dir" + # unzip seems to need this chmod + ensure chmod +x "$_install_dir/$_bin_name" + say " $_bin_name" + done + + say "everything's installed!" + + if [ "0" = "$NO_MODIFY_PATH" ]; then + add_install_dir_to_path "$_install_dir_expr" "$_env_script_path" "$_env_script_path_expr" + fi +} + +add_install_dir_to_path() { + # Edit rcfiles ($HOME/.profile) to add install_dir to $PATH + # + # We do this slightly indirectly by creating an "env" shell script which checks if install_dir + # is on $PATH already, and prepends it if not. The actual line we then add to rcfiles + # is to just source that script. This allows us to blast it into lots of different rcfiles and + # have it run multiple times without causing problems. It's also specifically compatible + # with the system rustup uses, so that we don't conflict with it. + local _install_dir_expr="$1" + local _env_script_path="$2" + local _env_script_path_expr="$3" + if [ -n "${HOME:-}" ]; then + local _rcfile="$HOME/.profile" + # `source x` is an alias for `. x`, and the latter is more portable/actually-posix. + # This apparently comes up a lot on freebsd. It's easy enough to always add + # the more robust line to rcfiles, but when telling the user to apply the change + # to their current shell ". x" is pretty easy to misread/miscopy, so we use the + # prettier "source x" line there. Hopefully people with Weird Shells are aware + # this is a thing and know to tweak it (or just restart their shell). + local _robust_line=". \"$_env_script_path_expr\"" + local _pretty_line="source \"$_env_script_path_expr\"" + + # Add the env script if it doesn't already exist + if [ ! -f "$_env_script_path" ]; then + say_verbose "creating $_env_script_path" + write_env_script "$_install_dir_expr" "$_env_script_path" + else + say_verbose "$_env_script_path already exists" + fi + + # Check if the line is already in the rcfile + # grep: 0 if matched, 1 if no match, and 2 if an error occurred + # + # Ideally we could use quiet grep (-q), but that makes "match" and "error" + # have the same behaviour, when we want "no match" and "error" to be the same + # (on error we want to create the file, which >> conveniently does) + # + # We search for both kinds of line here just to do the right thing in more cases. + if ! grep -F "$_robust_line" "$_rcfile" > /dev/null 2>/dev/null && \ + ! grep -F "$_pretty_line" "$_rcfile" > /dev/null 2>/dev/null + then + # If the script now exists, add the line to source it to the rcfile + # (This will also create the rcfile if it doesn't exist) + if [ -f "$_env_script_path" ]; then + say_verbose "adding $_robust_line to $_rcfile" + ensure echo "$_robust_line" >> "$_rcfile" + say "" + say "To add $_install_dir_expr to your PATH, either restart your shell or run:" + say "" + say " $_pretty_line" + fi + else + say_verbose "$_install_dir already on PATH" + fi + fi +} + +write_env_script() { + # write this env script to the given path (this cat/EOF stuff is a "heredoc" string) + local _install_dir_expr="$1" + local _env_script_path="$2" + ensure cat < "$_env_script_path" +#!/bin/sh +# add binaries to PATH if they aren't added yet +# affix colons on either side of \$PATH to simplify matching +case ":\${PATH}:" in + *:"$_install_dir_expr":*) + ;; + *) + # Prepending path in case a system-installed binary needs to be overridden + export PATH="$_install_dir_expr:\$PATH" + ;; +esac +EOF +} + +check_proc() { + # Check for /proc by looking for the /proc/self/exe link + # This is only run on Linux + if ! test -L /proc/self/exe ; then + err "fatal: Unable to find /proc/self/exe. Is /proc mounted? Installation cannot proceed without /proc." + fi +} + +get_bitness() { + need_cmd head + # Architecture detection without dependencies beyond coreutils. + # ELF files start out "\x7fELF", and the following byte is + # 0x01 for 32-bit and + # 0x02 for 64-bit. + # The printf builtin on some shells like dash only supports octal + # escape sequences, so we use those. + local _current_exe_head + _current_exe_head=$(head -c 5 /proc/self/exe ) + if [ "$_current_exe_head" = "$(printf '\177ELF\001')" ]; then + echo 32 + elif [ "$_current_exe_head" = "$(printf '\177ELF\002')" ]; then + echo 64 + else + err "unknown platform bitness" + fi +} + +is_host_amd64_elf() { + need_cmd head + need_cmd tail + # ELF e_machine detection without dependencies beyond coreutils. + # Two-byte field at offset 0x12 indicates the CPU, + # but we're interested in it being 0x3E to indicate amd64, or not that. + local _current_exe_machine + _current_exe_machine=$(head -c 19 /proc/self/exe | tail -c 1) + [ "$_current_exe_machine" = "$(printf '\076')" ] +} + +get_endianness() { + local cputype=$1 + local suffix_eb=$2 + local suffix_el=$3 + + # detect endianness without od/hexdump, like get_bitness() does. + need_cmd head + need_cmd tail + + local _current_exe_endianness + _current_exe_endianness="$(head -c 6 /proc/self/exe | tail -c 1)" + if [ "$_current_exe_endianness" = "$(printf '\001')" ]; then + echo "${cputype}${suffix_el}" + elif [ "$_current_exe_endianness" = "$(printf '\002')" ]; then + echo "${cputype}${suffix_eb}" + else + err "unknown platform endianness" + fi +} + +get_architecture() { + local _ostype + local _cputype + _ostype="$(uname -s)" + _cputype="$(uname -m)" + local _clibtype="gnu" + + if [ "$_ostype" = Linux ]; then + if [ "$(uname -o)" = Android ]; then + _ostype=Android + fi + if ldd --version 2>&1 | grep -q 'musl'; then + _clibtype="musl" + fi + fi + + if [ "$_ostype" = Darwin ] && [ "$_cputype" = i386 ]; then + # Darwin `uname -m` lies + if sysctl hw.optional.x86_64 | grep -q ': 1'; then + _cputype=x86_64 + fi + fi + + if [ "$_ostype" = SunOS ]; then + # Both Solaris and illumos presently announce as "SunOS" in "uname -s" + # so use "uname -o" to disambiguate. We use the full path to the + # system uname in case the user has coreutils uname first in PATH, + # which has historically sometimes printed the wrong value here. + if [ "$(/usr/bin/uname -o)" = illumos ]; then + _ostype=illumos + fi + + # illumos systems have multi-arch userlands, and "uname -m" reports the + # machine hardware name; e.g., "i86pc" on both 32- and 64-bit x86 + # systems. Check for the native (widest) instruction set on the + # running kernel: + if [ "$_cputype" = i86pc ]; then + _cputype="$(isainfo -n)" + fi + fi + + case "$_ostype" in + + Android) + _ostype=linux-android + ;; + + Linux) + check_proc + _ostype=unknown-linux-$_clibtype + _bitness=$(get_bitness) + ;; + + FreeBSD) + _ostype=unknown-freebsd + ;; + + NetBSD) + _ostype=unknown-netbsd + ;; + + DragonFly) + _ostype=unknown-dragonfly + ;; + + Darwin) + _ostype=apple-darwin + ;; + + illumos) + _ostype=unknown-illumos + ;; + + MINGW* | MSYS* | CYGWIN* | Windows_NT) + _ostype=pc-windows-gnu + ;; + + *) + err "unrecognized OS type: $_ostype" + ;; + + esac + + case "$_cputype" in + + i386 | i486 | i686 | i786 | x86) + _cputype=i686 + ;; + + xscale | arm) + _cputype=arm + if [ "$_ostype" = "linux-android" ]; then + _ostype=linux-androideabi + fi + ;; + + armv6l) + _cputype=arm + if [ "$_ostype" = "linux-android" ]; then + _ostype=linux-androideabi + else + _ostype="${_ostype}eabihf" + fi + ;; + + armv7l | armv8l) + _cputype=armv7 + if [ "$_ostype" = "linux-android" ]; then + _ostype=linux-androideabi + else + _ostype="${_ostype}eabihf" + fi + ;; + + aarch64 | arm64) + _cputype=aarch64 + ;; + + x86_64 | x86-64 | x64 | amd64) + _cputype=x86_64 + ;; + + mips) + _cputype=$(get_endianness mips '' el) + ;; + + mips64) + if [ "$_bitness" -eq 64 ]; then + # only n64 ABI is supported for now + _ostype="${_ostype}abi64" + _cputype=$(get_endianness mips64 '' el) + fi + ;; + + ppc) + _cputype=powerpc + ;; + + ppc64) + _cputype=powerpc64 + ;; + + ppc64le) + _cputype=powerpc64le + ;; + + s390x) + _cputype=s390x + ;; + riscv64) + _cputype=riscv64gc + ;; + loongarch64) + _cputype=loongarch64 + ;; + *) + err "unknown CPU type: $_cputype" + + esac + + # Detect 64-bit linux with 32-bit userland + if [ "${_ostype}" = unknown-linux-gnu ] && [ "${_bitness}" -eq 32 ]; then + case $_cputype in + x86_64) + # 32-bit executable for amd64 = x32 + if is_host_amd64_elf; then { + err "x32 linux unsupported" + }; else + _cputype=i686 + fi + ;; + mips64) + _cputype=$(get_endianness mips '' el) + ;; + powerpc64) + _cputype=powerpc + ;; + aarch64) + _cputype=armv7 + if [ "$_ostype" = "linux-android" ]; then + _ostype=linux-androideabi + else + _ostype="${_ostype}eabihf" + fi + ;; + riscv64gc) + err "riscv64 with 32-bit userland unsupported" + ;; + esac + fi + + # treat armv7 systems without neon as plain arm + if [ "$_ostype" = "unknown-linux-gnueabihf" ] && [ "$_cputype" = armv7 ]; then + if ensure grep '^Features' /proc/cpuinfo | grep -q -v neon; then + # At least one processor does not have NEON. + _cputype=arm + fi + fi + + _arch="${_cputype}-${_ostype}" + + RETVAL="$_arch" +} + +say() { + if [ "0" = "$PRINT_QUIET" ]; then + echo "$1" + fi +} + +say_verbose() { + if [ "1" = "$PRINT_VERBOSE" ]; then + echo "$1" + fi +} + +err() { + if [ "0" = "$PRINT_QUIET" ]; then + local red + local reset + red=$(tput setaf 1 2>/dev/null || echo '') + reset=$(tput sgr0 2>/dev/null || echo '') + say "${red}ERROR${reset}: $1" >&2 + fi + exit 1 +} + +need_cmd() { + if ! check_cmd "$1" + then err "need '$1' (command not found)" + fi +} + +check_cmd() { + command -v "$1" > /dev/null 2>&1 + return $? +} + +assert_nz() { + if [ -z "$1" ]; then err "assert_nz $2"; fi +} + +# Run a command that should never fail. If the command fails execution +# will immediately terminate with an error showing the failing +# command. +ensure() { + if ! "$@"; then err "command failed: $*"; fi +} + +# This is just for indicating that commands' results are being +# intentionally ignored. Usually, because it's being executed +# as part of error handling. +ignore() { + "$@" +} + +# This wraps curl or wget. Try curl first, if not installed, +# use wget instead. +downloader() { + if check_cmd curl + then _dld=curl + elif check_cmd wget + then _dld=wget + else _dld='curl or wget' # to be used in error message of need_cmd + fi + + if [ "$1" = --check ] + then need_cmd "$_dld" + elif [ "$_dld" = curl ] + then curl -sSfL "$1" -o "$2" + elif [ "$_dld" = wget ] + then wget "$1" -O "$2" + else err "Unknown downloader" # should not reach here + fi +} + +download_binary_and_run_installer "$@" || exit 1 + +================ installer.ps1 ================ +# Licensed under the MIT license +# , at your +# option. This file may not be copied, modified, or distributed +# except according to those terms. + +<# +.SYNOPSIS + +The installer for axolotlsay 0.1.0 + +.DESCRIPTION + +This script detects what platform you're on and fetches an appropriate archive from +https://github.com/axodotdev/axolotlsay/releases/download/v0.1.0 +then unpacks the binaries and installs them to $env:CARGO_HOME\bin ($HOME\.cargo\bin) + +It will then add that dir to PATH by editing your Environment.Path registry key + +.PARAMETER ArtifactDownloadUrl +The URL of the directory where artifacts can be fetched from + +.PARAMETER NoModifyPath +Don't add the install directory to PATH + +.PARAMETER Help +Print help + +#> + +param ( + [Parameter(HelpMessage = "The URL of the directory where artifacts can be fetched from")] + [string]$ArtifactDownloadUrl = 'https://github.com/axodotdev/axolotlsay/releases/download/v0.1.0', + [Parameter(HelpMessage = "Don't add the install directory to PATH")] + [switch]$NoModifyPath, + [Parameter(HelpMessage = "Print Help")] + [switch]$Help +) + +$app_name = 'axolotlsay' +$app_version = '0.1.0' + +function Install-Binary($install_args) { + if ($Help) { + Get-Help $PSCommandPath -Detailed + Exit + } + $old_erroractionpreference = $ErrorActionPreference + $ErrorActionPreference = 'stop' + + Initialize-Environment + + # Platform info injected by cargo-dist + $platforms = @{ + "x86_64-pc-windows-msvc" = @{ + "artifact_name" = "axolotlsay-x86_64-pc-windows-msvc.tar.gz" + "bins" = "axolotlsay.exe" + "zip_ext" = ".tar.gz" + } + } + + $fetched = Download "$ArtifactDownloadUrl" $platforms + # FIXME: add a flag that lets the user not do this step + Invoke-Installer $fetched "$install_args" + + $ErrorActionPreference = $old_erroractionpreference +} + +function Get-TargetTriple() { + try { + # NOTE: this might return X64 on ARM64 Windows, which is OK since emulation is available. + # It works correctly starting in PowerShell Core 7.3 and Windows PowerShell in Win 11 22H2. + # Ideally this would just be + # [System.Runtime.InteropServices.RuntimeInformation]::OSArchitecture + # but that gets a type from the wrong assembly on Windows PowerShell (i.e. not Core) + $a = [System.Reflection.Assembly]::LoadWithPartialName("System.Runtime.InteropServices.RuntimeInformation") + $t = $a.GetType("System.Runtime.InteropServices.RuntimeInformation") + $p = $t.GetProperty("OSArchitecture") + # Possible OSArchitecture Values: https://learn.microsoft.com/dotnet/api/system.runtime.interopservices.architecture + # Rust supported platforms: https://doc.rust-lang.org/stable/rustc/platform-support.html + switch ($p.GetValue($null).ToString()) + { + "X86" { return "i686-pc-windows-msvc" } + "X64" { return "x86_64-pc-windows-msvc" } + "Arm" { return "thumbv7a-pc-windows-msvc" } + "Arm64" { return "aarch64-pc-windows-msvc" } + } + } catch { + # The above was added in .NET 4.7.1, so Windows PowerShell in versions of Windows + # prior to Windows 10 v1709 may not have this API. + Write-Verbose "Get-TargetTriple: Exception when trying to determine OS architecture." + Write-Verbose $_ + } + + # This is available in .NET 4.0. We already checked for PS 5, which requires .NET 4.5. + Write-Verbose("Get-TargetTriple: falling back to Is64BitOperatingSystem.") + if ([System.Environment]::Is64BitOperatingSystem) { + return "x86_64-pc-windows-msvc" + } else { + return "i686-pc-windows-msvc" + } +} + +function Download($download_url, $platforms) { + $arch = Get-TargetTriple + + if (-not $platforms.ContainsKey($arch)) { + # X64 is well-supported, including in emulation on ARM64 + Write-Verbose "$arch is not availablem falling back to X64" + $arch = "x86_64-pc-windows-msvc" + } + + if (-not $platforms.ContainsKey($arch)) { + # should not be possible, as currently we always produce X64 binaries. + $platforms_json = ConvertTo-Json $platforms + throw "ERROR: could not find binaries for this platform. Last platform tried: $arch platform info: $platforms_json" + } + + # Lookup what we expect this platform to look like + $info = $platforms[$arch] + $zip_ext = $info["zip_ext"] + $bin_names = $info["bins"] + $artifact_name = $info["artifact_name"] + + # Make a new temp dir to unpack things to + $tmp = New-Temp-Dir + $dir_path = "$tmp\$app_name$zip_ext" + + # Download and unpack! + $url = "$download_url/$artifact_name" + Write-Information "Downloading $app_name $app_version ($arch)" + Write-Verbose " from $url" + Write-Verbose " to $dir_path" + $wc = New-Object Net.Webclient + $wc.downloadFile($url, $dir_path) + + Write-Verbose "Unpacking to $tmp" + + # Select the tool to unpack the files with. + # + # As of windows 10(?), powershell comes with tar preinstalled, but in practice + # it only seems to support .tar.gz, and not xz/zstd. Still, we should try to + # forward all tars to it in case the user has a machine that can handle it! + switch -Wildcard ($zip_ext) { + ".zip" { + Expand-Archive -Path $dir_path -DestinationPath "$tmp"; + Break + } + ".tar.*" { + tar xf $dir_path --strip-components 1 -C "$tmp"; + Break + } + Default { + throw "ERROR: unknown archive format $zip_ext" + } + } + + # Let the next step know what to copy + $bin_paths = @() + foreach ($bin_name in $bin_names) { + Write-Verbose " Unpacked $bin_name" + $bin_paths += "$tmp\$bin_name" + } + return $bin_paths +} + +function Invoke-Installer($bin_paths) { + + # first try CARGO_HOME, then fallback to HOME + # (for whatever reason $HOME is not a normal env var and doesn't need the $env: prefix) + $dest_dir = if (($base_dir = $env:CARGO_HOME)) { + Join-Path $base_dir "bin" + } elseif (($base_dir = $HOME)) { + Join-Path $base_dir ".cargo\bin" + } else { + throw "ERROR: could not find your HOME dir or CARGO_HOME to install binaries to" + } + + $dest_dir = New-Item -Force -ItemType Directory -Path $dest_dir + Write-Information "Installing to $dest_dir" + # Just copy the binaries from the temp location to the install dir + foreach ($bin_path in $bin_paths) { + $installed_file = Split-Path -Path "$bin_path" -Leaf + Copy-Item "$bin_path" -Destination "$dest_dir" + Remove-Item "$bin_path" -Recurse -Force + Write-Information " $installed_file" + } + + Write-Information "Everything's installed!" + if (-not $NoModifyPath) { + if (Add-Path $dest_dir) { + Write-Information "" + Write-Information "$dest_dir was added to your PATH, you may need to restart your shell for that to take effect." + } + } +} + +# Try to add the given path to PATH via the registry +# +# Returns true if the registry was modified, otherwise returns false +# (indicating it was already on PATH) +function Add-Path($OrigPathToAdd) { + $RegistryPath = "HKCU:\Environment" + $PropertyName = "Path" + $PathToAdd = $OrigPathToAdd + + $Item = if (Test-Path $RegistryPath) { + # If the registry key exists, get it + Get-Item -Path $RegistryPath + } else { + # If the registry key doesn't exist, create it + Write-Verbose "Creating $RegistryPath" + New-Item -Path $RegistryPath -Force + } + + $OldPath = "" + try { + # Try to get the old PATH value. If that fails, assume we're making it from scratch. + # Otherwise assume there's already paths in here and use a ; separator + $OldPath = $Item | Get-ItemPropertyValue -Name $PropertyName + $PathToAdd = "$PathToAdd;" + } catch { + # We'll be creating the PATH from scratch + Write-Verbose "Adding $PropertyName Property to $RegistryPath" + } + + # Check if the path is already there + # + # We don't want to incorrectly match "C:\blah\" to "C:\blah\blah\", so we include the semicolon + # delimiters when searching, ensuring exact matches. To avoid corner cases we add semicolons to + # both sides of the input, allowing us to pretend we're always in the middle of a list. + if (";$OldPath;" -like "*;$OrigPathToAdd;*") { + # Already on path, nothing to do + Write-Verbose "install dir already on PATH, all done!" + return $false + } else { + # Actually update PATH + Write-Verbose "Adding $OrigPathToAdd to your PATH" + $NewPath = $PathToAdd + $OldPath + # We use -Force here to make the value already existing not be an error + $Item | New-ItemProperty -Name $PropertyName -Value $NewPath -PropertyType String -Force | Out-Null + return $true + } +} + +function Initialize-Environment() { + If (($PSVersionTable.PSVersion.Major) -lt 5) { + Write-Error "PowerShell 5 or later is required to install $app_name." + Write-Error "Upgrade PowerShell: https://docs.microsoft.com/en-us/powershell/scripting/setup/installing-windows-powershell" + break + } + + # show notification to change execution policy: + $allowedExecutionPolicy = @('Unrestricted', 'RemoteSigned', 'ByPass') + If ((Get-ExecutionPolicy).ToString() -notin $allowedExecutionPolicy) { + Write-Error "PowerShell requires an execution policy in [$($allowedExecutionPolicy -join ", ")] to run $app_name." + Write-Error "For example, to set the execution policy to 'RemoteSigned' please run :" + Write-Error "'Set-ExecutionPolicy RemoteSigned -scope CurrentUser'" + break + } + + # GitHub requires TLS 1.2 + If ([System.Enum]::GetNames([System.Net.SecurityProtocolType]) -notcontains 'Tls12') { + Write-Error "Installing $app_name requires at least .NET Framework 4.5" + Write-Error "Please download and install it first:" + Write-Error "https://www.microsoft.com/net/download" + break + } +} + +function New-Temp-Dir() { + [CmdletBinding(SupportsShouldProcess)] + param() + $parent = [System.IO.Path]::GetTempPath() + [string] $name = [System.Guid]::NewGuid() + New-Item -ItemType Directory -Path (Join-Path $parent $name) +} + +# PSScriptAnalyzer doesn't like how we use our params as globals, this calms it +$Null = $ArtifactDownloadUrl, $NoModifyPath, $Help +# Make Write-Information statements be visible +$InformationPreference = "Continue" +Install-Binary "$Args" + +================ dist-manifest.json ================ +{ + "dist_version": "CENSORED", + "announcement_tag": "v0.1.0", + "announcement_is_prerelease": false, + "announcement_title": "Version 0.1.0", + "announcement_changelog": "```text\n +------------------------+\n | the initial release!!! |\n +------------------------+\n /\n≽(◕ ᴗ ◕)≼\n```", + "announcement_github_body": "## Release Notes\n\n```text\n +------------------------+\n | the initial release!!! |\n +------------------------+\n /\n≽(◕ ᴗ ◕)≼\n```\n\n## Install axolotlsay 0.1.0\n\n### Install prebuilt binaries via shell script\n\n```sh\ncurl --proto '=https' --tlsv1.2 -LsSf https://github.com/axodotdev/axolotlsay/releases/download/v0.1.0/axolotlsay-installer.sh | sh\n```\n\n### Install prebuilt binaries via powershell script\n\n```sh\nirm https://github.com/axodotdev/axolotlsay/releases/download/v0.1.0/axolotlsay-installer.ps1 | iex\n```\n\n## Download axolotlsay 0.1.0\n\n| File | Platform | Checksum |\n|--------|----------|----------|\n| [axolotlsay-aarch64-apple-darwin.tar.gz](https://github.com/axodotdev/axolotlsay/releases/download/v0.1.0/axolotlsay-aarch64-apple-darwin.tar.gz) | macOS Apple Silicon | [checksum](https://github.com/axodotdev/axolotlsay/releases/download/v0.1.0/axolotlsay-aarch64-apple-darwin.tar.gz.sha256) |\n| [axolotlsay-x86_64-apple-darwin.tar.gz](https://github.com/axodotdev/axolotlsay/releases/download/v0.1.0/axolotlsay-x86_64-apple-darwin.tar.gz) | macOS Intel | [checksum](https://github.com/axodotdev/axolotlsay/releases/download/v0.1.0/axolotlsay-x86_64-apple-darwin.tar.gz.sha256) |\n| [axolotlsay-x86_64-pc-windows-msvc.tar.gz](https://github.com/axodotdev/axolotlsay/releases/download/v0.1.0/axolotlsay-x86_64-pc-windows-msvc.tar.gz) | Windows x64 | [checksum](https://github.com/axodotdev/axolotlsay/releases/download/v0.1.0/axolotlsay-x86_64-pc-windows-msvc.tar.gz.sha256) |\n| [axolotlsay-x86_64-unknown-linux-gnu.tar.gz](https://github.com/axodotdev/axolotlsay/releases/download/v0.1.0/axolotlsay-x86_64-unknown-linux-gnu.tar.gz) | Linux x64 | [checksum](https://github.com/axodotdev/axolotlsay/releases/download/v0.1.0/axolotlsay-x86_64-unknown-linux-gnu.tar.gz.sha256) |\n| [axolotlsay-x86_64-pc-windows-msvc.msi](https://github.com/axodotdev/axolotlsay/releases/download/v0.1.0/axolotlsay-x86_64-pc-windows-msvc.msi) | Windows x64 | [checksum](https://github.com/axodotdev/axolotlsay/releases/download/v0.1.0/axolotlsay-x86_64-pc-windows-msvc.msi.sha256) |\n\n", + "system_info": { + "cargo_version_line": "CENSORED" + }, + "releases": [ + { + "app_name": "axolotlsay", + "app_version": "0.1.0", + "artifacts": [ + "axolotlsay-installer.sh", + "axolotlsay-installer.ps1", + "axolotlsay-aarch64-apple-darwin.tar.gz", + "axolotlsay-aarch64-apple-darwin.tar.gz.sha256", + "axolotlsay-x86_64-apple-darwin.tar.gz", + "axolotlsay-x86_64-apple-darwin.tar.gz.sha256", + "axolotlsay-x86_64-pc-windows-msvc.tar.gz", + "axolotlsay-x86_64-pc-windows-msvc.tar.gz.sha256", + "axolotlsay-x86_64-pc-windows-msvc.msi", + "axolotlsay-x86_64-pc-windows-msvc.msi.sha256", + "axolotlsay-x86_64-unknown-linux-gnu.tar.gz", + "axolotlsay-x86_64-unknown-linux-gnu.tar.gz.sha256" + ] + } + ], + "artifacts": { + "axolotlsay-aarch64-apple-darwin.tar.gz": { + "name": "axolotlsay-aarch64-apple-darwin.tar.gz", + "kind": "executable-zip", + "target_triples": [ + "aarch64-apple-darwin" + ], + "assets": [ + { + "name": "CHANGELOG.md", + "path": "CHANGELOG.md", + "kind": "changelog" + }, + { + "name": "LICENSE-APACHE", + "path": "LICENSE-APACHE", + "kind": "license" + }, + { + "name": "LICENSE-MIT", + "path": "LICENSE-MIT", + "kind": "license" + }, + { + "name": "README.md", + "path": "README.md", + "kind": "readme" + }, + { + "name": "axolotlsay", + "path": "axolotlsay", + "kind": "executable" + } + ], + "checksum": "axolotlsay-aarch64-apple-darwin.tar.gz.sha256" + }, + "axolotlsay-aarch64-apple-darwin.tar.gz.sha256": { + "name": "axolotlsay-aarch64-apple-darwin.tar.gz.sha256", + "kind": "checksum", + "target_triples": [ + "aarch64-apple-darwin" + ] + }, + "axolotlsay-installer.ps1": { + "name": "axolotlsay-installer.ps1", + "kind": "installer", + "target_triples": [ + "x86_64-pc-windows-msvc" + ], + "install_hint": "irm https://github.com/axodotdev/axolotlsay/releases/download/v0.1.0/axolotlsay-installer.ps1 | iex", + "description": "Install prebuilt binaries via powershell script" + }, + "axolotlsay-installer.sh": { + "name": "axolotlsay-installer.sh", + "kind": "installer", + "target_triples": [ + "aarch64-apple-darwin", + "x86_64-apple-darwin", + "x86_64-unknown-linux-gnu" + ], + "install_hint": "curl --proto '=https' --tlsv1.2 -LsSf https://github.com/axodotdev/axolotlsay/releases/download/v0.1.0/axolotlsay-installer.sh | sh", + "description": "Install prebuilt binaries via shell script" + }, + "axolotlsay-x86_64-apple-darwin.tar.gz": { + "name": "axolotlsay-x86_64-apple-darwin.tar.gz", + "kind": "executable-zip", + "target_triples": [ + "x86_64-apple-darwin" + ], + "assets": [ + { + "name": "CHANGELOG.md", + "path": "CHANGELOG.md", + "kind": "changelog" + }, + { + "name": "LICENSE-APACHE", + "path": "LICENSE-APACHE", + "kind": "license" + }, + { + "name": "LICENSE-MIT", + "path": "LICENSE-MIT", + "kind": "license" + }, + { + "name": "README.md", + "path": "README.md", + "kind": "readme" + }, + { + "name": "axolotlsay", + "path": "axolotlsay", + "kind": "executable" + } + ], + "checksum": "axolotlsay-x86_64-apple-darwin.tar.gz.sha256" + }, + "axolotlsay-x86_64-apple-darwin.tar.gz.sha256": { + "name": "axolotlsay-x86_64-apple-darwin.tar.gz.sha256", + "kind": "checksum", + "target_triples": [ + "x86_64-apple-darwin" + ] + }, + "axolotlsay-x86_64-pc-windows-msvc.msi": { + "name": "axolotlsay-x86_64-pc-windows-msvc.msi", + "kind": "installer", + "target_triples": [ + "x86_64-pc-windows-msvc" + ], + "assets": [ + { + "name": "axolotlsay", + "path": "axolotlsay.exe", + "kind": "executable" + } + ], + "description": "install via msi", + "checksum": "axolotlsay-x86_64-pc-windows-msvc.msi.sha256" + }, + "axolotlsay-x86_64-pc-windows-msvc.msi.sha256": { + "name": "axolotlsay-x86_64-pc-windows-msvc.msi.sha256", + "kind": "checksum", + "target_triples": [ + "x86_64-pc-windows-msvc" + ] + }, + "axolotlsay-x86_64-pc-windows-msvc.tar.gz": { + "name": "axolotlsay-x86_64-pc-windows-msvc.tar.gz", + "kind": "executable-zip", + "target_triples": [ + "x86_64-pc-windows-msvc" + ], + "assets": [ + { + "name": "CHANGELOG.md", + "path": "CHANGELOG.md", + "kind": "changelog" + }, + { + "name": "LICENSE-APACHE", + "path": "LICENSE-APACHE", + "kind": "license" + }, + { + "name": "LICENSE-MIT", + "path": "LICENSE-MIT", + "kind": "license" + }, + { + "name": "README.md", + "path": "README.md", + "kind": "readme" + }, + { + "name": "axolotlsay", + "path": "axolotlsay.exe", + "kind": "executable" + } + ], + "checksum": "axolotlsay-x86_64-pc-windows-msvc.tar.gz.sha256" + }, + "axolotlsay-x86_64-pc-windows-msvc.tar.gz.sha256": { + "name": "axolotlsay-x86_64-pc-windows-msvc.tar.gz.sha256", + "kind": "checksum", + "target_triples": [ + "x86_64-pc-windows-msvc" + ] + }, + "axolotlsay-x86_64-unknown-linux-gnu.tar.gz": { + "name": "axolotlsay-x86_64-unknown-linux-gnu.tar.gz", + "kind": "executable-zip", + "target_triples": [ + "x86_64-unknown-linux-gnu" + ], + "assets": [ + { + "name": "CHANGELOG.md", + "path": "CHANGELOG.md", + "kind": "changelog" + }, + { + "name": "LICENSE-APACHE", + "path": "LICENSE-APACHE", + "kind": "license" + }, + { + "name": "LICENSE-MIT", + "path": "LICENSE-MIT", + "kind": "license" + }, + { + "name": "README.md", + "path": "README.md", + "kind": "readme" + }, + { + "name": "axolotlsay", + "path": "axolotlsay", + "kind": "executable" + } + ], + "checksum": "axolotlsay-x86_64-unknown-linux-gnu.tar.gz.sha256" + }, + "axolotlsay-x86_64-unknown-linux-gnu.tar.gz.sha256": { + "name": "axolotlsay-x86_64-unknown-linux-gnu.tar.gz.sha256", + "kind": "checksum", + "target_triples": [ + "x86_64-unknown-linux-gnu" + ] + } + }, + "publish_prereleases": false, + "ci": { + "github": { + "artifacts_matrix": { + "include": [ + { + "runner": "macos-11", + "install_dist": "curl --proto '=https' --tlsv1.2 -LsSf https://github.com/axodotdev/cargo-dist/releases/download/vSOME_VERSION/cargo-dist-installer.sh | sh", + "dist_args": "--artifacts=local --target=aarch64-apple-darwin" + }, + { + "runner": "macos-11", + "install_dist": "curl --proto '=https' --tlsv1.2 -LsSf https://github.com/axodotdev/cargo-dist/releases/download/vSOME_VERSION/cargo-dist-installer.sh | sh", + "dist_args": "--artifacts=local --target=x86_64-apple-darwin" + }, + { + "runner": "windows-2019", + "install_dist": "irm https://github.com/axodotdev/cargo-dist/releases/download/vSOME_VERSION/cargo-dist-installer.ps1 | iex", + "dist_args": "--artifacts=local --target=x86_64-pc-windows-msvc" + }, + { + "runner": "ubuntu-20.04", + "install_dist": "curl --proto '=https' --tlsv1.2 -LsSf https://github.com/axodotdev/cargo-dist/releases/download/vSOME_VERSION/cargo-dist-installer.sh | sh", + "dist_args": "--artifacts=local --target=x86_64-unknown-linux-gnu" + } + ] + }, + "pr_run_mode": "plan" + } + } +} + +================ github-ci.yml ================ +# Copyright 2022-2023, axodotdev +# SPDX-License-Identifier: MIT or Apache-2.0 +# +# CI that: +# +# * checks for a Git Tag that looks like a release +# * builds artifacts with cargo-dist (executable-zips, installers, hashes) +# * uploads those artifacts to temporary workflow zip +# * on success, uploads the artifacts to a Github Release™ +# +# Note that the Github Release™ will be created with a generated +# title/body based on your changelogs. +name: Release + +permissions: + contents: write + +# This task will run whenever you push a git tag that looks like a version +# like "1.0.0", "v0.1.0-prerelease.1", "my-app/0.1.0", "releases/v1.0.0", etc. +# Various formats will be parsed into a VERSION and an optional PACKAGE_NAME, where +# PACKAGE_NAME must be the name of a Cargo package in your workspace, and VERSION +# must be a Cargo-style SemVer Version (must have at least major.minor.patch). +# +# If PACKAGE_NAME is specified, then the release will be for that +# package (erroring out if it doesn't have the given version or isn't cargo-dist-able). +# +# If PACKAGE_NAME isn't specified, then the release will be for all +# (cargo-dist-able) packages in the workspace with that version (this mode is +# intended for workspaces with only one dist-able package, or with all dist-able +# packages versioned/released in lockstep). +# +# If you push multiple tags at once, separate instances of this workflow will +# spin up, creating an independent Github Release™ for each one. However Github +# will hard limit this to 3 tags per commit, as it will assume more tags is a +# mistake. +# +# If there's a prerelease-style suffix to the version, then the Github Release™ +# will be marked as a prerelease. +on: + push: + tags: + - '**[0-9]+.[0-9]+.[0-9]+*' + pull_request: + +jobs: + # Run 'cargo dist plan' to determine what tasks we need to do + plan: + runs-on: ubuntu-latest + outputs: + val: ${{ steps.plan.outputs.manifest }} + tag: ${{ !github.event.pull_request && github.ref_name || '' }} + tag-flag: ${{ !github.event.pull_request && format('--tag={0}', github.ref_name) || '' }} + publishing: ${{ !github.event.pull_request }} + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + steps: + - uses: actions/checkout@v3 + with: + submodules: recursive + - name: Install cargo-dist + run: "curl --proto '=https' --tlsv1.2 -LsSf https://github.com/axodotdev/cargo-dist/releases/download/vSOME_VERSION/cargo-dist-installer.sh | sh" + - id: plan + run: | + cargo dist plan ${{ !github.event.pull_request && format('--tag={0}', github.ref_name) || '' }} --output-format=json > dist-manifest.json + echo "cargo dist plan ran successfully" + cat dist-manifest.json + echo "manifest=$(jq -c "." dist-manifest.json)" >> "$GITHUB_OUTPUT" + - name: "Upload dist-manifest.json" + uses: actions/upload-artifact@v3 + with: + name: artifacts + path: dist-manifest.json + + # Build and packages all the platform-specific things + upload-local-artifacts: + # Let the initial task tell us to not run (currently very blunt) + needs: plan + if: ${{ fromJson(needs.plan.outputs.val).releases != null && (needs.plan.outputs.publishing == 'true' || fromJson(needs.plan.outputs.val).ci.github.pr_run_mode == 'upload') }} + strategy: + fail-fast: false + # Target platforms/runners are computed by cargo-dist in create-release. + # Each member of the matrix has the following arguments: + # + # - runner: the github runner + # - dist-args: cli flags to pass to cargo dist + # - install-dist: expression to run to install cargo-dist on the runner + # + # Typically there will be: + # - 1 "global" task that builds universal installers + # - N "local" tasks that build each platform's binaries and platform-specific installers + matrix: ${{ fromJson(needs.plan.outputs.val).ci.github.artifacts_matrix }} + runs-on: ${{ matrix.runner }} + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + steps: + - uses: actions/checkout@v3 + with: + submodules: recursive + - uses: swatinem/rust-cache@v2 + - name: Install cargo-dist + run: ${{ matrix.install_dist }} + - id: cargo-dist + # We force bash here just because github makes it really hard to get values up + # to "real" actions without writing to env-vars, and writing to env-vars has + # inconsistent syntax between shell and powershell. cargo-dist and jq work fine + # in powershell. + shell: bash + run: | + # Actually do builds and make zips and whatnot + cargo dist build ${{ needs.plan.outputs.tag-flag }} --output-format=json ${{ matrix.dist_args }} > dist-manifest.json + echo "cargo dist ran successfully" + + # Parse out what we just built and upload it to the Github Release™ + echo "paths<> "$GITHUB_OUTPUT" + jq --raw-output ".artifacts[]?.path | select( . != null )" dist-manifest.json >> "$GITHUB_OUTPUT" + echo "EOF" >> "$GITHUB_OUTPUT" + - name: "Upload artifacts" + uses: actions/upload-artifact@v3 + with: + name: artifacts + path: ${{ steps.cargo-dist.outputs.paths }} + + # Build and package all the platform-agnostic(ish) things + upload-global-artifacts: + needs: [plan, upload-local-artifacts] + runs-on: "ubuntu-20.04" + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + steps: + - uses: actions/checkout@v3 + with: + submodules: recursive + - name: Install cargo-dist + run: "curl --proto '=https' --tlsv1.2 -LsSf https://github.com/axodotdev/cargo-dist/releases/download/vSOME_VERSION/cargo-dist-installer.sh | sh" + # Get all the local artifacts for the global tasks to use (for e.g. checksums) + - name: Fetch local artifacts + uses: actions/download-artifact@v3 + with: + name: artifacts + path: target/distrib/ + - id: cargo-dist + shell: bash + run: | + cargo dist build ${{ needs.plan.outputs.tag-flag }} --output-format=json "--artifacts=global" > dist-manifest.json + echo "cargo dist ran successfully" + + # Parse out what we just built and upload it to the Github Release™ + echo "paths<> "$GITHUB_OUTPUT" + jq --raw-output ".artifacts[]?.path | select( . != null )" dist-manifest.json >> "$GITHUB_OUTPUT" + echo "EOF" >> "$GITHUB_OUTPUT" + - name: "Upload artifacts" + uses: actions/upload-artifact@v3 + with: + name: artifacts + path: ${{ steps.cargo-dist.outputs.paths }} + + # Sign Windows artifacts with ssl.com + sign-windows-artifacts: + needs: + - plan + - upload-local-artifacts + - upload-global-artifacts + runs-on: "ubuntu-20.04" + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + SIGN_DIR_IN: target/distrib/sign-input + SIGN_DIR_OUT: target/distrib/sign-output + steps: + # Get all the artifacts for the signing tasks to use + - name: Fetch local artifacts + uses: actions/download-artifact@v3 + with: + name: artifacts + path: target/distrib/ + # Only try to sign files that the tool can handle + - name: Select Signable Artifacts + run: | + mkdir -p "$SIGN_DIR_IN" + mkdir -p "$SIGN_DIR_OUT" + for file in target/distrib/*.{msi,ps1}; do + [[ -e $file ]] && mv "$file" "$SIGN_DIR_IN" && echo "signing $file"; + done + # Sign the files + - name: Sign Artifacts with CodeSignTool + uses: ssldotcom/esigner-codesign@develop + with: + command: batch_sign + username: ${{ secrets.SSLDOTCOM_USERNAME }} + password: ${{ secrets.SSLDOTCOM_PASSWORD }} + credential_id: ${{ secrets.SSLDOTCOM_CREDENTIAL_ID }} + totp_secret: ${{ secrets.SSLDOTCOM_TOTP_SECRET }} + dir_path: ${{ env.SIGN_DIR_IN }} + output_path: ${{ env.SIGN_DIR_OUT }} + # Set this to TEST for testing (sandbox) and PROD for production + environment_name: PROD + # Regenerate checksum files for things that have been signed + - name: Regenerate Checksums + run: | + pushd "$SIGN_DIR_OUT" + for filename in *; do + echo "checksuming $filename" + sha256sum --binary "$filename" > "$filename.sha256" + done + popd + # Upload the result, overwriting old files + - name: "Upload artifacts" + uses: actions/upload-artifact@v3 + with: + name: artifacts + path: ${{ env.SIGN_DIR_OUT }} + + should-publish: + needs: + - plan + - upload-local-artifacts + - upload-global-artifacts + - sign-windows-artifacts + if: ${{ needs.plan.outputs.publishing == 'true' }} + runs-on: ubuntu-latest + steps: + - name: print tag + run: echo "ok we're publishing!" + + # Create a Github Release with all the results once everything is done, + publish-release: + needs: [plan, should-publish] + runs-on: ubuntu-latest + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + steps: + - uses: actions/checkout@v3 + with: + submodules: recursive + - name: "Download artifacts" + uses: actions/download-artifact@v3 + with: + name: artifacts + path: artifacts + - name: Create Release + uses: ncipollo/release-action@v1 + with: + tag: ${{ needs.plan.outputs.tag }} + name: ${{ fromJson(needs.plan.outputs.val).announcement_title }} + body: ${{ fromJson(needs.plan.outputs.val).announcement_github_body }} + prerelease: ${{ fromJson(needs.plan.outputs.val).announcement_is_prerelease }} + artifacts: "artifacts/*" + +================ main.wxs ================ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 1 + 1 + + + + + + + + + + + + + + + + + + + +