Compare commits
180 Commits
v0.0.1
..
2615b23d6e
| Author | SHA1 | Date | |
|---|---|---|---|
| 2615b23d6e | |||
| 628a13011e | |||
| cff4420ee0 | |||
| 9944e29ef0 | |||
| c95bae1761 | |||
| 21da7b782e | |||
| d038930ce5 | |||
|
|
f0fc829a73 | ||
|
|
ba0f108aa8 | ||
| 6daa6fd2f2 | |||
| 5fa4dbfe89 | |||
| bdcd496046 | |||
|
|
e37b80a262 | ||
| 3ce62c272e | |||
| 21b771507c | |||
| 508c8b7feb | |||
|
|
33a889fa67 | ||
|
|
7ddb7812fc | ||
| 9e11648a7c | |||
|
|
ed79af2a8a | ||
|
|
443fbcf305 | ||
| 78d7e90e68 | |||
| 01d4819160 | |||
|
|
e200a32f5a | ||
| 008b33b044 | |||
|
|
f35afac20f | ||
| 262a3d6435 | |||
| eb9e671818 | |||
| efc8af2c93 | |||
| 3d38ac9b51 | |||
|
|
8d40c3773f | ||
|
|
16ce245218 | ||
| a64f4dbf79 | |||
| 1b83d9b199 | |||
| f006503736 | |||
| 9abd2f88cf | |||
| 29acad5eed | |||
|
|
aba958ff8e | ||
|
|
b273c75018 | ||
| 1f7f5dbcae | |||
| 00802795ee | |||
| 2a01189a07 | |||
|
|
d3ca5d52bc | ||
|
|
7124e067a2 | ||
| 5f282fef87 | |||
|
|
e0121c88e9 | ||
| 9e3065794a | |||
|
|
3fecd6a00a | ||
| be8e3263c9 | |||
|
|
d563cc286f | ||
|
|
fc75487da9 | ||
| a015c9b382 | |||
|
|
e84f3da8d8 | ||
| f4a54a412a | |||
|
|
d1bfe9fb62 | ||
|
|
af01a4855a | ||
| 9ceaa1078d | |||
|
|
7f3edcaa9d | ||
| 3d93b5b479 | |||
|
|
32ea7ea35d | ||
| cce8a23b63 | |||
|
|
5ad5c14acb | ||
| 10e2beba3c | |||
|
|
07b14935df | ||
|
|
10f826b23b | ||
| 0871f9de7b | |||
|
|
5c5107ed5f | ||
| 13a074c3be | |||
|
|
cf7d010bc4 | ||
| c8696177c5 | |||
|
|
5fd25df6ac | ||
| be8a5535fa | |||
|
|
48d07e697b | ||
|
|
6c5b59f619 | ||
| 8252191317 | |||
| 261ec0bb6d | |||
| e8de47dc52 | |||
| dbb4d265c4 | |||
| 924976ee1b | |||
| ae6fe8be44 | |||
| 76df717fea | |||
|
|
2885decede | ||
|
|
353ce16782 | ||
| 2098aa65c1 | |||
| 1385aacc62 | |||
| a2106a06a1 | |||
|
|
f0d763c269 | ||
| 4c30bc7e7d | |||
|
|
21127f3ec3 | ||
| 6adc4ce228 | |||
|
|
9e740ec550 | ||
| 81f7e86adc | |||
|
|
8a79de2fc8 | ||
| 9116d1ed53 | |||
|
|
966b69b43b | ||
| 71f4781780 | |||
| fbd2503136 | |||
| 00080543bd | |||
| 1c02106bdc | |||
| 595917bb2b | |||
|
|
c0c0ae0b99 | ||
| 477c87aea9 | |||
| 4b9a84cf70 | |||
| a1e0a97b84 | |||
|
|
77676efffa | ||
| cfc296dd75 | |||
|
|
47d5159fd3 | ||
| ec115d470a | |||
|
|
c48301dead | ||
| d6a2606b7d | |||
| 7dc4995c9b | |||
|
|
419077b1a5 | ||
| befd414bfe | |||
| 2b971602c3 | |||
| ad1be71f41 | |||
| 18ee7e4a9f | |||
|
|
8e2d2a9b61 | ||
| 68bc150d30 | |||
|
|
ef0a687031 | ||
| d0278bfa65 | |||
|
|
f8b78a1325 | ||
| 35183f0e34 | |||
| 54bc914554 | |||
| 2a74aa3588 | |||
| cc5d4cd45d | |||
| c6bf2e10db | |||
| b326e7ed4c | |||
| 396cb4c3e4 | |||
| 9963d9cd0b | |||
| ba64e8141d | |||
| 53ce30e0b1 | |||
| 9aea77661b | |||
| e412f01c0a | |||
| 1e34429f52 | |||
| 25c428ddbd | |||
| f19153b196 | |||
| 8b3c9c822e | |||
| b5ad622798 | |||
| 1063038c52 | |||
|
|
9a3c7d360c | ||
| b956e95619 | |||
|
|
6c7f1c7ecd | ||
| a8b3c185c6 | |||
|
|
9b5b2a603f | ||
| 84ffc8b71c | |||
|
|
06885e3d86 | ||
|
|
77fe30b267 | ||
| eb7c78327b | |||
| 6fe33bffa0 | |||
| cd2c3d6c3d | |||
|
|
77f5c7824c | ||
| 5d1cfffc74 | |||
|
|
ee8bbad82c | ||
| fcab216926 | |||
|
|
ce42f51606 | ||
| c0755d8751 | |||
|
|
4a3058ce55 | ||
|
|
79ee93cc06 | ||
| 08dea6a190 | |||
|
|
671429dfcf | ||
|
|
2cb9e0cd14 | ||
| 8c5dd6fd84 | |||
| 63eef4a70a | |||
|
|
1be44cc855 | ||
|
|
8362359093 | ||
|
|
c20bbf07d9 | ||
|
|
d94e611b12 | ||
| e334b375da | |||
|
|
b95ac1a686 | ||
| 3a1752b148 | |||
|
|
b29f33413a | ||
| 3c119595fa | |||
|
|
7f8632b41d | ||
| d8dec0db59 | |||
|
|
6125e09e54 | ||
| ebea1519c7 | |||
|
|
ff4ec99ab0 | ||
| 378b5cbe65 | |||
|
|
595206d0d9 | ||
| 0968311055 |
+153
-243
@@ -8,9 +8,9 @@ on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
bump_type:
|
||||
description: 'Specify the type of version bump'
|
||||
description: "Specify the type of version bump"
|
||||
required: true
|
||||
default: 'patch'
|
||||
default: "patch"
|
||||
type: choice
|
||||
options:
|
||||
- patch
|
||||
@@ -46,7 +46,7 @@ jobs:
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: '3.10'
|
||||
python-version: "3.10"
|
||||
|
||||
- name: Install Commitizen
|
||||
run: |
|
||||
@@ -126,9 +126,7 @@ jobs:
|
||||
- name: Generate changelog for the version bump
|
||||
id: changelog
|
||||
run: |
|
||||
changelog=$(conventional-changelog -p angular -i CHANGELOG.md -s --from ${{ env.prev_version }} --to ${{ env.version }})
|
||||
echo "$changelog" > artifacts/changelog.md
|
||||
echo "changelog_body=$(cat artifacts/changelog.md)" >> $GITHUB_ENV
|
||||
conventional-changelog -p conventionalcommits -i CHANGELOG.md --from ${{ env.prev_version }} --to v${{ env.version }} > artifacts/changelog.md
|
||||
|
||||
- name: Push changes
|
||||
if: env.ACT != 'true'
|
||||
@@ -151,30 +149,42 @@ jobs:
|
||||
Cargo.toml
|
||||
Cargo.lock
|
||||
|
||||
build-release-artifacts:
|
||||
publish-github-release:
|
||||
name: build-release
|
||||
needs: [bump-version]
|
||||
runs-on: ${{ matrix.job.os }}
|
||||
runs-on: ${{ matrix.os }}
|
||||
env:
|
||||
RUST_BACKTRACE: 1
|
||||
BUILD_CMD: cargo
|
||||
strategy:
|
||||
fail-fast: true
|
||||
matrix:
|
||||
# prettier-ignore
|
||||
job:
|
||||
- { name: "macOS-arm64", os: "macOS-latest", target: "aarch64-apple-darwin", artifact_suffix: "macos-arm64", use-cross: true }
|
||||
- { name: "macOS-amd64", os: "macOS-latest", target: "x86_64-apple-darwin", artifact_suffix: "macos" }
|
||||
- { name: "windows-amd64", os: "windows-latest", target: "x86_64-pc-windows-msvc", artifact_suffix: "windows" }
|
||||
- { name: "windows-aarch64", os: "windows-latest", target: "aarch64-pc-windows-msvc", artifact_suffix: "windows-aarch64", use-cross: true }
|
||||
- { name: "linux-gnu", os: "ubuntu-latest", target: "x86_64-unknown-linux-gnu", artifact_suffix: "linux" }
|
||||
- { name: "linux-musl", os: "ubuntu-latest", target: "x86_64-unknown-linux-musl", artifact_suffix: "linux-musl", use-cross: true, }
|
||||
- { name: "linux-aarch64-gnu", os: "ubuntu-latest", target: "aarch64-unknown-linux-gnu", artifact_suffix: "aarch64-gnu", use-cross: true, test-bin: "--bin gman" }
|
||||
- { name: "linux-aarch64-musl", os: "ubuntu-latest", target: "aarch64-unknown-linux-musl", artifact_suffix: "aarch64-musl", use-cross: true, test-bin: "--bin gman" }
|
||||
- { name: "linux-arm-gnu", os: "ubuntu-latest", target: "arm-unknown-linux-gnueabi", artifact_suffix: "armv6-gnu", use-cross: true, test-bin: "--bin gman" }
|
||||
- { name: "linux-arm-musl", os: "ubuntu-latest", target: "arm-unknown-linux-musleabihf", artifact_suffix: "armv6-musl", use-cross: true, test-bin: "--bin gman" }
|
||||
- { name: "linux-armv7-gnu", os: "ubuntu-latest", target: "armv7-unknown-linux-gnueabihf", artifact_suffix: "armv7-gnu", use-cross: true, test-bin: "--bin gman" }
|
||||
- { name: "linux-armv7-musl", os: "ubuntu-latest", target: "armv7-unknown-linux-musleabihf", artifact_suffix: "armv7-musl", use-cross: true, test-bin: "--bin gman" }
|
||||
rust: [stable]
|
||||
include:
|
||||
- target: aarch64-unknown-linux-musl
|
||||
os: ubuntu-latest
|
||||
use-cross: true
|
||||
cargo-flags: ""
|
||||
- target: aarch64-apple-darwin
|
||||
os: macos-latest
|
||||
use-cross: true
|
||||
cargo-flags: ""
|
||||
- target: aarch64-pc-windows-msvc
|
||||
os: windows-latest
|
||||
use-cross: true
|
||||
cargo-flags: ""
|
||||
- target: x86_64-apple-darwin
|
||||
os: macos-latest
|
||||
cargo-flags: ""
|
||||
- target: x86_64-pc-windows-msvc
|
||||
os: windows-latest
|
||||
cargo-flags: ""
|
||||
- target: x86_64-unknown-linux-musl
|
||||
os: ubuntu-latest
|
||||
use-cross: true
|
||||
cargo-flags: ""
|
||||
- target: x86_64-unknown-linux-gnu
|
||||
os: ubuntu-latest
|
||||
cargo-flags: ""
|
||||
|
||||
steps:
|
||||
- name: Check if actor is repository owner
|
||||
@@ -194,126 +204,6 @@ jobs:
|
||||
git fetch --all
|
||||
git pull
|
||||
|
||||
- name: Get bumped Cargo files (Act)
|
||||
if: env.ACT == 'true'
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: bumped-cargo-files
|
||||
path: ${{ github.workspace }}
|
||||
|
||||
- uses: actions/cache@v3
|
||||
name: Cache Cargo registry
|
||||
with:
|
||||
path: ~/.cargo/registry
|
||||
key: ${{ runner.os }}-cargo-registry-${{ hashFiles('Cargo.lock') }}
|
||||
|
||||
- uses: actions/cache@v3
|
||||
if: startsWith(matrix.job.name, 'linux-')
|
||||
with:
|
||||
path: ~/.cargo/bin
|
||||
key: ${{ runner.os }}-cargo-bin-${{ hashFiles('.github/workflows/release.yml') }}
|
||||
|
||||
- uses: dtolnay/rust-toolchain@stable
|
||||
name: Set Rust toolchain
|
||||
with:
|
||||
targets: ${{ matrix.job.target }}
|
||||
|
||||
- uses: taiki-e/setup-cross-toolchain-action@v1
|
||||
with:
|
||||
# NB: sets CARGO_BUILD_TARGET evar - do not need --target flag in build
|
||||
target: ${{ matrix.job.target }}
|
||||
|
||||
- uses: taiki-e/install-action@cross
|
||||
if: ${{ matrix.job.use-cross }}
|
||||
|
||||
- name: Installing needed Ubuntu dependencies
|
||||
if: matrix.job.os == 'ubuntu-latest'
|
||||
shell: bash
|
||||
run: |
|
||||
sudo apt-get -y update
|
||||
case ${{ matrix.job.target }} in
|
||||
arm*-linux-*) sudo apt-get -y install gcc-arm-linux-gnueabihf ;;
|
||||
aarch64-*-linux-*) sudo apt-get -y install gcc-aarch64-linux-gnu ;;
|
||||
esac
|
||||
|
||||
- name: Build
|
||||
run: cargo build --release --verbose --target=${{ matrix.job.target }} --locked
|
||||
|
||||
- name: Verify file
|
||||
shell: bash
|
||||
run: |
|
||||
file target/${{ matrix.job.target }}/release/gman
|
||||
|
||||
- name: Test
|
||||
if: matrix.job.target != 'aarch64-apple-darwin' && matrix.job.target != 'aarch64-pc-windows-msvc'
|
||||
run: cargo test --release --verbose --target=${{ matrix.job.target }} ${{ matrix.job.test-bin }}
|
||||
|
||||
- name: Packaging final binary (Windows)
|
||||
if: matrix.job.os == 'windows-latest'
|
||||
shell: bash
|
||||
run: |
|
||||
cd target/${{ matrix.job.target }}/release
|
||||
BINARY_NAME=gman.exe
|
||||
if [ "${{ matrix.job.target }}" != "aarch64-pc-windows-msvc" ]; then
|
||||
# strip the binary
|
||||
strip $BINARY_NAME
|
||||
fi
|
||||
RELEASE_NAME=gman-${{ matrix.job.artifact_suffix }}
|
||||
mkdir -p artifacts
|
||||
tar czvf $RELEASE_NAME.tar.gz $BINARY_NAME
|
||||
# create sha checksum files
|
||||
certutil -hashfile $RELEASE_NAME.tar.gz sha256 | grep -E [A-Fa-f0-9]{64} > $RELEASE_NAME.sha256
|
||||
echo "RELEASE_NAME=$RELEASE_NAME" >> $GITHUB_ENV
|
||||
|
||||
- name: Packaging final binary (macOS and Linux)
|
||||
if: matrix.job.os != 'windows-latest'
|
||||
shell: bash
|
||||
run: |
|
||||
# set the right strip executable
|
||||
STRIP="strip";
|
||||
case ${{ matrix.job.target }} in
|
||||
arm*-linux-*) STRIP="arm-linux-gnueabihf-strip" ;;
|
||||
aarch64-*-linux-*) STRIP="aarch64-linux-gnu-strip" ;;
|
||||
esac;
|
||||
cd target/${{ matrix.job.target }}/release
|
||||
BINARY_NAME=gman
|
||||
# strip the binary
|
||||
"$STRIP" "$BINARY_NAME"
|
||||
RELEASE_NAME=gman-${{ matrix.job.artifact_suffix }}
|
||||
tar czvf $RELEASE_NAME.tar.gz $BINARY_NAME
|
||||
# create sha checksum files
|
||||
shasum -a 256 $RELEASE_NAME.tar.gz > $RELEASE_NAME.sha256
|
||||
echo "RELEASE_NAME=$RELEASE_NAME" >> $GITHUB_ENV
|
||||
|
||||
- name: Add artifacts
|
||||
run: |
|
||||
mkdir -p artifacts
|
||||
cp target/${{ matrix.job.target }}/release/${{ env.RELEASE_NAME }}.tar.gz artifacts/
|
||||
cp target/${{ matrix.job.target }}/release/${{ env.RELEASE_NAME }}.sha256 artifacts/
|
||||
|
||||
- name: Upload artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: artifacts-${{ env.RELEASE_NAME }}
|
||||
path: artifacts
|
||||
overwrite: true
|
||||
|
||||
publish-github-release:
|
||||
name: publish-github-release
|
||||
needs: [build-release-artifacts]
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Check if actor is repository owner
|
||||
if: ${{ github.actor != github.repository_owner && env.ACT != 'true' }}
|
||||
run: |
|
||||
echo "You are not authorized to run this workflow."
|
||||
exit 1
|
||||
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Download all artifacts
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
@@ -327,114 +217,145 @@ jobs:
|
||||
git pull
|
||||
|
||||
- name: Set environment variables
|
||||
shell: bash
|
||||
run: |
|
||||
release_version="$(cat ./artifacts/release-version)"
|
||||
echo "RELEASE_VERSION=$release_version" >> $GITHUB_ENV
|
||||
changelog_body="$(cat ./artifacts/changelog.md)"
|
||||
echo "changelog_body=$(cat artifacts/changelog.md)" >> $GITHUB_ENV
|
||||
|
||||
- name: Validate release environment variables
|
||||
run: |
|
||||
echo "Release version: ${{ env.RELEASE_VERSION }}"
|
||||
echo "Changelog body: ${{ env.changelog_body }}"
|
||||
echo "Changelog body: $(cat artifacts/changelog.md)"
|
||||
|
||||
- name: Create a GitHub Release
|
||||
- name: Get bumped Cargo files (Act)
|
||||
if: env.ACT == 'true'
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: bumped-cargo-files
|
||||
path: ${{ github.workspace }}
|
||||
|
||||
- uses: dtolnay/rust-toolchain@stable
|
||||
name: Set Rust toolchain
|
||||
with:
|
||||
targets: ${{ matrix.target }}
|
||||
|
||||
- name: Install cross
|
||||
if: matrix.use-cross
|
||||
uses: taiki-e/install-action@v2
|
||||
with:
|
||||
tool: cross
|
||||
|
||||
- name: Overwrite build command env variable
|
||||
if: matrix.use-cross
|
||||
shell: bash
|
||||
run: echo "BUILD_CMD=cross" >> $GITHUB_ENV
|
||||
|
||||
- name: Install latest LLVM/Clang
|
||||
if: matrix.os == 'ubuntu-latest'
|
||||
run: |
|
||||
wget https://apt.llvm.org/llvm.sh
|
||||
chmod +x llvm.sh
|
||||
# omit the version to get the latest stable for your Ubuntu (24.04 "noble" on ubuntu-latest)
|
||||
sudo ./llvm.sh all
|
||||
# ensure libclang dev package is present (adjust the "22" if a newer major exists)
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y libclang-20-dev libclang-dev
|
||||
|
||||
- name: Show Version Information (Rust, cargo, GCC)
|
||||
shell: bash
|
||||
run: |
|
||||
gcc --version || true
|
||||
rustup -V
|
||||
rustup toolchain list
|
||||
rustup default
|
||||
cargo -V
|
||||
rustc -V
|
||||
|
||||
- name: Build
|
||||
shell: bash
|
||||
run: $BUILD_CMD build --locked --release --target=${{ matrix.target }} ${{ matrix.cargo-flags }}
|
||||
|
||||
- name: Verify file
|
||||
shell: bash
|
||||
run: |
|
||||
file target/${{ matrix.target }}/release/gman
|
||||
|
||||
- name: Test
|
||||
if: matrix.target != 'aarch64-apple-darwin' && matrix.target != 'aarch64-pc-windows-msvc'
|
||||
shell: bash
|
||||
run: |
|
||||
set -euxo pipefail
|
||||
if [[ "${{ matrix.use-cross || 'false' }}" == 'true' ]]; then
|
||||
cross test --release --locked --target=${{ matrix.target }} --verbose
|
||||
else
|
||||
cargo test --release --locked --target=${{ matrix.target }} --verbose
|
||||
fi
|
||||
|
||||
- name: Build Archive
|
||||
shell: bash
|
||||
id: package
|
||||
env:
|
||||
target: ${{ matrix.target }}
|
||||
run: |
|
||||
set -euxo pipefail
|
||||
|
||||
bin=${GITHUB_REPOSITORY##*/}
|
||||
dist_dir=`pwd`/dist
|
||||
name=$bin-$target
|
||||
executable=target/$target/release/$bin
|
||||
|
||||
if [[ "$RUNNER_OS" == "Windows" ]]; then
|
||||
executable=$executable.exe
|
||||
fi
|
||||
|
||||
mkdir $dist_dir
|
||||
cp $executable $dist_dir
|
||||
cd $dist_dir
|
||||
|
||||
if [[ "$RUNNER_OS" == "Windows" ]]; then
|
||||
archive=$dist_dir/$name.zip
|
||||
sha=$dist_dir/$name.sha256
|
||||
7z a $archive *
|
||||
certutil -hashfile $archive sha256 | grep -E [A-Fa-f0-9]{64} > $sha
|
||||
echo "archive=dist/$name.zip" >> $GITHUB_OUTPUT
|
||||
echo "sha=dist/$name.sha256" >> $GITHUB_OUTPUT
|
||||
else
|
||||
archive=$dist_dir/$name.tar.gz
|
||||
sha=$dist_dir/$name.sha256
|
||||
tar -czf $archive *
|
||||
shasum -a 256 $archive > $sha
|
||||
echo "archive=dist/$name.tar.gz" >> $GITHUB_OUTPUT
|
||||
echo "sha=dist/$name.sha256" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- name: Publish Archive and SHA
|
||||
if: env.ACT != 'true'
|
||||
uses: softprops/action-gh-release@v1
|
||||
uses: softprops/action-gh-release@v2
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
files: |
|
||||
artifacts/gman-macos-arm64.tar.gz
|
||||
artifacts/gman-macos-arm64.sha256
|
||||
artifacts/gman-macos.tar.gz
|
||||
artifacts/gman-macos.sha256
|
||||
artifacts/gman-windows.tar.gz
|
||||
artifacts/gman-windows.sha256
|
||||
artifacts/gman-windows-aarch64.tar.gz
|
||||
artifacts/gman-windows-aarch64.sha256
|
||||
artifacts/gman-linux.tar.gz
|
||||
artifacts/gman-linux.sha256
|
||||
artifacts/gman-linux-musl.tar.gz
|
||||
artifacts/gman-linux-musl.sha256
|
||||
artifacts/gman-aarch64-gnu.tar.gz
|
||||
artifacts/gman-aarch64-gnu.sha256
|
||||
artifacts/gman-aarch64-musl.tar.gz
|
||||
artifacts/gman-aarch64-musl.sha256
|
||||
artifacts/gman-armv6-gnu.tar.gz
|
||||
artifacts/gman-armv6-gnu.sha256
|
||||
artifacts/gman-armv6-musl.tar.gz
|
||||
artifacts/gman-armv6-musl.sha256
|
||||
artifacts/gman-armv7-gnu.tar.gz
|
||||
artifacts/gman-armv7-gnu.sha256
|
||||
artifacts/gman-armv7-musl.tar.gz
|
||||
artifacts/gman-armv7-musl.sha256
|
||||
${{ steps.package.outputs.archive }}
|
||||
${{ steps.package.outputs.sha }}
|
||||
tag_name: v${{ env.RELEASE_VERSION }}
|
||||
name: 'v${{ env.RELEASE_VERSION }}'
|
||||
body: ${{ env.changelog_body }}
|
||||
draft: false
|
||||
name: "v${{ env.RELEASE_VERSION }}"
|
||||
body_path: artifacts/changelog.md
|
||||
prerelease: false
|
||||
|
||||
- name: Add artifacts
|
||||
shell: bash
|
||||
run: |
|
||||
[[ -d artifacts ]] || mkdir -p artifacts
|
||||
cp ${{ steps.package.outputs.archive }} artifacts/
|
||||
cp ${{ steps.package.outputs.sha }} artifacts/
|
||||
|
||||
- name: Upload artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: artifacts-v${{ env.RELEASE_VERSION }}-${{ matrix.target }}
|
||||
path: artifacts
|
||||
overwrite: true
|
||||
|
||||
publish-chocolatey-package:
|
||||
needs: [publish-github-release]
|
||||
name: Publish Chocolatey Package
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- name: Check if actor is repository owner
|
||||
if: ${{ github.actor != github.repository_owner && env.ACT != 'true' }}
|
||||
run: |
|
||||
echo "You are not authorized to run this workflow."
|
||||
exit 1
|
||||
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 1
|
||||
|
||||
- name: Get release artifacts
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
path: artifacts
|
||||
merge-multiple: true
|
||||
|
||||
- name: Set release assets and version
|
||||
shell: pwsh
|
||||
run: |
|
||||
# Read the first column from the SHA256 file
|
||||
$windows_sha = Get-Content ./artifacts/gman-windows.sha256 | ForEach-Object { $_.Split(' ')[0] }
|
||||
Add-Content -Path $env:GITHUB_ENV -Value "WINDOWS_SHA=$windows_sha"
|
||||
|
||||
# Read the release version from the release-version file
|
||||
$release_version = Get-Content ./artifacts/release-version
|
||||
Add-Content -Path $env:GITHUB_ENV -Value "RELEASE_VERSION=$release_version"
|
||||
|
||||
- name: Validate release environment variables
|
||||
run: |
|
||||
echo "Release SHA windows: ${{ env.WINDOWS_SHA }}"
|
||||
echo "Release version: ${{ env.RELEASE_VERSION }}"
|
||||
|
||||
- name: Package and Publish package to Chocolatey
|
||||
if: env.ACT != 'true'
|
||||
run: |
|
||||
mkdir ./deployment/chocolatey/tools
|
||||
# Run packaging script
|
||||
python "./deployment/chocolatey/packager.py" ${{ env.RELEASE_VERSION }} "./deployment/chocolatey/gman.nuspec.template" "./deployment/chocolatey/gman.nuspec" ${{ env.WINDOWS_SHA }}
|
||||
python "./deployment/chocolatey/packager.py" ${{ env.RELEASE_VERSION }} "./deployment/chocolatey/chocolateyinstall.ps1.template" "./deployment/chocolatey/tools/chocolateyinstall.ps1" ${{ env.WINDOWS_SHA }}
|
||||
|
||||
# Publish to Chocolatey
|
||||
cd ./deployment/chocolatey
|
||||
choco pack
|
||||
echo y | choco install gman -dv -s .
|
||||
$version = gman --version
|
||||
$version = $version -replace " ", "."
|
||||
choco push $version.nupkg -s https://push.chocolatey.org/ --api-key ${{ secrets.CHOCOLATEY_API_KEY }};
|
||||
|
||||
publish-homebrew-formula:
|
||||
needs: [publish-github-release]
|
||||
name: Update Homebrew formulas
|
||||
@@ -461,11 +382,11 @@ jobs:
|
||||
shell: bash
|
||||
run: |
|
||||
# Set environment variables
|
||||
macos_sha="$(cat ./artifacts/gman-macos.sha256 | awk '{print $1}')"
|
||||
macos_sha="$(cat ./artifacts/gman-x86_64-apple-darwin.sha256 | awk '{print $1}')"
|
||||
echo "MACOS_SHA=$macos_sha" >> $GITHUB_ENV
|
||||
macos_sha_arm="$(cat ./artifacts/gman-macos-arm64.sha256 | awk '{print $1}')"
|
||||
macos_sha_arm="$(cat ./artifacts/gman-aarch64-apple-darwin.sha256 | awk '{print $1}')"
|
||||
echo "MACOS_SHA_ARM=$macos_sha_arm" >> $GITHUB_ENV
|
||||
linux_sha="$(cat ./artifacts/gman-linux-musl.sha256 | awk '{print $1}')"
|
||||
linux_sha="$(cat ./artifacts/gman-x86_64-unknown-linux-musl.sha256 | awk '{print $1}')"
|
||||
echo "LINUX_SHA=$linux_sha" >> $GITHUB_ENV
|
||||
release_version="$(cat ./artifacts/release-version)"
|
||||
echo "RELEASE_VERSION=$release_version" >> $GITHUB_ENV
|
||||
@@ -528,17 +449,6 @@ jobs:
|
||||
git fetch --all
|
||||
git pull
|
||||
|
||||
- uses: actions/cache@v3
|
||||
name: Cache Cargo registry
|
||||
with:
|
||||
path: ~/.cargo/registry
|
||||
key: ${{ runner.os }}-cargo-registry-${{ hashFiles('Cargo.lock') }}
|
||||
|
||||
- uses: actions/cache@v3
|
||||
with:
|
||||
path: ~/.cargo/bin
|
||||
key: ${{ runner.os }}-cargo-bin-${{ hashFiles('.github/workflows/release.yml') }}
|
||||
|
||||
- name: Install Rust stable
|
||||
uses: dtolnay/rust-toolchain@stable
|
||||
|
||||
|
||||
+68
-3
@@ -5,7 +5,72 @@ All notable changes to this project will be documented in this file.
|
||||
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
|
||||
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
|
||||
|
||||
## [0.0.1] - 2025-09-10
|
||||
## v0.2.3 (2025-10-14)
|
||||
|
||||
### Other
|
||||
- Initial test release of the `gman` project.
|
||||
### Refactor
|
||||
|
||||
- Refactored the library for gman so that it dynamically names config and password files to be used across any application
|
||||
|
||||
## v0.2.2 (2025-09-30)
|
||||
|
||||
### Refactor
|
||||
|
||||
- Environment variable interpolation in config file works globally, not based on type
|
||||
|
||||
## v0.2.1 (2025-09-30)
|
||||
|
||||
### Feat
|
||||
|
||||
- Environment variable interpolation in the Gman configuration file
|
||||
|
||||
### Fix
|
||||
|
||||
- Corrected tab completions for the provider flag
|
||||
|
||||
## v0.2.0 (2025-09-30)
|
||||
|
||||
### Feat
|
||||
|
||||
- gopass support
|
||||
- Added command aliases to make the CLI more universal
|
||||
- Added dynamic tab completions for the profile, providers, and the secrets in any given secret manager
|
||||
- Users can now specify a default provider to use with each run config, so they don't need to explicitly specify which to use when wanting to run different applications.
|
||||
|
||||
## v0.1.0 (2025-09-17)
|
||||
|
||||
### Feat
|
||||
|
||||
- Subcommand to edit the config directly instead of having to find the file
|
||||
|
||||
### Fix
|
||||
|
||||
- improved user messages for local provider sync set up
|
||||
- Pass the changelog to the GHA properly using a file
|
||||
- Potential bug in changelog variable generation
|
||||
- Revert back hacky stuff so I can test with act now
|
||||
- Attempting to use pre-generated bindgens for the aws-lc-sys library
|
||||
- Install openSSL differently to make this work
|
||||
- Address edge case for unknown_musl targets
|
||||
- Install LLVM prereqs for release flow
|
||||
- Updated the release flow to install the external bindgen-cli
|
||||
|
||||
## v0.0.1 (2025-09-12)
|
||||
|
||||
### Feat
|
||||
|
||||
- Azure Key Vault support
|
||||
- GCP Secret Manager support
|
||||
- Full AWS SecretsManager support
|
||||
- AWS Secrets Manager support
|
||||
- Added two new flags to output where gman writes logs to and where it expects the config file to live
|
||||
|
||||
### Fix
|
||||
|
||||
- Made the vault file location more fault tolerant
|
||||
- Attempting to maybe be a bit more explicit about config file handling to fix MacOS tests
|
||||
|
||||
### Refactor
|
||||
|
||||
- Refactor configuration structs directly into the provider definition to simplify validation, structs, and future extensions
|
||||
- Made the creation of the log directories a bit more fault tolerant
|
||||
- Renamed the provider field in a config file to type to make things a little easier to understand; also removed husky
|
||||
|
||||
+1
-1
@@ -60,7 +60,7 @@ representative at an online or offline event.
|
||||
|
||||
Instances of abusive, harassing, or otherwise unacceptable behavior may be
|
||||
reported to the community leaders responsible for enforcement at
|
||||
d4udts@gmail.com.
|
||||
alex.j.tusa@gmail.com.
|
||||
All complaints will be reviewed and investigated promptly and fairly.
|
||||
|
||||
All community leaders are obligated to respect the privacy and security of the
|
||||
|
||||
Generated
+917
-700
File diff suppressed because it is too large
Load Diff
+26
-10
@@ -1,10 +1,16 @@
|
||||
[package]
|
||||
name = "gman"
|
||||
version = "0.0.1"
|
||||
version = "0.2.3"
|
||||
edition = "2024"
|
||||
authors = ["Alex Clarke <alex.j.tusa@gmail.com>"]
|
||||
description = "Universal secret management and injection tool"
|
||||
keywords = ["cli", "secrets", "credentials", "command-line", "encryption"]
|
||||
description = "Universal command line secret management and injection tool"
|
||||
keywords = [
|
||||
"cli",
|
||||
"secrets-manager",
|
||||
"secret-injection",
|
||||
"command-runner",
|
||||
"vault",
|
||||
]
|
||||
documentation = "https://github.com/Dark-Alex-17/gman"
|
||||
repository = "https://github.com/Dark-Alex-17/gman"
|
||||
homepage = "https://github.com/Dark-Alex-17/gman"
|
||||
@@ -25,8 +31,8 @@ clap = { version = "4.5.47", features = [
|
||||
"env",
|
||||
"wrap_help",
|
||||
] }
|
||||
clap_complete = "4.5.57"
|
||||
confy = { version = "1.0.0", default-features = false, features = [
|
||||
clap_complete = { version = "4.5.57", features = ["unstable-dynamic"] }
|
||||
confy = { version = "2.0.0", default-features = false, features = [
|
||||
"yaml_conf",
|
||||
] }
|
||||
crossterm = "0.29.0"
|
||||
@@ -47,24 +53,34 @@ indoc = "2.0.6"
|
||||
regex = "1.11.2"
|
||||
serde_yaml = "0.9.34"
|
||||
tempfile = "3.22.0"
|
||||
aws-sdk-secretsmanager = "1.88.0"
|
||||
aws-sdk-secretsmanager = "1.98.0"
|
||||
tokio = { version = "1.47.1", features = ["full"] }
|
||||
aws-config = { version = "1.8.6", features = ["behavior-version-latest"] }
|
||||
aws-config = { version = "1.8.12", features = ["behavior-version-latest"] }
|
||||
async-trait = "0.1.89"
|
||||
futures = "0.3.31"
|
||||
gcloud-sdk = { version = "0.28.1", features = [
|
||||
"google-cloud-secretmanager-v1",
|
||||
] }
|
||||
crc32c = "0.6.8"
|
||||
azure_identity = "0.27.0"
|
||||
azure_security_keyvault_secrets = "0.6.0"
|
||||
azure_core = "0.31.0"
|
||||
azure_identity = "0.31.0"
|
||||
azure_security_keyvault_secrets = "0.10.0"
|
||||
aws-lc-sys = { version = "0.37.0", features = ["bindgen"] }
|
||||
which = "8.0.0"
|
||||
once_cell = "1.21.3"
|
||||
|
||||
[target.'cfg(all(target_os="linux", target_env="musl"))'.dependencies]
|
||||
openssl = { version = "0.10", features = ["vendored"] }
|
||||
|
||||
[target.'cfg(target_os="macos")'.dependencies]
|
||||
openssl = { version = "0.10", features = ["vendored"] }
|
||||
|
||||
[dev-dependencies]
|
||||
pretty_assertions = "1.4.1"
|
||||
proptest = "1.5.0"
|
||||
assert_cmd = "2.0.16"
|
||||
predicates = "3.1.2"
|
||||
|
||||
serial_test = "3.2.0"
|
||||
|
||||
[[bin]]
|
||||
bench = false
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# G-Man - Universal Credential Manager
|
||||
# G-Man - Universal Command Line Secret Manager and Injection Tool
|
||||
|
||||

|
||||

|
||||
@@ -89,12 +89,15 @@ gman aws sts get-caller-identity
|
||||
- [Features](#features)
|
||||
- [Installation](#installation)
|
||||
- [Configuration](#configuration)
|
||||
- [Environment Variable Interpolation](#environment-variable-interpolation)
|
||||
- [Providers](#providers)
|
||||
- [Local](#provider-local)
|
||||
- [AWS Secrets Manager](#provider-aws_secrets_manager)
|
||||
- [GCP Secret Manager](#provider-gcp_secret_manager)
|
||||
- [Azure Key Vault](#provider-azure_key_vault)
|
||||
- [Run Configurations](#run-configurations)
|
||||
- [Gopass](#provider-gopass)
|
||||
- [Run Configurations](#run-configurations)
|
||||
- [Specifying a Default Provider per Run Config](#specifying-a-default-provider-per-run-config)
|
||||
- [Environment Variable Secret Injection](#environment-variable-secret-injection)
|
||||
- [Inject Secrets via Command-Line Flags](#inject-secrets-via-command-line-flags)
|
||||
- [Inject Secrets into Files](#inject-secrets-into-files)
|
||||
@@ -133,33 +136,31 @@ To upgrade `gman` using Homebrew:
|
||||
brew upgrade gman
|
||||
```
|
||||
|
||||
### Chocolatey (Windows)
|
||||
The G-Man Chocolatey package is located [here](https://community.chocolatey.org/packages/gman). Please note that validation
|
||||
of Chocolatey packages take quite some time, and thus the package may not be available immediately after a new release.
|
||||
### Scripts
|
||||
#### Linux/MacOS (`bash`)
|
||||
You can use the following command to run a bash script that downloads and installs the latest version of `gman` for your
|
||||
OS (Linux/MacOS) and architecture (x86_64/arm64):
|
||||
|
||||
```powershell
|
||||
choco install gman
|
||||
|
||||
# Some newer releases may require a version number, so you can specify it like so:
|
||||
choco install gman --version=0.1.0
|
||||
```shell
|
||||
curl -fsSL https://raw.githubusercontent.com/Dark-Alex-17/gman/main/install_gman.sh | bash
|
||||
```
|
||||
|
||||
To upgrade to the latest and greatest version of G-Man:
|
||||
```powershell
|
||||
choco upgrade gman
|
||||
#### Windows/Linux/MacOS (`PowerShell`)
|
||||
You can use the following command to run a PowerShell script that downloads and installs the latest version of `gman`
|
||||
for your OS (Windows/Linux/MacOS) and architecture (x86_64/arm64):
|
||||
|
||||
# To upgrade to a specific version:
|
||||
choco upgrade gman --version=0.1.0
|
||||
```powershell
|
||||
powershell -NoProfile -ExecutionPolicy Bypass -Command "iwr -useb https://raw.githubusercontent.com/Dark-Alex-17/gman/main/scripts/install_gman.ps1 | iex"
|
||||
```
|
||||
|
||||
### Manual
|
||||
Binaries are available on the [releases](https://github.com/Dark-Alex-17/gman/releases) page for the following platforms:
|
||||
|
||||
| Platform | Architecture(s) |
|
||||
|----------------|----------------------------|
|
||||
| macOS | x86_64, arm64 |
|
||||
| Linux GNU/MUSL | x86_64,armv6,armv7,aarch64 |
|
||||
| Windows | x86_64,aarch64 |
|
||||
| Platform | Architecture(s) |
|
||||
|----------------|-----------------|
|
||||
| macOS | x86_64, arm64 |
|
||||
| Linux GNU/MUSL | x86_64, aarch64 |
|
||||
| Windows | x86_64, aarch64 |
|
||||
|
||||
#### Windows Instructions
|
||||
To use a binary from the releases page on Windows, do the following:
|
||||
@@ -176,6 +177,22 @@ To use a binary from the releases page on Linux/MacOS, do the following:
|
||||
3. Extract the binary with `tar -C /usr/local/bin -xzf gman-<arch>.tar.gz` (Note: This may require `sudo`)
|
||||
4. Now you can run `gman`!
|
||||
|
||||
### Enable Tab Completion
|
||||
`gman` supports shell tab completion for `bash`, `zsh`, and `fish`. To enable it, run the following command for your
|
||||
shell:
|
||||
|
||||
```shell
|
||||
# Bash
|
||||
echo 'source <(COMPLETE=bash gman)' >> ~/.bashrc
|
||||
# Zsh
|
||||
echo 'source <(COMPLETE=zsh gman)' >> ~/.zshrc
|
||||
# Fish
|
||||
echo 'COMPLETE=fish gman | source' >> ~/.config/fish/config.fish
|
||||
```
|
||||
|
||||
Then restart your shell or `source` the appropriate config file.
|
||||
|
||||
|
||||
## Configuration
|
||||
|
||||
`gman` reads a YAML configuration file located at an OS-specific path:
|
||||
@@ -226,6 +243,28 @@ providers:
|
||||
run_configs: []
|
||||
```
|
||||
|
||||
### Environment Variable Interpolation
|
||||
The config file supports environment variable interpolation using `${VAR_NAME}` syntax. For example, to use an
|
||||
AWS profile from your environment:
|
||||
|
||||
```yaml
|
||||
providers:
|
||||
- name: aws
|
||||
type: aws_secrets_manager
|
||||
aws_profile: ${AWS_PROFILE} # Uses the AWS_PROFILE env var
|
||||
aws_region: us-east-1
|
||||
```
|
||||
|
||||
Or to set a default profile to use when `AWS_PROFILE` is unset:
|
||||
|
||||
```yaml
|
||||
providers:
|
||||
- name: aws
|
||||
type: aws_secrets_manager
|
||||
aws_profile: ${AWS_PROFILE:-default} # Uses 'default' if AWS_PROFILE is unset
|
||||
aws_region: us-east-1
|
||||
```
|
||||
|
||||
## Providers
|
||||
`gman` supports multiple providers for secret storage. The default provider is `local`, which stores secrets in an
|
||||
encrypted file on your filesystem. The CLI and config format are designed to be extensible so new providers can be
|
||||
@@ -247,6 +286,7 @@ documented and added without breaking existing setups. The following table shows
|
||||
| [`hashicorp_vault`](https://www.hashicorp.com/en/products/vault) | 🕒 | | |
|
||||
| [`azure_key_vault`](https://azure.microsoft.com/en-us/products/key-vault/) | ✅ | [Azure Key Vault](#provider-azure_key_vault) | |
|
||||
| [`gcp_secret_manager`](https://cloud.google.com/security/products/secret-manager?hl=en) | ✅ | [GCP Secret Manager](#provider-gcp_secret_manager) | |
|
||||
| [`gopass`](https://www.gopass.pw/) | ✅ | | |
|
||||
| [`1password`](https://1password.com/) | 🕒 | | |
|
||||
| [`bitwarden`](https://bitwarden.com/) | 🕒 | | |
|
||||
| [`dashlane`](https://www.dashlane.com/) | 🕒 | | Waiting for CLI support for adding secrets |
|
||||
@@ -390,6 +430,26 @@ Important notes:
|
||||
- Ensure your identity has the necessary Key Vault permissions (RBAC such as `Key Vault Secrets User`/`Administrator`,
|
||||
or appropriate access policies) for get/set/list/delete.
|
||||
|
||||
### Provider: `gopass`
|
||||
The `gopass` provider uses [gopass](https://www.gopass.pw/) as the backing storage location for secrets.
|
||||
|
||||
- Optional: `store` (string) to specify a particular gopass store if you have multiple.
|
||||
|
||||
Configuration example:
|
||||
|
||||
```yaml
|
||||
default_provider: gopass
|
||||
providers:
|
||||
- name: gopass
|
||||
type: gopass
|
||||
store: my-store # Optional; if omitted, uses the default configured gopass store
|
||||
```
|
||||
|
||||
Important notes:
|
||||
- Ensure `gopass` is installed and initialized on your system.
|
||||
- Secrets are managed using gopass's native commands; `gman` acts as a wrapper to interface with gopass.
|
||||
- Updates overwrite existing secrets
|
||||
- If no store is specified, the default gopass store is used and `gman sync` will sync with all configured stores.
|
||||
## Run Configurations
|
||||
|
||||
Run configurations (or "profiles") tell `gman` how to inject secrets into a command. Three modes of secret injection are
|
||||
@@ -406,6 +466,45 @@ will error out and report that it could not find the run config with that name.
|
||||
You can manually specify which run configuration to use with the `--profile` flag. Again, if no profile is found with
|
||||
that name, `gman` will error out.
|
||||
|
||||
|
||||
### Specifying a Default Provider per Run Config
|
||||
All run configs also support the `provider` field, which lets you override the default provider for that specific
|
||||
profile. This is useful if you have multiple providers configured and want to use a different one for a specific command
|
||||
, but that provider may not be the `default_provider`, and you don't want to have to specify `--provider` on the command
|
||||
line every time.
|
||||
|
||||
For Example:
|
||||
```yaml
|
||||
default_provider: local
|
||||
run_configs:
|
||||
# `gman aws ...` uses the `aws` provider instead of `local` if no
|
||||
# `--provider` is given.
|
||||
- name: aws
|
||||
# Can be overridden by explicitly specifying a `--provider`
|
||||
provider: aws
|
||||
secrets:
|
||||
- DB_USERNAME
|
||||
- DB_PASSWORD
|
||||
# `gman docker ...` uses the default_provider `local` because no
|
||||
# `provider` is specified.
|
||||
- name: docker
|
||||
secrets:
|
||||
- MY_APP_API_KEY
|
||||
- MY_APP_DB_PASSWORD
|
||||
# `gman managarr ...` uses the `local` provider; This is useful
|
||||
# if you change the default provider to something else.
|
||||
- name: managarr
|
||||
provider: local
|
||||
secrets:
|
||||
- RADARR_API_KEY
|
||||
- SONARR_API_KEY
|
||||
files:
|
||||
- /home/user/.config/managarr/config.yml
|
||||
```
|
||||
|
||||
**Important Note:** Any run config with a `provider` field can be overridden by specifying `--provider` on the command
|
||||
line.
|
||||
|
||||
### Environment Variable Secret Injection
|
||||
|
||||
By default, secrets are injected as environment variables. The two required fields are `name` and `secrets`.
|
||||
|
||||
@@ -1,20 +0,0 @@
|
||||
$ErrorActionPreference = 'Stop';
|
||||
|
||||
$PackageName = 'gman'
|
||||
$toolsDir = "$(Split-Path -parent $MyInvocation.MyCommand.Definition)"
|
||||
$url64 = 'https://github.com/Dark-Alex-17/gman/releases/download/v$version/gman-windows.tar.gz'
|
||||
$checksum64 = '$hash_64'
|
||||
|
||||
$packageArgs = @{
|
||||
packageName = $packageName
|
||||
softwareName = $packageName
|
||||
unzipLocation = $toolsDir
|
||||
fileType = 'exe'
|
||||
url = $url64
|
||||
checksum = $checksum64
|
||||
checksumType = 'sha256'
|
||||
|
||||
}
|
||||
Install-ChocolateyZipPackage @packageArgs
|
||||
$File = Get-ChildItem -File -Path $env:ChocolateyInstall\lib\$packageName\tools\ -Filter *.tar
|
||||
Get-ChocolateyUnzip -fileFullPath $File.FullName -destination $env:ChocolateyInstall\lib\$packageName\tools\
|
||||
@@ -1,53 +0,0 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<!-- Read this before creating packages: https://chocolatey.org/docs/create-packages -->
|
||||
<!-- It is especially important to read the above link to understand additional requirements when publishing packages to the community feed aka dot org (https://chocolatey.org/packages). -->
|
||||
|
||||
<!-- Test your packages in a test environment: https://github.com/chocolatey/chocolatey-test-environment -->
|
||||
|
||||
<!--
|
||||
This is a nuspec. It mostly adheres to https://docs.nuget.org/create/Nuspec-Reference. Chocolatey uses a special version of NuGet.Core that allows us to do more than was initially possible. As such there are certain things to be aware of:
|
||||
|
||||
* the package xmlns schema url may cause issues with nuget.exe
|
||||
* Any of the following elements can ONLY be used by choco tools - projectSourceUrl, docsUrl, mailingListUrl, bugTrackerUrl, packageSourceUrl, provides, conflicts, replaces
|
||||
* nuget.exe can still install packages with those elements but they are ignored. Any authoring tools or commands will error on those elements
|
||||
-->
|
||||
|
||||
<!-- You can embed software files directly into packages, as long as you are not bound by distribution rights. -->
|
||||
<!-- * If you are an organization making private packages, you probably have no issues here -->
|
||||
<!-- * If you are releasing to the community feed, you need to consider distribution rights. -->
|
||||
<!-- Do not remove this test for UTF-8: if “Ω” doesn’t appear as greek uppercase omega letter enclosed in quotation marks, you should use an editor that supports UTF-8, not this one. -->
|
||||
<package xmlns="http://schemas.microsoft.com/packaging/2015/06/nuspec.xsd">
|
||||
<metadata>
|
||||
<!-- == PACKAGE SPECIFIC SECTION == -->
|
||||
<id>gman</id>
|
||||
<version>$version</version>
|
||||
|
||||
<!-- == SOFTWARE SPECIFIC SECTION == -->
|
||||
<!-- This section is about the software itself -->
|
||||
<title>G-Man</title>
|
||||
<authors>Alex Clarke</authors>
|
||||
<projectUrl>https://github.com/Dark-Alex-17/gman</projectUrl>
|
||||
<licenseUrl>https://github.com/Dark-Alex-17/gman/blob/main/LICENSE</licenseUrl>
|
||||
<requireLicenseAcceptance>true</requireLicenseAcceptance>
|
||||
<projectSourceUrl>https://github.com/Dark-Alex-17/gman</projectSourceUrl>
|
||||
<docsUrl>https://github.com/Dark-Alex-17/gman/blob/main/README.md</docsUrl>
|
||||
<bugTrackerUrl>https://github.com/Dark-Alex-17/gman/issues</bugTrackerUrl>
|
||||
<tags>cli cross-platform terminal credential-management secret-management rust</tags>
|
||||
<summary>Universal command line credential management and injection tool</summary>
|
||||
<description>
|
||||
Universal command line credential management and injection tool.
|
||||
|
||||
**Usage**
|
||||
To get started, run `gman --help` in a terminal.
|
||||
|
||||
For more [documentation and usage](https://github.com/Dark-Alex-17/gman/blob/main/README.md), see the [official repo](https://github.com/Dark-Alex-17/gman).
|
||||
|
||||
</description>
|
||||
<releaseNotes>https://github.com/Dark-Alex-17/gman/releases/tag/v$version/</releaseNotes>
|
||||
</metadata>
|
||||
<files>
|
||||
<!-- this section controls what actually gets packaged into the Chocolatey package -->
|
||||
<file src="tools\**" target="tools" />
|
||||
<!--Building from Linux? You may need this instead: <file src="tools/**" target="tools" />-->
|
||||
</files>
|
||||
</package>
|
||||
@@ -1,28 +0,0 @@
|
||||
import hashlib
|
||||
import sys
|
||||
from string import Template
|
||||
|
||||
sys.stdout.reconfigure(encoding='utf-8')
|
||||
args = sys.argv
|
||||
version = args[1].replace("v", "")
|
||||
template_file_path = args[2]
|
||||
generated_file_path = args[3]
|
||||
|
||||
# Deployment files
|
||||
hash_64 = args[4].strip()
|
||||
|
||||
print("Generating formula")
|
||||
print(" VERSION: %s" % version)
|
||||
print(" TEMPLATE PATH: %s" % template_file_path)
|
||||
print(" SAVING AT: %s" % generated_file_path)
|
||||
print(" HASH: %s" % hash_64)
|
||||
|
||||
with open(template_file_path, "r", encoding="utf-8") as template_file:
|
||||
template = Template(template_file.read())
|
||||
substitute = template.safe_substitute(version=version, hash_64=hash_64)
|
||||
print("\n================== Generated package file ==================\n")
|
||||
print(substitute)
|
||||
print("\n============================================================\n")
|
||||
|
||||
with open(generated_file_path, "w", encoding="utf-8") as generated_file:
|
||||
generated_file.write(substitute)
|
||||
@@ -1,16 +1,16 @@
|
||||
# Documentation: https://docs.brew.sh/Formula-Cookbook
|
||||
# https://rubydoc.brew.sh/Formula
|
||||
class GMan < Formula
|
||||
class Gman < Formula
|
||||
desc "Universal command line credential management and injection tool"
|
||||
homepage "https://github.com/Dark-Alex-17/gman"
|
||||
if OS.mac? and Hardware::CPU.arm?
|
||||
url "https://github.com/Dark-Alex-17/gman/releases/download/v$version/gman-macos-arm64.tar.gz"
|
||||
url "https://github.com/Dark-Alex-17/gman/releases/download/v$version/gman-aarch64-apple-darwin.tar.gz"
|
||||
sha256 "$hash_mac_arm"
|
||||
elsif OS.mac? and Hardware::CPU.intel?
|
||||
url "https://github.com/Dark-Alex-17/gman/releases/download/v$version/gman-macos.tar.gz"
|
||||
url "https://github.com/Dark-Alex-17/gman/releases/download/v$version/gman-x86_64-apple-darwin.tar.gz"
|
||||
sha256 "$hash_mac"
|
||||
else
|
||||
url "https://github.com/Dark-Alex-17/gman/releases/download/v$version/gman-linux-musl.tar.gz"
|
||||
url "https://github.com/Dark-Alex-17/gman/releases/download/v$version/gman-x86_64-unknown-linux-musl.tar.gz"
|
||||
sha256 "$hash_linux"
|
||||
end
|
||||
version "$version"
|
||||
|
||||
@@ -0,0 +1,139 @@
|
||||
<#
|
||||
gman installer (Windows/PowerShell 5+ and PowerShell 7)
|
||||
|
||||
Examples:
|
||||
powershell -NoProfile -ExecutionPolicy Bypass -Command "iwr -useb https://raw.githubusercontent.com/Dark-Alex-17/gman/main/scripts/install_gman.ps1 | iex"
|
||||
pwsh -c "irm https://raw.githubusercontent.com/Dark-Alex-17/gman/main/scripts/install_gman.ps1 | iex -Version vX.Y.Z"
|
||||
|
||||
Parameters:
|
||||
-Version <tag> (default: latest)
|
||||
-BinDir <path> (default: %LOCALAPPDATA%\gman\bin on Windows; ~/.local/bin on *nix PowerShell)
|
||||
#>
|
||||
|
||||
[CmdletBinding()]
|
||||
param(
|
||||
[string]$Version = $env:GMAN_VERSION,
|
||||
[string]$BinDir = $env:BIN_DIR
|
||||
)
|
||||
|
||||
$Repo = 'Dark-Alex-17/gman'
|
||||
|
||||
function Write-Info($msg) { Write-Host "[gman-install] $msg" }
|
||||
function Fail($msg) { Write-Error $msg; exit 1 }
|
||||
|
||||
Add-Type -AssemblyName System.Runtime
|
||||
$isWin = [System.Runtime.InteropServices.RuntimeInformation]::IsOSPlatform([System.Runtime.InteropServices.OSPlatform]::Windows)
|
||||
$isMac = [System.Runtime.InteropServices.RuntimeInformation]::IsOSPlatform([System.Runtime.InteropServices.OSPlatform]::OSX)
|
||||
$isLin = [System.Runtime.InteropServices.RuntimeInformation]::IsOSPlatform([System.Runtime.InteropServices.OSPlatform]::Linux)
|
||||
|
||||
if ($isWin) { $os = 'windows' }
|
||||
elseif ($isMac) { $os = 'darwin' }
|
||||
elseif ($isLin) { $os = 'linux' }
|
||||
else { Fail "Unsupported OS" }
|
||||
|
||||
switch ([System.Runtime.InteropServices.RuntimeInformation]::OSArchitecture) {
|
||||
'X64' { $arch = 'x86_64' }
|
||||
'Arm64'{ $arch = 'aarch64' }
|
||||
default { Fail "Unsupported arch: $([System.Runtime.InteropServices.RuntimeInformation]::OSArchitecture)" }
|
||||
}
|
||||
|
||||
if (-not $BinDir) {
|
||||
if ($isWin) { $BinDir = Join-Path $env:LOCALAPPDATA 'gman\bin' }
|
||||
else { $home = $env:HOME; if (-not $home) { $home = (Get-Item -Path ~).FullName }; $BinDir = Join-Path $home '.local/bin' }
|
||||
}
|
||||
New-Item -ItemType Directory -Force -Path $BinDir | Out-Null
|
||||
|
||||
Write-Info "Target: $os-$arch"
|
||||
|
||||
$apiBase = "https://api.github.com/repos/$Repo/releases"
|
||||
$relUrl = if ($Version) { "$apiBase/tags/$Version" } else { "$apiBase/latest" }
|
||||
Write-Info "Fetching release: $relUrl"
|
||||
try {
|
||||
$release = Invoke-RestMethod -UseBasicParsing -Headers @{ 'User-Agent' = 'gman-installer' } -Uri $relUrl -Method GET
|
||||
} catch { Fail "Failed to fetch release metadata. $_" }
|
||||
if (-not $release.assets) { Fail "No assets found in the release." }
|
||||
|
||||
$candidates = @()
|
||||
if ($os -eq 'windows') {
|
||||
if ($arch -eq 'x86_64') { $candidates += 'gman-x86_64-pc-windows-msvc.zip' }
|
||||
else { $candidates += 'gman-aarch64-pc-windows-msvc.zip' }
|
||||
} elseif ($os -eq 'darwin') {
|
||||
if ($arch -eq 'x86_64') { $candidates += 'gman-x86_64-apple-darwin.tar.gz' }
|
||||
else { $candidates += 'gman-aarch64-apple-darwin.tar.gz' }
|
||||
} elseif ($os -eq 'linux') {
|
||||
if ($arch -eq 'x86_64') {
|
||||
$candidates += 'gman-x86_64-unknown-linux-gnu.tar.gz'
|
||||
$candidates += 'gman-x86_64-unknown-linux-musl.tar.gz'
|
||||
} else {
|
||||
$candidates += 'gman-aarch64-unknown-linux-musl.tar.gz'
|
||||
}
|
||||
} else {
|
||||
Fail "Unsupported OS for this installer: $os"
|
||||
}
|
||||
|
||||
$asset = $null
|
||||
foreach ($c in $candidates) {
|
||||
$asset = $release.assets | Where-Object { $_.name -eq $c } | Select-Object -First 1
|
||||
if ($asset) { break }
|
||||
}
|
||||
if (-not $asset) {
|
||||
Write-Error "No matching asset found for $os-$arch. Tried:"; $candidates | ForEach-Object { Write-Error " - $_" }
|
||||
exit 1
|
||||
}
|
||||
|
||||
Write-Info "Selected asset: $($asset.name)"
|
||||
Write-Info "Download URL: $($asset.browser_download_url)"
|
||||
|
||||
$tmp = New-Item -ItemType Directory -Force -Path ([IO.Path]::Combine([IO.Path]::GetTempPath(), "gman-$(Get-Random)"))
|
||||
$archive = Join-Path $tmp.FullName 'asset'
|
||||
try { Invoke-WebRequest -UseBasicParsing -Headers @{ 'User-Agent' = 'gman-installer' } -Uri $asset.browser_download_url -OutFile $archive } catch { Fail "Failed to download asset. $_" }
|
||||
|
||||
$extractDir = Join-Path $tmp.FullName 'extract'; New-Item -ItemType Directory -Force -Path $extractDir | Out-Null
|
||||
|
||||
if ($asset.name -match '\.zip$') {
|
||||
Add-Type -AssemblyName System.IO.Compression.FileSystem
|
||||
[System.IO.Compression.ZipFile]::ExtractToDirectory($archive, $extractDir)
|
||||
} elseif ($asset.name -match '\.tar\.gz$' -or $asset.name -match '\.tgz$') {
|
||||
$tar = Get-Command tar -ErrorAction SilentlyContinue
|
||||
if ($tar) { & $tar.FullName -xzf $archive -C $extractDir }
|
||||
else { Fail "Asset is tar archive but 'tar' is not available." }
|
||||
} else {
|
||||
try { Add-Type -AssemblyName System.IO.Compression.FileSystem; [System.IO.Compression.ZipFile]::ExtractToDirectory($archive, $extractDir) }
|
||||
catch {
|
||||
$tar = Get-Command tar -ErrorAction SilentlyContinue
|
||||
if ($tar) { & $tar.FullName -xf $archive -C $extractDir } else { Fail "Unknown archive format; neither zip nor tar workable." }
|
||||
}
|
||||
}
|
||||
|
||||
$bin = $null
|
||||
Get-ChildItem -Recurse -File $extractDir | ForEach-Object {
|
||||
if ($isWin) { if ($_.Name -ieq 'gman.exe') { $bin = $_.FullName } }
|
||||
else { if ($_.Name -ieq 'gman') { $bin = $_.FullName } }
|
||||
}
|
||||
if (-not $bin) { Fail "Could not find gman binary inside the archive." }
|
||||
|
||||
if (-not $isWin) { try { & chmod +x -- $bin } catch {} }
|
||||
|
||||
$exec = if ($isWin) { 'gman.exe'} else { 'gman' }
|
||||
$dest = Join-Path $BinDir $exec
|
||||
Copy-Item -Force $bin $dest
|
||||
Write-Info "Installed: $dest"
|
||||
|
||||
if ($isWin) {
|
||||
$pathParts = ($env:Path -split ';') | Where-Object { $_ -ne '' }
|
||||
if ($pathParts -notcontains $BinDir) {
|
||||
$userPath = [Environment]::GetEnvironmentVariable('Path', 'User'); if (-not $userPath) { $userPath = '' }
|
||||
if (-not ($userPath -split ';' | Where-Object { $_ -eq $BinDir })) {
|
||||
$newUserPath = if ($userPath.Trim().Length -gt 0) { "$userPath;$BinDir" } else { $BinDir }
|
||||
[Environment]::SetEnvironmentVariable('Path', $newUserPath, 'User')
|
||||
Write-Info "Added to User PATH: $BinDir (restart shell to take effect)"
|
||||
}
|
||||
}
|
||||
} else {
|
||||
if (-not ($env:PATH -split ':' | Where-Object { $_ -eq $BinDir })) {
|
||||
Write-Info "Note: $BinDir is not in PATH. Add it to your shell profile."
|
||||
}
|
||||
}
|
||||
|
||||
Write-Info "Done. Try: gman --help"
|
||||
|
||||
@@ -0,0 +1,220 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
# gman installer (Linux/macOS)
|
||||
#
|
||||
# Usage examples:
|
||||
# curl -fsSL https://raw.githubusercontent.com/Dark-Alex-17/gman/main/scripts/install_gman.sh | bash
|
||||
# curl -fsSL https://raw.githubusercontent.com/Dark-Alex-17/gman/main/scripts/install_gman.sh | bash -s -- --version vX.Y.Z
|
||||
# BIN_DIR="$HOME/.local/bin" bash scripts/install_gman.sh
|
||||
#
|
||||
# Flags / Env:
|
||||
# --version <tag> Release tag (default: latest). Or set GMAN_VERSION.
|
||||
# --bin-dir <dir> Install directory (default: /usr/local/bin or ~/.local/bin). Or set BIN_DIR.
|
||||
|
||||
REPO="Dark-Alex-17/gman"
|
||||
VERSION="${GMAN_VERSION:-}"
|
||||
BIN_DIR="${BIN_DIR:-}"
|
||||
|
||||
usage() {
|
||||
echo "gman installer (Linux/macOS)"
|
||||
echo
|
||||
echo "Options:"
|
||||
echo " --version <tag> Release tag (default: latest)"
|
||||
echo " --bin-dir <dir> Install directory (default: /usr/local/bin or ~/.local/bin)"
|
||||
echo " -h, --help Show help"
|
||||
}
|
||||
|
||||
while [[ $# -gt 0 ]]; do
|
||||
case "$1" in
|
||||
--version) VERSION="$2"; shift 2;;
|
||||
--bin-dir) BIN_DIR="$2"; shift 2;;
|
||||
-h|--help) usage; exit 0;;
|
||||
*) echo "Unknown argument: $1" >&2; usage; exit 2;;
|
||||
esac
|
||||
done
|
||||
|
||||
if [[ -z "${BIN_DIR}" ]]; then
|
||||
if [[ -w "/usr/local/bin" ]]; then
|
||||
BIN_DIR="/usr/local/bin"
|
||||
else
|
||||
BIN_DIR="${HOME}/.local/bin"
|
||||
fi
|
||||
fi
|
||||
mkdir -p "${BIN_DIR}"
|
||||
|
||||
log() {
|
||||
echo "[gman-install] $*"
|
||||
}
|
||||
|
||||
need_cmd() {
|
||||
if ! command -v "$1" >/dev/null 2>&1; then
|
||||
echo "Error: required command '$1' not found" >&2
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
need_cmd uname
|
||||
need_cmd mktemp
|
||||
need_cmd tar
|
||||
|
||||
if command -v curl >/dev/null 2>&1; then
|
||||
DL=curl
|
||||
elif command -v wget >/dev/null 2>&1; then
|
||||
DL=wget
|
||||
else
|
||||
echo "Error: need curl or wget" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
UNAME_OS=$(uname -s | tr '[:upper:]' '[:lower:]')
|
||||
case "$UNAME_OS" in
|
||||
linux) OS=linux ;;
|
||||
darwin) OS=darwin ;;
|
||||
*) echo "Error: unsupported OS '$UNAME_OS'" >&2; exit 1;;
|
||||
esac
|
||||
|
||||
UNAME_ARCH=$(uname -m)
|
||||
case "$UNAME_ARCH" in
|
||||
x86_64|amd64) ARCH=x86_64 ;;
|
||||
aarch64|arm64) ARCH=aarch64 ;;
|
||||
*) echo "Error: unsupported arch '$UNAME_ARCH'" >&2; exit 1;;
|
||||
esac
|
||||
|
||||
log "Target: ${OS}-${ARCH}"
|
||||
|
||||
API_BASE="https://api.github.com/repos/${REPO}/releases"
|
||||
if [[ -z "${VERSION}" ]]; then
|
||||
RELEASE_URL="${API_BASE}/latest"
|
||||
else
|
||||
RELEASE_URL="${API_BASE}/tags/${VERSION}"
|
||||
fi
|
||||
|
||||
http_get() {
|
||||
if [[ "$DL" == "curl" ]]; then
|
||||
curl -fsSL -H 'User-Agent: gman-installer' "$1"
|
||||
else
|
||||
wget -qO- --header='User-Agent: gman-installer' "$1"
|
||||
fi
|
||||
}
|
||||
|
||||
TMPDIR="$(mktemp -d)"
|
||||
trap 'rm -rf "$TMPDIR"' EXIT
|
||||
|
||||
log "Fetching release metadata from $RELEASE_URL"
|
||||
JSON="$TMPDIR/release.json"
|
||||
if ! http_get "$RELEASE_URL" > "$JSON"; then
|
||||
echo "Error: failed to fetch release metadata. Check version tag." >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
ASSET_CANDIDATES=()
|
||||
if [[ "$OS" == "darwin" ]]; then
|
||||
if [[ "$ARCH" == "x86_64" ]]; then
|
||||
ASSET_CANDIDATES+=("gman-x86_64-apple-darwin.tar.gz")
|
||||
else
|
||||
ASSET_CANDIDATES+=("gman-aarch64-apple-darwin.tar.gz")
|
||||
fi
|
||||
elif [[ "$OS" == "linux" ]]; then
|
||||
if [[ "$ARCH" == "x86_64" ]]; then
|
||||
LIBC="musl"
|
||||
if command -v getconf >/dev/null 2>&1 && getconf GNU_LIBC_VERSION >/dev/null 2>&1; then LIBC="gnu"; fi
|
||||
if ldd --version 2>&1 | grep -qi glibc; then LIBC="gnu"; fi
|
||||
|
||||
if [[ "$LIBC" == "gnu" ]]; then
|
||||
ASSET_CANDIDATES+=("gman-x86_64-unknown-linux-gnu.tar.gz")
|
||||
fi
|
||||
|
||||
ASSET_CANDIDATES+=("gman-x86_64-unknown-linux-musl.tar.gz")
|
||||
else
|
||||
ASSET_CANDIDATES+=("gman-aarch64-unknown-linux-musl.tar.gz")
|
||||
fi
|
||||
else
|
||||
echo "Error: unsupported OS for this installer: $OS" >&2; exit 1
|
||||
fi
|
||||
|
||||
ASSET_NAME=""; ASSET_URL=""
|
||||
for candidate in "${ASSET_CANDIDATES[@]}"; do
|
||||
NAME=$(grep -oE '"name":\s*"[^"]+"' "$JSON" | sed 's/"name":\s*"//; s/"$//' | grep -Fx "$candidate" || true)
|
||||
if [[ -n "$NAME" ]]; then
|
||||
ASSET_NAME="$NAME"
|
||||
ASSET_URL=$(awk -v pat="$NAME" '
|
||||
BEGIN{ FS=":"; want=0 }
|
||||
/"name"/ {
|
||||
line=$0;
|
||||
gsub(/^\s+|\s+$/,"",line);
|
||||
gsub(/"name"\s*:\s*"|"/ ,"", line);
|
||||
want = (line==pat) ? 1 : 0;
|
||||
next
|
||||
}
|
||||
want==1 && /"browser_download_url"/ {
|
||||
u=$0;
|
||||
gsub(/^\s+|\s+$/,"",u);
|
||||
gsub(/.*"browser_download_url"\s*:\s*"|".*/ ,"", u);
|
||||
print u;
|
||||
exit
|
||||
}
|
||||
' "$JSON")
|
||||
if [[ -n "$ASSET_URL" ]]; then break; fi
|
||||
fi
|
||||
done
|
||||
|
||||
if [[ -z "$ASSET_URL" ]]; then
|
||||
echo "Error: no matching asset found for ${OS}-${ARCH}. Tried:" >&2
|
||||
for c in "${ASSET_CANDIDATES[@]}"; do echo " - $c" >&2; done
|
||||
exit 1
|
||||
fi
|
||||
|
||||
log "Selected asset: $ASSET_NAME"
|
||||
log "Download URL: $ASSET_URL"
|
||||
|
||||
ARCHIVE="$TMPDIR/asset"
|
||||
if [[ "$DL" == "curl" ]]; then
|
||||
curl -fL -H 'User-Agent: gman-installer' "$ASSET_URL" -o "$ARCHIVE"
|
||||
else
|
||||
wget -q --header='User-Agent: gman-installer' "$ASSET_URL" -O "$ARCHIVE"
|
||||
fi
|
||||
|
||||
WORK="$TMPDIR/work"; mkdir -p "$WORK"
|
||||
EXTRACTED_DIR="$WORK/extracted"; mkdir -p "$EXTRACTED_DIR"
|
||||
|
||||
if tar -tf "$ARCHIVE" >/dev/null 2>&1; then
|
||||
tar -xzf "$ARCHIVE" -C "$EXTRACTED_DIR"
|
||||
else
|
||||
if command -v unzip >/dev/null 2>&1; then
|
||||
unzip -q "$ARCHIVE" -d "$EXTRACTED_DIR"
|
||||
else
|
||||
echo "Error: unknown archive format; install 'unzip'" >&2
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
BIN_PATH=""
|
||||
while IFS= read -r -d '' f; do
|
||||
base=$(basename "$f")
|
||||
if [[ "$base" == "gman" ]]; then
|
||||
BIN_PATH="$f"
|
||||
break
|
||||
fi
|
||||
done < <(find "$EXTRACTED_DIR" -type f -print0)
|
||||
|
||||
if [[ -z "$BIN_PATH" ]]; then
|
||||
echo "Error: could not find 'gman' binary in the archive" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
chmod +x "$BIN_PATH"
|
||||
install -m 0755 "$BIN_PATH" "${BIN_DIR}/gman"
|
||||
|
||||
log "Installed: ${BIN_DIR}/gman"
|
||||
|
||||
case ":$PATH:" in
|
||||
*":${BIN_DIR}:"*) ;;
|
||||
*)
|
||||
log "Note: ${BIN_DIR} is not in PATH. Add it, e.g.:"
|
||||
log " export PATH=\"${BIN_DIR}:\$PATH\""
|
||||
;;
|
||||
esac
|
||||
|
||||
log "Done. Try: gman --help"
|
||||
|
||||
+205
-35
@@ -1,21 +1,22 @@
|
||||
use crate::command::preview_command;
|
||||
use anyhow::{Context, Result, anyhow};
|
||||
use clap_complete::CompletionCandidate;
|
||||
use futures::future::join_all;
|
||||
use gman::config::{Config, RunConfig};
|
||||
use gman::providers::SecretProvider;
|
||||
use gman::config::{Config, RunConfig, load_config};
|
||||
use log::{debug, error};
|
||||
use regex::Regex;
|
||||
use std::collections::HashMap;
|
||||
use std::ffi::OsString;
|
||||
use std::ffi::{OsStr, OsString};
|
||||
use std::fs;
|
||||
use std::path::PathBuf;
|
||||
use std::process::Command;
|
||||
use tokio::runtime::Handle;
|
||||
|
||||
const ARG_FORMAT_PLACEHOLDER_KEY: &str = "{{key}}";
|
||||
const ARG_FORMAT_PLACEHOLDER_VALUE: &str = "{{value}}";
|
||||
|
||||
pub async fn wrap_and_run_command(
|
||||
secrets_provider: &mut dyn SecretProvider,
|
||||
provider: Option<String>,
|
||||
config: &Config,
|
||||
tokens: Vec<OsString>,
|
||||
profile_name: Option<String>,
|
||||
@@ -36,6 +37,9 @@ pub async fn wrap_and_run_command(
|
||||
.find(|c| c.name.as_deref() == Some(run_config_profile_name))
|
||||
});
|
||||
if let Some(run_cfg) = run_config_opt {
|
||||
let mut provider_config =
|
||||
config.extract_provider_config(provider.or(run_cfg.provider.clone()))?;
|
||||
let secrets_provider = provider_config.extract_provider();
|
||||
let secrets_result_futures = run_cfg
|
||||
.secrets
|
||||
.as_ref()
|
||||
@@ -163,7 +167,7 @@ fn generate_files_secret_injections(
|
||||
secrets: HashMap<&str, String>,
|
||||
run_config: &RunConfig,
|
||||
) -> Result<Vec<(PathBuf, String, String)>> {
|
||||
let re = Regex::new(r"\{\{(.+)\}\}")?;
|
||||
let re = Regex::new(r"\{\{(.+)}}")?;
|
||||
let mut results = Vec::new();
|
||||
for file in run_config
|
||||
.files
|
||||
@@ -251,34 +255,82 @@ pub fn parse_args(
|
||||
Ok(args)
|
||||
}
|
||||
|
||||
pub fn run_config_completer(current: &OsStr) -> Vec<CompletionCandidate> {
|
||||
let cur = current.to_string_lossy();
|
||||
match load_config(true) {
|
||||
Ok(config) => {
|
||||
if let Some(run_configs) = config.run_configs {
|
||||
run_configs
|
||||
.iter()
|
||||
.filter(|rc| {
|
||||
rc.name
|
||||
.as_ref()
|
||||
.expect("run config has no name")
|
||||
.starts_with(&*cur)
|
||||
})
|
||||
.map(|rc| {
|
||||
CompletionCandidate::new(rc.name.as_ref().expect("run config has no name"))
|
||||
})
|
||||
.collect()
|
||||
} else {
|
||||
vec![]
|
||||
}
|
||||
}
|
||||
Err(_) => vec![],
|
||||
}
|
||||
}
|
||||
|
||||
pub fn provider_completer(current: &OsStr) -> Vec<CompletionCandidate> {
|
||||
let cur = current.to_string_lossy();
|
||||
match load_config(true) {
|
||||
Ok(config) => config
|
||||
.providers
|
||||
.iter()
|
||||
.filter(|pc| {
|
||||
pc.name
|
||||
.as_ref()
|
||||
.expect("run config has no name")
|
||||
.starts_with(&*cur)
|
||||
})
|
||||
.map(|pc| CompletionCandidate::new(pc.name.as_ref().expect("provider has no name")))
|
||||
.collect(),
|
||||
Err(_) => vec![],
|
||||
}
|
||||
}
|
||||
|
||||
pub fn secrets_completer(current: &OsStr) -> Vec<CompletionCandidate> {
|
||||
let cur = current.to_string_lossy();
|
||||
match load_config(true) {
|
||||
Ok(config) => {
|
||||
let mut provider_config = match config.extract_provider_config(None) {
|
||||
Ok(pc) => pc,
|
||||
Err(_) => return vec![],
|
||||
};
|
||||
let secrets_provider = provider_config.extract_provider();
|
||||
let h = Handle::current();
|
||||
tokio::task::block_in_place(|| h.block_on(secrets_provider.list_secrets()))
|
||||
.unwrap_or_default()
|
||||
.into_iter()
|
||||
.filter(|s| s.starts_with(&*cur))
|
||||
.map(CompletionCandidate::new)
|
||||
.collect()
|
||||
}
|
||||
Err(_) => vec![],
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::cli::generate_files_secret_injections;
|
||||
use gman::config::get_config_file_path;
|
||||
use gman::config::{Config, RunConfig};
|
||||
use pretty_assertions::{assert_eq, assert_str_eq};
|
||||
use serial_test::serial;
|
||||
use std::collections::HashMap;
|
||||
use std::env as std_env;
|
||||
use std::ffi::OsString;
|
||||
|
||||
struct DummyProvider;
|
||||
#[async_trait::async_trait]
|
||||
impl SecretProvider for DummyProvider {
|
||||
fn name(&self) -> &'static str {
|
||||
"Dummy"
|
||||
}
|
||||
async fn get_secret(&self, key: &str) -> Result<String> {
|
||||
Ok(format!("{}_VAL", key))
|
||||
}
|
||||
async fn set_secret(&self, _key: &str, _value: &str) -> Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
async fn delete_secret(&self, _key: &str) -> Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
async fn sync(&mut self) -> Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
use tempfile::tempdir;
|
||||
|
||||
#[test]
|
||||
fn test_generate_files_secret_injections() {
|
||||
@@ -290,6 +342,7 @@ mod tests {
|
||||
|
||||
let run_config = RunConfig {
|
||||
name: Some("test".to_string()),
|
||||
provider: None,
|
||||
secrets: Some(vec!["testing/SOME-secret".to_string()]),
|
||||
files: Some(vec![file_path.clone()]),
|
||||
flag: None,
|
||||
@@ -309,6 +362,7 @@ mod tests {
|
||||
fn test_parse_args_insert_and_append() {
|
||||
let run_config = RunConfig {
|
||||
name: Some("docker".into()),
|
||||
provider: None,
|
||||
secrets: Some(vec!["api_key".into()]),
|
||||
files: None,
|
||||
flag: Some("-e".into()),
|
||||
@@ -347,10 +401,8 @@ mod tests {
|
||||
#[tokio::test]
|
||||
async fn test_wrap_and_run_command_no_profile() {
|
||||
let cfg = Config::default();
|
||||
let mut dummy = DummyProvider;
|
||||
let prov: &mut dyn SecretProvider = &mut dummy;
|
||||
let tokens = vec![OsString::from("echo"), OsString::from("hi")];
|
||||
let err = wrap_and_run_command(prov, &cfg, tokens, None, true)
|
||||
let err = wrap_and_run_command(None, &cfg, tokens, None, true)
|
||||
.await
|
||||
.unwrap_err();
|
||||
assert!(err.to_string().contains("No run profile found"));
|
||||
@@ -361,6 +413,7 @@ mod tests {
|
||||
// Create a config with a matching run profile for command "echo"
|
||||
let run_cfg = RunConfig {
|
||||
name: Some("echo".into()),
|
||||
provider: None,
|
||||
secrets: Some(vec!["api_key".into()]),
|
||||
files: None,
|
||||
flag: None,
|
||||
@@ -371,14 +424,131 @@ mod tests {
|
||||
run_configs: Some(vec![run_cfg]),
|
||||
..Config::default()
|
||||
};
|
||||
let mut dummy = DummyProvider;
|
||||
let prov: &mut dyn SecretProvider = &mut dummy;
|
||||
|
||||
// Capture stderr for dry_run preview
|
||||
let tokens = vec![OsString::from("echo"), OsString::from("hello")];
|
||||
// Best-effort: ensure function does not error under dry_run
|
||||
let res = wrap_and_run_command(prov, &cfg, tokens, None, true).await;
|
||||
assert!(res.is_ok());
|
||||
// Not asserting output text to keep test platform-agnostic
|
||||
let err = wrap_and_run_command(None, &cfg, tokens, None, true)
|
||||
.await
|
||||
.expect_err("expected failed secret resolution in dry_run");
|
||||
assert!(err.to_string().contains("Failed to fetch"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[serial]
|
||||
fn test_run_config_completer_filters_by_prefix() {
|
||||
let td = tempdir().unwrap();
|
||||
let xdg = td.path().join("xdg");
|
||||
unsafe { std_env::set_var("XDG_CONFIG_HOME", &xdg) };
|
||||
let cfg_path = get_config_file_path().unwrap();
|
||||
let app_dir = cfg_path.parent().unwrap().to_path_buf();
|
||||
fs::create_dir_all(&app_dir).unwrap();
|
||||
|
||||
let yaml = indoc::indoc! {
|
||||
"---
|
||||
default_provider: local
|
||||
providers:
|
||||
- name: local
|
||||
type: local
|
||||
run_configs:
|
||||
- name: echo
|
||||
secrets: [API_KEY]
|
||||
- name: docker
|
||||
secrets: [DB_PASSWORD]
|
||||
- name: aws
|
||||
secrets: [AWS_ACCESS_KEY_ID]
|
||||
"
|
||||
};
|
||||
fs::write(app_dir.join("config.yml"), yaml).unwrap();
|
||||
|
||||
let out = run_config_completer(OsStr::new("do"));
|
||||
assert_eq!(out.len(), 1);
|
||||
// Compare via debug string to avoid depending on crate internals
|
||||
let rendered = format!("{:?}", &out[0]);
|
||||
assert!(rendered.contains("docker"), "got: {}", rendered);
|
||||
|
||||
unsafe { std_env::remove_var("XDG_CONFIG_HOME") };
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[serial]
|
||||
fn test_provider_completer_lists_matching_providers() {
|
||||
let td = tempdir().unwrap();
|
||||
let xdg = td.path().join("xdg");
|
||||
unsafe { std_env::set_var("XDG_CONFIG_HOME", &xdg) };
|
||||
let cfg_path = get_config_file_path().unwrap();
|
||||
let app_dir = cfg_path.parent().unwrap().to_path_buf();
|
||||
fs::create_dir_all(&app_dir).unwrap();
|
||||
|
||||
let yaml = indoc::indoc! {
|
||||
"---
|
||||
default_provider: local
|
||||
providers:
|
||||
- name: local
|
||||
type: local
|
||||
- name: prod
|
||||
type: local
|
||||
run_configs:
|
||||
- name: echo
|
||||
secrets: [API_KEY]
|
||||
"
|
||||
};
|
||||
fs::write(app_dir.join("config.yml"), yaml).unwrap();
|
||||
|
||||
// Prefix 'p' should match only 'prod'
|
||||
let out = provider_completer(OsStr::new("p"));
|
||||
assert_eq!(out.len(), 1);
|
||||
let rendered = format!("{:?}", &out[0]);
|
||||
assert!(rendered.contains("prod"), "got: {}", rendered);
|
||||
|
||||
// Empty prefix returns at least both providers
|
||||
let out_all = provider_completer(OsStr::new(""));
|
||||
assert!(out_all.len() >= 2);
|
||||
|
||||
unsafe { std_env::remove_var("XDG_CONFIG_HOME") };
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread")]
|
||||
#[serial]
|
||||
async fn test_secrets_completer_filters_keys_by_prefix() {
|
||||
let td = tempdir().unwrap();
|
||||
let xdg = td.path().join("xdg");
|
||||
unsafe { std_env::set_var("XDG_CONFIG_HOME", &xdg) };
|
||||
let cfg_path = get_config_file_path().unwrap();
|
||||
let app_dir = cfg_path.parent().unwrap().to_path_buf();
|
||||
fs::create_dir_all(&app_dir).unwrap();
|
||||
|
||||
let yaml = indoc::indoc! {
|
||||
"---
|
||||
default_provider: local
|
||||
providers:
|
||||
- name: local
|
||||
type: local
|
||||
run_configs:
|
||||
- name: echo
|
||||
secrets: [API_KEY]
|
||||
"
|
||||
};
|
||||
fs::write(app_dir.join("config.yml"), yaml).unwrap();
|
||||
|
||||
// Seed a minimal vault with keys (values are irrelevant for listing)
|
||||
let vault_yaml = indoc::indoc! {
|
||||
"---
|
||||
API_KEY: dummy
|
||||
DB_PASSWORD: dummy
|
||||
AWS_ACCESS_KEY_ID: dummy
|
||||
"
|
||||
};
|
||||
fs::write(app_dir.join("vault.yml"), vault_yaml).unwrap();
|
||||
|
||||
let out = secrets_completer(OsStr::new("AWS"));
|
||||
assert_eq!(out.len(), 1);
|
||||
let rendered = format!("{:?}", &out[0]);
|
||||
assert!(rendered.contains("AWS_ACCESS_KEY_ID"), "got: {}", rendered);
|
||||
|
||||
let out2 = secrets_completer(OsStr::new("DB_"));
|
||||
assert_eq!(out2.len(), 1);
|
||||
let rendered2 = format!("{:?}", &out2[0]);
|
||||
assert!(rendered2.contains("DB_PASSWORD"), "got: {}", rendered2);
|
||||
|
||||
unsafe { std_env::remove_var("XDG_CONFIG_HOME") };
|
||||
}
|
||||
}
|
||||
|
||||
+98
-24
@@ -1,19 +1,25 @@
|
||||
use crate::cli::provider_completer;
|
||||
use crate::cli::run_config_completer;
|
||||
use crate::cli::secrets_completer;
|
||||
use anyhow::{Context, Result};
|
||||
use clap::Subcommand;
|
||||
use clap::{
|
||||
CommandFactory, Parser, ValueEnum, crate_authors, crate_description, crate_name, crate_version,
|
||||
};
|
||||
use std::ffi::OsString;
|
||||
|
||||
use anyhow::{Context, Result};
|
||||
use clap::Subcommand;
|
||||
use clap_complete::{ArgValueCompleter, CompleteEnv};
|
||||
use crossterm::execute;
|
||||
use crossterm::terminal::{LeaveAlternateScreen, disable_raw_mode};
|
||||
use gman::config::{get_config_file_path, load_config};
|
||||
use gman::config::{Config, get_config_file_path, load_config};
|
||||
use std::ffi::OsString;
|
||||
use std::io::{self, IsTerminal, Read, Write};
|
||||
use std::panic::PanicHookInfo;
|
||||
|
||||
use crate::cli::wrap_and_run_command;
|
||||
use crate::utils::persist_config_file;
|
||||
use dialoguer::Editor;
|
||||
use std::panic;
|
||||
use std::process::exit;
|
||||
use validator::Validate;
|
||||
|
||||
mod cli;
|
||||
mod command;
|
||||
@@ -46,11 +52,11 @@ struct Cli {
|
||||
output: Option<OutputFormat>,
|
||||
|
||||
/// Specify the secret provider to use (defaults to 'default_provider' in config (usually 'local'))
|
||||
#[arg(long, value_enum, global = true, env = "GMAN_PROVIDER")]
|
||||
#[arg(long, global = true, env = "GMAN_PROVIDER", add = ArgValueCompleter::new(provider_completer))]
|
||||
provider: Option<String>,
|
||||
|
||||
/// Specify a run profile to use when wrapping a command
|
||||
#[arg(long, short)]
|
||||
#[arg(long, short, add = ArgValueCompleter::new(run_config_completer))]
|
||||
profile: Option<String>,
|
||||
|
||||
/// Output the command that will be run instead of executing it
|
||||
@@ -72,14 +78,17 @@ struct Cli {
|
||||
#[derive(Subcommand, Clone, Debug)]
|
||||
enum Commands {
|
||||
/// Add a secret to the configured secret provider
|
||||
#[clap(aliases = &["set", "create"])]
|
||||
Add {
|
||||
/// Name of the secret to store
|
||||
name: String,
|
||||
},
|
||||
|
||||
/// Decrypt a secret and print the plaintext
|
||||
#[clap(alias = "show")]
|
||||
Get {
|
||||
/// Name of the secret to retrieve
|
||||
#[arg(add = ArgValueCompleter::new(secrets_completer))]
|
||||
name: String,
|
||||
},
|
||||
|
||||
@@ -87,33 +96,39 @@ enum Commands {
|
||||
/// If a provider does not support updating secrets, this command will return an error.
|
||||
Update {
|
||||
/// Name of the secret to update
|
||||
#[arg(add = ArgValueCompleter::new(secrets_completer))]
|
||||
name: String,
|
||||
},
|
||||
|
||||
/// Delete a secret from the configured secret provider
|
||||
#[clap(aliases = &["remove", "rm"])]
|
||||
Delete {
|
||||
/// Name of the secret to delete
|
||||
#[arg(add = ArgValueCompleter::new(secrets_completer))]
|
||||
name: String,
|
||||
},
|
||||
|
||||
/// List all secrets stored in the configured secret provider (if supported by the provider)
|
||||
/// If a provider does not support listing secrets, this command will return an error.
|
||||
#[clap(alias = "ls")]
|
||||
List {},
|
||||
|
||||
/// Sync secrets with remote storage (if supported by the provider)
|
||||
Sync {},
|
||||
|
||||
// TODO: Remove once all users have migrated their local vaults
|
||||
/// Migrate local vault secrets to the current secure encryption format.
|
||||
/// This is only needed if you have secrets encrypted with older versions of gman.
|
||||
/// Only works with the local provider.
|
||||
Migrate {},
|
||||
|
||||
/// Open and edit the config file in the default text editor
|
||||
Config {},
|
||||
|
||||
/// Wrap the provided command and supply it with secrets as environment variables or as
|
||||
/// configured in a corresponding run profile
|
||||
#[command(external_subcommand)]
|
||||
External(Vec<OsString>),
|
||||
|
||||
/// Generate shell completion scripts
|
||||
Completions {
|
||||
/// The shell to generate the script for
|
||||
#[arg(value_enum)]
|
||||
shell: clap_complete::Shell,
|
||||
},
|
||||
}
|
||||
|
||||
#[tokio::main]
|
||||
@@ -124,6 +139,7 @@ async fn main() -> Result<()> {
|
||||
panic::set_hook(Box::new(|info| {
|
||||
panic_hook(info);
|
||||
}));
|
||||
CompleteEnv::with_factory(Cli::command).complete();
|
||||
let cli = Cli::parse();
|
||||
|
||||
if cli.show_log_path {
|
||||
@@ -140,7 +156,7 @@ async fn main() -> Result<()> {
|
||||
exit(1);
|
||||
}
|
||||
|
||||
let config = load_config()?;
|
||||
let config = load_config(true)?;
|
||||
let mut provider_config = config.extract_provider_config(cli.provider.clone())?;
|
||||
let secrets_provider = provider_config.extract_provider();
|
||||
|
||||
@@ -149,7 +165,7 @@ async fn main() -> Result<()> {
|
||||
let plaintext =
|
||||
read_all_stdin().with_context(|| "unable to read plaintext from stdin")?;
|
||||
secrets_provider
|
||||
.set_secret(&name, plaintext.trim_end())
|
||||
.set_secret(&name, &plaintext)
|
||||
.await
|
||||
.map(|_| match cli.output {
|
||||
Some(_) => (),
|
||||
@@ -180,7 +196,7 @@ async fn main() -> Result<()> {
|
||||
let plaintext =
|
||||
read_all_stdin().with_context(|| "unable to read plaintext from stdin")?;
|
||||
secrets_provider
|
||||
.update_secret(&name, plaintext.trim_end())
|
||||
.update_secret(&name, &plaintext)
|
||||
.await
|
||||
.map(|_| match cli.output {
|
||||
Some(_) => (),
|
||||
@@ -220,6 +236,27 @@ async fn main() -> Result<()> {
|
||||
}
|
||||
}
|
||||
}
|
||||
Commands::Config {} => {
|
||||
let uninterpolated_config = load_config(false)?;
|
||||
let config_yaml = serde_yaml::to_string(&uninterpolated_config)
|
||||
.with_context(|| "failed to serialize existing configuration")?;
|
||||
let new_config = Editor::new()
|
||||
.edit(&config_yaml)
|
||||
.with_context(|| "unable to process user changes")?;
|
||||
if new_config.is_none() {
|
||||
println!("✗ No changes made to configuration");
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let new_config = new_config.unwrap();
|
||||
let new_config: Config = serde_yaml::from_str(&new_config)
|
||||
.with_context(|| "failed to parse updated configuration")?;
|
||||
new_config
|
||||
.validate()
|
||||
.with_context(|| "updated configuration is invalid")?;
|
||||
persist_config_file(&new_config)?;
|
||||
println!("✓ Configuration updated successfully");
|
||||
}
|
||||
Commands::Sync {} => {
|
||||
secrets_provider.sync().await.map(|_| {
|
||||
if cli.output.is_none() {
|
||||
@@ -227,14 +264,51 @@ async fn main() -> Result<()> {
|
||||
}
|
||||
})?;
|
||||
}
|
||||
Commands::External(tokens) => {
|
||||
wrap_and_run_command(secrets_provider, &config, tokens, cli.profile, cli.dry_run)
|
||||
.await?;
|
||||
// TODO: Remove once all users have migrated their local vaults
|
||||
Commands::Migrate {} => {
|
||||
use gman::providers::SupportedProvider;
|
||||
use gman::providers::local::LocalProvider;
|
||||
|
||||
let provider_config_for_migrate =
|
||||
config.extract_provider_config(cli.provider.clone())?;
|
||||
|
||||
let local_provider: LocalProvider = match provider_config_for_migrate.provider_type {
|
||||
SupportedProvider::Local { provider_def } => provider_def,
|
||||
_ => {
|
||||
anyhow::bail!("The migrate command only works with the local provider.");
|
||||
}
|
||||
};
|
||||
|
||||
println!("Migrating vault secrets to current secure format...");
|
||||
let result = local_provider.migrate_vault().await?;
|
||||
|
||||
if result.total == 0 {
|
||||
println!("Vault is empty, nothing to migrate.");
|
||||
} else {
|
||||
println!(
|
||||
"Migration complete: {} total, {} migrated, {} already current",
|
||||
result.total, result.migrated, result.already_current
|
||||
);
|
||||
|
||||
if !result.failed.is_empty() {
|
||||
eprintln!("\n⚠ Failed to migrate {} secret(s):", result.failed.len());
|
||||
for (key, error) in &result.failed {
|
||||
eprintln!(" - {}: {}", key, error);
|
||||
}
|
||||
}
|
||||
|
||||
if result.migrated > 0 {
|
||||
println!(
|
||||
"\n✓ Successfully migrated {} secret(s) to the secure format.",
|
||||
result.migrated
|
||||
);
|
||||
} else if result.failed.is_empty() {
|
||||
println!("\n✓ All secrets are already using the current secure format.");
|
||||
}
|
||||
}
|
||||
}
|
||||
Commands::Completions { shell } => {
|
||||
let mut cmd = Cli::command();
|
||||
let bin_name = cmd.get_name().to_string();
|
||||
clap_complete::generate(shell, &mut cmd, bin_name, &mut io::stdout());
|
||||
Commands::External(tokens) => {
|
||||
wrap_and_run_command(cli.provider, &config, tokens, cli.profile, cli.dry_run).await?;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
+25
-1
@@ -1,3 +1,5 @@
|
||||
use anyhow::{Context, Result};
|
||||
use gman::config::{Config, get_config_file_path};
|
||||
use log::LevelFilter;
|
||||
use log4rs::append::console::ConsoleAppender;
|
||||
use log4rs::append::file::FileAppender;
|
||||
@@ -44,7 +46,7 @@ pub fn init_logging_config() -> log4rs::Config {
|
||||
|
||||
pub fn get_log_path() -> PathBuf {
|
||||
let base_dir = dirs::cache_dir().unwrap_or_else(env::temp_dir);
|
||||
let log_dir = base_dir.join("gman");
|
||||
let log_dir = base_dir.join(env!("CARGO_CRATE_NAME"));
|
||||
|
||||
let dir = if let Err(e) = fs::create_dir_all(&log_dir) {
|
||||
eprintln!(
|
||||
@@ -60,6 +62,28 @@ pub fn get_log_path() -> PathBuf {
|
||||
dir.join("gman.log")
|
||||
}
|
||||
|
||||
pub fn persist_config_file(config: &Config) -> Result<()> {
|
||||
let config_path =
|
||||
get_config_file_path().with_context(|| "unable to determine config file path")?;
|
||||
let ext = config_path
|
||||
.extension()
|
||||
.and_then(|s| s.to_str())
|
||||
.unwrap_or("");
|
||||
if ext.eq_ignore_ascii_case("yml") || ext.eq_ignore_ascii_case("yaml") {
|
||||
if let Some(parent) = config_path.parent() {
|
||||
fs::create_dir_all(parent)?;
|
||||
}
|
||||
let s = serde_yaml::to_string(config)?;
|
||||
fs::write(&config_path, s)
|
||||
.with_context(|| format!("failed to write {}", config_path.display()))?;
|
||||
} else {
|
||||
confy::store(env!("CARGO_CRATE_NAME"), "config", config)
|
||||
.with_context(|| "failed to save updated config via confy")?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::utils::get_log_path;
|
||||
|
||||
+153
-22
@@ -11,6 +11,7 @@
|
||||
//!
|
||||
//! let rc = RunConfig{
|
||||
//! name: Some("echo".into()),
|
||||
//! provider: None,
|
||||
//! secrets: Some(vec!["api_key".into()]),
|
||||
//! files: None,
|
||||
//! flag: None,
|
||||
@@ -19,16 +20,20 @@
|
||||
//! };
|
||||
//! rc.validate().unwrap();
|
||||
//! ```
|
||||
|
||||
use crate::calling_app_name;
|
||||
use crate::providers::local::LocalProvider;
|
||||
use crate::providers::{SecretProvider, SupportedProvider};
|
||||
use anyhow::{Context, Result};
|
||||
use collections::HashSet;
|
||||
use log::debug;
|
||||
use regex::Regex;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_with::serde_as;
|
||||
use serde_with::skip_serializing_none;
|
||||
use std::borrow::Cow;
|
||||
use std::path::PathBuf;
|
||||
use std::{env, fs};
|
||||
use std::{collections, env, fs};
|
||||
use validator::{Validate, ValidationError};
|
||||
|
||||
#[skip_serializing_none]
|
||||
@@ -43,6 +48,7 @@ use validator::{Validate, ValidationError};
|
||||
pub struct RunConfig {
|
||||
#[validate(required)]
|
||||
pub name: Option<String>,
|
||||
pub provider: Option<String>,
|
||||
#[validate(required)]
|
||||
pub secrets: Option<Vec<String>>,
|
||||
pub files: Option<Vec<PathBuf>>,
|
||||
@@ -144,6 +150,7 @@ impl ProviderConfig {
|
||||
match &mut self.provider_type {
|
||||
SupportedProvider::Local { provider_def } => {
|
||||
debug!("Using local secret provider");
|
||||
provider_def.runtime_provider_name = self.name.clone();
|
||||
provider_def
|
||||
}
|
||||
SupportedProvider::AwsSecretsManager { provider_def } => {
|
||||
@@ -158,6 +165,10 @@ impl ProviderConfig {
|
||||
debug!("Using Azure Key Vault provider");
|
||||
provider_def
|
||||
}
|
||||
SupportedProvider::Gopass { provider_def } => {
|
||||
debug!("Using Gopass provider");
|
||||
provider_def
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -181,6 +192,7 @@ impl ProviderConfig {
|
||||
/// ```
|
||||
#[derive(Debug, Clone, Validate, Serialize, Deserialize, PartialEq, Eq)]
|
||||
#[validate(schema(function = "default_provider_exists"))]
|
||||
#[validate(schema(function = "providers_names_are_unique"))]
|
||||
pub struct Config {
|
||||
pub default_provider: Option<String>,
|
||||
#[validate(length(min = 1))]
|
||||
@@ -210,6 +222,22 @@ fn default_provider_exists(config: &Config) -> Result<(), ValidationError> {
|
||||
}
|
||||
}
|
||||
|
||||
fn providers_names_are_unique(config: &Config) -> Result<(), ValidationError> {
|
||||
let mut names = HashSet::new();
|
||||
for provider in &config.providers {
|
||||
if let Some(name) = &provider.name
|
||||
&& !names.insert(name)
|
||||
{
|
||||
let mut err = ValidationError::new("duplicate_provider_name");
|
||||
err.message = Some(Cow::Borrowed(
|
||||
"Provider names must be unique; duplicate found",
|
||||
));
|
||||
return Err(err);
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
impl Default for Config {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
@@ -241,48 +269,49 @@ impl Config {
|
||||
|
||||
/// Discover the default password file for the local provider.
|
||||
///
|
||||
/// On most systems this resolves to `~/.gman_password` when the file
|
||||
/// exists, otherwise `None`.
|
||||
pub fn local_provider_password_file() -> Option<PathBuf> {
|
||||
let candidate = dirs::home_dir().map(|p| p.join(".gman_password"));
|
||||
match candidate {
|
||||
Some(p) if p.exists() => Some(p),
|
||||
_ => None,
|
||||
}
|
||||
/// On most systems this resolves to `~/.<executable_name>_password`
|
||||
pub fn local_provider_password_file() -> PathBuf {
|
||||
dirs::home_dir()
|
||||
.map(|p| p.join(format!(".{}_password", calling_app_name())))
|
||||
.expect("unable to determine home directory for local provider password file")
|
||||
}
|
||||
}
|
||||
|
||||
/// Load and validate the application configuration.
|
||||
///
|
||||
/// This uses the `confy` crate to load the configuration from a file
|
||||
/// (e.g. `~/.config/gman/config.yaml`). If the file does
|
||||
/// (e.g. `~/.config/<executable_name>/config.yaml`). If the file does
|
||||
/// not exist, a default configuration is created and saved.
|
||||
///
|
||||
/// ```no_run
|
||||
/// # use gman::config::load_config;
|
||||
/// let config = load_config().unwrap();
|
||||
/// // Load config with environment variable interpolation enabled
|
||||
/// let config = load_config(true).unwrap();
|
||||
/// println!("loaded config: {:?}", config);
|
||||
/// ```
|
||||
pub fn load_config() -> Result<Config> {
|
||||
pub fn load_config(interpolate: bool) -> Result<Config> {
|
||||
let xdg_path = env::var_os("XDG_CONFIG_HOME").map(PathBuf::from);
|
||||
|
||||
let mut config: Config = if let Some(base) = xdg_path.as_ref() {
|
||||
let app_dir = base.join("gman");
|
||||
let app_dir = base.join(calling_app_name());
|
||||
let yml = app_dir.join("config.yml");
|
||||
let yaml = app_dir.join("config.yaml");
|
||||
if yml.exists() || yaml.exists() {
|
||||
let load_path = if yml.exists() { &yml } else { &yaml };
|
||||
let content = fs::read_to_string(load_path)
|
||||
let mut content = fs::read_to_string(load_path)
|
||||
.with_context(|| format!("failed to read config file '{}'", load_path.display()))?;
|
||||
if interpolate {
|
||||
content = interpolate_env_vars(&content);
|
||||
}
|
||||
let cfg: Config = serde_yaml::from_str(&content).with_context(|| {
|
||||
format!("failed to parse YAML config at '{}'", load_path.display())
|
||||
})?;
|
||||
cfg
|
||||
} else {
|
||||
confy::load("gman", "config")?
|
||||
load_confy_config(interpolate)?
|
||||
}
|
||||
} else {
|
||||
confy::load("gman", "config")?
|
||||
load_confy_config(interpolate)?
|
||||
};
|
||||
|
||||
config.validate()?;
|
||||
@@ -296,26 +325,128 @@ pub fn load_config() -> Result<Config> {
|
||||
ref mut provider_def,
|
||||
} = p.provider_type
|
||||
&& provider_def.password_file.is_none()
|
||||
&& let Some(local_password_file) = Config::local_provider_password_file()
|
||||
&& Config::local_provider_password_file().exists()
|
||||
{
|
||||
provider_def.password_file = Some(local_password_file);
|
||||
provider_def.password_file = Some(Config::local_provider_password_file());
|
||||
}
|
||||
});
|
||||
|
||||
Ok(config)
|
||||
}
|
||||
|
||||
/// Returns the configuration file path that `confy` will use for this app.
|
||||
fn load_confy_config(interpolate: bool) -> Result<Config> {
|
||||
let load_path = confy::get_configuration_file_path(&calling_app_name(), "config")?;
|
||||
let mut content = fs::read_to_string(&load_path)
|
||||
.with_context(|| format!("failed to read config file '{}'", load_path.display()))?;
|
||||
if interpolate {
|
||||
content = interpolate_env_vars(&content);
|
||||
}
|
||||
let cfg: Config = serde_yaml::from_str(&content)
|
||||
.with_context(|| format!("failed to parse YAML config at '{}'", load_path.display()))?;
|
||||
|
||||
Ok(cfg)
|
||||
}
|
||||
|
||||
/// Returns the configuration file path that `confy` will use
|
||||
pub fn get_config_file_path() -> Result<PathBuf> {
|
||||
if let Some(base) = env::var_os("XDG_CONFIG_HOME").map(PathBuf::from) {
|
||||
let dir = base.join("gman");
|
||||
let dir = base.join(calling_app_name());
|
||||
let yml = dir.join("config.yml");
|
||||
let yaml = dir.join("config.yaml");
|
||||
if yml.exists() || yaml.exists() {
|
||||
return Ok(if yml.exists() { yml } else { yaml });
|
||||
}
|
||||
// Prefer .yml if creating anew
|
||||
return Ok(dir.join("config.yml"));
|
||||
}
|
||||
Ok(confy::get_configuration_file_path("gman", "config")?)
|
||||
Ok(confy::get_configuration_file_path(
|
||||
&calling_app_name(),
|
||||
"config",
|
||||
)?)
|
||||
}
|
||||
|
||||
pub fn interpolate_env_vars(s: &str) -> String {
|
||||
let result = s.to_string();
|
||||
let scrubbing_regex = Regex::new(r#"[\s{}^()\[\]\\|`'"]+"#).unwrap();
|
||||
let var_regex = Regex::new(r"\$\{(.*?)(:-.+)?}").unwrap();
|
||||
|
||||
var_regex
|
||||
.replace_all(s, |caps: ®ex::Captures<'_>| {
|
||||
if let Some(mat) = caps.get(1) {
|
||||
if let Ok(value) = env::var(mat.as_str()) {
|
||||
return scrubbing_regex.replace_all(&value, "").to_string();
|
||||
} else if let Some(default_value) = caps.get(2) {
|
||||
return scrubbing_regex
|
||||
.replace_all(
|
||||
default_value
|
||||
.as_str()
|
||||
.strip_prefix(":-")
|
||||
.expect("unable to strip ':-' prefix from default value"),
|
||||
"",
|
||||
)
|
||||
.to_string();
|
||||
}
|
||||
}
|
||||
|
||||
scrubbing_regex.replace_all(&result, "").to_string()
|
||||
})
|
||||
.to_string()
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use pretty_assertions::assert_str_eq;
|
||||
use serial_test::serial;
|
||||
|
||||
#[test]
|
||||
fn test_interpolate_env_vars_defaults_to_original_string_if_not_in_yaml_interpolation_format() {
|
||||
let var = interpolate_env_vars("TEST_VAR_INTERPOLATION_NON_YAML");
|
||||
|
||||
assert_str_eq!(var, "TEST_VAR_INTERPOLATION_NON_YAML");
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[serial]
|
||||
fn test_interpolate_env_vars_scrubs_all_unnecessary_characters() {
|
||||
unsafe {
|
||||
env::set_var(
|
||||
"TEST_VAR_INTERPOLATION_UNNECESSARY_CHARACTERS",
|
||||
r#"""
|
||||
`"'https://dontdo:this@testing.com/query?test=%20query#results'"` {([\|])}
|
||||
"""#,
|
||||
)
|
||||
};
|
||||
|
||||
let var = interpolate_env_vars("${TEST_VAR_INTERPOLATION_UNNECESSARY_CHARACTERS}");
|
||||
|
||||
assert_str_eq!(
|
||||
var,
|
||||
"https://dontdo:this@testing.com/query?test=%20query#results"
|
||||
);
|
||||
unsafe { env::remove_var("TEST_VAR_INTERPOLATION_UNNECESSARY_CHARACTERS") };
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[serial]
|
||||
fn test_interpolate_env_vars_scrubs_all_unnecessary_characters_for_default_values() {
|
||||
let var = interpolate_env_vars(
|
||||
r#"${UNSET:-`"'https://dontdo:this@testing.com/query?test=%20query#results'"` {([\|])}}"#,
|
||||
);
|
||||
|
||||
assert_str_eq!(
|
||||
var,
|
||||
"https://dontdo:this@testing.com/query?test=%20query#results"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_interpolate_env_vars_scrubs_all_unnecessary_characters_from_non_environment_variable() {
|
||||
let var =
|
||||
interpolate_env_vars("https://dontdo:this@testing.com/query?test=%20query#results");
|
||||
|
||||
assert_str_eq!(
|
||||
var,
|
||||
"https://dontdo:this@testing.com/query?test=%20query#results"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
+77
-35
@@ -20,17 +20,16 @@
|
||||
//! The `config` and `providers` modules power the CLI. They can be embedded
|
||||
//! in other programs, but many functions interact with the user or the
|
||||
//! filesystem. Prefer `no_run` doctests for those.
|
||||
|
||||
use anyhow::{Context, Result, anyhow, bail};
|
||||
use argon2::{
|
||||
Algorithm, Argon2, Params, Version,
|
||||
password_hash::{SaltString, rand_core::RngCore},
|
||||
};
|
||||
use argon2::{Algorithm, Argon2, Params, Version, password_hash::rand_core::RngCore};
|
||||
use base64::{Engine as _, engine::general_purpose::STANDARD as B64};
|
||||
use chacha20poly1305::{
|
||||
Key, XChaCha20Poly1305, XNonce,
|
||||
aead::{Aead, KeyInit, OsRng},
|
||||
};
|
||||
use secrecy::{ExposeSecret, SecretString};
|
||||
use std::path::PathBuf;
|
||||
use zeroize::Zeroize;
|
||||
/// Configuration structures and helpers used by the CLI and library.
|
||||
pub mod config;
|
||||
@@ -41,8 +40,8 @@ pub(crate) const HEADER: &str = "$VAULT";
|
||||
pub(crate) const VERSION: &str = "v1";
|
||||
pub(crate) const KDF: &str = "argon2id";
|
||||
|
||||
pub(crate) const ARGON_M_COST_KIB: u32 = 19_456;
|
||||
pub(crate) const ARGON_T_COST: u32 = 2;
|
||||
pub(crate) const ARGON_M_COST_KIB: u32 = 65_536;
|
||||
pub(crate) const ARGON_T_COST: u32 = 3;
|
||||
pub(crate) const ARGON_P: u32 = 1;
|
||||
|
||||
pub(crate) const SALT_LEN: usize = 16;
|
||||
@@ -59,7 +58,7 @@ fn derive_key(password: &SecretString, salt: &[u8]) -> Result<Key> {
|
||||
.hash_password_into(password.expose_secret().as_bytes(), salt, &mut key_bytes)
|
||||
.map_err(|e| anyhow!("argon2 into error: {:?}", e))?;
|
||||
|
||||
let key = *Key::from_slice(&key_bytes);
|
||||
let key: Key = key_bytes.into();
|
||||
key_bytes.zeroize();
|
||||
Ok(key)
|
||||
}
|
||||
@@ -82,20 +81,28 @@ fn derive_key(password: &SecretString, salt: &[u8]) -> Result<Key> {
|
||||
pub fn encrypt_string(password: impl Into<SecretString>, plaintext: &str) -> Result<String> {
|
||||
let password = password.into();
|
||||
|
||||
let salt = SaltString::generate(&mut OsRng);
|
||||
if password.expose_secret().is_empty() {
|
||||
bail!("password cannot be empty");
|
||||
}
|
||||
|
||||
let mut salt = [0u8; SALT_LEN];
|
||||
OsRng.fill_bytes(&mut salt);
|
||||
let mut nonce_bytes = [0u8; NONCE_LEN];
|
||||
OsRng.fill_bytes(&mut nonce_bytes);
|
||||
|
||||
let key = derive_key(&password, salt.as_str().as_bytes())?;
|
||||
let mut key = derive_key(&password, &salt)?;
|
||||
let cipher = XChaCha20Poly1305::new(&key);
|
||||
|
||||
let aad = format!("{};{}", HEADER, VERSION);
|
||||
let aad = format!(
|
||||
"{};{};{};m={},t={},p={}",
|
||||
HEADER, VERSION, KDF, ARGON_M_COST_KIB, ARGON_T_COST, ARGON_P
|
||||
);
|
||||
|
||||
let nonce = XNonce::from_slice(&nonce_bytes);
|
||||
let nonce: XNonce = nonce_bytes.into();
|
||||
let mut pt = plaintext.as_bytes().to_vec();
|
||||
let ct = cipher
|
||||
.encrypt(
|
||||
nonce,
|
||||
&nonce,
|
||||
chacha20poly1305::aead::Payload {
|
||||
msg: &pt,
|
||||
aad: aad.as_bytes(),
|
||||
@@ -113,13 +120,14 @@ pub fn encrypt_string(password: impl Into<SecretString>, plaintext: &str) -> Res
|
||||
m = ARGON_M_COST_KIB,
|
||||
t = ARGON_T_COST,
|
||||
p = ARGON_P,
|
||||
salt = B64.encode(salt.as_str().as_bytes()),
|
||||
salt = B64.encode(salt),
|
||||
nonce = B64.encode(nonce_bytes),
|
||||
ct = B64.encode(&ct),
|
||||
);
|
||||
|
||||
drop(cipher);
|
||||
let _ = key;
|
||||
key.zeroize();
|
||||
salt.zeroize();
|
||||
nonce_bytes.zeroize();
|
||||
|
||||
Ok(env)
|
||||
@@ -130,6 +138,9 @@ pub fn encrypt_string(password: impl Into<SecretString>, plaintext: &str) -> Res
|
||||
/// Returns the original plaintext on success or an error if the password is
|
||||
/// wrong, the envelope was tampered with, or the input is malformed.
|
||||
///
|
||||
/// This function supports both the current format (with KDF params in AAD) and
|
||||
/// the legacy format (without KDF params in AAD) for backwards compatibility.
|
||||
///
|
||||
/// Example
|
||||
/// ```
|
||||
/// use gman::{encrypt_string, decrypt_string};
|
||||
@@ -143,6 +154,10 @@ pub fn encrypt_string(password: impl Into<SecretString>, plaintext: &str) -> Res
|
||||
pub fn decrypt_string(password: impl Into<SecretString>, envelope: &str) -> Result<String> {
|
||||
let password = password.into();
|
||||
|
||||
if password.expose_secret().is_empty() {
|
||||
bail!("password cannot be empty");
|
||||
}
|
||||
|
||||
let parts: Vec<&str> = envelope.split(';').collect();
|
||||
if parts.len() < 7 {
|
||||
bail!("invalid envelope format");
|
||||
@@ -176,37 +191,66 @@ pub fn decrypt_string(password: impl Into<SecretString>, envelope: &str) -> Resu
|
||||
let nonce_b64 = parts[5].strip_prefix("nonce=").context("missing nonce")?;
|
||||
let ct_b64 = parts[6].strip_prefix("ct=").context("missing ct")?;
|
||||
|
||||
let salt_bytes = B64.decode(salt_b64).context("bad salt b64")?;
|
||||
let mut nonce_bytes = B64.decode(nonce_b64).context("bad nonce b64")?;
|
||||
let mut salt_bytes = B64.decode(salt_b64).context("bad salt b64")?;
|
||||
let nonce_bytes = B64.decode(nonce_b64).context("bad nonce b64")?;
|
||||
let mut ct = B64.decode(ct_b64).context("bad ct b64")?;
|
||||
|
||||
if nonce_bytes.len() != NONCE_LEN {
|
||||
bail!("nonce length mismatch");
|
||||
}
|
||||
|
||||
let key = derive_key(&password, &salt_bytes)?;
|
||||
let mut key = derive_key(&password, &salt_bytes)?;
|
||||
|
||||
let cipher = XChaCha20Poly1305::new(&key);
|
||||
|
||||
let aad = format!("{};{}", HEADER, VERSION);
|
||||
let nonce = XNonce::from_slice(&nonce_bytes);
|
||||
let pt = cipher
|
||||
.decrypt(
|
||||
nonce,
|
||||
chacha20poly1305::aead::Payload {
|
||||
msg: &ct,
|
||||
aad: aad.as_bytes(),
|
||||
},
|
||||
)
|
||||
.map_err(|_| anyhow!("decryption failed (wrong password or corrupted data)"))?;
|
||||
let aad_new = format!("{};{};{};m={},t={},p={}", HEADER, VERSION, KDF, m, t, p);
|
||||
let aad_legacy = format!("{};{}", HEADER, VERSION);
|
||||
|
||||
nonce_bytes.zeroize();
|
||||
let mut nonce_arr: [u8; NONCE_LEN] = nonce_bytes
|
||||
.try_into()
|
||||
.map_err(|_| anyhow!("invalid nonce length"))?;
|
||||
let nonce: XNonce = nonce_arr.into();
|
||||
|
||||
let decrypt_result = cipher.decrypt(
|
||||
&nonce,
|
||||
chacha20poly1305::aead::Payload {
|
||||
msg: &ct,
|
||||
aad: aad_new.as_bytes(),
|
||||
},
|
||||
);
|
||||
|
||||
let mut pt = match decrypt_result {
|
||||
Ok(pt) => pt,
|
||||
Err(_) => cipher
|
||||
.decrypt(
|
||||
&nonce,
|
||||
chacha20poly1305::aead::Payload {
|
||||
msg: &ct,
|
||||
aad: aad_legacy.as_bytes(),
|
||||
},
|
||||
)
|
||||
.map_err(|_| anyhow!("decryption failed (wrong password or corrupted data)"))?,
|
||||
};
|
||||
|
||||
let s = String::from_utf8(pt.clone()).context("plaintext not valid UTF-8")?;
|
||||
|
||||
key.zeroize();
|
||||
salt_bytes.zeroize();
|
||||
nonce_arr.zeroize();
|
||||
ct.zeroize();
|
||||
pt.zeroize();
|
||||
|
||||
let s = String::from_utf8(pt).context("plaintext not valid UTF-8")?;
|
||||
Ok(s)
|
||||
}
|
||||
|
||||
pub(crate) fn calling_app_name() -> String {
|
||||
let exe: PathBuf = std::env::current_exe().expect("unable to get current exe path");
|
||||
exe.file_stem()
|
||||
.and_then(|s| s.to_str())
|
||||
.map(|s| s.to_owned())
|
||||
.expect("executable name not valid UTF-8")
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
@@ -237,12 +281,10 @@ mod tests {
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn empty_password() {
|
||||
fn empty_password_rejected() {
|
||||
let pw = SecretString::new("".into());
|
||||
let msg = "hello";
|
||||
let env = encrypt_string(pw.clone(), msg).unwrap();
|
||||
let out = decrypt_string(pw, &env).unwrap();
|
||||
assert_eq!(msg, out);
|
||||
assert!(encrypt_string(pw.clone(), msg).is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -264,7 +306,7 @@ mod tests {
|
||||
let mut ct = base64::engine::general_purpose::STANDARD
|
||||
.decode(ct_b64)
|
||||
.unwrap();
|
||||
ct[0] ^= 0x01; // Flip a bit
|
||||
ct[0] ^= 0x01;
|
||||
let new_ct_b64 = base64::engine::general_purpose::STANDARD.encode(&ct);
|
||||
let new_ct_part = format!("ct={}", new_ct_b64);
|
||||
parts[6] = &new_ct_part;
|
||||
|
||||
@@ -1,11 +1,13 @@
|
||||
use crate::providers::SecretProvider;
|
||||
use anyhow::{Context, Result};
|
||||
use azure_identity::DefaultAzureCredential;
|
||||
use azure_core::credentials::TokenCredential;
|
||||
use azure_identity::DeveloperToolsCredential;
|
||||
use azure_security_keyvault_secrets::models::SetSecretParameters;
|
||||
use azure_security_keyvault_secrets::{ResourceExt, SecretClient};
|
||||
use futures::TryStreamExt;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_with::skip_serializing_none;
|
||||
use std::sync::Arc;
|
||||
use validator::Validate;
|
||||
|
||||
#[skip_serializing_none]
|
||||
@@ -40,12 +42,8 @@ impl SecretProvider for AzureKeyVaultProvider {
|
||||
}
|
||||
|
||||
async fn get_secret(&self, key: &str) -> Result<String> {
|
||||
let body = self
|
||||
.get_client()?
|
||||
.get_secret(key, "", None)
|
||||
.await?
|
||||
.into_body()
|
||||
.await?;
|
||||
let response = self.get_client()?.get_secret(key, None).await?;
|
||||
let body = response.into_model()?;
|
||||
|
||||
body.value
|
||||
.with_context(|| format!("Secret '{}' not found", key))
|
||||
@@ -60,8 +58,7 @@ impl SecretProvider for AzureKeyVaultProvider {
|
||||
self.get_client()?
|
||||
.set_secret(key, params.try_into()?, None)
|
||||
.await?
|
||||
.into_body()
|
||||
.await?;
|
||||
.into_model()?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@@ -77,10 +74,7 @@ impl SecretProvider for AzureKeyVaultProvider {
|
||||
}
|
||||
|
||||
async fn list_secrets(&self) -> Result<Vec<String>> {
|
||||
let mut pager = self
|
||||
.get_client()?
|
||||
.list_secret_properties(None)?
|
||||
.into_stream();
|
||||
let mut pager = self.get_client()?.list_secret_properties(None)?;
|
||||
let mut secrets = Vec::new();
|
||||
while let Some(props) = pager.try_next().await? {
|
||||
let name = props.resource_id()?.name;
|
||||
@@ -93,7 +87,7 @@ impl SecretProvider for AzureKeyVaultProvider {
|
||||
|
||||
impl AzureKeyVaultProvider {
|
||||
fn get_client(&self) -> Result<SecretClient> {
|
||||
let credential = DefaultAzureCredential::new()?;
|
||||
let credential: Arc<dyn TokenCredential> = DeveloperToolsCredential::new(None)?;
|
||||
let client = SecretClient::new(
|
||||
format!(
|
||||
"https://{}.vault.azure.net",
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
use crate::calling_app_name;
|
||||
use anyhow::{Context, Result, anyhow};
|
||||
use chrono::Utc;
|
||||
use dialoguer::Confirm;
|
||||
@@ -25,7 +26,7 @@ pub fn sync_and_push(opts: &SyncOpts<'_>) -> Result<()> {
|
||||
opts.validate()
|
||||
.with_context(|| "invalid git sync options")?;
|
||||
let commit_message = format!("chore: sync @ {}", Utc::now().to_rfc3339());
|
||||
let config_dir = confy::get_configuration_file_path("gman", "vault")
|
||||
let config_dir = confy::get_configuration_file_path(&calling_app_name(), "vault")
|
||||
.with_context(|| "get config dir")?
|
||||
.parent()
|
||||
.map(Path::to_path_buf)
|
||||
@@ -37,7 +38,7 @@ pub fn sync_and_push(opts: &SyncOpts<'_>) -> Result<()> {
|
||||
fs::create_dir_all(&repo_dir).with_context(|| format!("create {}", repo_dir.display()))?;
|
||||
|
||||
// Move the default vault into the repo dir on first sync so only vault.yml is tracked.
|
||||
let default_vault = confy::get_configuration_file_path("gman", "vault")
|
||||
let default_vault = confy::get_configuration_file_path(&calling_app_name(), "vault")
|
||||
.with_context(|| "get default vault path")?;
|
||||
let repo_vault = repo_dir.join("vault.yml");
|
||||
if default_vault.exists() && !repo_vault.exists() {
|
||||
@@ -116,8 +117,7 @@ fn resolve_git_username(git: &Path, name: Option<&String>) -> Result<String> {
|
||||
return Ok(name.to_string());
|
||||
}
|
||||
|
||||
run_git_config_capture(git, &["config", "user.name"])
|
||||
.with_context(|| "unable to determine git username")
|
||||
default_git_username(git)
|
||||
}
|
||||
|
||||
fn resolve_git_email(git: &Path, email: Option<&String>) -> Result<String> {
|
||||
@@ -126,11 +126,10 @@ fn resolve_git_email(git: &Path, email: Option<&String>) -> Result<String> {
|
||||
return Ok(email.to_string());
|
||||
}
|
||||
|
||||
run_git_config_capture(git, &["config", "user.email"])
|
||||
.with_context(|| "unable to determine git user email")
|
||||
default_git_email(git)
|
||||
}
|
||||
|
||||
fn resolve_git(override_path: Option<&PathBuf>) -> Result<PathBuf> {
|
||||
pub(in crate::providers) fn resolve_git(override_path: Option<&PathBuf>) -> Result<PathBuf> {
|
||||
debug!("Resolving git executable");
|
||||
if let Some(p) = override_path {
|
||||
return Ok(p.to_path_buf());
|
||||
@@ -141,7 +140,19 @@ fn resolve_git(override_path: Option<&PathBuf>) -> Result<PathBuf> {
|
||||
Ok(PathBuf::from("git"))
|
||||
}
|
||||
|
||||
fn ensure_git_available(git: &Path) -> Result<()> {
|
||||
pub(in crate::providers) fn default_git_username(git: &Path) -> Result<String> {
|
||||
debug!("Checking for default git username");
|
||||
run_git_config_capture(git, &["config", "user.name"])
|
||||
.with_context(|| "unable to determine git user name")
|
||||
}
|
||||
|
||||
pub(in crate::providers) fn default_git_email(git: &Path) -> Result<String> {
|
||||
debug!("Checking for default git username");
|
||||
run_git_config_capture(git, &["config", "user.email"])
|
||||
.with_context(|| "unable to determine git user email")
|
||||
}
|
||||
|
||||
pub(in crate::providers) fn ensure_git_available(git: &Path) -> Result<()> {
|
||||
let ok = Command::new(git)
|
||||
.arg("--version")
|
||||
.stdout(Stdio::null())
|
||||
|
||||
@@ -0,0 +1,190 @@
|
||||
use crate::providers::{ENV_PATH, SecretProvider};
|
||||
use anyhow::{Context, Result, anyhow};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_with::skip_serializing_none;
|
||||
use std::io::{Read, Write};
|
||||
use std::process::{Command, Stdio};
|
||||
use validator::Validate;
|
||||
|
||||
#[skip_serializing_none]
|
||||
/// Gopass-based secret provider
|
||||
/// See [Gopass](https://gopass.pw/) for more information.
|
||||
///
|
||||
/// You must already have gopass installed and configured on your system.
|
||||
///
|
||||
/// This provider stores secrets in a gopass store. It requires
|
||||
/// an optional store name to be specified. If no store name is
|
||||
/// specified, the default store will be used.
|
||||
///
|
||||
/// Example
|
||||
/// ```no_run
|
||||
/// use gman::providers::gopass::GopassProvider;
|
||||
/// use gman::providers::{SecretProvider, SupportedProvider};
|
||||
/// use gman::config::Config;
|
||||
///
|
||||
/// let provider = GopassProvider::default();
|
||||
/// let _ = provider.set_secret("MY_SECRET", "value");
|
||||
/// ```
|
||||
#[derive(Debug, Default, Clone, Validate, Serialize, Deserialize, PartialEq, Eq)]
|
||||
#[serde(deny_unknown_fields)]
|
||||
pub struct GopassProvider {
|
||||
pub store: Option<String>,
|
||||
}
|
||||
|
||||
#[async_trait::async_trait]
|
||||
impl SecretProvider for GopassProvider {
|
||||
fn name(&self) -> &'static str {
|
||||
"GopassProvider"
|
||||
}
|
||||
|
||||
async fn get_secret(&self, key: &str) -> Result<String> {
|
||||
ensure_gopass_installed()?;
|
||||
|
||||
let mut child = Command::new("gopass")
|
||||
.args(["show", "-yfon", key])
|
||||
.env("PATH", ENV_PATH.as_ref().expect("No ENV_PATH set"))
|
||||
.stdin(Stdio::inherit())
|
||||
.stdout(Stdio::piped())
|
||||
.stderr(Stdio::inherit())
|
||||
.spawn()
|
||||
.context("Failed to spawn gopass command")?;
|
||||
|
||||
let mut output = String::new();
|
||||
child
|
||||
.stdout
|
||||
.as_mut()
|
||||
.expect("Failed to open gopass stdout")
|
||||
.read_to_string(&mut output)
|
||||
.context("Failed to read gopass output")?;
|
||||
|
||||
let status = child.wait().context("Failed to wait on gopass process")?;
|
||||
if !status.success() {
|
||||
return Err(anyhow!("gopass command failed with status: {}", status));
|
||||
}
|
||||
|
||||
Ok(output.trim_end_matches(&['\r', '\n'][..]).to_string())
|
||||
}
|
||||
|
||||
async fn set_secret(&self, key: &str, value: &str) -> Result<()> {
|
||||
ensure_gopass_installed()?;
|
||||
|
||||
let mut child = Command::new("gopass")
|
||||
.args(["insert", "-f", key])
|
||||
.env("PATH", ENV_PATH.as_ref().expect("No ENV_PATH set"))
|
||||
.stdin(Stdio::piped())
|
||||
.stdout(Stdio::inherit())
|
||||
.stderr(Stdio::inherit())
|
||||
.spawn()
|
||||
.context("Failed to spawn gopass command")?;
|
||||
|
||||
{
|
||||
let stdin = child.stdin.as_mut().expect("Failed to open gopass stdin");
|
||||
stdin
|
||||
.write_all(value.as_bytes())
|
||||
.context("Failed to write to gopass stdin")?;
|
||||
}
|
||||
|
||||
let status = child.wait().context("Failed to wait on gopass process")?;
|
||||
if !status.success() {
|
||||
return Err(anyhow!("gopass command failed with status: {}", status));
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn update_secret(&self, key: &str, value: &str) -> Result<()> {
|
||||
ensure_gopass_installed()?;
|
||||
|
||||
self.set_secret(key, value).await
|
||||
}
|
||||
|
||||
async fn delete_secret(&self, key: &str) -> Result<()> {
|
||||
ensure_gopass_installed()?;
|
||||
|
||||
let mut child = Command::new("gopass")
|
||||
.args(["rm", "-f", key])
|
||||
.env("PATH", ENV_PATH.as_ref().expect("No ENV_PATH set"))
|
||||
.stdin(Stdio::inherit())
|
||||
.stdout(Stdio::inherit())
|
||||
.stderr(Stdio::inherit())
|
||||
.spawn()
|
||||
.context("Failed to spawn gopass command")?;
|
||||
|
||||
let status = child.wait().context("Failed to wait on gopass process")?;
|
||||
if !status.success() {
|
||||
return Err(anyhow!("gopass command failed with status: {}", status));
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn list_secrets(&self) -> Result<Vec<String>> {
|
||||
ensure_gopass_installed()?;
|
||||
|
||||
let mut child = Command::new("gopass")
|
||||
.args(["ls", "-f"])
|
||||
.env("PATH", ENV_PATH.as_ref().expect("No ENV_PATH set"))
|
||||
.stdin(Stdio::inherit())
|
||||
.stdout(Stdio::piped())
|
||||
.stderr(Stdio::inherit())
|
||||
.spawn()
|
||||
.context("Failed to spawn gopass command")?;
|
||||
|
||||
let mut output = String::new();
|
||||
child
|
||||
.stdout
|
||||
.as_mut()
|
||||
.expect("Failed to open gopass stdout")
|
||||
.read_to_string(&mut output)
|
||||
.context("Failed to read gopass output")?;
|
||||
|
||||
let status = child.wait().context("Failed to wait on gopass process")?;
|
||||
if !status.success() {
|
||||
return Err(anyhow!("gopass command failed with status: {}", status));
|
||||
}
|
||||
|
||||
let secrets: Vec<String> = output
|
||||
.lines()
|
||||
.map(|line| line.trim().to_string())
|
||||
.filter(|line| !line.is_empty())
|
||||
.collect();
|
||||
|
||||
Ok(secrets)
|
||||
}
|
||||
|
||||
async fn sync(&mut self) -> Result<()> {
|
||||
ensure_gopass_installed()?;
|
||||
let mut child = Command::new("gopass");
|
||||
child.arg("sync");
|
||||
|
||||
if let Some(store) = &self.store {
|
||||
child.args(["-s", store]);
|
||||
}
|
||||
|
||||
let status = child
|
||||
.env("PATH", ENV_PATH.as_ref().expect("No ENV_PATH set"))
|
||||
.stdin(Stdio::inherit())
|
||||
.stdout(Stdio::inherit())
|
||||
.stderr(Stdio::inherit())
|
||||
.spawn()
|
||||
.context("Failed to spawn gopass command")?
|
||||
.wait()
|
||||
.context("Failed to wait on gopass process")?;
|
||||
|
||||
if !status.success() {
|
||||
return Err(anyhow!("gopass command failed with status: {}", status));
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
fn ensure_gopass_installed() -> Result<()> {
|
||||
if which::which("gopass").is_err() {
|
||||
Err(anyhow!(
|
||||
"Gopass is not installed or not found in PATH. Please install Gopass from https://gopass.pw/"
|
||||
))
|
||||
} else {
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
+483
-45
@@ -5,11 +5,15 @@ use std::path::{Path, PathBuf};
|
||||
use std::{env, fs};
|
||||
use zeroize::Zeroize;
|
||||
|
||||
use crate::config::Config;
|
||||
use crate::providers::SecretProvider;
|
||||
use crate::providers::git_sync::{SyncOpts, repo_name_from_url, sync_and_push};
|
||||
use crate::config::{Config, get_config_file_path, load_config};
|
||||
use crate::providers::git_sync::{
|
||||
SyncOpts, default_git_email, default_git_username, ensure_git_available, repo_name_from_url,
|
||||
resolve_git, sync_and_push,
|
||||
};
|
||||
use crate::providers::{SecretProvider, SupportedProvider};
|
||||
use crate::{
|
||||
ARGON_M_COST_KIB, ARGON_P, ARGON_T_COST, HEADER, KDF, KEY_LEN, NONCE_LEN, SALT_LEN, VERSION,
|
||||
calling_app_name,
|
||||
};
|
||||
use anyhow::Result;
|
||||
use argon2::{Algorithm, Argon2, Params, Version};
|
||||
@@ -54,17 +58,25 @@ pub struct LocalProvider {
|
||||
#[validate(email)]
|
||||
pub git_user_email: Option<String>,
|
||||
pub git_executable: Option<PathBuf>,
|
||||
#[serde(skip)]
|
||||
pub runtime_provider_name: Option<String>,
|
||||
}
|
||||
|
||||
impl Default for LocalProvider {
|
||||
fn default() -> Self {
|
||||
let password_file = match Config::local_provider_password_file() {
|
||||
p if p.exists() => Some(p),
|
||||
_ => None,
|
||||
};
|
||||
|
||||
Self {
|
||||
password_file: Config::local_provider_password_file(),
|
||||
password_file,
|
||||
git_branch: Some("main".into()),
|
||||
git_remote_url: None,
|
||||
git_user_name: None,
|
||||
git_user_email: None,
|
||||
git_executable: None,
|
||||
runtime_provider_name: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -153,6 +165,8 @@ impl SecretProvider for LocalProvider {
|
||||
|
||||
async fn sync(&mut self) -> Result<()> {
|
||||
let mut config_changed = false;
|
||||
let git = resolve_git(self.git_executable.as_ref())?;
|
||||
ensure_git_available(&git)?;
|
||||
|
||||
if self.git_branch.is_none() {
|
||||
config_changed = true;
|
||||
@@ -169,7 +183,9 @@ impl SecretProvider for LocalProvider {
|
||||
config_changed = true;
|
||||
debug!("Prompting user to set git_remote in config for sync");
|
||||
let remote: String = Input::with_theme(&ColorfulTheme::default())
|
||||
.with_prompt("Enter remote git URL to sync with")
|
||||
.with_prompt(
|
||||
"Enter remote git URL to sync with (e.g. 'git@github.com:user/repo.git')",
|
||||
)
|
||||
.validate_with(|s: &String| {
|
||||
LocalProvider {
|
||||
git_remote_url: Some(s.clone()),
|
||||
@@ -184,10 +200,41 @@ impl SecretProvider for LocalProvider {
|
||||
self.git_remote_url = Some(remote);
|
||||
}
|
||||
|
||||
if self.git_user_name.is_none() {
|
||||
config_changed = true;
|
||||
debug!("Prompting user git user name");
|
||||
let default_user_name = default_git_username(&git)?.trim().to_string();
|
||||
let branch: String = Input::with_theme(&ColorfulTheme::default())
|
||||
.with_prompt("Enter git user name")
|
||||
.default(default_user_name)
|
||||
.interact_text()?;
|
||||
|
||||
self.git_user_name = Some(branch);
|
||||
}
|
||||
|
||||
if self.git_user_email.is_none() {
|
||||
config_changed = true;
|
||||
debug!("Prompting user git email");
|
||||
let default_user_name = default_git_email(&git)?.trim().to_string();
|
||||
let branch: String = Input::with_theme(&ColorfulTheme::default())
|
||||
.with_prompt("Enter git user email")
|
||||
.validate_with({
|
||||
|s: &String| {
|
||||
if s.contains('@') {
|
||||
Ok(())
|
||||
} else {
|
||||
Err("not a valid email address".to_string())
|
||||
}
|
||||
}
|
||||
})
|
||||
.default(default_user_name)
|
||||
.interact_text()?;
|
||||
|
||||
self.git_user_email = Some(branch);
|
||||
}
|
||||
|
||||
if config_changed {
|
||||
debug!("Saving updated config");
|
||||
confy::store("gman", "config", &self)
|
||||
.with_context(|| "failed to save updated config")?;
|
||||
self.persist_git_settings_to_config()?;
|
||||
}
|
||||
|
||||
let sync_opts = SyncOpts {
|
||||
@@ -203,6 +250,55 @@ impl SecretProvider for LocalProvider {
|
||||
}
|
||||
|
||||
impl LocalProvider {
|
||||
fn persist_git_settings_to_config(&self) -> Result<()> {
|
||||
debug!("Saving updated config (only current local provider)");
|
||||
|
||||
let mut cfg = load_config(true).with_context(|| "failed to load existing config")?;
|
||||
|
||||
let target_name = self.runtime_provider_name.clone();
|
||||
let mut updated = false;
|
||||
for pc in cfg.providers.iter_mut() {
|
||||
if let SupportedProvider::Local { provider_def } = &mut pc.provider_type {
|
||||
let matches_name = match (&pc.name, &target_name) {
|
||||
(Some(n), Some(t)) => n == t,
|
||||
(Some(_), None) => false,
|
||||
_ => false,
|
||||
};
|
||||
if matches_name || target_name.is_none() {
|
||||
provider_def.git_branch = self.git_branch.clone();
|
||||
provider_def.git_remote_url = self.git_remote_url.clone();
|
||||
provider_def.git_user_name = self.git_user_name.clone();
|
||||
provider_def.git_user_email = self.git_user_email.clone();
|
||||
provider_def.git_executable = self.git_executable.clone();
|
||||
|
||||
updated = true;
|
||||
if matches_name {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if !updated {
|
||||
bail!("unable to find matching local provider in config to update");
|
||||
}
|
||||
|
||||
let path = get_config_file_path()?;
|
||||
let ext = path.extension().and_then(|s| s.to_str()).unwrap_or("");
|
||||
if ext.eq_ignore_ascii_case("yml") || ext.eq_ignore_ascii_case("yaml") {
|
||||
if let Some(parent) = path.parent() {
|
||||
fs::create_dir_all(parent)?;
|
||||
}
|
||||
let s = serde_yaml::to_string(&cfg)?;
|
||||
fs::write(&path, s).with_context(|| format!("failed to write {}", path.display()))?;
|
||||
} else {
|
||||
confy::store(&calling_app_name(), "config", &cfg)
|
||||
.with_context(|| "failed to save updated config via confy")?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn repo_dir_for_config(&self) -> Result<Option<PathBuf>> {
|
||||
if let Some(remote) = &self.git_remote_url {
|
||||
let name = repo_name_from_url(remote);
|
||||
@@ -225,6 +321,22 @@ impl LocalProvider {
|
||||
|
||||
fn get_password(&self) -> Result<SecretString> {
|
||||
if let Some(password_file) = &self.password_file {
|
||||
#[cfg(unix)]
|
||||
{
|
||||
use std::os::unix::fs::PermissionsExt;
|
||||
let metadata = fs::metadata(password_file).with_context(|| {
|
||||
format!("failed to read password file metadata {:?}", password_file)
|
||||
})?;
|
||||
let mode = metadata.permissions().mode();
|
||||
if mode & 0o077 != 0 {
|
||||
bail!(
|
||||
"password file {:?} has insecure permissions {:o} (should be 0600 or 0400)",
|
||||
password_file,
|
||||
mode & 0o777
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
let password = SecretString::new(
|
||||
fs::read_to_string(password_file)
|
||||
.with_context(|| format!("failed to read password file {:?}", password_file))?
|
||||
@@ -245,10 +357,11 @@ fn default_vault_path() -> Result<PathBuf> {
|
||||
let xdg_path = env::var_os("XDG_CONFIG_HOME").map(PathBuf::from);
|
||||
|
||||
if let Some(xdg) = xdg_path {
|
||||
return Ok(xdg.join("gman").join("vault.yml"));
|
||||
return Ok(xdg.join(calling_app_name()).join("vault.yml"));
|
||||
}
|
||||
|
||||
confy::get_configuration_file_path("gman", "vault").with_context(|| "get config dir")
|
||||
confy::get_configuration_file_path(&calling_app_name(), "vault")
|
||||
.with_context(|| "get config dir")
|
||||
}
|
||||
|
||||
fn base_config_dir() -> Result<PathBuf> {
|
||||
@@ -272,24 +385,41 @@ fn store_vault(path: &Path, map: &HashMap<String, String>) -> Result<()> {
|
||||
fs::create_dir_all(parent).with_context(|| format!("create {}", parent.display()))?;
|
||||
}
|
||||
let s = serde_yaml::to_string(map).with_context(|| "serialize vault")?;
|
||||
fs::write(path, s).with_context(|| format!("write {}", path.display()))
|
||||
fs::write(path, &s).with_context(|| format!("write {}", path.display()))?;
|
||||
|
||||
#[cfg(unix)]
|
||||
{
|
||||
use std::os::unix::fs::PermissionsExt;
|
||||
fs::set_permissions(path, fs::Permissions::from_mode(0o600))
|
||||
.with_context(|| format!("set permissions on {}", path.display()))?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn encrypt_string(password: &SecretString, plaintext: &str) -> Result<String> {
|
||||
if password.expose_secret().is_empty() {
|
||||
bail!("password cannot be empty");
|
||||
}
|
||||
|
||||
let mut salt = [0u8; SALT_LEN];
|
||||
OsRng.fill_bytes(&mut salt);
|
||||
let mut nonce_bytes = [0u8; NONCE_LEN];
|
||||
OsRng.fill_bytes(&mut nonce_bytes);
|
||||
|
||||
let key = derive_key(password, &salt)?;
|
||||
let mut key = derive_key(password, &salt)?;
|
||||
let cipher = XChaCha20Poly1305::new(&key);
|
||||
let aad = format!("{};{}", HEADER, VERSION);
|
||||
|
||||
let nonce = XNonce::from_slice(&nonce_bytes);
|
||||
let aad = format!(
|
||||
"{};{};{};m={},t={},p={}",
|
||||
HEADER, VERSION, KDF, ARGON_M_COST_KIB, ARGON_T_COST, ARGON_P
|
||||
);
|
||||
|
||||
let nonce: XNonce = nonce_bytes.into();
|
||||
let mut pt = plaintext.as_bytes().to_vec();
|
||||
let ct = cipher
|
||||
.encrypt(
|
||||
nonce,
|
||||
&nonce,
|
||||
chacha20poly1305::aead::Payload {
|
||||
msg: &pt,
|
||||
aad: aad.as_bytes(),
|
||||
@@ -312,6 +442,7 @@ fn encrypt_string(password: &SecretString, plaintext: &str) -> Result<String> {
|
||||
);
|
||||
|
||||
drop(cipher);
|
||||
key.zeroize();
|
||||
salt.zeroize();
|
||||
nonce_bytes.zeroize();
|
||||
|
||||
@@ -332,16 +463,30 @@ fn derive_key_with_params(
|
||||
argon
|
||||
.hash_password_into(password.expose_secret().as_bytes(), salt, &mut key_bytes)
|
||||
.map_err(|e| anyhow!("argon2 derive error: {:?}", e))?;
|
||||
let key: Key = key_bytes.into();
|
||||
key_bytes.zeroize();
|
||||
let key = Key::from_slice(&key_bytes);
|
||||
Ok(*key)
|
||||
Ok(key)
|
||||
}
|
||||
|
||||
fn derive_key(password: &SecretString, salt: &[u8]) -> Result<Key> {
|
||||
derive_key_with_params(password, salt, ARGON_M_COST_KIB, ARGON_T_COST, ARGON_P)
|
||||
}
|
||||
|
||||
fn decrypt_string(password: &SecretString, envelope: &str) -> Result<String> {
|
||||
/// Attempts to decrypt with the given cipher, nonce, ciphertext, and AAD.
|
||||
fn try_decrypt(
|
||||
cipher: &XChaCha20Poly1305,
|
||||
nonce: &XNonce,
|
||||
ct: &[u8],
|
||||
aad: &[u8],
|
||||
) -> std::result::Result<Vec<u8>, chacha20poly1305::aead::Error> {
|
||||
cipher.decrypt(nonce, chacha20poly1305::aead::Payload { msg: ct, aad })
|
||||
}
|
||||
|
||||
type EnvelopeComponents = (u32, u32, u32, Vec<u8>, [u8; NONCE_LEN], Vec<u8>);
|
||||
|
||||
/// Parse an envelope string and extract its components.
|
||||
/// Returns (m, t, p, salt, nonce_arr, ct) on success.
|
||||
fn parse_envelope(envelope: &str) -> Result<EnvelopeComponents> {
|
||||
let parts: Vec<&str> = envelope.trim().split(';').collect();
|
||||
if parts.len() < 7 {
|
||||
debug!("Invalid envelope format: {:?}", parts);
|
||||
@@ -383,40 +528,202 @@ fn decrypt_string(password: &SecretString, envelope: &str) -> Result<String> {
|
||||
.with_context(|| "missing nonce")?;
|
||||
let ct_b64 = parts[6].strip_prefix("ct=").with_context(|| "missing ct")?;
|
||||
|
||||
let mut salt = B64.decode(salt_b64).with_context(|| "bad salt b64")?;
|
||||
let mut nonce_bytes = B64.decode(nonce_b64).with_context(|| "bad nonce b64")?;
|
||||
let mut ct = B64.decode(ct_b64).with_context(|| "bad ct b64")?;
|
||||
let salt = B64.decode(salt_b64).with_context(|| "bad salt b64")?;
|
||||
let nonce_bytes = B64.decode(nonce_b64).with_context(|| "bad nonce b64")?;
|
||||
let ct = B64.decode(ct_b64).with_context(|| "bad ct b64")?;
|
||||
|
||||
if salt.len() != SALT_LEN || nonce_bytes.len() != NONCE_LEN {
|
||||
debug!(
|
||||
"Salt/nonce length mismatch: salt {}, nonce {}",
|
||||
salt.len(),
|
||||
nonce_bytes.len()
|
||||
);
|
||||
bail!("salt/nonce length mismatch");
|
||||
if nonce_bytes.len() != NONCE_LEN {
|
||||
debug!("Nonce length mismatch: {}", nonce_bytes.len());
|
||||
bail!("nonce length mismatch");
|
||||
}
|
||||
|
||||
let key = derive_key_with_params(password, &salt, m, t, p)?;
|
||||
let nonce_arr: [u8; NONCE_LEN] = nonce_bytes
|
||||
.try_into()
|
||||
.map_err(|_| anyhow!("invalid nonce length"))?;
|
||||
|
||||
Ok((m, t, p, salt, nonce_arr, ct))
|
||||
}
|
||||
|
||||
fn decrypt_string(password: &SecretString, envelope: &str) -> Result<String> {
|
||||
if password.expose_secret().is_empty() {
|
||||
bail!("password cannot be empty");
|
||||
}
|
||||
|
||||
let (m, t, p, mut salt, mut nonce_arr, mut ct) = parse_envelope(envelope)?;
|
||||
let nonce: XNonce = nonce_arr.into();
|
||||
|
||||
let aad_current = format!("{};{};{};m={},t={},p={}", HEADER, VERSION, KDF, m, t, p);
|
||||
|
||||
let mut key = derive_key_with_params(password, &salt, m, t, p)?;
|
||||
let cipher = XChaCha20Poly1305::new(&key);
|
||||
let aad = format!("{};{}", HEADER, VERSION);
|
||||
let nonce = XNonce::from_slice(&nonce_bytes);
|
||||
|
||||
let pt = cipher
|
||||
.decrypt(
|
||||
nonce,
|
||||
chacha20poly1305::aead::Payload {
|
||||
msg: &ct,
|
||||
aad: aad.as_bytes(),
|
||||
},
|
||||
)
|
||||
.map_err(|_| anyhow!("decryption failed (wrong password or corrupted data)"))?;
|
||||
if let Ok(pt) = try_decrypt(&cipher, &nonce, &ct, aad_current.as_bytes()) {
|
||||
let s = String::from_utf8(pt.clone()).with_context(|| "plaintext not valid UTF-8")?;
|
||||
key.zeroize();
|
||||
salt.zeroize();
|
||||
nonce_arr.zeroize();
|
||||
ct.zeroize();
|
||||
return Ok(s);
|
||||
}
|
||||
|
||||
key.zeroize();
|
||||
salt.zeroize();
|
||||
nonce_bytes.zeroize();
|
||||
nonce_arr.zeroize();
|
||||
ct.zeroize();
|
||||
|
||||
let s = String::from_utf8(pt).with_context(|| "plaintext not valid UTF-8")?;
|
||||
Ok(s)
|
||||
// TODO: Remove once all users have migrated their local vaults
|
||||
if let Ok(plaintext) = legacy::decrypt_string_legacy(password, envelope) {
|
||||
return Ok(plaintext);
|
||||
}
|
||||
|
||||
bail!("decryption failed (wrong password or corrupted data)")
|
||||
}
|
||||
|
||||
// TODO: Remove this entire module once all users have migrated their vaults.
|
||||
mod legacy {
|
||||
use super::*;
|
||||
|
||||
fn legacy_aad() -> String {
|
||||
format!("{};{}", HEADER, VERSION)
|
||||
}
|
||||
|
||||
pub fn decrypt_string_legacy(password: &SecretString, envelope: &str) -> Result<String> {
|
||||
if password.expose_secret().is_empty() {
|
||||
bail!("password cannot be empty");
|
||||
}
|
||||
|
||||
let (m, t, p, mut salt, mut nonce_arr, mut ct) = parse_envelope(envelope)?;
|
||||
let nonce: XNonce = nonce_arr.into();
|
||||
let aad = legacy_aad();
|
||||
|
||||
let mut key = derive_key_with_params(password, &salt, m, t, p)?;
|
||||
let cipher = XChaCha20Poly1305::new(&key);
|
||||
|
||||
if let Ok(pt) = try_decrypt(&cipher, &nonce, &ct, aad.as_bytes()) {
|
||||
let s = String::from_utf8(pt.clone()).with_context(|| "plaintext not valid UTF-8")?;
|
||||
key.zeroize();
|
||||
salt.zeroize();
|
||||
nonce_arr.zeroize();
|
||||
ct.zeroize();
|
||||
return Ok(s);
|
||||
}
|
||||
|
||||
key.zeroize();
|
||||
|
||||
let mut zeros_key: Key = [0u8; KEY_LEN].into();
|
||||
let zeros_cipher = XChaCha20Poly1305::new(&zeros_key);
|
||||
|
||||
if let Ok(pt) = try_decrypt(&zeros_cipher, &nonce, &ct, aad.as_bytes()) {
|
||||
debug!("Decrypted using legacy all-zeros key - secret needs migration");
|
||||
let s = String::from_utf8(pt.clone()).with_context(|| "plaintext not valid UTF-8")?;
|
||||
zeros_key.zeroize();
|
||||
salt.zeroize();
|
||||
nonce_arr.zeroize();
|
||||
ct.zeroize();
|
||||
return Ok(s);
|
||||
}
|
||||
|
||||
zeros_key.zeroize();
|
||||
salt.zeroize();
|
||||
nonce_arr.zeroize();
|
||||
ct.zeroize();
|
||||
|
||||
bail!("legacy decryption failed")
|
||||
}
|
||||
|
||||
pub fn is_current_format(password: &SecretString, envelope: &str) -> Result<bool> {
|
||||
if password.expose_secret().is_empty() {
|
||||
bail!("password cannot be empty");
|
||||
}
|
||||
|
||||
let (m, t, p, salt, nonce_arr, ct) = parse_envelope(envelope)?;
|
||||
let nonce: XNonce = nonce_arr.into();
|
||||
|
||||
let aad_current = format!("{};{};{};m={},t={},p={}", HEADER, VERSION, KDF, m, t, p);
|
||||
let key = derive_key_with_params(password, &salt, m, t, p)?;
|
||||
let cipher = XChaCha20Poly1305::new(&key);
|
||||
|
||||
Ok(try_decrypt(&cipher, &nonce, &ct, aad_current.as_bytes()).is_ok())
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: Remove once all users have migrated their local vaults
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub enum SecretStatus {
|
||||
Current,
|
||||
NeedsMigration,
|
||||
}
|
||||
|
||||
// TODO: Remove once all users have migrated their local vaults
|
||||
#[derive(Debug)]
|
||||
pub struct MigrationResult {
|
||||
pub total: usize,
|
||||
pub migrated: usize,
|
||||
pub already_current: usize,
|
||||
pub failed: Vec<(String, String)>,
|
||||
}
|
||||
|
||||
impl LocalProvider {
|
||||
// TODO: Remove once all users have migrated their local vaults
|
||||
pub async fn migrate_vault(&self) -> Result<MigrationResult> {
|
||||
let vault_path = self.active_vault_path()?;
|
||||
let vault: HashMap<String, String> = load_vault(&vault_path).unwrap_or_default();
|
||||
|
||||
if vault.is_empty() {
|
||||
return Ok(MigrationResult {
|
||||
total: 0,
|
||||
migrated: 0,
|
||||
already_current: 0,
|
||||
failed: vec![],
|
||||
});
|
||||
}
|
||||
|
||||
let password = self.get_password()?;
|
||||
let mut migrated_vault = HashMap::new();
|
||||
let mut migrated_count = 0;
|
||||
let mut already_current_count = 0;
|
||||
let mut failed = vec![];
|
||||
|
||||
for (key, envelope) in &vault {
|
||||
match legacy::is_current_format(&password, envelope) {
|
||||
Ok(true) => {
|
||||
migrated_vault.insert(key.clone(), envelope.clone());
|
||||
already_current_count += 1;
|
||||
}
|
||||
Ok(false) => match decrypt_string(&password, envelope) {
|
||||
Ok(plaintext) => match encrypt_string(&password, &plaintext) {
|
||||
Ok(new_envelope) => {
|
||||
migrated_vault.insert(key.clone(), new_envelope);
|
||||
migrated_count += 1;
|
||||
}
|
||||
Err(e) => {
|
||||
failed.push((key.clone(), format!("re-encryption failed: {}", e)));
|
||||
migrated_vault.insert(key.clone(), envelope.clone());
|
||||
}
|
||||
},
|
||||
Err(e) => {
|
||||
failed.push((key.clone(), format!("decryption failed: {}", e)));
|
||||
migrated_vault.insert(key.clone(), envelope.clone());
|
||||
}
|
||||
},
|
||||
Err(e) => {
|
||||
failed.push((key.clone(), format!("status check failed: {}", e)));
|
||||
migrated_vault.insert(key.clone(), envelope.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if migrated_count > 0 {
|
||||
store_vault(&vault_path, &migrated_vault)?;
|
||||
}
|
||||
|
||||
Ok(MigrationResult {
|
||||
total: vault.len(),
|
||||
migrated: migrated_count,
|
||||
already_current: already_current_count,
|
||||
failed,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
@@ -424,6 +731,7 @@ mod tests {
|
||||
use super::*;
|
||||
use pretty_assertions::assert_eq;
|
||||
use secrecy::{ExposeSecret, SecretString};
|
||||
use std::env as std_env;
|
||||
use tempfile::tempdir;
|
||||
|
||||
#[test]
|
||||
@@ -431,7 +739,7 @@ mod tests {
|
||||
let password = SecretString::new("test_password".to_string().into());
|
||||
let salt = [0u8; 16];
|
||||
let key = derive_key(&password, &salt).unwrap();
|
||||
assert_eq!(key.as_slice().len(), 32);
|
||||
assert_eq!(key.len(), 32);
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -439,7 +747,7 @@ mod tests {
|
||||
let password = SecretString::new("test_password".to_string().into());
|
||||
let salt = [0u8; 16];
|
||||
let key = derive_key_with_params(&password, &salt, 10, 1, 1).unwrap();
|
||||
assert_eq!(key.as_slice().len(), 32);
|
||||
assert_eq!(key.len(), 32);
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -452,15 +760,145 @@ mod tests {
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[cfg(unix)]
|
||||
fn get_password_reads_password_file() {
|
||||
use std::os::unix::fs::PermissionsExt;
|
||||
let dir = tempdir().unwrap();
|
||||
let file = dir.path().join("pw.txt");
|
||||
fs::write(&file, "secretpw\n").unwrap();
|
||||
fs::set_permissions(&file, fs::Permissions::from_mode(0o600)).unwrap();
|
||||
let provider = LocalProvider {
|
||||
password_file: Some(file),
|
||||
runtime_provider_name: None,
|
||||
..LocalProvider::default()
|
||||
};
|
||||
let pw = provider.get_password().unwrap();
|
||||
assert_eq!(pw.expose_secret(), "secretpw");
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[cfg(unix)]
|
||||
fn get_password_rejects_insecure_file() {
|
||||
use std::os::unix::fs::PermissionsExt;
|
||||
let dir = tempdir().unwrap();
|
||||
let file = dir.path().join("pw.txt");
|
||||
fs::write(&file, "secretpw\n").unwrap();
|
||||
fs::set_permissions(&file, fs::Permissions::from_mode(0o644)).unwrap();
|
||||
let provider = LocalProvider {
|
||||
password_file: Some(file),
|
||||
runtime_provider_name: None,
|
||||
..LocalProvider::default()
|
||||
};
|
||||
assert!(provider.get_password().is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[cfg(not(unix))]
|
||||
fn get_password_reads_password_file() {
|
||||
let dir = tempdir().unwrap();
|
||||
let file = dir.path().join("pw.txt");
|
||||
fs::write(&file, "secretpw\n").unwrap();
|
||||
let provider = LocalProvider {
|
||||
password_file: Some(file),
|
||||
runtime_provider_name: None,
|
||||
..LocalProvider::default()
|
||||
};
|
||||
let pw = provider.get_password().unwrap();
|
||||
assert_eq!(pw.expose_secret(), "secretpw");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn persist_only_target_local_provider_git_settings() {
|
||||
let td = tempdir().unwrap();
|
||||
let xdg = td.path().join("xdg");
|
||||
let app_dir = xdg.join(calling_app_name());
|
||||
fs::create_dir_all(&app_dir).unwrap();
|
||||
unsafe {
|
||||
std_env::set_var("XDG_CONFIG_HOME", &xdg);
|
||||
}
|
||||
|
||||
let initial_yaml = indoc::indoc! {
|
||||
"---
|
||||
default_provider: local
|
||||
providers:
|
||||
- name: local
|
||||
type: local
|
||||
password_file: /tmp/.gman_pass
|
||||
git_branch: main
|
||||
git_remote_url: null
|
||||
git_user_name: null
|
||||
git_user_email: null
|
||||
git_executable: null
|
||||
- name: other
|
||||
type: local
|
||||
git_branch: main
|
||||
git_remote_url: git@github.com:someone/else.git
|
||||
run_configs:
|
||||
- name: echo
|
||||
secrets: [API_KEY]
|
||||
"
|
||||
};
|
||||
let cfg_path = app_dir.join("config.yml");
|
||||
fs::write(&cfg_path, initial_yaml).unwrap();
|
||||
|
||||
let provider = LocalProvider {
|
||||
password_file: None,
|
||||
git_branch: Some("dev".into()),
|
||||
git_remote_url: Some("git@github.com:user/repo.git".into()),
|
||||
git_user_name: Some("Test User".into()),
|
||||
git_user_email: Some("test@example.com".into()),
|
||||
git_executable: Some(PathBuf::from("/usr/bin/git")),
|
||||
runtime_provider_name: Some("local".into()),
|
||||
};
|
||||
|
||||
provider
|
||||
.persist_git_settings_to_config()
|
||||
.expect("persist ok");
|
||||
|
||||
let content = fs::read_to_string(&cfg_path).unwrap();
|
||||
let cfg: Config = serde_yaml::from_str(&content).unwrap();
|
||||
|
||||
assert_eq!(cfg.default_provider.as_deref(), Some("local"));
|
||||
assert!(cfg.run_configs.is_some());
|
||||
assert_eq!(cfg.run_configs.as_ref().unwrap().len(), 1);
|
||||
|
||||
let p0 = &cfg.providers[0];
|
||||
assert_eq!(p0.name.as_deref(), Some("local"));
|
||||
match &p0.provider_type {
|
||||
SupportedProvider::Local { provider_def } => {
|
||||
assert_eq!(provider_def.git_branch.as_deref(), Some("dev"));
|
||||
assert_eq!(
|
||||
provider_def.git_remote_url.as_deref(),
|
||||
Some("git@github.com:user/repo.git")
|
||||
);
|
||||
assert_eq!(provider_def.git_user_name.as_deref(), Some("Test User"));
|
||||
assert_eq!(
|
||||
provider_def.git_user_email.as_deref(),
|
||||
Some("test@example.com")
|
||||
);
|
||||
assert_eq!(
|
||||
provider_def.git_executable.as_ref(),
|
||||
Some(&PathBuf::from("/usr/bin/git"))
|
||||
);
|
||||
}
|
||||
_ => panic!("expected local provider"),
|
||||
}
|
||||
|
||||
let p1 = &cfg.providers[1];
|
||||
assert_eq!(p1.name.as_deref(), Some("other"));
|
||||
match &p1.provider_type {
|
||||
SupportedProvider::Local { provider_def } => {
|
||||
assert_eq!(provider_def.git_branch.as_deref(), Some("main"));
|
||||
assert_eq!(
|
||||
provider_def.git_remote_url.as_deref(),
|
||||
Some("git@github.com:someone/else.git")
|
||||
);
|
||||
}
|
||||
_ => panic!("expected local provider"),
|
||||
}
|
||||
|
||||
unsafe {
|
||||
std_env::remove_var("XDG_CONFIG_HOME");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
+16
-3
@@ -6,17 +6,24 @@ pub mod aws_secrets_manager;
|
||||
pub mod azure_key_vault;
|
||||
pub mod gcp_secret_manager;
|
||||
mod git_sync;
|
||||
pub mod gopass;
|
||||
pub mod local;
|
||||
|
||||
use crate::providers::gopass::GopassProvider;
|
||||
use crate::providers::local::LocalProvider;
|
||||
use anyhow::{Result, anyhow};
|
||||
use anyhow::{Context, Result, anyhow};
|
||||
use aws_secrets_manager::AwsSecretsManagerProvider;
|
||||
use azure_key_vault::AzureKeyVaultProvider;
|
||||
use gcp_secret_manager::GcpSecretManagerProvider;
|
||||
use once_cell::sync::Lazy;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::fmt;
|
||||
use std::fmt::{Display, Formatter};
|
||||
use std::{env, fmt};
|
||||
use validator::{Validate, ValidationErrors};
|
||||
|
||||
pub(in crate::providers) static ENV_PATH: Lazy<Result<String>> =
|
||||
Lazy::new(|| env::var("PATH").context("No PATH environment variable"));
|
||||
|
||||
/// A secret storage backend capable of CRUD, with optional
|
||||
/// update, listing, and sync support.
|
||||
#[async_trait::async_trait]
|
||||
@@ -63,7 +70,11 @@ pub enum SupportedProvider {
|
||||
},
|
||||
AzureKeyVault {
|
||||
#[serde(flatten)]
|
||||
provider_def: azure_key_vault::AzureKeyVaultProvider,
|
||||
provider_def: AzureKeyVaultProvider,
|
||||
},
|
||||
Gopass {
|
||||
#[serde(flatten)]
|
||||
provider_def: GopassProvider,
|
||||
},
|
||||
}
|
||||
|
||||
@@ -74,6 +85,7 @@ impl Validate for SupportedProvider {
|
||||
SupportedProvider::AwsSecretsManager { provider_def } => provider_def.validate(),
|
||||
SupportedProvider::GcpSecretManager { provider_def } => provider_def.validate(),
|
||||
SupportedProvider::AzureKeyVault { provider_def } => provider_def.validate(),
|
||||
SupportedProvider::Gopass { provider_def } => provider_def.validate(),
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -93,6 +105,7 @@ impl Display for SupportedProvider {
|
||||
SupportedProvider::AwsSecretsManager { .. } => write!(f, "aws_secrets_manager"),
|
||||
SupportedProvider::GcpSecretManager { .. } => write!(f, "gcp_secret_manager"),
|
||||
SupportedProvider::AzureKeyVault { .. } => write!(f, "azure_key_vault"),
|
||||
SupportedProvider::Gopass { .. } => write!(f, "gopass"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
+95
-36
@@ -1,10 +1,16 @@
|
||||
use assert_cmd::prelude::*;
|
||||
use predicates::prelude::*;
|
||||
use std::fs;
|
||||
#[cfg(unix)]
|
||||
use std::os::unix::fs::PermissionsExt;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::process::{Command, Stdio};
|
||||
use tempfile::TempDir;
|
||||
|
||||
fn gman_bin() -> PathBuf {
|
||||
PathBuf::from(env!("CARGO_BIN_EXE_gman"))
|
||||
}
|
||||
|
||||
fn setup_env() -> (TempDir, PathBuf, PathBuf) {
|
||||
let td = tempfile::tempdir().expect("tempdir");
|
||||
let cfg_home = td.path().join("config");
|
||||
@@ -44,15 +50,90 @@ providers:
|
||||
password_file.display()
|
||||
)
|
||||
};
|
||||
// Confy with yaml feature typically uses .yml; write both to be safe.
|
||||
fs::write(app_dir.join("config.yml"), &cfg).unwrap();
|
||||
fs::write(app_dir.join("config.yaml"), &cfg).unwrap();
|
||||
}
|
||||
|
||||
fn create_password_file(path: &Path, content: &[u8]) {
|
||||
fs::write(path, content).unwrap();
|
||||
#[cfg(unix)]
|
||||
{
|
||||
fs::set_permissions(path, fs::Permissions::from_mode(0o600)).unwrap();
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[cfg(unix)]
|
||||
fn cli_config_no_changes() {
|
||||
let (td, xdg_cfg, xdg_cache) = setup_env();
|
||||
let pw_file = td.path().join("pw.txt");
|
||||
create_password_file(&pw_file, b"pw\n");
|
||||
write_yaml_config(&xdg_cfg, &pw_file, None);
|
||||
|
||||
let editor = td.path().join("noop-editor.sh");
|
||||
fs::write(&editor, b"#!/bin/sh\nexit 0\n").unwrap();
|
||||
let mut perms = fs::metadata(&editor).unwrap().permissions();
|
||||
perms.set_mode(0o755);
|
||||
fs::set_permissions(&editor, perms).unwrap();
|
||||
|
||||
let mut cmd = Command::new(gman_bin());
|
||||
cmd.env("XDG_CONFIG_HOME", &xdg_cfg)
|
||||
.env("XDG_CACHE_HOME", &xdg_cache)
|
||||
.env("EDITOR", &editor)
|
||||
.arg("config");
|
||||
|
||||
cmd.assert()
|
||||
.success()
|
||||
.stdout(predicate::str::contains("No changes made to configuration"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[cfg(unix)]
|
||||
fn cli_config_updates_and_persists() {
|
||||
let (td, xdg_cfg, xdg_cache) = setup_env();
|
||||
let pw_file = td.path().join("pw.txt");
|
||||
create_password_file(&pw_file, b"pw\n");
|
||||
write_yaml_config(&xdg_cfg, &pw_file, None);
|
||||
|
||||
let editor = td.path().join("append-run-config.sh");
|
||||
// Note: We need a small sleep to ensure the file modification timestamp changes.
|
||||
// The dialoguer Editor uses file modification time to detect changes, and on fast
|
||||
// systems the edit can complete within the same timestamp granularity.
|
||||
let script = r#"#!/bin/sh
|
||||
FILE="$1"
|
||||
sleep 0.1
|
||||
cat >> "$FILE" <<'EOF'
|
||||
run_configs:
|
||||
- name: echo
|
||||
secrets: ["api_key"]
|
||||
EOF
|
||||
exit 0
|
||||
"#;
|
||||
fs::write(&editor, script.as_bytes()).unwrap();
|
||||
let mut perms = fs::metadata(&editor).unwrap().permissions();
|
||||
perms.set_mode(0o755);
|
||||
fs::set_permissions(&editor, perms).unwrap();
|
||||
|
||||
let mut cmd = Command::new(gman_bin());
|
||||
cmd.env("XDG_CONFIG_HOME", &xdg_cfg)
|
||||
.env("XDG_CACHE_HOME", &xdg_cache)
|
||||
.env("EDITOR", &editor)
|
||||
.arg("config");
|
||||
|
||||
cmd.assert().success().stdout(predicate::str::contains(
|
||||
"Configuration updated successfully",
|
||||
));
|
||||
|
||||
let cfg_path = xdg_cfg.join("gman").join("config.yml");
|
||||
let written = fs::read_to_string(&cfg_path).expect("config file readable");
|
||||
assert!(written.contains("run_configs:"));
|
||||
assert!(written.contains("name: echo"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn cli_shows_help() {
|
||||
let (_td, cfg, cache) = setup_env();
|
||||
let mut cmd = Command::cargo_bin("gman").unwrap();
|
||||
let mut cmd = Command::new(gman_bin());
|
||||
cmd.env("XDG_CACHE_HOME", &cache)
|
||||
.env("XDG_CONFIG_HOME", &cfg)
|
||||
.arg("--help");
|
||||
@@ -61,27 +142,14 @@ fn cli_shows_help() {
|
||||
.stdout(predicate::str::contains("Usage").or(predicate::str::contains("Add")));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn cli_completions_bash() {
|
||||
let (_td, cfg, cache) = setup_env();
|
||||
let mut cmd = Command::cargo_bin("gman").unwrap();
|
||||
cmd.env("XDG_CACHE_HOME", &cache)
|
||||
.env("XDG_CONFIG_HOME", &cfg)
|
||||
.args(["completions", "bash"]);
|
||||
cmd.assert()
|
||||
.success()
|
||||
.stdout(predicate::str::contains("_gman").or(predicate::str::contains("complete -F")));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn cli_add_get_list_update_delete_roundtrip() {
|
||||
let (td, xdg_cfg, xdg_cache) = setup_env();
|
||||
let pw_file = td.path().join("pw.txt");
|
||||
fs::write(&pw_file, b"testpw\n").unwrap();
|
||||
create_password_file(&pw_file, b"testpw\n");
|
||||
write_yaml_config(&xdg_cfg, &pw_file, None);
|
||||
|
||||
// add
|
||||
let mut add = Command::cargo_bin("gman").unwrap();
|
||||
let mut add = Command::new(gman_bin());
|
||||
add.env("XDG_CONFIG_HOME", &xdg_cfg)
|
||||
.env("XDG_CACHE_HOME", &xdg_cache)
|
||||
.stdin(Stdio::piped())
|
||||
@@ -97,8 +165,7 @@ fn cli_add_get_list_update_delete_roundtrip() {
|
||||
let add_out = child.wait_with_output().unwrap();
|
||||
assert!(add_out.status.success());
|
||||
|
||||
// get (text)
|
||||
let mut get = Command::cargo_bin("gman").unwrap();
|
||||
let mut get = Command::new(gman_bin());
|
||||
get.env("XDG_CONFIG_HOME", &xdg_cfg)
|
||||
.env("XDG_CACHE_HOME", &xdg_cache)
|
||||
.args(["get", "my_api_key"]);
|
||||
@@ -106,8 +173,7 @@ fn cli_add_get_list_update_delete_roundtrip() {
|
||||
.success()
|
||||
.stdout(predicate::str::contains("super_secret"));
|
||||
|
||||
// get as JSON
|
||||
let mut get_json = Command::cargo_bin("gman").unwrap();
|
||||
let mut get_json = Command::new(gman_bin());
|
||||
get_json
|
||||
.env("XDG_CONFIG_HOME", &xdg_cfg)
|
||||
.env("XDG_CACHE_HOME", &xdg_cache)
|
||||
@@ -116,8 +182,7 @@ fn cli_add_get_list_update_delete_roundtrip() {
|
||||
predicate::str::contains("my_api_key").and(predicate::str::contains("super_secret")),
|
||||
);
|
||||
|
||||
// list
|
||||
let mut list = Command::cargo_bin("gman").unwrap();
|
||||
let mut list = Command::new(gman_bin());
|
||||
list.env("XDG_CONFIG_HOME", &xdg_cfg)
|
||||
.env("XDG_CACHE_HOME", &xdg_cache)
|
||||
.arg("list");
|
||||
@@ -125,8 +190,7 @@ fn cli_add_get_list_update_delete_roundtrip() {
|
||||
.success()
|
||||
.stdout(predicate::str::contains("my_api_key"));
|
||||
|
||||
// update
|
||||
let mut update = Command::cargo_bin("gman").unwrap();
|
||||
let mut update = Command::new(gman_bin());
|
||||
update
|
||||
.env("XDG_CONFIG_HOME", &xdg_cfg)
|
||||
.env("XDG_CACHE_HOME", &xdg_cache)
|
||||
@@ -142,8 +206,7 @@ fn cli_add_get_list_update_delete_roundtrip() {
|
||||
let upd_out = child.wait_with_output().unwrap();
|
||||
assert!(upd_out.status.success());
|
||||
|
||||
// get again
|
||||
let mut get2 = Command::cargo_bin("gman").unwrap();
|
||||
let mut get2 = Command::new(gman_bin());
|
||||
get2.env("XDG_CONFIG_HOME", &xdg_cfg)
|
||||
.env("XDG_CACHE_HOME", &xdg_cache)
|
||||
.args(["get", "my_api_key"]);
|
||||
@@ -151,15 +214,13 @@ fn cli_add_get_list_update_delete_roundtrip() {
|
||||
.success()
|
||||
.stdout(predicate::str::contains("new_val"));
|
||||
|
||||
// delete
|
||||
let mut del = Command::cargo_bin("gman").unwrap();
|
||||
let mut del = Command::new(gman_bin());
|
||||
del.env("XDG_CONFIG_HOME", &xdg_cfg)
|
||||
.env("XDG_CACHE_HOME", &xdg_cache)
|
||||
.args(["delete", "my_api_key"]);
|
||||
del.assert().success();
|
||||
|
||||
// get should now fail
|
||||
let mut get_missing = Command::cargo_bin("gman").unwrap();
|
||||
let mut get_missing = Command::new(gman_bin());
|
||||
get_missing
|
||||
.env("XDG_CONFIG_HOME", &xdg_cfg)
|
||||
.env("XDG_CACHE_HOME", &xdg_cache)
|
||||
@@ -171,11 +232,10 @@ fn cli_add_get_list_update_delete_roundtrip() {
|
||||
fn cli_wrap_dry_run_env_injection() {
|
||||
let (td, xdg_cfg, xdg_cache) = setup_env();
|
||||
let pw_file = td.path().join("pw.txt");
|
||||
fs::write(&pw_file, b"pw\n").unwrap();
|
||||
create_password_file(&pw_file, b"pw\n");
|
||||
write_yaml_config(&xdg_cfg, &pw_file, Some("echo"));
|
||||
|
||||
// Add the secret so the profile can read it
|
||||
let mut add = Command::cargo_bin("gman").unwrap();
|
||||
let mut add = Command::new(gman_bin());
|
||||
add.env("XDG_CONFIG_HOME", &xdg_cfg)
|
||||
.env("XDG_CACHE_HOME", &xdg_cache)
|
||||
.stdin(Stdio::piped())
|
||||
@@ -186,8 +246,7 @@ fn cli_wrap_dry_run_env_injection() {
|
||||
let add_out = child.wait_with_output().unwrap();
|
||||
assert!(add_out.status.success());
|
||||
|
||||
// Dry-run wrapping: prints preview command
|
||||
let mut wrap = Command::cargo_bin("gman").unwrap();
|
||||
let mut wrap = Command::new(gman_bin());
|
||||
wrap.env("XDG_CONFIG_HOME", &xdg_cfg)
|
||||
.env("XDG_CACHE_HOME", &xdg_cache)
|
||||
.arg("--dry-run")
|
||||
|
||||
+40
-10
@@ -9,6 +9,7 @@ mod tests {
|
||||
fn test_run_config_valid() {
|
||||
let run_config = RunConfig {
|
||||
name: Some("test".to_string()),
|
||||
provider: None,
|
||||
secrets: Some(vec!["secret1".to_string()]),
|
||||
flag: None,
|
||||
flag_position: None,
|
||||
@@ -23,6 +24,7 @@ mod tests {
|
||||
fn test_run_config_missing_name() {
|
||||
let run_config = RunConfig {
|
||||
name: None,
|
||||
provider: None,
|
||||
secrets: Some(vec!["secret1".to_string()]),
|
||||
flag: None,
|
||||
flag_position: None,
|
||||
@@ -37,6 +39,7 @@ mod tests {
|
||||
fn test_run_config_missing_secrets() {
|
||||
let run_config = RunConfig {
|
||||
name: Some("test".to_string()),
|
||||
provider: None,
|
||||
secrets: None,
|
||||
flag: None,
|
||||
flag_position: None,
|
||||
@@ -51,6 +54,7 @@ mod tests {
|
||||
fn test_run_config_invalid_flag_position() {
|
||||
let run_config = RunConfig {
|
||||
name: Some("test".to_string()),
|
||||
provider: None,
|
||||
secrets: Some(vec!["secret1".to_string()]),
|
||||
flag: Some("--test-flag".to_string()),
|
||||
flag_position: Some(0),
|
||||
@@ -65,6 +69,7 @@ mod tests {
|
||||
fn test_run_config_flags_or_none_all_some() {
|
||||
let run_config = RunConfig {
|
||||
name: Some("test".to_string()),
|
||||
provider: None,
|
||||
secrets: Some(vec!["secret1".to_string()]),
|
||||
flag: Some("--test-flag".to_string()),
|
||||
flag_position: Some(1),
|
||||
@@ -79,6 +84,7 @@ mod tests {
|
||||
fn test_run_config_flags_or_none_all_none() {
|
||||
let run_config = RunConfig {
|
||||
name: Some("test".to_string()),
|
||||
provider: None,
|
||||
secrets: Some(vec!["secret1".to_string()]),
|
||||
flag: None,
|
||||
flag_position: None,
|
||||
@@ -93,6 +99,7 @@ mod tests {
|
||||
fn test_run_config_flags_or_none_partial_some() {
|
||||
let run_config = RunConfig {
|
||||
name: Some("test".to_string()),
|
||||
provider: None,
|
||||
secrets: Some(vec!["secret1".to_string()]),
|
||||
flag: Some("--test-flag".to_string()),
|
||||
flag_position: None,
|
||||
@@ -107,6 +114,7 @@ mod tests {
|
||||
fn test_run_config_flags_or_none_missing_placeholder() {
|
||||
let run_config = RunConfig {
|
||||
name: Some("test".to_string()),
|
||||
provider: None,
|
||||
secrets: Some(vec!["secret1".to_string()]),
|
||||
flag: Some("--test-flag".to_string()),
|
||||
flag_position: Some(1),
|
||||
@@ -121,6 +129,7 @@ mod tests {
|
||||
fn test_run_config_flags_or_files_all_none() {
|
||||
let run_config = RunConfig {
|
||||
name: Some("test".to_string()),
|
||||
provider: None,
|
||||
secrets: Some(vec!["secret1".to_string()]),
|
||||
flag: None,
|
||||
flag_position: None,
|
||||
@@ -135,6 +144,7 @@ mod tests {
|
||||
fn test_run_config_flags_or_files_files_is_some() {
|
||||
let run_config = RunConfig {
|
||||
name: Some("test".to_string()),
|
||||
provider: None,
|
||||
secrets: Some(vec!["secret1".to_string()]),
|
||||
flag: None,
|
||||
flag_position: None,
|
||||
@@ -149,6 +159,7 @@ mod tests {
|
||||
fn test_run_config_flags_or_files_all_some() {
|
||||
let run_config = RunConfig {
|
||||
name: Some("test".to_string()),
|
||||
provider: None,
|
||||
secrets: Some(vec!["secret1".to_string()]),
|
||||
flag: Some("--test-flag".to_string()),
|
||||
flag_position: Some(1),
|
||||
@@ -241,15 +252,34 @@ mod tests {
|
||||
#[test]
|
||||
fn test_config_local_provider_password_file() {
|
||||
let path = Config::local_provider_password_file();
|
||||
let expected_path = dirs::home_dir().map(|p| p.join(".gman_password"));
|
||||
if let Some(p) = &expected_path {
|
||||
if !p.exists() {
|
||||
assert_eq!(path, None);
|
||||
} else {
|
||||
assert_eq!(path, expected_path);
|
||||
}
|
||||
} else {
|
||||
assert_eq!(path, None);
|
||||
}
|
||||
// Derive expected filename based on current test executable name
|
||||
let exe = std::env::current_exe().expect("current_exe");
|
||||
let stem = exe
|
||||
.file_stem()
|
||||
.and_then(|s| s.to_str())
|
||||
.expect("utf-8 file stem");
|
||||
let expected = dirs::home_dir().map(|p| p.join(format!(".{}_password", stem)));
|
||||
assert_eq!(Some(path), expected);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_config_duplicate_provider_names_is_invalid() {
|
||||
let name = Some("dup".into());
|
||||
let p1 = ProviderConfig {
|
||||
name: name.clone(),
|
||||
..Default::default()
|
||||
};
|
||||
let p2 = ProviderConfig {
|
||||
name,
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
let cfg = Config {
|
||||
default_provider: Some("dup".into()),
|
||||
providers: vec![p1, p2],
|
||||
run_configs: None,
|
||||
};
|
||||
|
||||
assert!(cfg.validate().is_err());
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,8 @@
|
||||
# Seeds for failure cases proptest has generated in the past. It is
|
||||
# automatically read and these particular cases re-run before any
|
||||
# novel cases are generated.
|
||||
#
|
||||
# It is recommended to check this file in to source control so that
|
||||
# everyone who runs the test benefits from these saved cases.
|
||||
cc 155469a45d7311cd4003e23a3bcdaa8e55879e6222c1b6313a2b1f0b563bb195 # shrinks to password = "", msg = " "
|
||||
cc 0bc9f608677234c082d10ff51b15dc39b4c194cdf920b4d87e553467c93824ed # shrinks to password = "", msg = ""
|
||||
@@ -1,15 +1,15 @@
|
||||
use base64::Engine;
|
||||
use gman::{decrypt_string, encrypt_string};
|
||||
use proptest::prelude::*;
|
||||
|
||||
proptest! {
|
||||
#![proptest_config(ProptestConfig::with_cases(64))]
|
||||
}
|
||||
use secrecy::SecretString;
|
||||
|
||||
proptest! {
|
||||
// Reduced case count because Argon2 key derivation is intentionally slow
|
||||
// (65 MiB memory, 3 iterations per encryption/decryption)
|
||||
#![proptest_config(ProptestConfig::with_cases(4))]
|
||||
|
||||
#[test]
|
||||
fn prop_encrypt_decrypt_roundtrip(password in ".{0,64}", msg in ".{0,512}") {
|
||||
fn prop_encrypt_decrypt_roundtrip(password in ".{1,64}", msg in ".{0,512}") {
|
||||
let pw = SecretString::new(password.into());
|
||||
let env = encrypt_string(pw.clone(), &msg).unwrap();
|
||||
let out = decrypt_string(pw, &env).unwrap();
|
||||
@@ -18,10 +18,9 @@ proptest! {
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn prop_tamper_ciphertext_detected(password in ".{0,32}", msg in ".{1,128}") {
|
||||
fn prop_tamper_ciphertext_detected(password in ".{1,32}", msg in ".{1,128}") {
|
||||
let pw = SecretString::new(password.into());
|
||||
let env = encrypt_string(pw.clone(), &msg).unwrap();
|
||||
// Flip a bit in the ct payload segment
|
||||
let mut parts: Vec<&str> = env.split(';').collect();
|
||||
let ct_b64 = parts[6].strip_prefix("ct=").unwrap();
|
||||
let mut ct = base64::engine::general_purpose::STANDARD.decode(ct_b64).unwrap();
|
||||
|
||||
@@ -0,0 +1,53 @@
|
||||
use gman::config::{Config, ProviderConfig};
|
||||
use gman::providers::{SecretProvider, SupportedProvider};
|
||||
use pretty_assertions::{assert_eq, assert_str_eq};
|
||||
use validator::Validate;
|
||||
|
||||
#[test]
|
||||
fn test_gopass_supported_provider_display_and_validate_from_yaml() {
|
||||
// Build a SupportedProvider via YAML to avoid direct type import
|
||||
let yaml = r#"---
|
||||
type: gopass
|
||||
store: personal
|
||||
"#;
|
||||
|
||||
let sp: SupportedProvider = serde_yaml::from_str(yaml).expect("valid supported provider yaml");
|
||||
// Validate delegates to inner provider (no required fields)
|
||||
assert!(sp.validate().is_ok());
|
||||
// Display formatting for the enum variant
|
||||
assert_eq!(sp.to_string(), "gopass");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_provider_config_with_gopass_deserialize_and_extract() {
|
||||
// Minimal ProviderConfig YAML using the gopass variant
|
||||
let yaml = r#"---
|
||||
name: gopass
|
||||
type: gopass
|
||||
"#;
|
||||
|
||||
let pc: ProviderConfig = serde_yaml::from_str(yaml).expect("valid provider config yaml");
|
||||
// Gopass has no required fields, so validation should pass
|
||||
assert!(pc.validate().is_ok());
|
||||
|
||||
// Extract the provider and inspect its name via the trait
|
||||
let mut pc_owned = pc.clone();
|
||||
let provider: &mut dyn SecretProvider = pc_owned.extract_provider();
|
||||
assert_str_eq!(provider.name(), "GopassProvider");
|
||||
|
||||
// Round-trip through Config with default_provider
|
||||
let cfg_yaml = r#"---
|
||||
default_provider: gopass
|
||||
providers:
|
||||
- name: gopass
|
||||
type: gopass
|
||||
store: personal
|
||||
"#;
|
||||
let cfg: Config = serde_yaml::from_str(cfg_yaml).expect("valid config yaml");
|
||||
assert!(cfg.validate().is_ok());
|
||||
|
||||
let extracted = cfg
|
||||
.extract_provider_config(None)
|
||||
.expect("should find default provider");
|
||||
assert_eq!(extracted.name.as_deref(), Some("gopass"));
|
||||
}
|
||||
@@ -34,6 +34,7 @@ fn test_local_provider_valid() {
|
||||
git_user_name: None,
|
||||
git_user_email: Some("test@example.com".to_string()),
|
||||
git_executable: None,
|
||||
runtime_provider_name: None,
|
||||
};
|
||||
|
||||
assert!(provider.validate().is_ok());
|
||||
@@ -48,6 +49,7 @@ fn test_local_provider_invalid_email() {
|
||||
git_user_name: None,
|
||||
git_user_email: Some("test".to_string()),
|
||||
git_executable: None,
|
||||
runtime_provider_name: None,
|
||||
};
|
||||
|
||||
assert!(config.validate().is_err());
|
||||
@@ -56,10 +58,11 @@ fn test_local_provider_invalid_email() {
|
||||
#[test]
|
||||
fn test_local_provider_default() {
|
||||
let provider = LocalProvider::default();
|
||||
assert_eq!(
|
||||
provider.password_file,
|
||||
Config::local_provider_password_file()
|
||||
);
|
||||
let expected_pw = {
|
||||
let p = Config::local_provider_password_file();
|
||||
if p.exists() { Some(p) } else { None }
|
||||
};
|
||||
assert_eq!(provider.password_file, expected_pw);
|
||||
assert_eq!(provider.git_branch, Some("main".into()));
|
||||
assert_eq!(provider.git_remote_url, None);
|
||||
assert_eq!(provider.git_user_name, None);
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
mod aws_secrets_manager_tests;
|
||||
mod azure_key_vault_tests;
|
||||
mod gcp_secret_manager_tests;
|
||||
mod gopass_tests;
|
||||
mod local_tests;
|
||||
mod provider_tests;
|
||||
|
||||
Reference in New Issue
Block a user