184 Commits

Author SHA1 Message Date
github-actions[bot]
9f63ee8265 bump: version 0.4.0 → 0.4.1 [skip ci] 2026-03-20 22:04:37 +00:00
6cba3d6d0b feat: Upgraded aws-lc-sys version to address high severity CWE-295
Check / stable / fmt (push) Failing after 25s
Check / beta / clippy (push) Failing after 39s
Check / stable / clippy (push) Failing after 40s
Check / nightly / doc (push) Failing after 41s
Check / 1.89.0 / check (push) Failing after 41s
Test Suite / ubuntu / beta (push) Failing after 43s
Test Suite / ubuntu / stable (push) Failing after 42s
Test Suite / ubuntu / stable / coverage (push) Failing after 59s
Test Suite / macos-latest / stable (push) Has been cancelled
Test Suite / windows-latest / stable (push) Has been cancelled
2026-03-20 16:03:49 -06:00
b2a51dc1b1 docs: Cleaned up README formatting a tad (80 character column length)
Check / stable / fmt (push) Has been cancelled
Check / beta / clippy (push) Has been cancelled
Check / stable / clippy (push) Has been cancelled
Check / nightly / doc (push) Has been cancelled
Check / 1.89.0 / check (push) Has been cancelled
Test Suite / ubuntu / beta (push) Has been cancelled
Test Suite / ubuntu / stable (push) Has been cancelled
Test Suite / macos-latest / stable (push) Has been cancelled
Test Suite / windows-latest / stable (push) Has been cancelled
Test Suite / ubuntu / stable / coverage (push) Has been cancelled
2026-03-09 17:35:22 -06:00
github-actions[bot]
cc44fca54e bump: version 0.3.0 → 0.4.0 [skip ci] 2026-03-09 23:06:23 +00:00
9a678ae67d build: Updated dependencies to address security issues
Check / stable / fmt (push) Has been cancelled
Check / beta / clippy (push) Has been cancelled
Check / stable / clippy (push) Has been cancelled
Check / nightly / doc (push) Has been cancelled
Check / 1.89.0 / check (push) Has been cancelled
Test Suite / ubuntu / beta (push) Has been cancelled
Test Suite / ubuntu / stable (push) Has been cancelled
Test Suite / macos-latest / stable (push) Has been cancelled
Test Suite / windows-latest / stable (push) Has been cancelled
Test Suite / ubuntu / stable / coverage (push) Has been cancelled
2026-03-09 16:57:29 -06:00
66b950991c feat: Added 1password support
Check / stable / fmt (push) Has been cancelled
Check / beta / clippy (push) Has been cancelled
Check / stable / clippy (push) Has been cancelled
Check / nightly / doc (push) Has been cancelled
Check / 1.89.0 / check (push) Has been cancelled
Test Suite / ubuntu / beta (push) Has been cancelled
Test Suite / ubuntu / stable (push) Has been cancelled
Test Suite / macos-latest / stable (push) Has been cancelled
Test Suite / windows-latest / stable (push) Has been cancelled
Test Suite / ubuntu / stable / coverage (push) Has been cancelled
2026-03-09 16:33:57 -06:00
e8e0bd02e9 docs: created an authorship policy and pull request template to require disclosure of AI coding assistance for all contributions 2026-02-24 17:48:28 -07:00
ed5a7308be build: Migrated from Makefile to justfile
Check / stable / fmt (push) Successful in 9m56s
Check / beta / clippy (push) Failing after 40s
Check / stable / clippy (push) Failing after 39s
Check / nightly / doc (push) Failing after 36s
Check / 1.89.0 / check (push) Failing after 39s
Test Suite / ubuntu / beta (push) Failing after 39s
Test Suite / ubuntu / stable (push) Failing after 38s
Test Suite / ubuntu / stable / coverage (push) Failing after 1m4s
Test Suite / macos-latest / stable (push) Has been cancelled
Test Suite / windows-latest / stable (push) Has been cancelled
2026-02-02 10:40:52 -07:00
044d5960eb feat: sort local keys alphabetically when listing them
Check / stable / fmt (push) Has been cancelled
Check / beta / clippy (push) Has been cancelled
Check / stable / clippy (push) Has been cancelled
Check / nightly / doc (push) Has been cancelled
Check / 1.89.0 / check (push) Has been cancelled
Test Suite / ubuntu / beta (push) Has been cancelled
Test Suite / ubuntu / stable (push) Has been cancelled
Test Suite / macos-latest / stable (push) Has been cancelled
Test Suite / windows-latest / stable (push) Has been cancelled
Test Suite / ubuntu / stable / coverage (push) Has been cancelled
2026-02-02 10:36:56 -07:00
github-actions[bot]
c0aa379b20 bump: version 0.2.3 → 0.3.0 [skip ci] 2026-02-02 01:08:03 +00:00
f9fd9692aa build: Modified integration tests so they don't run when cross-compiling to non-x86 systems
Check / stable / fmt (push) Successful in 9m54s
Check / beta / clippy (push) Failing after 39s
Check / stable / clippy (push) Failing after 40s
Check / nightly / doc (push) Failing after 37s
Check / 1.89.0 / check (push) Failing after 38s
Test Suite / ubuntu / beta (push) Failing after 38s
Test Suite / ubuntu / stable (push) Failing after 39s
Test Suite / ubuntu / stable / coverage (push) Failing after 1m3s
Test Suite / macos-latest / stable (push) Has been cancelled
Test Suite / windows-latest / stable (push) Has been cancelled
2026-02-01 18:03:51 -07:00
2615b23d6e test: Removed deprecated function calls from cli_tests module and sped up proptests
Check / stable / fmt (push) Successful in 9m55s
Check / beta / clippy (push) Failing after 38s
Check / stable / clippy (push) Failing after 39s
Check / nightly / doc (push) Failing after 37s
Check / 1.89.0 / check (push) Failing after 38s
Test Suite / ubuntu / beta (push) Failing after 38s
Test Suite / ubuntu / stable (push) Failing after 39s
Test Suite / ubuntu / stable / coverage (push) Failing after 1m28s
Test Suite / macos-latest / stable (push) Has been cancelled
Test Suite / windows-latest / stable (push) Has been cancelled
2026-02-01 17:14:24 -07:00
628a13011e build: upgraded to the most recent Azure SDK version 2026-02-01 16:44:28 -07:00
cff4420ee0 fix: Upgraded AWS dependencies to address CWE-20 2026-02-01 16:15:41 -07:00
9944e29ef0 fix: A critical security flaw was discovered that essentially had all local secrets be encrypted with an all-zero key 2026-02-01 16:15:13 -07:00
c95bae1761 fix: Addressed XNonce::from_slice deprecation warning 2026-02-01 14:48:37 -07:00
21da7b782e fix: Secrets are now stored exactly as passed without newlines stripped 2026-02-01 14:47:43 -07:00
d038930ce5 docs: fixed a typo in the mac/linux install script command
Check / stable / fmt (push) Has been cancelled
Check / beta / clippy (push) Has been cancelled
Check / stable / clippy (push) Has been cancelled
Check / nightly / doc (push) Has been cancelled
Check / 1.89.0 / check (push) Has been cancelled
Test Suite / ubuntu / beta (push) Has been cancelled
Test Suite / ubuntu / stable (push) Has been cancelled
Test Suite / macos-latest / stable (push) Has been cancelled
Test Suite / windows-latest / stable (push) Has been cancelled
Test Suite / ubuntu / stable / coverage (push) Has been cancelled
2025-11-07 11:39:04 -07:00
github-actions[bot]
f0fc829a73 chore: bump Cargo.toml to 0.2.3 2025-10-14 23:32:36 +00:00
github-actions[bot]
ba0f108aa8 bump: version 0.2.2 → 0.2.3 [skip ci] 2025-10-14 23:32:32 +00:00
6daa6fd2f2 refactor: Refactored the library for gman so that it dynamically names config and password files to be used across any application 2025-10-14 17:12:43 -06:00
5fa4dbfe89 Merge remote-tracking branch 'origin/main' 2025-10-07 10:59:00 -06:00
bdcd496046 docs: fixed typo in code of conduct 2025-10-07 10:58:52 -06:00
github-actions[bot]
e37b80a262 bump: version 0.2.1 → 0.2.2 [skip ci] 2025-09-30 22:03:17 +00:00
3ce62c272e build: Updated changelog format 2025-09-30 15:42:41 -06:00
21b771507c Merge remote-tracking branch 'origin/main' 2025-09-30 15:40:36 -06:00
508c8b7feb style: Reformatted code 2025-09-30 15:40:27 -06:00
github-actions[bot]
33a889fa67 chore: bump Cargo.toml to 0.2.2 2025-09-30 21:37:14 +00:00
github-actions[bot]
7ddb7812fc bump: version 0.2.1 → 0.2.2 [skip ci] 2025-09-30 21:37:04 +00:00
9e11648a7c refactor: Environment variable interpolation in config file works globally, not based on type 2025-09-30 15:35:48 -06:00
github-actions[bot]
ed79af2a8a chore: bump Cargo.toml to 0.2.1 2025-09-30 17:44:19 +00:00
github-actions[bot]
443fbcf305 bump: version 0.2.0 → 0.2.1 [skip ci] 2025-09-30 17:44:08 +00:00
78d7e90e68 feat: Environment variable interpolation in the Gman configuration file 2025-09-30 11:10:20 -06:00
01d4819160 fix: Corrected tab completions for the provider flag 2025-09-30 09:25:29 -06:00
github-actions[bot]
e200a32f5a bump: version 0.1.0 → 0.2.0 [skip ci] 2025-09-30 03:56:50 +00:00
008b33b044 docs: Updated changelog once more 2025-09-29 21:35:02 -06:00
Alex Clarke
f35afac20f docs: Update Changeling changelog 2025-09-29 18:48:09 -07:00
262a3d6435 build: Fixed build dependencies 2025-09-29 18:16:39 -06:00
eb9e671818 Merge branch 'main' of github.com:Dark-Alex-17/gman 2025-09-29 18:15:44 -06:00
efc8af2c93 docs: Updated the gopass provider docs 2025-09-29 18:15:30 -06:00
3d38ac9b51 docs: Added gopass docs to README 2025-09-29 17:55:06 -06:00
github-actions[bot]
8d40c3773f chore: bump Cargo.toml to 0.2.0 2025-09-29 23:52:24 +00:00
github-actions[bot]
16ce245218 bump: version 0.1.0 → 0.2.0 [skip ci] 2025-09-29 23:52:12 +00:00
a64f4dbf79 test: Added tests for the new gopass provider 2025-09-29 17:28:04 -06:00
1b83d9b199 feat: gopass support 2025-09-29 16:34:51 -06:00
f006503736 feat: Added command aliases to make the CLI more universal 2025-09-29 16:30:39 -06:00
9abd2f88cf feat: Added dynamic tab completions for the profile, providers, and the secrets in any given secret manager 2025-09-29 16:30:16 -06:00
29acad5eed feat: Users can now specify a default provider to use with each run config, so they don't need to explicitly specify which to use when wanting to run different applications. 2025-09-29 15:18:56 -06:00
github-actions[bot]
aba958ff8e chore: bump Cargo.toml to 0.1.0 2025-09-17 23:59:23 +00:00
github-actions[bot]
b273c75018 bump: version 0.0.1 → 0.1.0 [skip ci] 2025-09-17 23:59:19 +00:00
1f7f5dbcae ci: Created single-line installation scripts to also install gman automatically without any package managers 2025-09-17 12:47:08 -06:00
00802795ee build: prep for full release 2025-09-16 13:58:31 -06:00
2a01189a07 ci: Manually created first winget package: ready for full release (hopefully) 2025-09-16 10:55:59 -06:00
github-actions[bot]
d3ca5d52bc chore: bump Cargo.toml to 0.0.2 2025-09-16 15:30:55 +00:00
github-actions[bot]
7124e067a2 bump: version 0.0.1 → 0.0.2 [skip ci] 2025-09-16 15:30:51 +00:00
5f282fef87 ci: Need to manually deploy first winget package 2025-09-16 09:29:55 -06:00
github-actions[bot]
e0121c88e9 bump: version 0.0.1 → 0.1.0 [skip ci] 2025-09-15 22:56:56 +00:00
9e3065794a ci: Fixed heredoc issue for windows-latest 2025-09-15 16:56:22 -06:00
github-actions[bot]
3fecd6a00a bump: version 0.0.1 → 0.1.0 [skip ci] 2025-09-15 22:36:14 +00:00
be8e3263c9 ci: Use windows-latest for winget release job 2025-09-15 16:33:08 -06:00
github-actions[bot]
d563cc286f chore: bump Cargo.toml to 0.1.0 2025-09-15 22:13:34 +00:00
github-actions[bot]
fc75487da9 bump: version 0.0.1 → 0.1.0 [skip ci] 2025-09-15 22:13:30 +00:00
a015c9b382 ci: Change winget packaging methodology 2025-09-15 16:12:04 -06:00
github-actions[bot]
e84f3da8d8 bump: version 0.0.1 → 0.1.0 [skip ci] 2025-09-15 21:44:25 +00:00
f4a54a412a ci: revert failed GH pipeline 2025-09-15 15:43:46 -06:00
github-actions[bot]
d1bfe9fb62 chore: bump Cargo.toml to 0.1.0 2025-09-15 21:38:34 +00:00
github-actions[bot]
af01a4855a bump: version 0.0.1 → 0.1.0 [skip ci] 2025-09-15 21:38:30 +00:00
9ceaa1078d ci: Update the winget release step so that it works for brand new packages 2025-09-15 15:37:50 -06:00
github-actions[bot]
7f3edcaa9d bump: version 0.0.1 → 0.1.0 [skip ci] 2025-09-15 21:07:05 +00:00
3d93b5b479 ci: Trying to rename the gman Windows executable so it can be picked up when archiving everything else 2025-09-15 15:06:23 -06:00
github-actions[bot]
32ea7ea35d bump: version 0.0.1 → 0.1.0 [skip ci] 2025-09-15 20:49:44 +00:00
cce8a23b63 ci: Revert to previous release 2025-09-15 14:49:13 -06:00
github-actions[bot]
5ad5c14acb bump: version 0.0.1 → 0.1.0 [skip ci] 2025-09-15 20:05:43 +00:00
10e2beba3c ci: Modified windows binaries to be in executable form instead of archives to fix winget deploy 2025-09-15 14:00:40 -06:00
github-actions[bot]
07b14935df chore: bump Cargo.toml to 0.1.0 2025-09-15 19:42:46 +00:00
github-actions[bot]
10f826b23b bump: version 0.0.1 → 0.1.0 [skip ci] 2025-09-15 19:42:43 +00:00
0871f9de7b ci: Omit installers regex for winget to see what it does 2025-09-15 13:41:24 -06:00
github-actions[bot]
5c5107ed5f bump: version 0.0.1 → 0.1.0 [skip ci] 2025-09-15 19:20:25 +00:00
13a074c3be ci: Final fix for winget release 2025-09-15 13:11:05 -06:00
github-actions[bot]
cf7d010bc4 bump: version 0.0.1 → 0.1.0 [skip ci] 2025-09-15 18:52:38 +00:00
c8696177c5 ci: Fix winget release 2025-09-15 12:51:36 -06:00
github-actions[bot]
5fd25df6ac bump: version 0.0.1 → 0.1.0 [skip ci] 2025-09-15 18:24:01 +00:00
be8a5535fa ci: Attempted fix of winget release 2025-09-15 12:23:18 -06:00
github-actions[bot]
48d07e697b chore: bump Cargo.toml to 0.2.0 2025-09-15 17:48:43 +00:00
github-actions[bot]
6c5b59f619 bump: version 0.1.1 → 0.2.0 [skip ci] 2025-09-15 17:48:39 +00:00
8252191317 ci: Migrated Windows install to winget 2025-09-15 11:20:12 -06:00
261ec0bb6d test: Fixed Windows CLI tests (forgot to add unix cfg check) 2025-09-15 10:24:17 -06:00
e8de47dc52 test: Added tests for new config command 2025-09-15 09:47:31 -06:00
dbb4d265c4 feat: Subcommand to edit the config directly instead of having to find the file 2025-09-15 09:25:09 -06:00
924976ee1b Merge remote-tracking branch 'origin/main'
# Conflicts:
#	.github/workflows/release.yml
2025-09-15 07:50:12 -06:00
ae6fe8be44 test: fixed local provider user test 2025-09-14 21:45:39 -06:00
76df717fea style: improved formatting in l9cal provider 2025-09-14 21:42:44 -06:00
Alex Clarke
2885decede fix: improved user messages for local provider sync set up 2025-09-14 21:38:02 -06:00
github-actions[bot]
353ce16782 bump: version 0.0.1 → 0.1.0 [skip ci] 2025-09-15 02:52:29 +00:00
2098aa65c1 ci: fixed copy/paste typo 2025-09-14 20:44:21 -06:00
1385aacc62 ci: fix bug after config refactor for persisting user prompted local sync changes to config file 2025-09-14 20:42:31 -06:00
a2106a06a1 ci: Fix chocolatey install 2025-09-14 20:32:35 -06:00
github-actions[bot]
f0d763c269 bump: version 0.0.1 → 0.1.0 [skip ci] 2025-09-15 02:01:03 +00:00
4c30bc7e7d ci: Attempting to fix path errors in chocolatey install 2025-09-14 20:00:03 -06:00
github-actions[bot]
21127f3ec3 bump: version 0.0.1 → 0.1.0 [skip ci] 2025-09-15 01:32:17 +00:00
6adc4ce228 ci: Potential typo in chocolatey package deploy 2025-09-14 19:30:28 -06:00
github-actions[bot]
9e740ec550 bump: version 0.0.1 → 0.1.0 [skip ci] 2025-09-15 01:03:26 +00:00
81f7e86adc ci: Fixed typo in artifact upload 2025-09-14 19:02:18 -06:00
github-actions[bot]
8a79de2fc8 bump: version 0.0.1 → 0.1.0 [skip ci] 2025-09-15 00:45:15 +00:00
9116d1ed53 ci: Modify changelog generation and output 2025-09-14 18:43:55 -06:00
github-actions[bot]
966b69b43b bump: version 0.0.1 → 0.1.0 [skip ci] 2025-09-15 00:39:08 +00:00
71f4781780 fix: Pass the changelog to the GHA properly using a file 2025-09-14 18:38:18 -06:00
fbd2503136 fix: Potential bug in changelog variable generation 2025-09-14 18:33:30 -06:00
00080543bd ci: Migrated conventional-changelog to conventionalcommits 2025-09-14 18:30:30 -06:00
1c02106bdc ci: Fix bug in changelog generation 2025-09-14 18:27:56 -06:00
595917bb2b ci: Fix bug in artifact directory name to be unique per release 2025-09-14 18:22:51 -06:00
github-actions[bot]
c0c0ae0b99 bump: version 0.0.1 → 0.1.0 [skip ci] 2025-09-15 00:01:26 +00:00
477c87aea9 ci: Full release attempt (GitHub, Choco, Homebrew, and Crates) 2025-09-14 18:00:38 -06:00
4b9a84cf70 Merge remote-tracking branch 'origin/main' 2025-09-14 17:46:10 -06:00
a1e0a97b84 ci: Updated the archive names for homebrew and chocolatey 2025-09-14 17:46:04 -06:00
github-actions[bot]
77676efffa bump: version 0.0.1 → 0.0.2 [skip ci] 2025-09-14 23:43:36 +00:00
cfc296dd75 ci: Fixed typo in shell commands for Windows and Mac builds 2025-09-14 17:40:20 -06:00
github-actions[bot]
47d5159fd3 bump: version 0.0.1 → 0.0.2 [skip ci] 2025-09-14 23:29:34 +00:00
ec115d470a ci: Fixed linux-gnu target build 2025-09-14 17:28:49 -06:00
github-actions[bot]
c48301dead bump: version 0.0.1 → 0.0.2 [skip ci] 2025-09-14 23:24:13 +00:00
d6a2606b7d build: Reverted to previous build 2025-09-14 17:23:27 -06:00
7dc4995c9b ci: Don't use cross to compile ARM linux GNU 2025-09-14 17:21:44 -06:00
github-actions[bot]
419077b1a5 bump: version 0.0.4 → 0.0.5 [skip ci] 2025-09-14 23:14:46 +00:00
befd414bfe ci: Don't use cross to build linux-gnu 2025-09-14 17:11:36 -06:00
2b971602c3 docs: Updated changelog 2025-09-14 17:08:32 -06:00
ad1be71f41 Merge remote-tracking branch 'origin/main' 2025-09-14 17:08:16 -06:00
18ee7e4a9f ci: Fix potential typo in publishing of archives 2025-09-14 17:08:09 -06:00
github-actions[bot]
8e2d2a9b61 bump: version 0.0.3 → 0.0.4 [skip ci] 2025-09-14 23:07:39 +00:00
68bc150d30 ci: Added additional targets and fixed typo in artifact upload 2025-09-14 17:06:59 -06:00
github-actions[bot]
ef0a687031 bump: version 0.0.2 → 0.0.3 [skip ci] 2025-09-14 22:56:11 +00:00
d0278bfa65 ci: Fixed bug in the setting of environment variables 2025-09-14 16:52:17 -06:00
github-actions[bot]
f8b78a1325 bump: version 0.0.1 → 0.0.2 [skip ci] 2025-09-14 22:49:42 +00:00
35183f0e34 build: Set up test release 2025-09-14 16:47:35 -06:00
54bc914554 build: Set up test release 2025-09-14 16:47:13 -06:00
2a74aa3588 ci: Fix typo in GH release draft status 2025-09-14 16:45:25 -06:00
cc5d4cd45d ci: Test full GH release 2025-09-14 16:44:12 -06:00
c6bf2e10db build: gated the openssl crate to linux and mac MUSL builds only to fix Windows 2025-09-14 16:17:49 -06:00
b326e7ed4c ci: Fix typo in windows ARM target name 2025-09-14 16:06:35 -06:00
396cb4c3e4 ci: finalize the OS build choices 2025-09-14 16:01:17 -06:00
9963d9cd0b ci: install the generic libclang-dev 2025-09-14 15:53:47 -06:00
ba64e8141d ci: try again without i686 windows 2025-09-14 15:45:34 -06:00
53ce30e0b1 ci: change llvm version 2025-09-14 15:40:27 -06:00
9aea77661b ci: Install libclang using the official llvm installation method 2025-09-14 15:38:14 -06:00
e412f01c0a ci: add the universe repository for ubuntu 2025-09-14 15:28:13 -06:00
1e34429f52 ci: Force install the latest libclang 2025-09-14 15:25:09 -06:00
25c428ddbd ci: Attempt to use pre-built target binaries for aws-lc-sys 2025-09-14 15:21:52 -06:00
f19153b196 ci: enable the universe apt repository 2025-09-14 15:08:30 -06:00
8b3c9c822e ci: Testing manual install of bindgen 2025-09-14 15:03:56 -06:00
b5ad622798 ci: Test disabling aws-lc-sys crate 2025-09-14 14:40:54 -06:00
1063038c52 ci: Testing new build flow 2025-09-14 14:36:23 -06:00
github-actions[bot]
9a3c7d360c bump: version 0.0.1 → 0.0.2 [skip ci] 2025-09-13 22:21:25 +00:00
b956e95619 ci: test using cross for all compilation targets 2025-09-13 16:20:40 -06:00
github-actions[bot]
6c7f1c7ecd bump: version 0.0.1 → 0.0.2 [skip ci] 2025-09-13 22:15:20 +00:00
a8b3c185c6 ci: fix bug for mac installs 2025-09-13 16:14:33 -06:00
github-actions[bot]
9b5b2a603f bump: version 0.0.1 → 0.0.2 [skip ci] 2025-09-13 22:10:00 +00:00
84ffc8b71c ci: force install libclang deps 2025-09-13 16:08:37 -06:00
github-actions[bot]
06885e3d86 chore: bump Cargo.toml to 0.0.2 2025-09-13 22:01:03 +00:00
github-actions[bot]
77fe30b267 bump: version 0.0.1 → 0.0.2 [skip ci] 2025-09-13 22:01:00 +00:00
eb7c78327b ci: try build with cross 2025-09-13 16:00:09 -06:00
6fe33bffa0 testing 2025-09-13 15:41:56 -06:00
cd2c3d6c3d build: revert to previous dependencies stack 2025-09-13 15:08:12 -06:00
github-actions[bot]
77f5c7824c bump: version 0.0.1 → 0.0.2 [skip ci] 2025-09-13 20:36:37 +00:00
5d1cfffc74 ci: use cross for musl builds 2025-09-13 14:35:44 -06:00
github-actions[bot]
ee8bbad82c bump: version 0.0.1 → 0.0.2 [skip ci] 2025-09-13 20:07:44 +00:00
fcab216926 ci: explicitly help out aws-lc-sys with bindgen 2025-09-13 14:06:40 -06:00
github-actions[bot]
ce42f51606 bump: version 0.0.1 → 0.0.2 [skip ci] 2025-09-13 19:58:22 +00:00
c0755d8751 build: version fix 2025-09-13 13:56:35 -06:00
Alex Clarke
4a3058ce55 ci: explicitly tell bingen what to use for each architecture 2025-09-13 13:54:53 -06:00
github-actions[bot]
79ee93cc06 bump: version 0.0.1 → 0.0.2 [skip ci] 2025-09-13 19:47:51 +00:00
08dea6a190 build: Reverted back to previous build 2025-09-13 13:47:00 -06:00
Alex Clarke
671429dfcf ci: Install libgen on all Linux musl builds instead of just ARM architectures 2025-09-13 13:42:07 -06:00
github-actions[bot]
2cb9e0cd14 bump: version 0.0.1 → 0.0.2 [skip ci] 2025-09-13 19:34:56 +00:00
8c5dd6fd84 build: Specify proper version in cargo toml 2025-09-13 13:33:52 -06:00
63eef4a70a build: Updated Cargo lockfile 2025-09-13 13:02:54 -06:00
Alex Clarke
1be44cc855 ci: Installed OpenSSL tool chain for musl targets 2025-09-13 12:57:23 -06:00
Alex Clarke
8362359093 build: Added explicit "vendored" feature requirements for openssl 2025-09-13 12:55:05 -06:00
github-actions[bot]
c20bbf07d9 bump: version 0.0.1 → 0.0.2 [skip ci] 2025-09-13 18:43:38 +00:00
Alex Clarke
d94e611b12 ci: Install bingen and clang on musl 2025-09-13 12:41:27 -06:00
e334b375da fix: Revert back hacky stuff so I can test with act now 2025-09-12 21:33:50 -06:00
github-actions[bot]
b95ac1a686 bump: version 0.0.6 → 0.1.0 [skip ci] 2025-09-13 02:43:12 +00:00
3a1752b148 fix: Attempting to use pre-generated bindgens for the aws-lc-sys library 2025-09-12 20:42:19 -06:00
github-actions[bot]
b29f33413a bump: version 0.0.5 → 0.1.0 [skip ci] 2025-09-13 02:36:35 +00:00
3c119595fa fix: Install openSSL differently to make this work 2025-09-12 20:35:44 -06:00
github-actions[bot]
7f8632b41d bump: version 0.0.4 → 0.1.0 [skip ci] 2025-09-13 02:31:21 +00:00
35 changed files with 3831 additions and 1477 deletions
@@ -0,0 +1,11 @@
### AI assistance (if any):
- List tools here and files touched by them
### Authorship & Understanding
- [ ] I wrote or heavily modified this code myself
- [ ] I understand how it works end-to-end
- [ ] I can maintain this code in the future
- [ ] No undisclosed AI-generated code was used
- [ ] If AI assistance was used, it is documented below
+153 -271
View File
@@ -8,9 +8,9 @@ on:
workflow_dispatch:
inputs:
bump_type:
description: 'Specify the type of version bump'
description: "Specify the type of version bump"
required: true
default: 'patch'
default: "patch"
type: choice
options:
- patch
@@ -46,7 +46,7 @@ jobs:
- name: Set up Python
uses: actions/setup-python@v4
with:
python-version: '3.10'
python-version: "3.10"
- name: Install Commitizen
run: |
@@ -126,9 +126,7 @@ jobs:
- name: Generate changelog for the version bump
id: changelog
run: |
changelog=$(conventional-changelog -p angular -i CHANGELOG.md -s --from ${{ env.prev_version }} --to ${{ env.version }})
echo "$changelog" > artifacts/changelog.md
echo "changelog_body=$(cat artifacts/changelog.md)" >> $GITHUB_ENV
conventional-changelog -p conventionalcommits -i CHANGELOG.md --from ${{ env.prev_version }} --to v${{ env.version }} > artifacts/changelog.md
- name: Push changes
if: env.ACT != 'true'
@@ -151,30 +149,42 @@ jobs:
Cargo.toml
Cargo.lock
build-release-artifacts:
publish-github-release:
name: build-release
needs: [bump-version]
runs-on: ${{ matrix.job.os }}
runs-on: ${{ matrix.os }}
env:
RUST_BACKTRACE: 1
BUILD_CMD: cargo
strategy:
fail-fast: true
matrix:
# prettier-ignore
job:
- { name: "macOS-arm64", os: "macOS-latest", target: "aarch64-apple-darwin", artifact_suffix: "macos-arm64", use-cross: true }
- { name: "macOS-amd64", os: "macOS-latest", target: "x86_64-apple-darwin", artifact_suffix: "macos" }
- { name: "windows-amd64", os: "windows-latest", target: "x86_64-pc-windows-msvc", artifact_suffix: "windows" }
- { name: "windows-aarch64", os: "windows-latest", target: "aarch64-pc-windows-msvc", artifact_suffix: "windows-aarch64", use-cross: true }
- { name: "linux-gnu", os: "ubuntu-latest", target: "x86_64-unknown-linux-gnu", artifact_suffix: "linux" }
- { name: "linux-musl", os: "ubuntu-latest", target: "x86_64-unknown-linux-musl", artifact_suffix: "linux-musl", use-cross: true, }
- { name: "linux-aarch64-gnu", os: "ubuntu-latest", target: "aarch64-unknown-linux-gnu", artifact_suffix: "aarch64-gnu", use-cross: true, test-bin: "--bin gman" }
- { name: "linux-aarch64-musl", os: "ubuntu-latest", target: "aarch64-unknown-linux-musl", artifact_suffix: "aarch64-musl", use-cross: true, test-bin: "--bin gman" }
- { name: "linux-arm-gnu", os: "ubuntu-latest", target: "arm-unknown-linux-gnueabi", artifact_suffix: "armv6-gnu", use-cross: true, test-bin: "--bin gman" }
- { name: "linux-arm-musl", os: "ubuntu-latest", target: "arm-unknown-linux-musleabihf", artifact_suffix: "armv6-musl", use-cross: true, test-bin: "--bin gman" }
- { name: "linux-armv7-gnu", os: "ubuntu-latest", target: "armv7-unknown-linux-gnueabihf", artifact_suffix: "armv7-gnu", use-cross: true, test-bin: "--bin gman" }
- { name: "linux-armv7-musl", os: "ubuntu-latest", target: "armv7-unknown-linux-musleabihf", artifact_suffix: "armv7-musl", use-cross: true, test-bin: "--bin gman" }
rust: [stable]
include:
- target: aarch64-unknown-linux-musl
os: ubuntu-latest
use-cross: true
cargo-flags: ""
- target: aarch64-apple-darwin
os: macos-latest
use-cross: true
cargo-flags: ""
- target: aarch64-pc-windows-msvc
os: windows-latest
use-cross: true
cargo-flags: ""
- target: x86_64-apple-darwin
os: macos-latest
cargo-flags: ""
- target: x86_64-pc-windows-msvc
os: windows-latest
cargo-flags: ""
- target: x86_64-unknown-linux-musl
os: ubuntu-latest
use-cross: true
cargo-flags: ""
- target: x86_64-unknown-linux-gnu
os: ubuntu-latest
cargo-flags: ""
steps:
- name: Check if actor is repository owner
@@ -194,154 +204,6 @@ jobs:
git fetch --all
git pull
- name: Get bumped Cargo files (Act)
if: env.ACT == 'true'
uses: actions/download-artifact@v4
with:
name: bumped-cargo-files
path: ${{ github.workspace }}
- uses: actions/cache@v3
name: Cache Cargo registry
with:
path: ~/.cargo/registry
key: ${{ runner.os }}-cargo-registry-${{ hashFiles('Cargo.lock') }}
- uses: actions/cache@v3
if: startsWith(matrix.job.name, 'linux-')
with:
path: ~/.cargo/bin
key: ${{ runner.os }}-cargo-bin-${{ hashFiles('.github/workflows/release.yml') }}
- uses: dtolnay/rust-toolchain@stable
name: Set Rust toolchain
with:
targets: ${{ matrix.job.target }}
- uses: taiki-e/setup-cross-toolchain-action@v1
with:
# NB: sets CARGO_BUILD_TARGET evar - do not need --target flag in build
target: ${{ matrix.job.target }}
- uses: taiki-e/install-action@cross
if: ${{ matrix.job.use-cross }}
- name: Installing needed Ubuntu dependencies
if: matrix.job.os == 'ubuntu-latest'
shell: bash
run: |
sudo apt-get -y update
case ${{ matrix.job.target }} in
arm*-linux-*) sudo apt-get -y install gcc-arm-linux-gnueabihf ;;
aarch64-*-linux-*) sudo apt-get -y install gcc-aarch64-linux-gnu ;;
esac
- name: Install LLVM/Clang for bindgen
if: matrix.job.os == 'ubuntu-latest'
run: |
sudo apt-get update
sudo apt-get install -y clang llvm-dev libclang-dev pkg-config musl-tools
echo "LIBCLANG_PATH=$(llvm-config --libdir)" >> $GITHUB_ENV
echo "BINDGEN_EXTRA_CLANG_ARGS=--sysroot=/usr --target=x86_64-unknown-linux-musl" >> $GITHUB_ENV
- name: Install LLVM/Clang for bindgen
if: matrix.job.os == 'macOS-latest'
run: |
brew update
brew install llvm
echo "LIBCLANG_PATH=$(brew --prefix llvm)/lib" >> $GITHUB_ENV
echo "LLVM_CONFIG_PATH=$(brew --prefix llvm)/bin/llvm-config" >> $GITHUB_ENV
- name: Install LLVM (libclang) for bindgen
if: matrix.job.os == 'windows-latest'
shell: pwsh
run: |
choco install llvm -y
# libclang.dll lives in <LLVM>\bin; point bindgen at it
$llvm = "C:\Program Files\LLVM"
echo "LIBCLANG_PATH=$llvm\bin" | Out-File -FilePath $env:GITHUB_ENV -Append
- name: Install the bindgen-cli
run: cargo install --force --locked bindgen-cli
- name: Build
run: cargo build --release --verbose --target=${{ matrix.job.target }} --locked
- name: Verify file
shell: bash
run: |
file target/${{ matrix.job.target }}/release/gman
- name: Test
if: matrix.job.target != 'aarch64-apple-darwin' && matrix.job.target != 'aarch64-pc-windows-msvc'
run: cargo test --release --verbose --target=${{ matrix.job.target }} ${{ matrix.job.test-bin }}
- name: Packaging final binary (Windows)
if: matrix.job.os == 'windows-latest'
shell: bash
run: |
cd target/${{ matrix.job.target }}/release
BINARY_NAME=gman.exe
if [ "${{ matrix.job.target }}" != "aarch64-pc-windows-msvc" ]; then
# strip the binary
strip $BINARY_NAME
fi
RELEASE_NAME=gman-${{ matrix.job.artifact_suffix }}
mkdir -p artifacts
tar czvf $RELEASE_NAME.tar.gz $BINARY_NAME
# create sha checksum files
certutil -hashfile $RELEASE_NAME.tar.gz sha256 | grep -E [A-Fa-f0-9]{64} > $RELEASE_NAME.sha256
echo "RELEASE_NAME=$RELEASE_NAME" >> $GITHUB_ENV
- name: Packaging final binary (macOS and Linux)
if: matrix.job.os != 'windows-latest'
shell: bash
run: |
# set the right strip executable
STRIP="strip";
case ${{ matrix.job.target }} in
arm*-linux-*) STRIP="arm-linux-gnueabihf-strip" ;;
aarch64-*-linux-*) STRIP="aarch64-linux-gnu-strip" ;;
esac;
cd target/${{ matrix.job.target }}/release
BINARY_NAME=gman
# strip the binary
"$STRIP" "$BINARY_NAME"
RELEASE_NAME=gman-${{ matrix.job.artifact_suffix }}
tar czvf $RELEASE_NAME.tar.gz $BINARY_NAME
# create sha checksum files
shasum -a 256 $RELEASE_NAME.tar.gz > $RELEASE_NAME.sha256
echo "RELEASE_NAME=$RELEASE_NAME" >> $GITHUB_ENV
- name: Add artifacts
run: |
mkdir -p artifacts
cp target/${{ matrix.job.target }}/release/${{ env.RELEASE_NAME }}.tar.gz artifacts/
cp target/${{ matrix.job.target }}/release/${{ env.RELEASE_NAME }}.sha256 artifacts/
- name: Upload artifacts
uses: actions/upload-artifact@v4
with:
name: artifacts-${{ env.RELEASE_NAME }}
path: artifacts
overwrite: true
publish-github-release:
name: publish-github-release
needs: [build-release-artifacts]
runs-on: ubuntu-latest
steps:
- name: Check if actor is repository owner
if: ${{ github.actor != github.repository_owner && env.ACT != 'true' }}
run: |
echo "You are not authorized to run this workflow."
exit 1
- name: Checkout repository
uses: actions/checkout@v3
with:
fetch-depth: 0
- name: Download all artifacts
uses: actions/download-artifact@v4
with:
@@ -355,114 +217,145 @@ jobs:
git pull
- name: Set environment variables
shell: bash
run: |
release_version="$(cat ./artifacts/release-version)"
echo "RELEASE_VERSION=$release_version" >> $GITHUB_ENV
changelog_body="$(cat ./artifacts/changelog.md)"
echo "changelog_body=$(cat artifacts/changelog.md)" >> $GITHUB_ENV
- name: Validate release environment variables
run: |
echo "Release version: ${{ env.RELEASE_VERSION }}"
echo "Changelog body: ${{ env.changelog_body }}"
echo "Changelog body: $(cat artifacts/changelog.md)"
- name: Create a GitHub Release
- name: Get bumped Cargo files (Act)
if: env.ACT == 'true'
uses: actions/download-artifact@v4
with:
name: bumped-cargo-files
path: ${{ github.workspace }}
- uses: dtolnay/rust-toolchain@stable
name: Set Rust toolchain
with:
targets: ${{ matrix.target }}
- name: Install cross
if: matrix.use-cross
uses: taiki-e/install-action@v2
with:
tool: cross
- name: Overwrite build command env variable
if: matrix.use-cross
shell: bash
run: echo "BUILD_CMD=cross" >> $GITHUB_ENV
- name: Install latest LLVM/Clang
if: matrix.os == 'ubuntu-latest'
run: |
wget https://apt.llvm.org/llvm.sh
chmod +x llvm.sh
# omit the version to get the latest stable for your Ubuntu (24.04 "noble" on ubuntu-latest)
sudo ./llvm.sh all
# ensure libclang dev package is present (adjust the "22" if a newer major exists)
sudo apt-get update
sudo apt-get install -y libclang-20-dev libclang-dev
- name: Show Version Information (Rust, cargo, GCC)
shell: bash
run: |
gcc --version || true
rustup -V
rustup toolchain list
rustup default
cargo -V
rustc -V
- name: Build
shell: bash
run: $BUILD_CMD build --locked --release --target=${{ matrix.target }} ${{ matrix.cargo-flags }}
- name: Verify file
shell: bash
run: |
file target/${{ matrix.target }}/release/gman
- name: Test
if: matrix.target != 'aarch64-apple-darwin' && matrix.target != 'aarch64-pc-windows-msvc'
shell: bash
run: |
set -euxo pipefail
if [[ "${{ matrix.use-cross || 'false' }}" == 'true' ]]; then
cross test --release --locked --target=${{ matrix.target }} --verbose
else
cargo test --release --locked --target=${{ matrix.target }} --verbose
fi
- name: Build Archive
shell: bash
id: package
env:
target: ${{ matrix.target }}
run: |
set -euxo pipefail
bin=${GITHUB_REPOSITORY##*/}
dist_dir=`pwd`/dist
name=$bin-$target
executable=target/$target/release/$bin
if [[ "$RUNNER_OS" == "Windows" ]]; then
executable=$executable.exe
fi
mkdir $dist_dir
cp $executable $dist_dir
cd $dist_dir
if [[ "$RUNNER_OS" == "Windows" ]]; then
archive=$dist_dir/$name.zip
sha=$dist_dir/$name.sha256
7z a $archive *
certutil -hashfile $archive sha256 | grep -E [A-Fa-f0-9]{64} > $sha
echo "archive=dist/$name.zip" >> $GITHUB_OUTPUT
echo "sha=dist/$name.sha256" >> $GITHUB_OUTPUT
else
archive=$dist_dir/$name.tar.gz
sha=$dist_dir/$name.sha256
tar -czf $archive *
shasum -a 256 $archive > $sha
echo "archive=dist/$name.tar.gz" >> $GITHUB_OUTPUT
echo "sha=dist/$name.sha256" >> $GITHUB_OUTPUT
fi
- name: Publish Archive and SHA
if: env.ACT != 'true'
uses: softprops/action-gh-release@v1
uses: softprops/action-gh-release@v2
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
files: |
artifacts/gman-macos-arm64.tar.gz
artifacts/gman-macos-arm64.sha256
artifacts/gman-macos.tar.gz
artifacts/gman-macos.sha256
artifacts/gman-windows.tar.gz
artifacts/gman-windows.sha256
artifacts/gman-windows-aarch64.tar.gz
artifacts/gman-windows-aarch64.sha256
artifacts/gman-linux.tar.gz
artifacts/gman-linux.sha256
artifacts/gman-linux-musl.tar.gz
artifacts/gman-linux-musl.sha256
artifacts/gman-aarch64-gnu.tar.gz
artifacts/gman-aarch64-gnu.sha256
artifacts/gman-aarch64-musl.tar.gz
artifacts/gman-aarch64-musl.sha256
artifacts/gman-armv6-gnu.tar.gz
artifacts/gman-armv6-gnu.sha256
artifacts/gman-armv6-musl.tar.gz
artifacts/gman-armv6-musl.sha256
artifacts/gman-armv7-gnu.tar.gz
artifacts/gman-armv7-gnu.sha256
artifacts/gman-armv7-musl.tar.gz
artifacts/gman-armv7-musl.sha256
${{ steps.package.outputs.archive }}
${{ steps.package.outputs.sha }}
tag_name: v${{ env.RELEASE_VERSION }}
name: 'v${{ env.RELEASE_VERSION }}'
body: ${{ env.changelog_body }}
draft: false
name: "v${{ env.RELEASE_VERSION }}"
body_path: artifacts/changelog.md
prerelease: false
- name: Add artifacts
shell: bash
run: |
[[ -d artifacts ]] || mkdir -p artifacts
cp ${{ steps.package.outputs.archive }} artifacts/
cp ${{ steps.package.outputs.sha }} artifacts/
- name: Upload artifacts
uses: actions/upload-artifact@v4
with:
name: artifacts-v${{ env.RELEASE_VERSION }}-${{ matrix.target }}
path: artifacts
overwrite: true
publish-chocolatey-package:
needs: [publish-github-release]
name: Publish Chocolatey Package
runs-on: windows-latest
steps:
- name: Check if actor is repository owner
if: ${{ github.actor != github.repository_owner && env.ACT != 'true' }}
run: |
echo "You are not authorized to run this workflow."
exit 1
- name: Checkout repository
uses: actions/checkout@v4
with:
fetch-depth: 1
- name: Get release artifacts
uses: actions/download-artifact@v4
with:
path: artifacts
merge-multiple: true
- name: Set release assets and version
shell: pwsh
run: |
# Read the first column from the SHA256 file
$windows_sha = Get-Content ./artifacts/gman-windows.sha256 | ForEach-Object { $_.Split(' ')[0] }
Add-Content -Path $env:GITHUB_ENV -Value "WINDOWS_SHA=$windows_sha"
# Read the release version from the release-version file
$release_version = Get-Content ./artifacts/release-version
Add-Content -Path $env:GITHUB_ENV -Value "RELEASE_VERSION=$release_version"
- name: Validate release environment variables
run: |
echo "Release SHA windows: ${{ env.WINDOWS_SHA }}"
echo "Release version: ${{ env.RELEASE_VERSION }}"
- name: Package and Publish package to Chocolatey
if: env.ACT != 'true'
run: |
mkdir ./deployment/chocolatey/tools
# Run packaging script
python "./deployment/chocolatey/packager.py" ${{ env.RELEASE_VERSION }} "./deployment/chocolatey/gman.nuspec.template" "./deployment/chocolatey/gman.nuspec" ${{ env.WINDOWS_SHA }}
python "./deployment/chocolatey/packager.py" ${{ env.RELEASE_VERSION }} "./deployment/chocolatey/chocolateyinstall.ps1.template" "./deployment/chocolatey/tools/chocolateyinstall.ps1" ${{ env.WINDOWS_SHA }}
# Publish to Chocolatey
cd ./deployment/chocolatey
choco pack
echo y | choco install gman -dv -s .
$version = gman --version
$version = $version -replace " ", "."
choco push $version.nupkg -s https://push.chocolatey.org/ --api-key ${{ secrets.CHOCOLATEY_API_KEY }};
publish-homebrew-formula:
needs: [publish-github-release]
name: Update Homebrew formulas
@@ -489,11 +382,11 @@ jobs:
shell: bash
run: |
# Set environment variables
macos_sha="$(cat ./artifacts/gman-macos.sha256 | awk '{print $1}')"
macos_sha="$(cat ./artifacts/gman-x86_64-apple-darwin.sha256 | awk '{print $1}')"
echo "MACOS_SHA=$macos_sha" >> $GITHUB_ENV
macos_sha_arm="$(cat ./artifacts/gman-macos-arm64.sha256 | awk '{print $1}')"
macos_sha_arm="$(cat ./artifacts/gman-aarch64-apple-darwin.sha256 | awk '{print $1}')"
echo "MACOS_SHA_ARM=$macos_sha_arm" >> $GITHUB_ENV
linux_sha="$(cat ./artifacts/gman-linux-musl.sha256 | awk '{print $1}')"
linux_sha="$(cat ./artifacts/gman-x86_64-unknown-linux-musl.sha256 | awk '{print $1}')"
echo "LINUX_SHA=$linux_sha" >> $GITHUB_ENV
release_version="$(cat ./artifacts/release-version)"
echo "RELEASE_VERSION=$release_version" >> $GITHUB_ENV
@@ -556,17 +449,6 @@ jobs:
git fetch --all
git pull
- uses: actions/cache@v3
name: Cache Cargo registry
with:
path: ~/.cargo/registry
key: ${{ runner.os }}-cargo-registry-${{ hashFiles('Cargo.lock') }}
- uses: actions/cache@v3
with:
path: ~/.cargo/bin
key: ${{ runner.os }}-cargo-bin-${{ hashFiles('.github/workflows/release.yml') }}
- name: Install Rust stable
uses: dtolnay/rust-toolchain@stable
+90 -3
View File
@@ -5,7 +5,94 @@ All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
## [0.0.1] - 2025-09-10
## v0.4.1 (2026-03-20)
### Other
- Initial test release of the `gman` project.
### Feat
- Upgraded aws-lc-sys version to address high severity CWE-295
## v0.4.0 (2026-03-09)
### Feat
- Added 1password support
- sort local keys alphabetically when listing them
## v0.3.0 (2026-02-02)
### Fix
- Upgraded AWS dependencies to address CWE-20
- A critical security flaw was discovered that essentially had all local secrets be encrypted with an all-zero key
- Addressed XNonce::from_slice deprecation warning
- Secrets are now stored exactly as passed without newlines stripped
## v0.2.3 (2025-10-14)
### Refactor
- Refactored the library for gman so that it dynamically names config and password files to be used across any application
## v0.2.2 (2025-09-30)
### Refactor
- Environment variable interpolation in config file works globally, not based on type
## v0.2.1 (2025-09-30)
### Feat
- Environment variable interpolation in the Gman configuration file
### Fix
- Corrected tab completions for the provider flag
## v0.2.0 (2025-09-30)
### Feat
- gopass support
- Added command aliases to make the CLI more universal
- Added dynamic tab completions for the profile, providers, and the secrets in any given secret manager
- Users can now specify a default provider to use with each run config, so they don't need to explicitly specify which to use when wanting to run different applications.
## v0.1.0 (2025-09-17)
### Feat
- Subcommand to edit the config directly instead of having to find the file
### Fix
- improved user messages for local provider sync set up
- Pass the changelog to the GHA properly using a file
- Potential bug in changelog variable generation
- Revert back hacky stuff so I can test with act now
- Attempting to use pre-generated bindgens for the aws-lc-sys library
- Install openSSL differently to make this work
- Address edge case for unknown_musl targets
- Install LLVM prereqs for release flow
- Updated the release flow to install the external bindgen-cli
## v0.0.1 (2025-09-12)
### Feat
- Azure Key Vault support
- GCP Secret Manager support
- Full AWS SecretsManager support
- AWS Secrets Manager support
- Added two new flags to output where gman writes logs to and where it expects the config file to live
### Fix
- Made the vault file location more fault tolerant
- Attempting to maybe be a bit more explicit about config file handling to fix MacOS tests
### Refactor
- Refactor configuration structs directly into the provider definition to simplify validation, structs, and future extensions
- Made the creation of the log directories a bit more fault tolerant
- Renamed the provider field in a config file to type to make things a little easier to understand; also removed husky
+1 -1
View File
@@ -60,7 +60,7 @@ representative at an online or offline event.
Instances of abusive, harassing, or otherwise unacceptable behavior may be
reported to the community leaders responsible for enforcement at
d4udts@gmail.com.
alex.j.tusa@gmail.com.
All complaints will be reviewed and investigated promptly and fairly.
All community leaders are obligated to respect the privacy and security of the
+9 -1
View File
@@ -48,7 +48,8 @@ cz commit
1. Clone this repo
2. Run `cargo test` to set up hooks
3. Make changes
4. Run the application using `make run` or `cargo run`
4. Run the application using `just run` or `just run`
- Install `just` (`cargo install just`) if you haven't already to use the [justfile](./justfile) in this project.
5. Commit changes. This will trigger pre-commit hooks that will run format, test and lint. If there are errors or
warnings from Clippy, please fix them.
6. Push your code to a new branch named after the feature/bug/etc. you're adding. This will trigger pre-push hooks that
@@ -75,6 +76,13 @@ Then, you can run workflows locally without having to commit and see if the GitH
act -W .github/workflows/release.yml --input_type bump=minor
```
## Authorship Policy
All code in this repository is written and reviewed by humans. AI-generated code (e.g., Copilot, ChatGPT,
Claude, etc.) is not permitted unless explicitly disclosed and approved.
Submissions must certify that the contributor understands and can maintain the code they submit.
## Questions? Reach out to me!
If you encounter any questions while developing G-Man, please don't hesitate to reach out to me at
alex.j.tusa@gmail.com. I'm happy to help contributors in any way I can, regardless of if they're new or experienced!
Generated
+1131 -776
View File
File diff suppressed because it is too large Load Diff
+27 -11
View File
@@ -1,10 +1,16 @@
[package]
name = "gman"
version = "0.0.4"
version = "0.4.1"
edition = "2024"
authors = ["Alex Clarke <alex.j.tusa@gmail.com>"]
description = "Universal secret management and injection tool"
keywords = ["cli", "secrets", "credentials", "command-line", "encryption"]
description = "Universal command line secret management and injection tool"
keywords = [
"cli",
"secrets-manager",
"secret-injection",
"command-runner",
"vault",
]
documentation = "https://github.com/Dark-Alex-17/gman"
repository = "https://github.com/Dark-Alex-17/gman"
homepage = "https://github.com/Dark-Alex-17/gman"
@@ -25,8 +31,8 @@ clap = { version = "4.5.47", features = [
"env",
"wrap_help",
] }
clap_complete = "4.5.57"
confy = { version = "1.0.0", default-features = false, features = [
clap_complete = { version = "4.5.57", features = ["unstable-dynamic"] }
confy = { version = "2.0.0", default-features = false, features = [
"yaml_conf",
] }
crossterm = "0.29.0"
@@ -47,24 +53,34 @@ indoc = "2.0.6"
regex = "1.11.2"
serde_yaml = "0.9.34"
tempfile = "3.22.0"
aws-sdk-secretsmanager = "1.88.0"
aws-sdk-secretsmanager = "1.98.0"
tokio = { version = "1.47.1", features = ["full"] }
aws-config = { version = "1.8.6", features = ["behavior-version-latest"] }
aws-config = { version = "1.8.12", features = ["behavior-version-latest"] }
async-trait = "0.1.89"
futures = "0.3.31"
gcloud-sdk = { version = "0.28.1", features = [
gcloud-sdk = { version = "0.28.5", features = [
"google-cloud-secretmanager-v1",
] }
crc32c = "0.6.8"
azure_identity = "0.27.0"
azure_security_keyvault_secrets = "0.6.0"
azure_core = "0.31.0"
azure_identity = "0.31.0"
azure_security_keyvault_secrets = "0.10.0"
aws-lc-sys = { version = "0.39.0", features = ["bindgen"] }
which = "8.0.0"
once_cell = "1.21.3"
[target.'cfg(all(target_os="linux", target_env="musl"))'.dependencies]
openssl = { version = "0.10", features = ["vendored"] }
[target.'cfg(target_os="macos")'.dependencies]
openssl = { version = "0.10", features = ["vendored"] }
[dev-dependencies]
pretty_assertions = "1.4.1"
proptest = "1.5.0"
assert_cmd = "2.0.16"
predicates = "3.1.2"
serial_test = "3.2.0"
[[bin]]
bench = false
-40
View File
@@ -1,40 +0,0 @@
#!make
default: run
.PHONY: test test-cov build run lint lint-fix fmt minimal-versions analyze release delete-tag
test:
@cargo test --all
## Run all tests with coverage - `cargo install cargo-tarpaulin`
test-cov:
@cargo tarpaulin
build: test
@cargo build --release
run:
@CARGO_INCREMENTAL=1 cargo fmt && make lint && cargo run
lint:
@find . | grep '\.\/src\/.*\.rs$$' | xargs touch && CARGO_INCREMENTAL=0 cargo clippy --all-targets --workspace
lint-fix:
@cargo fix
fmt:
@cargo fmt
minimal-versions:
@cargo +nightly update -Zdirect-minimal-versions
## Analyze for unsafe usage - `cargo install cargo-geiger`
analyze:
@cargo geiger
release:
@git tag -a ${V} -m "Release ${V}" && git push origin ${V}
delete-tag:
@git tag -d ${V} && git push --delete origin ${V}
+159 -23
View File
@@ -1,4 +1,4 @@
# G-Man - Universal Credential Manager
# G-Man - Universal Command Line Secret Manager and Injection Tool
![Check](https://github.com/Dark-Alex-17/gman/actions/workflows/check.yml/badge.svg)
![Test](https://github.com/Dark-Alex-17/gman/actions/workflows/test.yml/badge.svg)
@@ -14,8 +14,8 @@ files or sprinkling environment variables everywhere.
## Overview
`gman` acts as a universal wrapper for any command that needs credentials. Store your secretsAPI tokens, passwords,
certswith a provider, then either fetch them directly or run your command through `gman` to inject what it needs as
`gman` acts as a universal wrapper for any command that needs credentials. Store your secrets (e.g. API tokens, passwords,
certs, etc.) with a provider, then either fetch them directly or run your command through `gman` to inject what it needs as
environment variables, flags, or file content.
## Quick Examples: Before vs After
@@ -89,12 +89,16 @@ gman aws sts get-caller-identity
- [Features](#features)
- [Installation](#installation)
- [Configuration](#configuration)
- [Environment Variable Interpolation](#environment-variable-interpolation)
- [Providers](#providers)
- [Local](#provider-local)
- [AWS Secrets Manager](#provider-aws_secrets_manager)
- [GCP Secret Manager](#provider-gcp_secret_manager)
- [Azure Key Vault](#provider-azure_key_vault)
- [Gopass](#provider-gopass)
- [1Password](#provider-one_password)
- [Run Configurations](#run-configurations)
- [Specifying a Default Provider per Run Config](#specifying-a-default-provider-per-run-config)
- [Environment Variable Secret Injection](#environment-variable-secret-injection)
- [Inject Secrets via Command-Line Flags](#inject-secrets-via-command-line-flags)
- [Inject Secrets into Files](#inject-secrets-into-files)
@@ -133,33 +137,31 @@ To upgrade `gman` using Homebrew:
brew upgrade gman
```
### Chocolatey (Windows)
The G-Man Chocolatey package is located [here](https://community.chocolatey.org/packages/gman). Please note that validation
of Chocolatey packages take quite some time, and thus the package may not be available immediately after a new release.
### Scripts
#### Linux/MacOS (`bash`)
You can use the following command to run a bash script that downloads and installs the latest version of `gman` for your
OS (Linux/MacOS) and architecture (x86_64/arm64):
```powershell
choco install gman
# Some newer releases may require a version number, so you can specify it like so:
choco install gman --version=0.1.0
```shell
curl -fsSL https://raw.githubusercontent.com/Dark-Alex-17/gman/main/install_gman.sh | bash
```
To upgrade to the latest and greatest version of G-Man:
```powershell
choco upgrade gman
#### Windows/Linux/MacOS (`PowerShell`)
You can use the following command to run a PowerShell script that downloads and installs the latest version of `gman`
for your OS (Windows/Linux/MacOS) and architecture (x86_64/arm64):
# To upgrade to a specific version:
choco upgrade gman --version=0.1.0
```powershell
powershell -NoProfile -ExecutionPolicy Bypass -Command "iwr -useb https://raw.githubusercontent.com/Dark-Alex-17/gman/main/scripts/install_gman.ps1 | iex"
```
### Manual
Binaries are available on the [releases](https://github.com/Dark-Alex-17/gman/releases) page for the following platforms:
| Platform | Architecture(s) |
|----------------|----------------------------|
| macOS | x86_64, arm64 |
| Linux GNU/MUSL | x86_64,armv6,armv7,aarch64 |
| Windows | x86_64,aarch64 |
| Platform | Architecture(s) |
|----------------|-----------------|
| macOS | x86_64, arm64 |
| Linux GNU/MUSL | x86_64, aarch64 |
| Windows | x86_64, aarch64 |
#### Windows Instructions
To use a binary from the releases page on Windows, do the following:
@@ -176,6 +178,22 @@ To use a binary from the releases page on Linux/MacOS, do the following:
3. Extract the binary with `tar -C /usr/local/bin -xzf gman-<arch>.tar.gz` (Note: This may require `sudo`)
4. Now you can run `gman`!
### Enable Tab Completion
`gman` supports shell tab completion for `bash`, `zsh`, and `fish`. To enable it, run the following command for your
shell:
```shell
# Bash
echo 'source <(COMPLETE=bash gman)' >> ~/.bashrc
# Zsh
echo 'source <(COMPLETE=zsh gman)' >> ~/.zshrc
# Fish
echo 'COMPLETE=fish gman | source' >> ~/.config/fish/config.fish
```
Then restart your shell or `source` the appropriate config file.
## Configuration
`gman` reads a YAML configuration file located at an OS-specific path:
@@ -226,6 +244,28 @@ providers:
run_configs: []
```
### Environment Variable Interpolation
The config file supports environment variable interpolation using `${VAR_NAME}` syntax. For example, to use an
AWS profile from your environment:
```yaml
providers:
- name: aws
type: aws_secrets_manager
aws_profile: ${AWS_PROFILE} # Uses the AWS_PROFILE env var
aws_region: us-east-1
```
Or to set a default profile to use when `AWS_PROFILE` is unset:
```yaml
providers:
- name: aws
type: aws_secrets_manager
aws_profile: ${AWS_PROFILE:-default} # Uses 'default' if AWS_PROFILE is unset
aws_region: us-east-1
```
## Providers
`gman` supports multiple providers for secret storage. The default provider is `local`, which stores secrets in an
encrypted file on your filesystem. The CLI and config format are designed to be extensible so new providers can be
@@ -247,7 +287,8 @@ documented and added without breaking existing setups. The following table shows
| [`hashicorp_vault`](https://www.hashicorp.com/en/products/vault) | 🕒 | | |
| [`azure_key_vault`](https://azure.microsoft.com/en-us/products/key-vault/) | ✅ | [Azure Key Vault](#provider-azure_key_vault) | |
| [`gcp_secret_manager`](https://cloud.google.com/security/products/secret-manager?hl=en) | ✅ | [GCP Secret Manager](#provider-gcp_secret_manager) | |
| [`1password`](https://1password.com/) | 🕒 | | |
| [`gopass`](https://www.gopass.pw/) | | | |
| [`1password`](https://1password.com/) | ✅ | [1Password](#provider-one_password) | |
| [`bitwarden`](https://bitwarden.com/) | 🕒 | | |
| [`dashlane`](https://www.dashlane.com/) | 🕒 | | Waiting for CLI support for adding secrets |
| [`lastpass`](https://www.lastpass.com/) | 🕒 | | |
@@ -390,6 +431,62 @@ Important notes:
- Ensure your identity has the necessary Key Vault permissions (RBAC such as `Key Vault Secrets User`/`Administrator`,
or appropriate access policies) for get/set/list/delete.
### Provider: `gopass`
The `gopass` provider uses [gopass](https://www.gopass.pw/) as the backing storage location for secrets.
- Optional: `store` (string) to specify a particular gopass store if you have multiple.
Configuration example:
```yaml
default_provider: gopass
providers:
- name: gopass
type: gopass
store: my-store # Optional; if omitted, uses the default configured gopass store
```
Important notes:
- Ensure `gopass` is installed and initialized on your system.
- Secrets are managed using gopass's native commands; `gman` acts as a wrapper to interface with gopass.
- Updates overwrite existing secrets
- If no store is specified, the default gopass store is used and `gman sync` will sync with all configured stores.
### Provider: `one_password`
The `one_password` provider uses the [1Password CLI (`op`)](https://developer.1password.com/docs/cli/) as the backing
storage location for secrets.
- Optional: `vault` (string) to specify which 1Password vault to use. If omitted, the default vault is used.
- Optional: `account` (string) to specify which 1Password account to use. Useful if you have multiple accounts. If
omitted, the default signed-in account is used.
Configuration example:
```yaml
default_provider: op
providers:
- name: op
type: one_password
vault: Production # Optional; if omitted, uses the default vault
account: my.1password.com # Optional; if omitted, uses the default account
```
Authentication:
- **Interactive**: Run `op signin` to sign in interactively.
- **Service Account**: Set the `OP_SERVICE_ACCOUNT_TOKEN` environment variable for non-interactive/CI usage.
- **Desktop App Integration**: If the 1Password desktop app is installed and configured, the CLI can use biometric
authentication (Touch ID, Windows Hello, etc.).
Important notes:
- Ensure the 1Password CLI (`op`) is installed on your system. Install instructions are at
https://developer.1password.com/docs/cli/get-started/.
- Secrets are stored as 1Password Password items. The item title is the secret name and the `password` field holds the
secret value.
- **Deletions are permanent. Deleted items are not archived.**
- `add` creates a new Password item. If an item with the same title already exists in the vault, `op` will create a
duplicate. Use `update` to change an existing secret value.
- `list` returns the titles of all items in the configured vault.
## Run Configurations
Run configurations (or "profiles") tell `gman` how to inject secrets into a command. Three modes of secret injection are
@@ -406,6 +503,45 @@ will error out and report that it could not find the run config with that name.
You can manually specify which run configuration to use with the `--profile` flag. Again, if no profile is found with
that name, `gman` will error out.
### Specifying a Default Provider per Run Config
All run configs also support the `provider` field, which lets you override the default provider for that specific
profile. This is useful if you have multiple providers configured and want to use a different one for a specific command
, but that provider may not be the `default_provider`, and you don't want to have to specify `--provider` on the command
line every time.
For Example:
```yaml
default_provider: local
run_configs:
# `gman aws ...` uses the `aws` provider instead of `local` if no
# `--provider` is given.
- name: aws
# Can be overridden by explicitly specifying a `--provider`
provider: aws
secrets:
- DB_USERNAME
- DB_PASSWORD
# `gman docker ...` uses the default_provider `local` because no
# `provider` is specified.
- name: docker
secrets:
- MY_APP_API_KEY
- MY_APP_DB_PASSWORD
# `gman managarr ...` uses the `local` provider; This is useful
# if you change the default provider to something else.
- name: managarr
provider: local
secrets:
- RADARR_API_KEY
- SONARR_API_KEY
files:
- /home/user/.config/managarr/config.yml
```
**Important Note:** Any run config with a `provider` field can be overridden by specifying `--provider` on the command
line.
### Environment Variable Secret Injection
By default, secrets are injected as environment variables. The two required fields are `name` and `secrets`.
@@ -558,7 +694,7 @@ gman managarr
### Multiple Providers and Switching
You can define multiple providerseven multiple of the same typeand switch between them per command.
You can define multiple providers (even multiple of the same type) and switch between them per command.
Example: two AWS Secrets Manager providers named `lab` and `prod`.
@@ -1,20 +0,0 @@
$ErrorActionPreference = 'Stop';
$PackageName = 'gman'
$toolsDir = "$(Split-Path -parent $MyInvocation.MyCommand.Definition)"
$url64 = 'https://github.com/Dark-Alex-17/gman/releases/download/v$version/gman-windows.tar.gz'
$checksum64 = '$hash_64'
$packageArgs = @{
packageName = $packageName
softwareName = $packageName
unzipLocation = $toolsDir
fileType = 'exe'
url = $url64
checksum = $checksum64
checksumType = 'sha256'
}
Install-ChocolateyZipPackage @packageArgs
$File = Get-ChildItem -File -Path $env:ChocolateyInstall\lib\$packageName\tools\ -Filter *.tar
Get-ChocolateyUnzip -fileFullPath $File.FullName -destination $env:ChocolateyInstall\lib\$packageName\tools\
@@ -1,53 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<!-- Read this before creating packages: https://chocolatey.org/docs/create-packages -->
<!-- It is especially important to read the above link to understand additional requirements when publishing packages to the community feed aka dot org (https://chocolatey.org/packages). -->
<!-- Test your packages in a test environment: https://github.com/chocolatey/chocolatey-test-environment -->
<!--
This is a nuspec. It mostly adheres to https://docs.nuget.org/create/Nuspec-Reference. Chocolatey uses a special version of NuGet.Core that allows us to do more than was initially possible. As such there are certain things to be aware of:
* the package xmlns schema url may cause issues with nuget.exe
* Any of the following elements can ONLY be used by choco tools - projectSourceUrl, docsUrl, mailingListUrl, bugTrackerUrl, packageSourceUrl, provides, conflicts, replaces
* nuget.exe can still install packages with those elements but they are ignored. Any authoring tools or commands will error on those elements
-->
<!-- You can embed software files directly into packages, as long as you are not bound by distribution rights. -->
<!-- * If you are an organization making private packages, you probably have no issues here -->
<!-- * If you are releasing to the community feed, you need to consider distribution rights. -->
<!-- Do not remove this test for UTF-8: if “Ω” doesnt appear as greek uppercase omega letter enclosed in quotation marks, you should use an editor that supports UTF-8, not this one. -->
<package xmlns="http://schemas.microsoft.com/packaging/2015/06/nuspec.xsd">
<metadata>
<!-- == PACKAGE SPECIFIC SECTION == -->
<id>gman</id>
<version>$version</version>
<!-- == SOFTWARE SPECIFIC SECTION == -->
<!-- This section is about the software itself -->
<title>G-Man</title>
<authors>Alex Clarke</authors>
<projectUrl>https://github.com/Dark-Alex-17/gman</projectUrl>
<licenseUrl>https://github.com/Dark-Alex-17/gman/blob/main/LICENSE</licenseUrl>
<requireLicenseAcceptance>true</requireLicenseAcceptance>
<projectSourceUrl>https://github.com/Dark-Alex-17/gman</projectSourceUrl>
<docsUrl>https://github.com/Dark-Alex-17/gman/blob/main/README.md</docsUrl>
<bugTrackerUrl>https://github.com/Dark-Alex-17/gman/issues</bugTrackerUrl>
<tags>cli cross-platform terminal credential-management secret-management rust</tags>
<summary>Universal command line credential management and injection tool</summary>
<description>
Universal command line credential management and injection tool.
**Usage**
To get started, run `gman --help` in a terminal.
For more [documentation and usage](https://github.com/Dark-Alex-17/gman/blob/main/README.md), see the [official repo](https://github.com/Dark-Alex-17/gman).
</description>
<releaseNotes>https://github.com/Dark-Alex-17/gman/releases/tag/v$version/</releaseNotes>
</metadata>
<files>
<!-- this section controls what actually gets packaged into the Chocolatey package -->
<file src="tools\**" target="tools" />
<!--Building from Linux? You may need this instead: <file src="tools/**" target="tools" />-->
</files>
</package>
-28
View File
@@ -1,28 +0,0 @@
import hashlib
import sys
from string import Template
sys.stdout.reconfigure(encoding='utf-8')
args = sys.argv
version = args[1].replace("v", "")
template_file_path = args[2]
generated_file_path = args[3]
# Deployment files
hash_64 = args[4].strip()
print("Generating formula")
print(" VERSION: %s" % version)
print(" TEMPLATE PATH: %s" % template_file_path)
print(" SAVING AT: %s" % generated_file_path)
print(" HASH: %s" % hash_64)
with open(template_file_path, "r", encoding="utf-8") as template_file:
template = Template(template_file.read())
substitute = template.safe_substitute(version=version, hash_64=hash_64)
print("\n================== Generated package file ==================\n")
print(substitute)
print("\n============================================================\n")
with open(generated_file_path, "w", encoding="utf-8") as generated_file:
generated_file.write(substitute)
+4 -4
View File
@@ -1,16 +1,16 @@
# Documentation: https://docs.brew.sh/Formula-Cookbook
# https://rubydoc.brew.sh/Formula
class GMan < Formula
class Gman < Formula
desc "Universal command line credential management and injection tool"
homepage "https://github.com/Dark-Alex-17/gman"
if OS.mac? and Hardware::CPU.arm?
url "https://github.com/Dark-Alex-17/gman/releases/download/v$version/gman-macos-arm64.tar.gz"
url "https://github.com/Dark-Alex-17/gman/releases/download/v$version/gman-aarch64-apple-darwin.tar.gz"
sha256 "$hash_mac_arm"
elsif OS.mac? and Hardware::CPU.intel?
url "https://github.com/Dark-Alex-17/gman/releases/download/v$version/gman-macos.tar.gz"
url "https://github.com/Dark-Alex-17/gman/releases/download/v$version/gman-x86_64-apple-darwin.tar.gz"
sha256 "$hash_mac"
else
url "https://github.com/Dark-Alex-17/gman/releases/download/v$version/gman-linux-musl.tar.gz"
url "https://github.com/Dark-Alex-17/gman/releases/download/v$version/gman-x86_64-unknown-linux-musl.tar.gz"
sha256 "$hash_linux"
end
version "$version"
+35
View File
@@ -0,0 +1,35 @@
# List all recipes
default:
@just --list
# Format all files
[group: 'style']
fmt:
@cargo fmt --all
alias clippy := lint
# Run Clippy to inspect all files
[group: 'style']
lint:
@cargo clippy --all
alias clippy-fix := lint-fix
# Automatically fix clippy issues where possible
[group: 'style']
lint-fix:
@cargo fix
# Run all tests
[group: 'test']
test:
@cargo test --all
# Build and run the binary for the current system
run:
@cargo run
# Build the project for the current system architecture
[group: 'build']
[arg('build_type', pattern="debug|release")]
build build_type='debug':
@cargo build {{ if build_type == "release" { "--release" } else { "" } }}
+139
View File
@@ -0,0 +1,139 @@
<#
gman installer (Windows/PowerShell 5+ and PowerShell 7)
Examples:
powershell -NoProfile -ExecutionPolicy Bypass -Command "iwr -useb https://raw.githubusercontent.com/Dark-Alex-17/gman/main/scripts/install_gman.ps1 | iex"
pwsh -c "irm https://raw.githubusercontent.com/Dark-Alex-17/gman/main/scripts/install_gman.ps1 | iex -Version vX.Y.Z"
Parameters:
-Version <tag> (default: latest)
-BinDir <path> (default: %LOCALAPPDATA%\gman\bin on Windows; ~/.local/bin on *nix PowerShell)
#>
[CmdletBinding()]
param(
[string]$Version = $env:GMAN_VERSION,
[string]$BinDir = $env:BIN_DIR
)
$Repo = 'Dark-Alex-17/gman'
function Write-Info($msg) { Write-Host "[gman-install] $msg" }
function Fail($msg) { Write-Error $msg; exit 1 }
Add-Type -AssemblyName System.Runtime
$isWin = [System.Runtime.InteropServices.RuntimeInformation]::IsOSPlatform([System.Runtime.InteropServices.OSPlatform]::Windows)
$isMac = [System.Runtime.InteropServices.RuntimeInformation]::IsOSPlatform([System.Runtime.InteropServices.OSPlatform]::OSX)
$isLin = [System.Runtime.InteropServices.RuntimeInformation]::IsOSPlatform([System.Runtime.InteropServices.OSPlatform]::Linux)
if ($isWin) { $os = 'windows' }
elseif ($isMac) { $os = 'darwin' }
elseif ($isLin) { $os = 'linux' }
else { Fail "Unsupported OS" }
switch ([System.Runtime.InteropServices.RuntimeInformation]::OSArchitecture) {
'X64' { $arch = 'x86_64' }
'Arm64'{ $arch = 'aarch64' }
default { Fail "Unsupported arch: $([System.Runtime.InteropServices.RuntimeInformation]::OSArchitecture)" }
}
if (-not $BinDir) {
if ($isWin) { $BinDir = Join-Path $env:LOCALAPPDATA 'gman\bin' }
else { $home = $env:HOME; if (-not $home) { $home = (Get-Item -Path ~).FullName }; $BinDir = Join-Path $home '.local/bin' }
}
New-Item -ItemType Directory -Force -Path $BinDir | Out-Null
Write-Info "Target: $os-$arch"
$apiBase = "https://api.github.com/repos/$Repo/releases"
$relUrl = if ($Version) { "$apiBase/tags/$Version" } else { "$apiBase/latest" }
Write-Info "Fetching release: $relUrl"
try {
$release = Invoke-RestMethod -UseBasicParsing -Headers @{ 'User-Agent' = 'gman-installer' } -Uri $relUrl -Method GET
} catch { Fail "Failed to fetch release metadata. $_" }
if (-not $release.assets) { Fail "No assets found in the release." }
$candidates = @()
if ($os -eq 'windows') {
if ($arch -eq 'x86_64') { $candidates += 'gman-x86_64-pc-windows-msvc.zip' }
else { $candidates += 'gman-aarch64-pc-windows-msvc.zip' }
} elseif ($os -eq 'darwin') {
if ($arch -eq 'x86_64') { $candidates += 'gman-x86_64-apple-darwin.tar.gz' }
else { $candidates += 'gman-aarch64-apple-darwin.tar.gz' }
} elseif ($os -eq 'linux') {
if ($arch -eq 'x86_64') {
$candidates += 'gman-x86_64-unknown-linux-gnu.tar.gz'
$candidates += 'gman-x86_64-unknown-linux-musl.tar.gz'
} else {
$candidates += 'gman-aarch64-unknown-linux-musl.tar.gz'
}
} else {
Fail "Unsupported OS for this installer: $os"
}
$asset = $null
foreach ($c in $candidates) {
$asset = $release.assets | Where-Object { $_.name -eq $c } | Select-Object -First 1
if ($asset) { break }
}
if (-not $asset) {
Write-Error "No matching asset found for $os-$arch. Tried:"; $candidates | ForEach-Object { Write-Error " - $_" }
exit 1
}
Write-Info "Selected asset: $($asset.name)"
Write-Info "Download URL: $($asset.browser_download_url)"
$tmp = New-Item -ItemType Directory -Force -Path ([IO.Path]::Combine([IO.Path]::GetTempPath(), "gman-$(Get-Random)"))
$archive = Join-Path $tmp.FullName 'asset'
try { Invoke-WebRequest -UseBasicParsing -Headers @{ 'User-Agent' = 'gman-installer' } -Uri $asset.browser_download_url -OutFile $archive } catch { Fail "Failed to download asset. $_" }
$extractDir = Join-Path $tmp.FullName 'extract'; New-Item -ItemType Directory -Force -Path $extractDir | Out-Null
if ($asset.name -match '\.zip$') {
Add-Type -AssemblyName System.IO.Compression.FileSystem
[System.IO.Compression.ZipFile]::ExtractToDirectory($archive, $extractDir)
} elseif ($asset.name -match '\.tar\.gz$' -or $asset.name -match '\.tgz$') {
$tar = Get-Command tar -ErrorAction SilentlyContinue
if ($tar) { & $tar.FullName -xzf $archive -C $extractDir }
else { Fail "Asset is tar archive but 'tar' is not available." }
} else {
try { Add-Type -AssemblyName System.IO.Compression.FileSystem; [System.IO.Compression.ZipFile]::ExtractToDirectory($archive, $extractDir) }
catch {
$tar = Get-Command tar -ErrorAction SilentlyContinue
if ($tar) { & $tar.FullName -xf $archive -C $extractDir } else { Fail "Unknown archive format; neither zip nor tar workable." }
}
}
$bin = $null
Get-ChildItem -Recurse -File $extractDir | ForEach-Object {
if ($isWin) { if ($_.Name -ieq 'gman.exe') { $bin = $_.FullName } }
else { if ($_.Name -ieq 'gman') { $bin = $_.FullName } }
}
if (-not $bin) { Fail "Could not find gman binary inside the archive." }
if (-not $isWin) { try { & chmod +x -- $bin } catch {} }
$exec = if ($isWin) { 'gman.exe'} else { 'gman' }
$dest = Join-Path $BinDir $exec
Copy-Item -Force $bin $dest
Write-Info "Installed: $dest"
if ($isWin) {
$pathParts = ($env:Path -split ';') | Where-Object { $_ -ne '' }
if ($pathParts -notcontains $BinDir) {
$userPath = [Environment]::GetEnvironmentVariable('Path', 'User'); if (-not $userPath) { $userPath = '' }
if (-not ($userPath -split ';' | Where-Object { $_ -eq $BinDir })) {
$newUserPath = if ($userPath.Trim().Length -gt 0) { "$userPath;$BinDir" } else { $BinDir }
[Environment]::SetEnvironmentVariable('Path', $newUserPath, 'User')
Write-Info "Added to User PATH: $BinDir (restart shell to take effect)"
}
}
} else {
if (-not ($env:PATH -split ':' | Where-Object { $_ -eq $BinDir })) {
Write-Info "Note: $BinDir is not in PATH. Add it to your shell profile."
}
}
Write-Info "Done. Try: gman --help"
+220
View File
@@ -0,0 +1,220 @@
#!/usr/bin/env bash
set -euo pipefail
# gman installer (Linux/macOS)
#
# Usage examples:
# curl -fsSL https://raw.githubusercontent.com/Dark-Alex-17/gman/main/scripts/install_gman.sh | bash
# curl -fsSL https://raw.githubusercontent.com/Dark-Alex-17/gman/main/scripts/install_gman.sh | bash -s -- --version vX.Y.Z
# BIN_DIR="$HOME/.local/bin" bash scripts/install_gman.sh
#
# Flags / Env:
# --version <tag> Release tag (default: latest). Or set GMAN_VERSION.
# --bin-dir <dir> Install directory (default: /usr/local/bin or ~/.local/bin). Or set BIN_DIR.
REPO="Dark-Alex-17/gman"
VERSION="${GMAN_VERSION:-}"
BIN_DIR="${BIN_DIR:-}"
usage() {
echo "gman installer (Linux/macOS)"
echo
echo "Options:"
echo " --version <tag> Release tag (default: latest)"
echo " --bin-dir <dir> Install directory (default: /usr/local/bin or ~/.local/bin)"
echo " -h, --help Show help"
}
while [[ $# -gt 0 ]]; do
case "$1" in
--version) VERSION="$2"; shift 2;;
--bin-dir) BIN_DIR="$2"; shift 2;;
-h|--help) usage; exit 0;;
*) echo "Unknown argument: $1" >&2; usage; exit 2;;
esac
done
if [[ -z "${BIN_DIR}" ]]; then
if [[ -w "/usr/local/bin" ]]; then
BIN_DIR="/usr/local/bin"
else
BIN_DIR="${HOME}/.local/bin"
fi
fi
mkdir -p "${BIN_DIR}"
log() {
echo "[gman-install] $*"
}
need_cmd() {
if ! command -v "$1" >/dev/null 2>&1; then
echo "Error: required command '$1' not found" >&2
exit 1
fi
}
need_cmd uname
need_cmd mktemp
need_cmd tar
if command -v curl >/dev/null 2>&1; then
DL=curl
elif command -v wget >/dev/null 2>&1; then
DL=wget
else
echo "Error: need curl or wget" >&2
exit 1
fi
UNAME_OS=$(uname -s | tr '[:upper:]' '[:lower:]')
case "$UNAME_OS" in
linux) OS=linux ;;
darwin) OS=darwin ;;
*) echo "Error: unsupported OS '$UNAME_OS'" >&2; exit 1;;
esac
UNAME_ARCH=$(uname -m)
case "$UNAME_ARCH" in
x86_64|amd64) ARCH=x86_64 ;;
aarch64|arm64) ARCH=aarch64 ;;
*) echo "Error: unsupported arch '$UNAME_ARCH'" >&2; exit 1;;
esac
log "Target: ${OS}-${ARCH}"
API_BASE="https://api.github.com/repos/${REPO}/releases"
if [[ -z "${VERSION}" ]]; then
RELEASE_URL="${API_BASE}/latest"
else
RELEASE_URL="${API_BASE}/tags/${VERSION}"
fi
http_get() {
if [[ "$DL" == "curl" ]]; then
curl -fsSL -H 'User-Agent: gman-installer' "$1"
else
wget -qO- --header='User-Agent: gman-installer' "$1"
fi
}
TMPDIR="$(mktemp -d)"
trap 'rm -rf "$TMPDIR"' EXIT
log "Fetching release metadata from $RELEASE_URL"
JSON="$TMPDIR/release.json"
if ! http_get "$RELEASE_URL" > "$JSON"; then
echo "Error: failed to fetch release metadata. Check version tag." >&2
exit 1
fi
ASSET_CANDIDATES=()
if [[ "$OS" == "darwin" ]]; then
if [[ "$ARCH" == "x86_64" ]]; then
ASSET_CANDIDATES+=("gman-x86_64-apple-darwin.tar.gz")
else
ASSET_CANDIDATES+=("gman-aarch64-apple-darwin.tar.gz")
fi
elif [[ "$OS" == "linux" ]]; then
if [[ "$ARCH" == "x86_64" ]]; then
LIBC="musl"
if command -v getconf >/dev/null 2>&1 && getconf GNU_LIBC_VERSION >/dev/null 2>&1; then LIBC="gnu"; fi
if ldd --version 2>&1 | grep -qi glibc; then LIBC="gnu"; fi
if [[ "$LIBC" == "gnu" ]]; then
ASSET_CANDIDATES+=("gman-x86_64-unknown-linux-gnu.tar.gz")
fi
ASSET_CANDIDATES+=("gman-x86_64-unknown-linux-musl.tar.gz")
else
ASSET_CANDIDATES+=("gman-aarch64-unknown-linux-musl.tar.gz")
fi
else
echo "Error: unsupported OS for this installer: $OS" >&2; exit 1
fi
ASSET_NAME=""; ASSET_URL=""
for candidate in "${ASSET_CANDIDATES[@]}"; do
NAME=$(grep -oE '"name":\s*"[^"]+"' "$JSON" | sed 's/"name":\s*"//; s/"$//' | grep -Fx "$candidate" || true)
if [[ -n "$NAME" ]]; then
ASSET_NAME="$NAME"
ASSET_URL=$(awk -v pat="$NAME" '
BEGIN{ FS=":"; want=0 }
/"name"/ {
line=$0;
gsub(/^\s+|\s+$/,"",line);
gsub(/"name"\s*:\s*"|"/ ,"", line);
want = (line==pat) ? 1 : 0;
next
}
want==1 && /"browser_download_url"/ {
u=$0;
gsub(/^\s+|\s+$/,"",u);
gsub(/.*"browser_download_url"\s*:\s*"|".*/ ,"", u);
print u;
exit
}
' "$JSON")
if [[ -n "$ASSET_URL" ]]; then break; fi
fi
done
if [[ -z "$ASSET_URL" ]]; then
echo "Error: no matching asset found for ${OS}-${ARCH}. Tried:" >&2
for c in "${ASSET_CANDIDATES[@]}"; do echo " - $c" >&2; done
exit 1
fi
log "Selected asset: $ASSET_NAME"
log "Download URL: $ASSET_URL"
ARCHIVE="$TMPDIR/asset"
if [[ "$DL" == "curl" ]]; then
curl -fL -H 'User-Agent: gman-installer' "$ASSET_URL" -o "$ARCHIVE"
else
wget -q --header='User-Agent: gman-installer' "$ASSET_URL" -O "$ARCHIVE"
fi
WORK="$TMPDIR/work"; mkdir -p "$WORK"
EXTRACTED_DIR="$WORK/extracted"; mkdir -p "$EXTRACTED_DIR"
if tar -tf "$ARCHIVE" >/dev/null 2>&1; then
tar -xzf "$ARCHIVE" -C "$EXTRACTED_DIR"
else
if command -v unzip >/dev/null 2>&1; then
unzip -q "$ARCHIVE" -d "$EXTRACTED_DIR"
else
echo "Error: unknown archive format; install 'unzip'" >&2
exit 1
fi
fi
BIN_PATH=""
while IFS= read -r -d '' f; do
base=$(basename "$f")
if [[ "$base" == "gman" ]]; then
BIN_PATH="$f"
break
fi
done < <(find "$EXTRACTED_DIR" -type f -print0)
if [[ -z "$BIN_PATH" ]]; then
echo "Error: could not find 'gman' binary in the archive" >&2
exit 1
fi
chmod +x "$BIN_PATH"
install -m 0755 "$BIN_PATH" "${BIN_DIR}/gman"
log "Installed: ${BIN_DIR}/gman"
case ":$PATH:" in
*":${BIN_DIR}:"*) ;;
*)
log "Note: ${BIN_DIR} is not in PATH. Add it, e.g.:"
log " export PATH=\"${BIN_DIR}:\$PATH\""
;;
esac
log "Done. Try: gman --help"
+205 -35
View File
@@ -1,21 +1,22 @@
use crate::command::preview_command;
use anyhow::{Context, Result, anyhow};
use clap_complete::CompletionCandidate;
use futures::future::join_all;
use gman::config::{Config, RunConfig};
use gman::providers::SecretProvider;
use gman::config::{Config, RunConfig, load_config};
use log::{debug, error};
use regex::Regex;
use std::collections::HashMap;
use std::ffi::OsString;
use std::ffi::{OsStr, OsString};
use std::fs;
use std::path::PathBuf;
use std::process::Command;
use tokio::runtime::Handle;
const ARG_FORMAT_PLACEHOLDER_KEY: &str = "{{key}}";
const ARG_FORMAT_PLACEHOLDER_VALUE: &str = "{{value}}";
pub async fn wrap_and_run_command(
secrets_provider: &mut dyn SecretProvider,
provider: Option<String>,
config: &Config,
tokens: Vec<OsString>,
profile_name: Option<String>,
@@ -36,6 +37,9 @@ pub async fn wrap_and_run_command(
.find(|c| c.name.as_deref() == Some(run_config_profile_name))
});
if let Some(run_cfg) = run_config_opt {
let mut provider_config =
config.extract_provider_config(provider.or(run_cfg.provider.clone()))?;
let secrets_provider = provider_config.extract_provider();
let secrets_result_futures = run_cfg
.secrets
.as_ref()
@@ -163,7 +167,7 @@ fn generate_files_secret_injections(
secrets: HashMap<&str, String>,
run_config: &RunConfig,
) -> Result<Vec<(PathBuf, String, String)>> {
let re = Regex::new(r"\{\{(.+)\}\}")?;
let re = Regex::new(r"\{\{(.+)}}")?;
let mut results = Vec::new();
for file in run_config
.files
@@ -251,34 +255,82 @@ pub fn parse_args(
Ok(args)
}
pub fn run_config_completer(current: &OsStr) -> Vec<CompletionCandidate> {
let cur = current.to_string_lossy();
match load_config(true) {
Ok(config) => {
if let Some(run_configs) = config.run_configs {
run_configs
.iter()
.filter(|rc| {
rc.name
.as_ref()
.expect("run config has no name")
.starts_with(&*cur)
})
.map(|rc| {
CompletionCandidate::new(rc.name.as_ref().expect("run config has no name"))
})
.collect()
} else {
vec![]
}
}
Err(_) => vec![],
}
}
pub fn provider_completer(current: &OsStr) -> Vec<CompletionCandidate> {
let cur = current.to_string_lossy();
match load_config(true) {
Ok(config) => config
.providers
.iter()
.filter(|pc| {
pc.name
.as_ref()
.expect("run config has no name")
.starts_with(&*cur)
})
.map(|pc| CompletionCandidate::new(pc.name.as_ref().expect("provider has no name")))
.collect(),
Err(_) => vec![],
}
}
pub fn secrets_completer(current: &OsStr) -> Vec<CompletionCandidate> {
let cur = current.to_string_lossy();
match load_config(true) {
Ok(config) => {
let mut provider_config = match config.extract_provider_config(None) {
Ok(pc) => pc,
Err(_) => return vec![],
};
let secrets_provider = provider_config.extract_provider();
let h = Handle::current();
tokio::task::block_in_place(|| h.block_on(secrets_provider.list_secrets()))
.unwrap_or_default()
.into_iter()
.filter(|s| s.starts_with(&*cur))
.map(CompletionCandidate::new)
.collect()
}
Err(_) => vec![],
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::cli::generate_files_secret_injections;
use gman::config::get_config_file_path;
use gman::config::{Config, RunConfig};
use pretty_assertions::{assert_eq, assert_str_eq};
use serial_test::serial;
use std::collections::HashMap;
use std::env as std_env;
use std::ffi::OsString;
struct DummyProvider;
#[async_trait::async_trait]
impl SecretProvider for DummyProvider {
fn name(&self) -> &'static str {
"Dummy"
}
async fn get_secret(&self, key: &str) -> Result<String> {
Ok(format!("{}_VAL", key))
}
async fn set_secret(&self, _key: &str, _value: &str) -> Result<()> {
Ok(())
}
async fn delete_secret(&self, _key: &str) -> Result<()> {
Ok(())
}
async fn sync(&mut self) -> Result<()> {
Ok(())
}
}
use tempfile::tempdir;
#[test]
fn test_generate_files_secret_injections() {
@@ -290,6 +342,7 @@ mod tests {
let run_config = RunConfig {
name: Some("test".to_string()),
provider: None,
secrets: Some(vec!["testing/SOME-secret".to_string()]),
files: Some(vec![file_path.clone()]),
flag: None,
@@ -309,6 +362,7 @@ mod tests {
fn test_parse_args_insert_and_append() {
let run_config = RunConfig {
name: Some("docker".into()),
provider: None,
secrets: Some(vec!["api_key".into()]),
files: None,
flag: Some("-e".into()),
@@ -347,10 +401,8 @@ mod tests {
#[tokio::test]
async fn test_wrap_and_run_command_no_profile() {
let cfg = Config::default();
let mut dummy = DummyProvider;
let prov: &mut dyn SecretProvider = &mut dummy;
let tokens = vec![OsString::from("echo"), OsString::from("hi")];
let err = wrap_and_run_command(prov, &cfg, tokens, None, true)
let err = wrap_and_run_command(None, &cfg, tokens, None, true)
.await
.unwrap_err();
assert!(err.to_string().contains("No run profile found"));
@@ -361,6 +413,7 @@ mod tests {
// Create a config with a matching run profile for command "echo"
let run_cfg = RunConfig {
name: Some("echo".into()),
provider: None,
secrets: Some(vec!["api_key".into()]),
files: None,
flag: None,
@@ -371,14 +424,131 @@ mod tests {
run_configs: Some(vec![run_cfg]),
..Config::default()
};
let mut dummy = DummyProvider;
let prov: &mut dyn SecretProvider = &mut dummy;
// Capture stderr for dry_run preview
let tokens = vec![OsString::from("echo"), OsString::from("hello")];
// Best-effort: ensure function does not error under dry_run
let res = wrap_and_run_command(prov, &cfg, tokens, None, true).await;
assert!(res.is_ok());
// Not asserting output text to keep test platform-agnostic
let err = wrap_and_run_command(None, &cfg, tokens, None, true)
.await
.expect_err("expected failed secret resolution in dry_run");
assert!(err.to_string().contains("Failed to fetch"));
}
#[test]
#[serial]
fn test_run_config_completer_filters_by_prefix() {
let td = tempdir().unwrap();
let xdg = td.path().join("xdg");
unsafe { std_env::set_var("XDG_CONFIG_HOME", &xdg) };
let cfg_path = get_config_file_path().unwrap();
let app_dir = cfg_path.parent().unwrap().to_path_buf();
fs::create_dir_all(&app_dir).unwrap();
let yaml = indoc::indoc! {
"---
default_provider: local
providers:
- name: local
type: local
run_configs:
- name: echo
secrets: [API_KEY]
- name: docker
secrets: [DB_PASSWORD]
- name: aws
secrets: [AWS_ACCESS_KEY_ID]
"
};
fs::write(app_dir.join("config.yml"), yaml).unwrap();
let out = run_config_completer(OsStr::new("do"));
assert_eq!(out.len(), 1);
// Compare via debug string to avoid depending on crate internals
let rendered = format!("{:?}", &out[0]);
assert!(rendered.contains("docker"), "got: {}", rendered);
unsafe { std_env::remove_var("XDG_CONFIG_HOME") };
}
#[test]
#[serial]
fn test_provider_completer_lists_matching_providers() {
let td = tempdir().unwrap();
let xdg = td.path().join("xdg");
unsafe { std_env::set_var("XDG_CONFIG_HOME", &xdg) };
let cfg_path = get_config_file_path().unwrap();
let app_dir = cfg_path.parent().unwrap().to_path_buf();
fs::create_dir_all(&app_dir).unwrap();
let yaml = indoc::indoc! {
"---
default_provider: local
providers:
- name: local
type: local
- name: prod
type: local
run_configs:
- name: echo
secrets: [API_KEY]
"
};
fs::write(app_dir.join("config.yml"), yaml).unwrap();
// Prefix 'p' should match only 'prod'
let out = provider_completer(OsStr::new("p"));
assert_eq!(out.len(), 1);
let rendered = format!("{:?}", &out[0]);
assert!(rendered.contains("prod"), "got: {}", rendered);
// Empty prefix returns at least both providers
let out_all = provider_completer(OsStr::new(""));
assert!(out_all.len() >= 2);
unsafe { std_env::remove_var("XDG_CONFIG_HOME") };
}
#[tokio::test(flavor = "multi_thread")]
#[serial]
async fn test_secrets_completer_filters_keys_by_prefix() {
let td = tempdir().unwrap();
let xdg = td.path().join("xdg");
unsafe { std_env::set_var("XDG_CONFIG_HOME", &xdg) };
let cfg_path = get_config_file_path().unwrap();
let app_dir = cfg_path.parent().unwrap().to_path_buf();
fs::create_dir_all(&app_dir).unwrap();
let yaml = indoc::indoc! {
"---
default_provider: local
providers:
- name: local
type: local
run_configs:
- name: echo
secrets: [API_KEY]
"
};
fs::write(app_dir.join("config.yml"), yaml).unwrap();
// Seed a minimal vault with keys (values are irrelevant for listing)
let vault_yaml = indoc::indoc! {
"---
API_KEY: dummy
DB_PASSWORD: dummy
AWS_ACCESS_KEY_ID: dummy
"
};
fs::write(app_dir.join("vault.yml"), vault_yaml).unwrap();
let out = secrets_completer(OsStr::new("AWS"));
assert_eq!(out.len(), 1);
let rendered = format!("{:?}", &out[0]);
assert!(rendered.contains("AWS_ACCESS_KEY_ID"), "got: {}", rendered);
let out2 = secrets_completer(OsStr::new("DB_"));
assert_eq!(out2.len(), 1);
let rendered2 = format!("{:?}", &out2[0]);
assert!(rendered2.contains("DB_PASSWORD"), "got: {}", rendered2);
unsafe { std_env::remove_var("XDG_CONFIG_HOME") };
}
}
+98 -24
View File
@@ -1,19 +1,25 @@
use crate::cli::provider_completer;
use crate::cli::run_config_completer;
use crate::cli::secrets_completer;
use anyhow::{Context, Result};
use clap::Subcommand;
use clap::{
CommandFactory, Parser, ValueEnum, crate_authors, crate_description, crate_name, crate_version,
};
use std::ffi::OsString;
use anyhow::{Context, Result};
use clap::Subcommand;
use clap_complete::{ArgValueCompleter, CompleteEnv};
use crossterm::execute;
use crossterm::terminal::{LeaveAlternateScreen, disable_raw_mode};
use gman::config::{get_config_file_path, load_config};
use gman::config::{Config, get_config_file_path, load_config};
use std::ffi::OsString;
use std::io::{self, IsTerminal, Read, Write};
use std::panic::PanicHookInfo;
use crate::cli::wrap_and_run_command;
use crate::utils::persist_config_file;
use dialoguer::Editor;
use std::panic;
use std::process::exit;
use validator::Validate;
mod cli;
mod command;
@@ -46,11 +52,11 @@ struct Cli {
output: Option<OutputFormat>,
/// Specify the secret provider to use (defaults to 'default_provider' in config (usually 'local'))
#[arg(long, value_enum, global = true, env = "GMAN_PROVIDER")]
#[arg(long, global = true, env = "GMAN_PROVIDER", add = ArgValueCompleter::new(provider_completer))]
provider: Option<String>,
/// Specify a run profile to use when wrapping a command
#[arg(long, short)]
#[arg(long, short, add = ArgValueCompleter::new(run_config_completer))]
profile: Option<String>,
/// Output the command that will be run instead of executing it
@@ -72,14 +78,17 @@ struct Cli {
#[derive(Subcommand, Clone, Debug)]
enum Commands {
/// Add a secret to the configured secret provider
#[clap(aliases = &["set", "create"])]
Add {
/// Name of the secret to store
name: String,
},
/// Decrypt a secret and print the plaintext
#[clap(alias = "show")]
Get {
/// Name of the secret to retrieve
#[arg(add = ArgValueCompleter::new(secrets_completer))]
name: String,
},
@@ -87,33 +96,39 @@ enum Commands {
/// If a provider does not support updating secrets, this command will return an error.
Update {
/// Name of the secret to update
#[arg(add = ArgValueCompleter::new(secrets_completer))]
name: String,
},
/// Delete a secret from the configured secret provider
#[clap(aliases = &["remove", "rm"])]
Delete {
/// Name of the secret to delete
#[arg(add = ArgValueCompleter::new(secrets_completer))]
name: String,
},
/// List all secrets stored in the configured secret provider (if supported by the provider)
/// If a provider does not support listing secrets, this command will return an error.
#[clap(alias = "ls")]
List {},
/// Sync secrets with remote storage (if supported by the provider)
Sync {},
// TODO: Remove once all users have migrated their local vaults
/// Migrate local vault secrets to the current secure encryption format.
/// This is only needed if you have secrets encrypted with older versions of gman.
/// Only works with the local provider.
Migrate {},
/// Open and edit the config file in the default text editor
Config {},
/// Wrap the provided command and supply it with secrets as environment variables or as
/// configured in a corresponding run profile
#[command(external_subcommand)]
External(Vec<OsString>),
/// Generate shell completion scripts
Completions {
/// The shell to generate the script for
#[arg(value_enum)]
shell: clap_complete::Shell,
},
}
#[tokio::main]
@@ -124,6 +139,7 @@ async fn main() -> Result<()> {
panic::set_hook(Box::new(|info| {
panic_hook(info);
}));
CompleteEnv::with_factory(Cli::command).complete();
let cli = Cli::parse();
if cli.show_log_path {
@@ -140,7 +156,7 @@ async fn main() -> Result<()> {
exit(1);
}
let config = load_config()?;
let config = load_config(true)?;
let mut provider_config = config.extract_provider_config(cli.provider.clone())?;
let secrets_provider = provider_config.extract_provider();
@@ -149,7 +165,7 @@ async fn main() -> Result<()> {
let plaintext =
read_all_stdin().with_context(|| "unable to read plaintext from stdin")?;
secrets_provider
.set_secret(&name, plaintext.trim_end())
.set_secret(&name, &plaintext)
.await
.map(|_| match cli.output {
Some(_) => (),
@@ -180,7 +196,7 @@ async fn main() -> Result<()> {
let plaintext =
read_all_stdin().with_context(|| "unable to read plaintext from stdin")?;
secrets_provider
.update_secret(&name, plaintext.trim_end())
.update_secret(&name, &plaintext)
.await
.map(|_| match cli.output {
Some(_) => (),
@@ -220,6 +236,27 @@ async fn main() -> Result<()> {
}
}
}
Commands::Config {} => {
let uninterpolated_config = load_config(false)?;
let config_yaml = serde_yaml::to_string(&uninterpolated_config)
.with_context(|| "failed to serialize existing configuration")?;
let new_config = Editor::new()
.edit(&config_yaml)
.with_context(|| "unable to process user changes")?;
if new_config.is_none() {
println!("✗ No changes made to configuration");
return Ok(());
}
let new_config = new_config.unwrap();
let new_config: Config = serde_yaml::from_str(&new_config)
.with_context(|| "failed to parse updated configuration")?;
new_config
.validate()
.with_context(|| "updated configuration is invalid")?;
persist_config_file(&new_config)?;
println!("✓ Configuration updated successfully");
}
Commands::Sync {} => {
secrets_provider.sync().await.map(|_| {
if cli.output.is_none() {
@@ -227,14 +264,51 @@ async fn main() -> Result<()> {
}
})?;
}
Commands::External(tokens) => {
wrap_and_run_command(secrets_provider, &config, tokens, cli.profile, cli.dry_run)
.await?;
// TODO: Remove once all users have migrated their local vaults
Commands::Migrate {} => {
use gman::providers::SupportedProvider;
use gman::providers::local::LocalProvider;
let provider_config_for_migrate =
config.extract_provider_config(cli.provider.clone())?;
let local_provider: LocalProvider = match provider_config_for_migrate.provider_type {
SupportedProvider::Local { provider_def } => provider_def,
_ => {
anyhow::bail!("The migrate command only works with the local provider.");
}
};
println!("Migrating vault secrets to current secure format...");
let result = local_provider.migrate_vault().await?;
if result.total == 0 {
println!("Vault is empty, nothing to migrate.");
} else {
println!(
"Migration complete: {} total, {} migrated, {} already current",
result.total, result.migrated, result.already_current
);
if !result.failed.is_empty() {
eprintln!("\n⚠ Failed to migrate {} secret(s):", result.failed.len());
for (key, error) in &result.failed {
eprintln!(" - {}: {}", key, error);
}
}
if result.migrated > 0 {
println!(
"\n✓ Successfully migrated {} secret(s) to the secure format.",
result.migrated
);
} else if result.failed.is_empty() {
println!("\n✓ All secrets are already using the current secure format.");
}
}
}
Commands::Completions { shell } => {
let mut cmd = Cli::command();
let bin_name = cmd.get_name().to_string();
clap_complete::generate(shell, &mut cmd, bin_name, &mut io::stdout());
Commands::External(tokens) => {
wrap_and_run_command(cli.provider, &config, tokens, cli.profile, cli.dry_run).await?;
}
}
+25 -1
View File
@@ -1,3 +1,5 @@
use anyhow::{Context, Result};
use gman::config::{Config, get_config_file_path};
use log::LevelFilter;
use log4rs::append::console::ConsoleAppender;
use log4rs::append::file::FileAppender;
@@ -44,7 +46,7 @@ pub fn init_logging_config() -> log4rs::Config {
pub fn get_log_path() -> PathBuf {
let base_dir = dirs::cache_dir().unwrap_or_else(env::temp_dir);
let log_dir = base_dir.join("gman");
let log_dir = base_dir.join(env!("CARGO_CRATE_NAME"));
let dir = if let Err(e) = fs::create_dir_all(&log_dir) {
eprintln!(
@@ -60,6 +62,28 @@ pub fn get_log_path() -> PathBuf {
dir.join("gman.log")
}
pub fn persist_config_file(config: &Config) -> Result<()> {
let config_path =
get_config_file_path().with_context(|| "unable to determine config file path")?;
let ext = config_path
.extension()
.and_then(|s| s.to_str())
.unwrap_or("");
if ext.eq_ignore_ascii_case("yml") || ext.eq_ignore_ascii_case("yaml") {
if let Some(parent) = config_path.parent() {
fs::create_dir_all(parent)?;
}
let s = serde_yaml::to_string(config)?;
fs::write(&config_path, s)
.with_context(|| format!("failed to write {}", config_path.display()))?;
} else {
confy::store(env!("CARGO_CRATE_NAME"), "config", config)
.with_context(|| "failed to save updated config via confy")?;
}
Ok(())
}
#[cfg(test)]
mod tests {
use crate::utils::get_log_path;
+157 -22
View File
@@ -11,6 +11,7 @@
//!
//! let rc = RunConfig{
//! name: Some("echo".into()),
//! provider: None,
//! secrets: Some(vec!["api_key".into()]),
//! files: None,
//! flag: None,
@@ -19,16 +20,20 @@
//! };
//! rc.validate().unwrap();
//! ```
use crate::calling_app_name;
use crate::providers::local::LocalProvider;
use crate::providers::{SecretProvider, SupportedProvider};
use anyhow::{Context, Result};
use collections::HashSet;
use log::debug;
use regex::Regex;
use serde::{Deserialize, Serialize};
use serde_with::serde_as;
use serde_with::skip_serializing_none;
use std::borrow::Cow;
use std::path::PathBuf;
use std::{env, fs};
use std::{collections, env, fs};
use validator::{Validate, ValidationError};
#[skip_serializing_none]
@@ -43,6 +48,7 @@ use validator::{Validate, ValidationError};
pub struct RunConfig {
#[validate(required)]
pub name: Option<String>,
pub provider: Option<String>,
#[validate(required)]
pub secrets: Option<Vec<String>>,
pub files: Option<Vec<PathBuf>>,
@@ -144,6 +150,7 @@ impl ProviderConfig {
match &mut self.provider_type {
SupportedProvider::Local { provider_def } => {
debug!("Using local secret provider");
provider_def.runtime_provider_name = self.name.clone();
provider_def
}
SupportedProvider::AwsSecretsManager { provider_def } => {
@@ -158,6 +165,14 @@ impl ProviderConfig {
debug!("Using Azure Key Vault provider");
provider_def
}
SupportedProvider::Gopass { provider_def } => {
debug!("Using Gopass provider");
provider_def
}
SupportedProvider::OnePassword { provider_def } => {
debug!("Using 1Password provider");
provider_def
}
}
}
}
@@ -181,6 +196,7 @@ impl ProviderConfig {
/// ```
#[derive(Debug, Clone, Validate, Serialize, Deserialize, PartialEq, Eq)]
#[validate(schema(function = "default_provider_exists"))]
#[validate(schema(function = "providers_names_are_unique"))]
pub struct Config {
pub default_provider: Option<String>,
#[validate(length(min = 1))]
@@ -210,6 +226,22 @@ fn default_provider_exists(config: &Config) -> Result<(), ValidationError> {
}
}
fn providers_names_are_unique(config: &Config) -> Result<(), ValidationError> {
let mut names = HashSet::new();
for provider in &config.providers {
if let Some(name) = &provider.name
&& !names.insert(name)
{
let mut err = ValidationError::new("duplicate_provider_name");
err.message = Some(Cow::Borrowed(
"Provider names must be unique; duplicate found",
));
return Err(err);
}
}
Ok(())
}
impl Default for Config {
fn default() -> Self {
Self {
@@ -241,48 +273,49 @@ impl Config {
/// Discover the default password file for the local provider.
///
/// On most systems this resolves to `~/.gman_password` when the file
/// exists, otherwise `None`.
pub fn local_provider_password_file() -> Option<PathBuf> {
let candidate = dirs::home_dir().map(|p| p.join(".gman_password"));
match candidate {
Some(p) if p.exists() => Some(p),
_ => None,
}
/// On most systems this resolves to `~/.<executable_name>_password`
pub fn local_provider_password_file() -> PathBuf {
dirs::home_dir()
.map(|p| p.join(format!(".{}_password", calling_app_name())))
.expect("unable to determine home directory for local provider password file")
}
}
/// Load and validate the application configuration.
///
/// This uses the `confy` crate to load the configuration from a file
/// (e.g. `~/.config/gman/config.yaml`). If the file does
/// (e.g. `~/.config/<executable_name>/config.yaml`). If the file does
/// not exist, a default configuration is created and saved.
///
/// ```no_run
/// # use gman::config::load_config;
/// let config = load_config().unwrap();
/// // Load config with environment variable interpolation enabled
/// let config = load_config(true).unwrap();
/// println!("loaded config: {:?}", config);
/// ```
pub fn load_config() -> Result<Config> {
pub fn load_config(interpolate: bool) -> Result<Config> {
let xdg_path = env::var_os("XDG_CONFIG_HOME").map(PathBuf::from);
let mut config: Config = if let Some(base) = xdg_path.as_ref() {
let app_dir = base.join("gman");
let app_dir = base.join(calling_app_name());
let yml = app_dir.join("config.yml");
let yaml = app_dir.join("config.yaml");
if yml.exists() || yaml.exists() {
let load_path = if yml.exists() { &yml } else { &yaml };
let content = fs::read_to_string(load_path)
let mut content = fs::read_to_string(load_path)
.with_context(|| format!("failed to read config file '{}'", load_path.display()))?;
if interpolate {
content = interpolate_env_vars(&content);
}
let cfg: Config = serde_yaml::from_str(&content).with_context(|| {
format!("failed to parse YAML config at '{}'", load_path.display())
})?;
cfg
} else {
confy::load("gman", "config")?
load_confy_config(interpolate)?
}
} else {
confy::load("gman", "config")?
load_confy_config(interpolate)?
};
config.validate()?;
@@ -296,26 +329,128 @@ pub fn load_config() -> Result<Config> {
ref mut provider_def,
} = p.provider_type
&& provider_def.password_file.is_none()
&& let Some(local_password_file) = Config::local_provider_password_file()
&& Config::local_provider_password_file().exists()
{
provider_def.password_file = Some(local_password_file);
provider_def.password_file = Some(Config::local_provider_password_file());
}
});
Ok(config)
}
/// Returns the configuration file path that `confy` will use for this app.
fn load_confy_config(interpolate: bool) -> Result<Config> {
let load_path = confy::get_configuration_file_path(&calling_app_name(), "config")?;
let mut content = fs::read_to_string(&load_path)
.with_context(|| format!("failed to read config file '{}'", load_path.display()))?;
if interpolate {
content = interpolate_env_vars(&content);
}
let cfg: Config = serde_yaml::from_str(&content)
.with_context(|| format!("failed to parse YAML config at '{}'", load_path.display()))?;
Ok(cfg)
}
/// Returns the configuration file path that `confy` will use
pub fn get_config_file_path() -> Result<PathBuf> {
if let Some(base) = env::var_os("XDG_CONFIG_HOME").map(PathBuf::from) {
let dir = base.join("gman");
let dir = base.join(calling_app_name());
let yml = dir.join("config.yml");
let yaml = dir.join("config.yaml");
if yml.exists() || yaml.exists() {
return Ok(if yml.exists() { yml } else { yaml });
}
// Prefer .yml if creating anew
return Ok(dir.join("config.yml"));
}
Ok(confy::get_configuration_file_path("gman", "config")?)
Ok(confy::get_configuration_file_path(
&calling_app_name(),
"config",
)?)
}
pub fn interpolate_env_vars(s: &str) -> String {
let result = s.to_string();
let scrubbing_regex = Regex::new(r#"[\s{}^()\[\]\\|`'"]+"#).unwrap();
let var_regex = Regex::new(r"\$\{(.*?)(:-.+)?}").unwrap();
var_regex
.replace_all(s, |caps: &regex::Captures<'_>| {
if let Some(mat) = caps.get(1) {
if let Ok(value) = env::var(mat.as_str()) {
return scrubbing_regex.replace_all(&value, "").to_string();
} else if let Some(default_value) = caps.get(2) {
return scrubbing_regex
.replace_all(
default_value
.as_str()
.strip_prefix(":-")
.expect("unable to strip ':-' prefix from default value"),
"",
)
.to_string();
}
}
scrubbing_regex.replace_all(&result, "").to_string()
})
.to_string()
}
#[cfg(test)]
mod tests {
use super::*;
use pretty_assertions::assert_str_eq;
use serial_test::serial;
#[test]
fn test_interpolate_env_vars_defaults_to_original_string_if_not_in_yaml_interpolation_format() {
let var = interpolate_env_vars("TEST_VAR_INTERPOLATION_NON_YAML");
assert_str_eq!(var, "TEST_VAR_INTERPOLATION_NON_YAML");
}
#[test]
#[serial]
fn test_interpolate_env_vars_scrubs_all_unnecessary_characters() {
unsafe {
env::set_var(
"TEST_VAR_INTERPOLATION_UNNECESSARY_CHARACTERS",
r#"""
`"'https://dontdo:this@testing.com/query?test=%20query#results'"` {([\|])}
"""#,
)
};
let var = interpolate_env_vars("${TEST_VAR_INTERPOLATION_UNNECESSARY_CHARACTERS}");
assert_str_eq!(
var,
"https://dontdo:this@testing.com/query?test=%20query#results"
);
unsafe { env::remove_var("TEST_VAR_INTERPOLATION_UNNECESSARY_CHARACTERS") };
}
#[test]
#[serial]
fn test_interpolate_env_vars_scrubs_all_unnecessary_characters_for_default_values() {
let var = interpolate_env_vars(
r#"${UNSET:-`"'https://dontdo:this@testing.com/query?test=%20query#results'"` {([\|])}}"#,
);
assert_str_eq!(
var,
"https://dontdo:this@testing.com/query?test=%20query#results"
);
}
#[test]
fn test_interpolate_env_vars_scrubs_all_unnecessary_characters_from_non_environment_variable() {
let var =
interpolate_env_vars("https://dontdo:this@testing.com/query?test=%20query#results");
assert_str_eq!(
var,
"https://dontdo:this@testing.com/query?test=%20query#results"
);
}
}
+77 -35
View File
@@ -20,17 +20,16 @@
//! The `config` and `providers` modules power the CLI. They can be embedded
//! in other programs, but many functions interact with the user or the
//! filesystem. Prefer `no_run` doctests for those.
use anyhow::{Context, Result, anyhow, bail};
use argon2::{
Algorithm, Argon2, Params, Version,
password_hash::{SaltString, rand_core::RngCore},
};
use argon2::{Algorithm, Argon2, Params, Version, password_hash::rand_core::RngCore};
use base64::{Engine as _, engine::general_purpose::STANDARD as B64};
use chacha20poly1305::{
Key, XChaCha20Poly1305, XNonce,
aead::{Aead, KeyInit, OsRng},
};
use secrecy::{ExposeSecret, SecretString};
use std::path::PathBuf;
use zeroize::Zeroize;
/// Configuration structures and helpers used by the CLI and library.
pub mod config;
@@ -41,8 +40,8 @@ pub(crate) const HEADER: &str = "$VAULT";
pub(crate) const VERSION: &str = "v1";
pub(crate) const KDF: &str = "argon2id";
pub(crate) const ARGON_M_COST_KIB: u32 = 19_456;
pub(crate) const ARGON_T_COST: u32 = 2;
pub(crate) const ARGON_M_COST_KIB: u32 = 65_536;
pub(crate) const ARGON_T_COST: u32 = 3;
pub(crate) const ARGON_P: u32 = 1;
pub(crate) const SALT_LEN: usize = 16;
@@ -59,7 +58,7 @@ fn derive_key(password: &SecretString, salt: &[u8]) -> Result<Key> {
.hash_password_into(password.expose_secret().as_bytes(), salt, &mut key_bytes)
.map_err(|e| anyhow!("argon2 into error: {:?}", e))?;
let key = *Key::from_slice(&key_bytes);
let key: Key = key_bytes.into();
key_bytes.zeroize();
Ok(key)
}
@@ -82,20 +81,28 @@ fn derive_key(password: &SecretString, salt: &[u8]) -> Result<Key> {
pub fn encrypt_string(password: impl Into<SecretString>, plaintext: &str) -> Result<String> {
let password = password.into();
let salt = SaltString::generate(&mut OsRng);
if password.expose_secret().is_empty() {
bail!("password cannot be empty");
}
let mut salt = [0u8; SALT_LEN];
OsRng.fill_bytes(&mut salt);
let mut nonce_bytes = [0u8; NONCE_LEN];
OsRng.fill_bytes(&mut nonce_bytes);
let key = derive_key(&password, salt.as_str().as_bytes())?;
let mut key = derive_key(&password, &salt)?;
let cipher = XChaCha20Poly1305::new(&key);
let aad = format!("{};{}", HEADER, VERSION);
let aad = format!(
"{};{};{};m={},t={},p={}",
HEADER, VERSION, KDF, ARGON_M_COST_KIB, ARGON_T_COST, ARGON_P
);
let nonce = XNonce::from_slice(&nonce_bytes);
let nonce: XNonce = nonce_bytes.into();
let mut pt = plaintext.as_bytes().to_vec();
let ct = cipher
.encrypt(
nonce,
&nonce,
chacha20poly1305::aead::Payload {
msg: &pt,
aad: aad.as_bytes(),
@@ -113,13 +120,14 @@ pub fn encrypt_string(password: impl Into<SecretString>, plaintext: &str) -> Res
m = ARGON_M_COST_KIB,
t = ARGON_T_COST,
p = ARGON_P,
salt = B64.encode(salt.as_str().as_bytes()),
salt = B64.encode(salt),
nonce = B64.encode(nonce_bytes),
ct = B64.encode(&ct),
);
drop(cipher);
let _ = key;
key.zeroize();
salt.zeroize();
nonce_bytes.zeroize();
Ok(env)
@@ -130,6 +138,9 @@ pub fn encrypt_string(password: impl Into<SecretString>, plaintext: &str) -> Res
/// Returns the original plaintext on success or an error if the password is
/// wrong, the envelope was tampered with, or the input is malformed.
///
/// This function supports both the current format (with KDF params in AAD) and
/// the legacy format (without KDF params in AAD) for backwards compatibility.
///
/// Example
/// ```
/// use gman::{encrypt_string, decrypt_string};
@@ -143,6 +154,10 @@ pub fn encrypt_string(password: impl Into<SecretString>, plaintext: &str) -> Res
pub fn decrypt_string(password: impl Into<SecretString>, envelope: &str) -> Result<String> {
let password = password.into();
if password.expose_secret().is_empty() {
bail!("password cannot be empty");
}
let parts: Vec<&str> = envelope.split(';').collect();
if parts.len() < 7 {
bail!("invalid envelope format");
@@ -176,37 +191,66 @@ pub fn decrypt_string(password: impl Into<SecretString>, envelope: &str) -> Resu
let nonce_b64 = parts[5].strip_prefix("nonce=").context("missing nonce")?;
let ct_b64 = parts[6].strip_prefix("ct=").context("missing ct")?;
let salt_bytes = B64.decode(salt_b64).context("bad salt b64")?;
let mut nonce_bytes = B64.decode(nonce_b64).context("bad nonce b64")?;
let mut salt_bytes = B64.decode(salt_b64).context("bad salt b64")?;
let nonce_bytes = B64.decode(nonce_b64).context("bad nonce b64")?;
let mut ct = B64.decode(ct_b64).context("bad ct b64")?;
if nonce_bytes.len() != NONCE_LEN {
bail!("nonce length mismatch");
}
let key = derive_key(&password, &salt_bytes)?;
let mut key = derive_key(&password, &salt_bytes)?;
let cipher = XChaCha20Poly1305::new(&key);
let aad = format!("{};{}", HEADER, VERSION);
let nonce = XNonce::from_slice(&nonce_bytes);
let pt = cipher
.decrypt(
nonce,
chacha20poly1305::aead::Payload {
msg: &ct,
aad: aad.as_bytes(),
},
)
.map_err(|_| anyhow!("decryption failed (wrong password or corrupted data)"))?;
let aad_new = format!("{};{};{};m={},t={},p={}", HEADER, VERSION, KDF, m, t, p);
let aad_legacy = format!("{};{}", HEADER, VERSION);
nonce_bytes.zeroize();
let mut nonce_arr: [u8; NONCE_LEN] = nonce_bytes
.try_into()
.map_err(|_| anyhow!("invalid nonce length"))?;
let nonce: XNonce = nonce_arr.into();
let decrypt_result = cipher.decrypt(
&nonce,
chacha20poly1305::aead::Payload {
msg: &ct,
aad: aad_new.as_bytes(),
},
);
let mut pt = match decrypt_result {
Ok(pt) => pt,
Err(_) => cipher
.decrypt(
&nonce,
chacha20poly1305::aead::Payload {
msg: &ct,
aad: aad_legacy.as_bytes(),
},
)
.map_err(|_| anyhow!("decryption failed (wrong password or corrupted data)"))?,
};
let s = String::from_utf8(pt.clone()).context("plaintext not valid UTF-8")?;
key.zeroize();
salt_bytes.zeroize();
nonce_arr.zeroize();
ct.zeroize();
pt.zeroize();
let s = String::from_utf8(pt).context("plaintext not valid UTF-8")?;
Ok(s)
}
pub(crate) fn calling_app_name() -> String {
let exe: PathBuf = std::env::current_exe().expect("unable to get current exe path");
exe.file_stem()
.and_then(|s| s.to_str())
.map(|s| s.to_owned())
.expect("executable name not valid UTF-8")
}
#[cfg(test)]
mod tests {
use super::*;
@@ -237,12 +281,10 @@ mod tests {
}
#[test]
fn empty_password() {
fn empty_password_rejected() {
let pw = SecretString::new("".into());
let msg = "hello";
let env = encrypt_string(pw.clone(), msg).unwrap();
let out = decrypt_string(pw, &env).unwrap();
assert_eq!(msg, out);
assert!(encrypt_string(pw.clone(), msg).is_err());
}
#[test]
@@ -264,7 +306,7 @@ mod tests {
let mut ct = base64::engine::general_purpose::STANDARD
.decode(ct_b64)
.unwrap();
ct[0] ^= 0x01; // Flip a bit
ct[0] ^= 0x01;
let new_ct_b64 = base64::engine::general_purpose::STANDARD.encode(&ct);
let new_ct_part = format!("ct={}", new_ct_b64);
parts[6] = &new_ct_part;
+8 -14
View File
@@ -1,11 +1,13 @@
use crate::providers::SecretProvider;
use anyhow::{Context, Result};
use azure_identity::DefaultAzureCredential;
use azure_core::credentials::TokenCredential;
use azure_identity::DeveloperToolsCredential;
use azure_security_keyvault_secrets::models::SetSecretParameters;
use azure_security_keyvault_secrets::{ResourceExt, SecretClient};
use futures::TryStreamExt;
use serde::{Deserialize, Serialize};
use serde_with::skip_serializing_none;
use std::sync::Arc;
use validator::Validate;
#[skip_serializing_none]
@@ -40,12 +42,8 @@ impl SecretProvider for AzureKeyVaultProvider {
}
async fn get_secret(&self, key: &str) -> Result<String> {
let body = self
.get_client()?
.get_secret(key, "", None)
.await?
.into_body()
.await?;
let response = self.get_client()?.get_secret(key, None).await?;
let body = response.into_model()?;
body.value
.with_context(|| format!("Secret '{}' not found", key))
@@ -60,8 +58,7 @@ impl SecretProvider for AzureKeyVaultProvider {
self.get_client()?
.set_secret(key, params.try_into()?, None)
.await?
.into_body()
.await?;
.into_model()?;
Ok(())
}
@@ -77,10 +74,7 @@ impl SecretProvider for AzureKeyVaultProvider {
}
async fn list_secrets(&self) -> Result<Vec<String>> {
let mut pager = self
.get_client()?
.list_secret_properties(None)?
.into_stream();
let mut pager = self.get_client()?.list_secret_properties(None)?;
let mut secrets = Vec::new();
while let Some(props) = pager.try_next().await? {
let name = props.resource_id()?.name;
@@ -93,7 +87,7 @@ impl SecretProvider for AzureKeyVaultProvider {
impl AzureKeyVaultProvider {
fn get_client(&self) -> Result<SecretClient> {
let credential = DefaultAzureCredential::new()?;
let credential: Arc<dyn TokenCredential> = DeveloperToolsCredential::new(None)?;
let client = SecretClient::new(
format!(
"https://{}.vault.azure.net",
+19 -8
View File
@@ -1,3 +1,4 @@
use crate::calling_app_name;
use anyhow::{Context, Result, anyhow};
use chrono::Utc;
use dialoguer::Confirm;
@@ -25,7 +26,7 @@ pub fn sync_and_push(opts: &SyncOpts<'_>) -> Result<()> {
opts.validate()
.with_context(|| "invalid git sync options")?;
let commit_message = format!("chore: sync @ {}", Utc::now().to_rfc3339());
let config_dir = confy::get_configuration_file_path("gman", "vault")
let config_dir = confy::get_configuration_file_path(&calling_app_name(), "vault")
.with_context(|| "get config dir")?
.parent()
.map(Path::to_path_buf)
@@ -37,7 +38,7 @@ pub fn sync_and_push(opts: &SyncOpts<'_>) -> Result<()> {
fs::create_dir_all(&repo_dir).with_context(|| format!("create {}", repo_dir.display()))?;
// Move the default vault into the repo dir on first sync so only vault.yml is tracked.
let default_vault = confy::get_configuration_file_path("gman", "vault")
let default_vault = confy::get_configuration_file_path(&calling_app_name(), "vault")
.with_context(|| "get default vault path")?;
let repo_vault = repo_dir.join("vault.yml");
if default_vault.exists() && !repo_vault.exists() {
@@ -116,8 +117,7 @@ fn resolve_git_username(git: &Path, name: Option<&String>) -> Result<String> {
return Ok(name.to_string());
}
run_git_config_capture(git, &["config", "user.name"])
.with_context(|| "unable to determine git username")
default_git_username(git)
}
fn resolve_git_email(git: &Path, email: Option<&String>) -> Result<String> {
@@ -126,11 +126,10 @@ fn resolve_git_email(git: &Path, email: Option<&String>) -> Result<String> {
return Ok(email.to_string());
}
run_git_config_capture(git, &["config", "user.email"])
.with_context(|| "unable to determine git user email")
default_git_email(git)
}
fn resolve_git(override_path: Option<&PathBuf>) -> Result<PathBuf> {
pub(in crate::providers) fn resolve_git(override_path: Option<&PathBuf>) -> Result<PathBuf> {
debug!("Resolving git executable");
if let Some(p) = override_path {
return Ok(p.to_path_buf());
@@ -141,7 +140,19 @@ fn resolve_git(override_path: Option<&PathBuf>) -> Result<PathBuf> {
Ok(PathBuf::from("git"))
}
fn ensure_git_available(git: &Path) -> Result<()> {
pub(in crate::providers) fn default_git_username(git: &Path) -> Result<String> {
debug!("Checking for default git username");
run_git_config_capture(git, &["config", "user.name"])
.with_context(|| "unable to determine git user name")
}
pub(in crate::providers) fn default_git_email(git: &Path) -> Result<String> {
debug!("Checking for default git username");
run_git_config_capture(git, &["config", "user.email"])
.with_context(|| "unable to determine git user email")
}
pub(in crate::providers) fn ensure_git_available(git: &Path) -> Result<()> {
let ok = Command::new(git)
.arg("--version")
.stdout(Stdio::null())
+190
View File
@@ -0,0 +1,190 @@
use crate::providers::{ENV_PATH, SecretProvider};
use anyhow::{Context, Result, anyhow};
use serde::{Deserialize, Serialize};
use serde_with::skip_serializing_none;
use std::io::{Read, Write};
use std::process::{Command, Stdio};
use validator::Validate;
#[skip_serializing_none]
/// Gopass-based secret provider
/// See [Gopass](https://gopass.pw/) for more information.
///
/// You must already have gopass installed and configured on your system.
///
/// This provider stores secrets in a gopass store. It requires
/// an optional store name to be specified. If no store name is
/// specified, the default store will be used.
///
/// Example
/// ```no_run
/// use gman::providers::gopass::GopassProvider;
/// use gman::providers::{SecretProvider, SupportedProvider};
/// use gman::config::Config;
///
/// let provider = GopassProvider::default();
/// let _ = provider.set_secret("MY_SECRET", "value");
/// ```
#[derive(Debug, Default, Clone, Validate, Serialize, Deserialize, PartialEq, Eq)]
#[serde(deny_unknown_fields)]
pub struct GopassProvider {
pub store: Option<String>,
}
#[async_trait::async_trait]
impl SecretProvider for GopassProvider {
fn name(&self) -> &'static str {
"GopassProvider"
}
async fn get_secret(&self, key: &str) -> Result<String> {
ensure_gopass_installed()?;
let mut child = Command::new("gopass")
.args(["show", "-yfon", key])
.env("PATH", ENV_PATH.as_ref().expect("No ENV_PATH set"))
.stdin(Stdio::inherit())
.stdout(Stdio::piped())
.stderr(Stdio::inherit())
.spawn()
.context("Failed to spawn gopass command")?;
let mut output = String::new();
child
.stdout
.as_mut()
.expect("Failed to open gopass stdout")
.read_to_string(&mut output)
.context("Failed to read gopass output")?;
let status = child.wait().context("Failed to wait on gopass process")?;
if !status.success() {
return Err(anyhow!("gopass command failed with status: {}", status));
}
Ok(output.trim_end_matches(&['\r', '\n'][..]).to_string())
}
async fn set_secret(&self, key: &str, value: &str) -> Result<()> {
ensure_gopass_installed()?;
let mut child = Command::new("gopass")
.args(["insert", "-f", key])
.env("PATH", ENV_PATH.as_ref().expect("No ENV_PATH set"))
.stdin(Stdio::piped())
.stdout(Stdio::inherit())
.stderr(Stdio::inherit())
.spawn()
.context("Failed to spawn gopass command")?;
{
let stdin = child.stdin.as_mut().expect("Failed to open gopass stdin");
stdin
.write_all(value.as_bytes())
.context("Failed to write to gopass stdin")?;
}
let status = child.wait().context("Failed to wait on gopass process")?;
if !status.success() {
return Err(anyhow!("gopass command failed with status: {}", status));
}
Ok(())
}
async fn update_secret(&self, key: &str, value: &str) -> Result<()> {
ensure_gopass_installed()?;
self.set_secret(key, value).await
}
async fn delete_secret(&self, key: &str) -> Result<()> {
ensure_gopass_installed()?;
let mut child = Command::new("gopass")
.args(["rm", "-f", key])
.env("PATH", ENV_PATH.as_ref().expect("No ENV_PATH set"))
.stdin(Stdio::inherit())
.stdout(Stdio::inherit())
.stderr(Stdio::inherit())
.spawn()
.context("Failed to spawn gopass command")?;
let status = child.wait().context("Failed to wait on gopass process")?;
if !status.success() {
return Err(anyhow!("gopass command failed with status: {}", status));
}
Ok(())
}
async fn list_secrets(&self) -> Result<Vec<String>> {
ensure_gopass_installed()?;
let mut child = Command::new("gopass")
.args(["ls", "-f"])
.env("PATH", ENV_PATH.as_ref().expect("No ENV_PATH set"))
.stdin(Stdio::inherit())
.stdout(Stdio::piped())
.stderr(Stdio::inherit())
.spawn()
.context("Failed to spawn gopass command")?;
let mut output = String::new();
child
.stdout
.as_mut()
.expect("Failed to open gopass stdout")
.read_to_string(&mut output)
.context("Failed to read gopass output")?;
let status = child.wait().context("Failed to wait on gopass process")?;
if !status.success() {
return Err(anyhow!("gopass command failed with status: {}", status));
}
let secrets: Vec<String> = output
.lines()
.map(|line| line.trim().to_string())
.filter(|line| !line.is_empty())
.collect();
Ok(secrets)
}
async fn sync(&mut self) -> Result<()> {
ensure_gopass_installed()?;
let mut child = Command::new("gopass");
child.arg("sync");
if let Some(store) = &self.store {
child.args(["-s", store]);
}
let status = child
.env("PATH", ENV_PATH.as_ref().expect("No ENV_PATH set"))
.stdin(Stdio::inherit())
.stdout(Stdio::inherit())
.stderr(Stdio::inherit())
.spawn()
.context("Failed to spawn gopass command")?
.wait()
.context("Failed to wait on gopass process")?;
if !status.success() {
return Err(anyhow!("gopass command failed with status: {}", status));
}
Ok(())
}
}
fn ensure_gopass_installed() -> Result<()> {
if which::which("gopass").is_err() {
Err(anyhow!(
"Gopass is not installed or not found in PATH. Please install Gopass from https://gopass.pw/"
))
} else {
Ok(())
}
}
+485 -46
View File
@@ -5,11 +5,15 @@ use std::path::{Path, PathBuf};
use std::{env, fs};
use zeroize::Zeroize;
use crate::config::Config;
use crate::providers::SecretProvider;
use crate::providers::git_sync::{SyncOpts, repo_name_from_url, sync_and_push};
use crate::config::{Config, get_config_file_path, load_config};
use crate::providers::git_sync::{
SyncOpts, default_git_email, default_git_username, ensure_git_available, repo_name_from_url,
resolve_git, sync_and_push,
};
use crate::providers::{SecretProvider, SupportedProvider};
use crate::{
ARGON_M_COST_KIB, ARGON_P, ARGON_T_COST, HEADER, KDF, KEY_LEN, NONCE_LEN, SALT_LEN, VERSION,
calling_app_name,
};
use anyhow::Result;
use argon2::{Algorithm, Argon2, Params, Version};
@@ -54,17 +58,25 @@ pub struct LocalProvider {
#[validate(email)]
pub git_user_email: Option<String>,
pub git_executable: Option<PathBuf>,
#[serde(skip)]
pub runtime_provider_name: Option<String>,
}
impl Default for LocalProvider {
fn default() -> Self {
let password_file = match Config::local_provider_password_file() {
p if p.exists() => Some(p),
_ => None,
};
Self {
password_file: Config::local_provider_password_file(),
password_file,
git_branch: Some("main".into()),
git_remote_url: None,
git_user_name: None,
git_user_email: None,
git_executable: None,
runtime_provider_name: None,
}
}
}
@@ -146,13 +158,16 @@ impl SecretProvider for LocalProvider {
async fn list_secrets(&self) -> Result<Vec<String>> {
let vault_path = self.active_vault_path()?;
let vault: HashMap<String, String> = load_vault(&vault_path).unwrap_or_default();
let keys: Vec<String> = vault.keys().cloned().collect();
let mut keys: Vec<String> = vault.keys().cloned().collect();
keys.sort();
Ok(keys)
}
async fn sync(&mut self) -> Result<()> {
let mut config_changed = false;
let git = resolve_git(self.git_executable.as_ref())?;
ensure_git_available(&git)?;
if self.git_branch.is_none() {
config_changed = true;
@@ -169,7 +184,9 @@ impl SecretProvider for LocalProvider {
config_changed = true;
debug!("Prompting user to set git_remote in config for sync");
let remote: String = Input::with_theme(&ColorfulTheme::default())
.with_prompt("Enter remote git URL to sync with")
.with_prompt(
"Enter remote git URL to sync with (e.g. 'git@github.com:user/repo.git')",
)
.validate_with(|s: &String| {
LocalProvider {
git_remote_url: Some(s.clone()),
@@ -184,10 +201,41 @@ impl SecretProvider for LocalProvider {
self.git_remote_url = Some(remote);
}
if self.git_user_name.is_none() {
config_changed = true;
debug!("Prompting user git user name");
let default_user_name = default_git_username(&git)?.trim().to_string();
let branch: String = Input::with_theme(&ColorfulTheme::default())
.with_prompt("Enter git user name")
.default(default_user_name)
.interact_text()?;
self.git_user_name = Some(branch);
}
if self.git_user_email.is_none() {
config_changed = true;
debug!("Prompting user git email");
let default_user_name = default_git_email(&git)?.trim().to_string();
let branch: String = Input::with_theme(&ColorfulTheme::default())
.with_prompt("Enter git user email")
.validate_with({
|s: &String| {
if s.contains('@') {
Ok(())
} else {
Err("not a valid email address".to_string())
}
}
})
.default(default_user_name)
.interact_text()?;
self.git_user_email = Some(branch);
}
if config_changed {
debug!("Saving updated config");
confy::store("gman", "config", &self)
.with_context(|| "failed to save updated config")?;
self.persist_git_settings_to_config()?;
}
let sync_opts = SyncOpts {
@@ -203,6 +251,55 @@ impl SecretProvider for LocalProvider {
}
impl LocalProvider {
fn persist_git_settings_to_config(&self) -> Result<()> {
debug!("Saving updated config (only current local provider)");
let mut cfg = load_config(true).with_context(|| "failed to load existing config")?;
let target_name = self.runtime_provider_name.clone();
let mut updated = false;
for pc in cfg.providers.iter_mut() {
if let SupportedProvider::Local { provider_def } = &mut pc.provider_type {
let matches_name = match (&pc.name, &target_name) {
(Some(n), Some(t)) => n == t,
(Some(_), None) => false,
_ => false,
};
if matches_name || target_name.is_none() {
provider_def.git_branch = self.git_branch.clone();
provider_def.git_remote_url = self.git_remote_url.clone();
provider_def.git_user_name = self.git_user_name.clone();
provider_def.git_user_email = self.git_user_email.clone();
provider_def.git_executable = self.git_executable.clone();
updated = true;
if matches_name {
break;
}
}
}
}
if !updated {
bail!("unable to find matching local provider in config to update");
}
let path = get_config_file_path()?;
let ext = path.extension().and_then(|s| s.to_str()).unwrap_or("");
if ext.eq_ignore_ascii_case("yml") || ext.eq_ignore_ascii_case("yaml") {
if let Some(parent) = path.parent() {
fs::create_dir_all(parent)?;
}
let s = serde_yaml::to_string(&cfg)?;
fs::write(&path, s).with_context(|| format!("failed to write {}", path.display()))?;
} else {
confy::store(&calling_app_name(), "config", &cfg)
.with_context(|| "failed to save updated config via confy")?;
}
Ok(())
}
fn repo_dir_for_config(&self) -> Result<Option<PathBuf>> {
if let Some(remote) = &self.git_remote_url {
let name = repo_name_from_url(remote);
@@ -225,6 +322,22 @@ impl LocalProvider {
fn get_password(&self) -> Result<SecretString> {
if let Some(password_file) = &self.password_file {
#[cfg(unix)]
{
use std::os::unix::fs::PermissionsExt;
let metadata = fs::metadata(password_file).with_context(|| {
format!("failed to read password file metadata {:?}", password_file)
})?;
let mode = metadata.permissions().mode();
if mode & 0o077 != 0 {
bail!(
"password file {:?} has insecure permissions {:o} (should be 0600 or 0400)",
password_file,
mode & 0o777
);
}
}
let password = SecretString::new(
fs::read_to_string(password_file)
.with_context(|| format!("failed to read password file {:?}", password_file))?
@@ -245,10 +358,11 @@ fn default_vault_path() -> Result<PathBuf> {
let xdg_path = env::var_os("XDG_CONFIG_HOME").map(PathBuf::from);
if let Some(xdg) = xdg_path {
return Ok(xdg.join("gman").join("vault.yml"));
return Ok(xdg.join(calling_app_name()).join("vault.yml"));
}
confy::get_configuration_file_path("gman", "vault").with_context(|| "get config dir")
confy::get_configuration_file_path(&calling_app_name(), "vault")
.with_context(|| "get config dir")
}
fn base_config_dir() -> Result<PathBuf> {
@@ -272,24 +386,41 @@ fn store_vault(path: &Path, map: &HashMap<String, String>) -> Result<()> {
fs::create_dir_all(parent).with_context(|| format!("create {}", parent.display()))?;
}
let s = serde_yaml::to_string(map).with_context(|| "serialize vault")?;
fs::write(path, s).with_context(|| format!("write {}", path.display()))
fs::write(path, &s).with_context(|| format!("write {}", path.display()))?;
#[cfg(unix)]
{
use std::os::unix::fs::PermissionsExt;
fs::set_permissions(path, fs::Permissions::from_mode(0o600))
.with_context(|| format!("set permissions on {}", path.display()))?;
}
Ok(())
}
fn encrypt_string(password: &SecretString, plaintext: &str) -> Result<String> {
if password.expose_secret().is_empty() {
bail!("password cannot be empty");
}
let mut salt = [0u8; SALT_LEN];
OsRng.fill_bytes(&mut salt);
let mut nonce_bytes = [0u8; NONCE_LEN];
OsRng.fill_bytes(&mut nonce_bytes);
let key = derive_key(password, &salt)?;
let mut key = derive_key(password, &salt)?;
let cipher = XChaCha20Poly1305::new(&key);
let aad = format!("{};{}", HEADER, VERSION);
let nonce = XNonce::from_slice(&nonce_bytes);
let aad = format!(
"{};{};{};m={},t={},p={}",
HEADER, VERSION, KDF, ARGON_M_COST_KIB, ARGON_T_COST, ARGON_P
);
let nonce: XNonce = nonce_bytes.into();
let mut pt = plaintext.as_bytes().to_vec();
let ct = cipher
.encrypt(
nonce,
&nonce,
chacha20poly1305::aead::Payload {
msg: &pt,
aad: aad.as_bytes(),
@@ -312,6 +443,7 @@ fn encrypt_string(password: &SecretString, plaintext: &str) -> Result<String> {
);
drop(cipher);
key.zeroize();
salt.zeroize();
nonce_bytes.zeroize();
@@ -332,16 +464,30 @@ fn derive_key_with_params(
argon
.hash_password_into(password.expose_secret().as_bytes(), salt, &mut key_bytes)
.map_err(|e| anyhow!("argon2 derive error: {:?}", e))?;
let key: Key = key_bytes.into();
key_bytes.zeroize();
let key = Key::from_slice(&key_bytes);
Ok(*key)
Ok(key)
}
fn derive_key(password: &SecretString, salt: &[u8]) -> Result<Key> {
derive_key_with_params(password, salt, ARGON_M_COST_KIB, ARGON_T_COST, ARGON_P)
}
fn decrypt_string(password: &SecretString, envelope: &str) -> Result<String> {
/// Attempts to decrypt with the given cipher, nonce, ciphertext, and AAD.
fn try_decrypt(
cipher: &XChaCha20Poly1305,
nonce: &XNonce,
ct: &[u8],
aad: &[u8],
) -> std::result::Result<Vec<u8>, chacha20poly1305::aead::Error> {
cipher.decrypt(nonce, chacha20poly1305::aead::Payload { msg: ct, aad })
}
type EnvelopeComponents = (u32, u32, u32, Vec<u8>, [u8; NONCE_LEN], Vec<u8>);
/// Parse an envelope string and extract its components.
/// Returns (m, t, p, salt, nonce_arr, ct) on success.
fn parse_envelope(envelope: &str) -> Result<EnvelopeComponents> {
let parts: Vec<&str> = envelope.trim().split(';').collect();
if parts.len() < 7 {
debug!("Invalid envelope format: {:?}", parts);
@@ -383,40 +529,202 @@ fn decrypt_string(password: &SecretString, envelope: &str) -> Result<String> {
.with_context(|| "missing nonce")?;
let ct_b64 = parts[6].strip_prefix("ct=").with_context(|| "missing ct")?;
let mut salt = B64.decode(salt_b64).with_context(|| "bad salt b64")?;
let mut nonce_bytes = B64.decode(nonce_b64).with_context(|| "bad nonce b64")?;
let mut ct = B64.decode(ct_b64).with_context(|| "bad ct b64")?;
let salt = B64.decode(salt_b64).with_context(|| "bad salt b64")?;
let nonce_bytes = B64.decode(nonce_b64).with_context(|| "bad nonce b64")?;
let ct = B64.decode(ct_b64).with_context(|| "bad ct b64")?;
if salt.len() != SALT_LEN || nonce_bytes.len() != NONCE_LEN {
debug!(
"Salt/nonce length mismatch: salt {}, nonce {}",
salt.len(),
nonce_bytes.len()
);
bail!("salt/nonce length mismatch");
if nonce_bytes.len() != NONCE_LEN {
debug!("Nonce length mismatch: {}", nonce_bytes.len());
bail!("nonce length mismatch");
}
let key = derive_key_with_params(password, &salt, m, t, p)?;
let nonce_arr: [u8; NONCE_LEN] = nonce_bytes
.try_into()
.map_err(|_| anyhow!("invalid nonce length"))?;
Ok((m, t, p, salt, nonce_arr, ct))
}
fn decrypt_string(password: &SecretString, envelope: &str) -> Result<String> {
if password.expose_secret().is_empty() {
bail!("password cannot be empty");
}
let (m, t, p, mut salt, mut nonce_arr, mut ct) = parse_envelope(envelope)?;
let nonce: XNonce = nonce_arr.into();
let aad_current = format!("{};{};{};m={},t={},p={}", HEADER, VERSION, KDF, m, t, p);
let mut key = derive_key_with_params(password, &salt, m, t, p)?;
let cipher = XChaCha20Poly1305::new(&key);
let aad = format!("{};{}", HEADER, VERSION);
let nonce = XNonce::from_slice(&nonce_bytes);
let pt = cipher
.decrypt(
nonce,
chacha20poly1305::aead::Payload {
msg: &ct,
aad: aad.as_bytes(),
},
)
.map_err(|_| anyhow!("decryption failed (wrong password or corrupted data)"))?;
if let Ok(pt) = try_decrypt(&cipher, &nonce, &ct, aad_current.as_bytes()) {
let s = String::from_utf8(pt.clone()).with_context(|| "plaintext not valid UTF-8")?;
key.zeroize();
salt.zeroize();
nonce_arr.zeroize();
ct.zeroize();
return Ok(s);
}
key.zeroize();
salt.zeroize();
nonce_bytes.zeroize();
nonce_arr.zeroize();
ct.zeroize();
let s = String::from_utf8(pt).with_context(|| "plaintext not valid UTF-8")?;
Ok(s)
// TODO: Remove once all users have migrated their local vaults
if let Ok(plaintext) = legacy::decrypt_string_legacy(password, envelope) {
return Ok(plaintext);
}
bail!("decryption failed (wrong password or corrupted data)")
}
// TODO: Remove this entire module once all users have migrated their vaults.
mod legacy {
use super::*;
fn legacy_aad() -> String {
format!("{};{}", HEADER, VERSION)
}
pub fn decrypt_string_legacy(password: &SecretString, envelope: &str) -> Result<String> {
if password.expose_secret().is_empty() {
bail!("password cannot be empty");
}
let (m, t, p, mut salt, mut nonce_arr, mut ct) = parse_envelope(envelope)?;
let nonce: XNonce = nonce_arr.into();
let aad = legacy_aad();
let mut key = derive_key_with_params(password, &salt, m, t, p)?;
let cipher = XChaCha20Poly1305::new(&key);
if let Ok(pt) = try_decrypt(&cipher, &nonce, &ct, aad.as_bytes()) {
let s = String::from_utf8(pt.clone()).with_context(|| "plaintext not valid UTF-8")?;
key.zeroize();
salt.zeroize();
nonce_arr.zeroize();
ct.zeroize();
return Ok(s);
}
key.zeroize();
let mut zeros_key: Key = [0u8; KEY_LEN].into();
let zeros_cipher = XChaCha20Poly1305::new(&zeros_key);
if let Ok(pt) = try_decrypt(&zeros_cipher, &nonce, &ct, aad.as_bytes()) {
debug!("Decrypted using legacy all-zeros key - secret needs migration");
let s = String::from_utf8(pt.clone()).with_context(|| "plaintext not valid UTF-8")?;
zeros_key.zeroize();
salt.zeroize();
nonce_arr.zeroize();
ct.zeroize();
return Ok(s);
}
zeros_key.zeroize();
salt.zeroize();
nonce_arr.zeroize();
ct.zeroize();
bail!("legacy decryption failed")
}
pub fn is_current_format(password: &SecretString, envelope: &str) -> Result<bool> {
if password.expose_secret().is_empty() {
bail!("password cannot be empty");
}
let (m, t, p, salt, nonce_arr, ct) = parse_envelope(envelope)?;
let nonce: XNonce = nonce_arr.into();
let aad_current = format!("{};{};{};m={},t={},p={}", HEADER, VERSION, KDF, m, t, p);
let key = derive_key_with_params(password, &salt, m, t, p)?;
let cipher = XChaCha20Poly1305::new(&key);
Ok(try_decrypt(&cipher, &nonce, &ct, aad_current.as_bytes()).is_ok())
}
}
// TODO: Remove once all users have migrated their local vaults
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum SecretStatus {
Current,
NeedsMigration,
}
// TODO: Remove once all users have migrated their local vaults
#[derive(Debug)]
pub struct MigrationResult {
pub total: usize,
pub migrated: usize,
pub already_current: usize,
pub failed: Vec<(String, String)>,
}
impl LocalProvider {
// TODO: Remove once all users have migrated their local vaults
pub async fn migrate_vault(&self) -> Result<MigrationResult> {
let vault_path = self.active_vault_path()?;
let vault: HashMap<String, String> = load_vault(&vault_path).unwrap_or_default();
if vault.is_empty() {
return Ok(MigrationResult {
total: 0,
migrated: 0,
already_current: 0,
failed: vec![],
});
}
let password = self.get_password()?;
let mut migrated_vault = HashMap::new();
let mut migrated_count = 0;
let mut already_current_count = 0;
let mut failed = vec![];
for (key, envelope) in &vault {
match legacy::is_current_format(&password, envelope) {
Ok(true) => {
migrated_vault.insert(key.clone(), envelope.clone());
already_current_count += 1;
}
Ok(false) => match decrypt_string(&password, envelope) {
Ok(plaintext) => match encrypt_string(&password, &plaintext) {
Ok(new_envelope) => {
migrated_vault.insert(key.clone(), new_envelope);
migrated_count += 1;
}
Err(e) => {
failed.push((key.clone(), format!("re-encryption failed: {}", e)));
migrated_vault.insert(key.clone(), envelope.clone());
}
},
Err(e) => {
failed.push((key.clone(), format!("decryption failed: {}", e)));
migrated_vault.insert(key.clone(), envelope.clone());
}
},
Err(e) => {
failed.push((key.clone(), format!("status check failed: {}", e)));
migrated_vault.insert(key.clone(), envelope.clone());
}
}
}
if migrated_count > 0 {
store_vault(&vault_path, &migrated_vault)?;
}
Ok(MigrationResult {
total: vault.len(),
migrated: migrated_count,
already_current: already_current_count,
failed,
})
}
}
#[cfg(test)]
@@ -424,6 +732,7 @@ mod tests {
use super::*;
use pretty_assertions::assert_eq;
use secrecy::{ExposeSecret, SecretString};
use std::env as std_env;
use tempfile::tempdir;
#[test]
@@ -431,7 +740,7 @@ mod tests {
let password = SecretString::new("test_password".to_string().into());
let salt = [0u8; 16];
let key = derive_key(&password, &salt).unwrap();
assert_eq!(key.as_slice().len(), 32);
assert_eq!(key.len(), 32);
}
#[test]
@@ -439,7 +748,7 @@ mod tests {
let password = SecretString::new("test_password".to_string().into());
let salt = [0u8; 16];
let key = derive_key_with_params(&password, &salt, 10, 1, 1).unwrap();
assert_eq!(key.as_slice().len(), 32);
assert_eq!(key.len(), 32);
}
#[test]
@@ -452,15 +761,145 @@ mod tests {
}
#[test]
#[cfg(unix)]
fn get_password_reads_password_file() {
use std::os::unix::fs::PermissionsExt;
let dir = tempdir().unwrap();
let file = dir.path().join("pw.txt");
fs::write(&file, "secretpw\n").unwrap();
fs::set_permissions(&file, fs::Permissions::from_mode(0o600)).unwrap();
let provider = LocalProvider {
password_file: Some(file),
runtime_provider_name: None,
..LocalProvider::default()
};
let pw = provider.get_password().unwrap();
assert_eq!(pw.expose_secret(), "secretpw");
}
#[test]
#[cfg(unix)]
fn get_password_rejects_insecure_file() {
use std::os::unix::fs::PermissionsExt;
let dir = tempdir().unwrap();
let file = dir.path().join("pw.txt");
fs::write(&file, "secretpw\n").unwrap();
fs::set_permissions(&file, fs::Permissions::from_mode(0o644)).unwrap();
let provider = LocalProvider {
password_file: Some(file),
runtime_provider_name: None,
..LocalProvider::default()
};
assert!(provider.get_password().is_err());
}
#[test]
#[cfg(not(unix))]
fn get_password_reads_password_file() {
let dir = tempdir().unwrap();
let file = dir.path().join("pw.txt");
fs::write(&file, "secretpw\n").unwrap();
let provider = LocalProvider {
password_file: Some(file),
runtime_provider_name: None,
..LocalProvider::default()
};
let pw = provider.get_password().unwrap();
assert_eq!(pw.expose_secret(), "secretpw");
}
#[test]
fn persist_only_target_local_provider_git_settings() {
let td = tempdir().unwrap();
let xdg = td.path().join("xdg");
let app_dir = xdg.join(calling_app_name());
fs::create_dir_all(&app_dir).unwrap();
unsafe {
std_env::set_var("XDG_CONFIG_HOME", &xdg);
}
let initial_yaml = indoc::indoc! {
"---
default_provider: local
providers:
- name: local
type: local
password_file: /tmp/.gman_pass
git_branch: main
git_remote_url: null
git_user_name: null
git_user_email: null
git_executable: null
- name: other
type: local
git_branch: main
git_remote_url: git@github.com:someone/else.git
run_configs:
- name: echo
secrets: [API_KEY]
"
};
let cfg_path = app_dir.join("config.yml");
fs::write(&cfg_path, initial_yaml).unwrap();
let provider = LocalProvider {
password_file: None,
git_branch: Some("dev".into()),
git_remote_url: Some("git@github.com:user/repo.git".into()),
git_user_name: Some("Test User".into()),
git_user_email: Some("test@example.com".into()),
git_executable: Some(PathBuf::from("/usr/bin/git")),
runtime_provider_name: Some("local".into()),
};
provider
.persist_git_settings_to_config()
.expect("persist ok");
let content = fs::read_to_string(&cfg_path).unwrap();
let cfg: Config = serde_yaml::from_str(&content).unwrap();
assert_eq!(cfg.default_provider.as_deref(), Some("local"));
assert!(cfg.run_configs.is_some());
assert_eq!(cfg.run_configs.as_ref().unwrap().len(), 1);
let p0 = &cfg.providers[0];
assert_eq!(p0.name.as_deref(), Some("local"));
match &p0.provider_type {
SupportedProvider::Local { provider_def } => {
assert_eq!(provider_def.git_branch.as_deref(), Some("dev"));
assert_eq!(
provider_def.git_remote_url.as_deref(),
Some("git@github.com:user/repo.git")
);
assert_eq!(provider_def.git_user_name.as_deref(), Some("Test User"));
assert_eq!(
provider_def.git_user_email.as_deref(),
Some("test@example.com")
);
assert_eq!(
provider_def.git_executable.as_ref(),
Some(&PathBuf::from("/usr/bin/git"))
);
}
_ => panic!("expected local provider"),
}
let p1 = &cfg.providers[1];
assert_eq!(p1.name.as_deref(), Some("other"));
match &p1.provider_type {
SupportedProvider::Local { provider_def } => {
assert_eq!(provider_def.git_branch.as_deref(), Some("main"));
assert_eq!(
provider_def.git_remote_url.as_deref(),
Some("git@github.com:someone/else.git")
);
}
_ => panic!("expected local provider"),
}
unsafe {
std_env::remove_var("XDG_CONFIG_HOME");
}
}
}
+24 -3
View File
@@ -6,17 +6,26 @@ pub mod aws_secrets_manager;
pub mod azure_key_vault;
pub mod gcp_secret_manager;
mod git_sync;
pub mod gopass;
pub mod local;
pub mod one_password;
use crate::providers::gopass::GopassProvider;
use crate::providers::local::LocalProvider;
use anyhow::{Result, anyhow};
use crate::providers::one_password::OnePasswordProvider;
use anyhow::{Context, Result, anyhow};
use aws_secrets_manager::AwsSecretsManagerProvider;
use azure_key_vault::AzureKeyVaultProvider;
use gcp_secret_manager::GcpSecretManagerProvider;
use once_cell::sync::Lazy;
use serde::{Deserialize, Serialize};
use std::fmt;
use std::fmt::{Display, Formatter};
use std::{env, fmt};
use validator::{Validate, ValidationErrors};
pub(in crate::providers) static ENV_PATH: Lazy<Result<String>> =
Lazy::new(|| env::var("PATH").context("No PATH environment variable"));
/// A secret storage backend capable of CRUD, with optional
/// update, listing, and sync support.
#[async_trait::async_trait]
@@ -63,7 +72,15 @@ pub enum SupportedProvider {
},
AzureKeyVault {
#[serde(flatten)]
provider_def: azure_key_vault::AzureKeyVaultProvider,
provider_def: AzureKeyVaultProvider,
},
Gopass {
#[serde(flatten)]
provider_def: GopassProvider,
},
OnePassword {
#[serde(flatten)]
provider_def: OnePasswordProvider,
},
}
@@ -74,6 +91,8 @@ impl Validate for SupportedProvider {
SupportedProvider::AwsSecretsManager { provider_def } => provider_def.validate(),
SupportedProvider::GcpSecretManager { provider_def } => provider_def.validate(),
SupportedProvider::AzureKeyVault { provider_def } => provider_def.validate(),
SupportedProvider::Gopass { provider_def } => provider_def.validate(),
SupportedProvider::OnePassword { provider_def } => provider_def.validate(),
}
}
}
@@ -93,6 +112,8 @@ impl Display for SupportedProvider {
SupportedProvider::AwsSecretsManager { .. } => write!(f, "aws_secrets_manager"),
SupportedProvider::GcpSecretManager { .. } => write!(f, "gcp_secret_manager"),
SupportedProvider::AzureKeyVault { .. } => write!(f, "azure_key_vault"),
SupportedProvider::Gopass { .. } => write!(f, "gopass"),
SupportedProvider::OnePassword { .. } => write!(f, "one_password"),
}
}
}
+199
View File
@@ -0,0 +1,199 @@
use crate::providers::{ENV_PATH, SecretProvider};
use anyhow::{Context, Result, anyhow};
use serde::{Deserialize, Serialize};
use serde_with::skip_serializing_none;
use std::io::Read;
use std::process::{Command, Stdio};
use validator::Validate;
#[skip_serializing_none]
/// 1Password-based secret provider.
/// See [1Password CLI](https://developer.1password.com/docs/cli/) for more
/// information.
///
/// You must already have the 1Password CLI (`op`) installed and configured
/// on your system.
///
/// This provider stores secrets as 1Password Password items. It requires
/// an optional vault name and an optional account identifier to be specified.
/// If no vault is specified, the user's default vault is used. If no account
/// is specified, the default signed-in account is used.
///
/// Example
/// ```no_run
/// use gman::providers::one_password::OnePasswordProvider;
/// use gman::providers::{SecretProvider, SupportedProvider};
/// use gman::config::Config;
///
/// let provider = OnePasswordProvider::default();
/// let _ = provider.set_secret("MY_SECRET", "value");
/// ```
#[derive(Debug, Default, Clone, Validate, Serialize, Deserialize, PartialEq, Eq)]
#[serde(deny_unknown_fields)]
pub struct OnePasswordProvider {
pub vault: Option<String>,
pub account: Option<String>,
}
impl OnePasswordProvider {
fn base_command(&self) -> Command {
let mut cmd = Command::new("op");
cmd.env("PATH", ENV_PATH.as_ref().expect("No ENV_PATH set"));
if let Some(account) = &self.account {
cmd.args(["--account", account]);
}
cmd
}
fn vault_args(&self) -> Vec<&str> {
match &self.vault {
Some(vault) => vec!["--vault", vault],
None => vec![],
}
}
}
#[async_trait::async_trait]
impl SecretProvider for OnePasswordProvider {
fn name(&self) -> &'static str {
"OnePasswordProvider"
}
async fn get_secret(&self, key: &str) -> Result<String> {
ensure_op_installed()?;
let mut cmd = self.base_command();
cmd.args(["item", "get", key, "--fields", "password", "--reveal"]);
cmd.args(self.vault_args());
cmd.stdin(Stdio::inherit())
.stdout(Stdio::piped())
.stderr(Stdio::inherit());
let mut child = cmd.spawn().context("Failed to spawn op command")?;
let mut output = String::new();
child
.stdout
.as_mut()
.expect("Failed to open op stdout")
.read_to_string(&mut output)
.context("Failed to read op output")?;
let status = child.wait().context("Failed to wait on op process")?;
if !status.success() {
return Err(anyhow!("op command failed with status: {}", status));
}
Ok(output.trim_end_matches(&['\r', '\n'][..]).to_string())
}
async fn set_secret(&self, key: &str, value: &str) -> Result<()> {
ensure_op_installed()?;
let mut cmd = self.base_command();
cmd.args(["item", "create", "--category", "password", "--title", key]);
cmd.args(self.vault_args());
cmd.arg(format!("password={}", value));
cmd.stdin(Stdio::inherit())
.stdout(Stdio::piped())
.stderr(Stdio::inherit());
let mut child = cmd.spawn().context("Failed to spawn op command")?;
let status = child.wait().context("Failed to wait on op process")?;
if !status.success() {
return Err(anyhow!("op command failed with status: {}", status));
}
Ok(())
}
async fn update_secret(&self, key: &str, value: &str) -> Result<()> {
ensure_op_installed()?;
let mut cmd = self.base_command();
cmd.args(["item", "edit", key]);
cmd.args(self.vault_args());
cmd.arg(format!("password={}", value));
cmd.stdin(Stdio::inherit())
.stdout(Stdio::piped())
.stderr(Stdio::inherit());
let mut child = cmd.spawn().context("Failed to spawn op command")?;
let status = child.wait().context("Failed to wait on op process")?;
if !status.success() {
return Err(anyhow!("op command failed with status: {}", status));
}
Ok(())
}
async fn delete_secret(&self, key: &str) -> Result<()> {
ensure_op_installed()?;
let mut cmd = self.base_command();
cmd.args(["item", "delete", key]);
cmd.args(self.vault_args());
cmd.stdin(Stdio::inherit())
.stdout(Stdio::inherit())
.stderr(Stdio::inherit());
let mut child = cmd.spawn().context("Failed to spawn op command")?;
let status = child.wait().context("Failed to wait on op process")?;
if !status.success() {
return Err(anyhow!("op command failed with status: {}", status));
}
Ok(())
}
async fn list_secrets(&self) -> Result<Vec<String>> {
ensure_op_installed()?;
let mut cmd = self.base_command();
cmd.args(["item", "list", "--format", "json"]);
cmd.args(self.vault_args());
cmd.stdin(Stdio::inherit())
.stdout(Stdio::piped())
.stderr(Stdio::inherit());
let mut child = cmd.spawn().context("Failed to spawn op command")?;
let mut output = String::new();
child
.stdout
.as_mut()
.expect("Failed to open op stdout")
.read_to_string(&mut output)
.context("Failed to read op output")?;
let status = child.wait().context("Failed to wait on op process")?;
if !status.success() {
return Err(anyhow!("op command failed with status: {}", status));
}
let items: Vec<serde_json::Value> =
serde_json::from_str(&output).context("Failed to parse op item list JSON output")?;
let secrets: Vec<String> = items
.iter()
.filter_map(|item| item.get("title").and_then(|t| t.as_str()))
.map(|s| s.to_string())
.collect();
Ok(secrets)
}
}
fn ensure_op_installed() -> Result<()> {
if which::which("op").is_err() {
Err(anyhow!(
"1Password CLI (op) is not installed or not found in PATH. \
Please install it from https://developer.1password.com/docs/cli/get-started/"
))
} else {
Ok(())
}
}
+135 -36
View File
@@ -1,10 +1,31 @@
//! CLI integration tests that execute the gman binary.
//!
//! These tests are skipped when cross-compiling because the compiled binary
//! cannot be executed on a different architecture (e.g., ARM64 binary on x86_64 host).
use assert_cmd::prelude::*;
use predicates::prelude::*;
use std::fs;
#[cfg(unix)]
use std::os::unix::fs::PermissionsExt;
use std::path::{Path, PathBuf};
use std::process::{Command, Stdio};
use tempfile::TempDir;
fn gman_bin() -> PathBuf {
PathBuf::from(env!("CARGO_BIN_EXE_gman"))
}
/// Check if the gman binary can be executed on this system.
/// Returns false when cross-compiling (e.g., ARM64 binary on x86_64 host).
fn can_execute_binary() -> bool {
Command::new(gman_bin())
.arg("--version")
.output()
.map(|o| o.status.success())
.unwrap_or(false)
}
fn setup_env() -> (TempDir, PathBuf, PathBuf) {
let td = tempfile::tempdir().expect("tempdir");
let cfg_home = td.path().join("config");
@@ -44,15 +65,105 @@ providers:
password_file.display()
)
};
// Confy with yaml feature typically uses .yml; write both to be safe.
fs::write(app_dir.join("config.yml"), &cfg).unwrap();
fs::write(app_dir.join("config.yaml"), &cfg).unwrap();
}
fn create_password_file(path: &Path, content: &[u8]) {
fs::write(path, content).unwrap();
#[cfg(unix)]
{
fs::set_permissions(path, fs::Permissions::from_mode(0o600)).unwrap();
}
}
#[test]
#[cfg(unix)]
fn cli_config_no_changes() {
if !can_execute_binary() {
eprintln!("Skipping test: cannot execute cross-compiled binary");
return;
}
let (td, xdg_cfg, xdg_cache) = setup_env();
let pw_file = td.path().join("pw.txt");
create_password_file(&pw_file, b"pw\n");
write_yaml_config(&xdg_cfg, &pw_file, None);
let editor = td.path().join("noop-editor.sh");
fs::write(&editor, b"#!/bin/sh\nexit 0\n").unwrap();
let mut perms = fs::metadata(&editor).unwrap().permissions();
perms.set_mode(0o755);
fs::set_permissions(&editor, perms).unwrap();
let mut cmd = Command::new(gman_bin());
cmd.env("XDG_CONFIG_HOME", &xdg_cfg)
.env("XDG_CACHE_HOME", &xdg_cache)
.env("EDITOR", &editor)
.arg("config");
cmd.assert()
.success()
.stdout(predicate::str::contains("No changes made to configuration"));
}
#[test]
#[cfg(unix)]
fn cli_config_updates_and_persists() {
if !can_execute_binary() {
eprintln!("Skipping test: cannot execute cross-compiled binary");
return;
}
let (td, xdg_cfg, xdg_cache) = setup_env();
let pw_file = td.path().join("pw.txt");
create_password_file(&pw_file, b"pw\n");
write_yaml_config(&xdg_cfg, &pw_file, None);
let editor = td.path().join("append-run-config.sh");
// Note: We need a small sleep to ensure the file modification timestamp changes.
// The dialoguer Editor uses file modification time to detect changes, and on fast
// systems the edit can complete within the same timestamp granularity.
let script = r#"#!/bin/sh
FILE="$1"
sleep 0.1
cat >> "$FILE" <<'EOF'
run_configs:
- name: echo
secrets: ["api_key"]
EOF
exit 0
"#;
fs::write(&editor, script.as_bytes()).unwrap();
let mut perms = fs::metadata(&editor).unwrap().permissions();
perms.set_mode(0o755);
fs::set_permissions(&editor, perms).unwrap();
let mut cmd = Command::new(gman_bin());
cmd.env("XDG_CONFIG_HOME", &xdg_cfg)
.env("XDG_CACHE_HOME", &xdg_cache)
.env("EDITOR", &editor)
.arg("config");
cmd.assert().success().stdout(predicate::str::contains(
"Configuration updated successfully",
));
let cfg_path = xdg_cfg.join("gman").join("config.yml");
let written = fs::read_to_string(&cfg_path).expect("config file readable");
assert!(written.contains("run_configs:"));
assert!(written.contains("name: echo"));
}
#[test]
fn cli_shows_help() {
if !can_execute_binary() {
eprintln!("Skipping test: cannot execute cross-compiled binary");
return;
}
let (_td, cfg, cache) = setup_env();
let mut cmd = Command::cargo_bin("gman").unwrap();
let mut cmd = Command::new(gman_bin());
cmd.env("XDG_CACHE_HOME", &cache)
.env("XDG_CONFIG_HOME", &cfg)
.arg("--help");
@@ -61,27 +172,19 @@ fn cli_shows_help() {
.stdout(predicate::str::contains("Usage").or(predicate::str::contains("Add")));
}
#[test]
fn cli_completions_bash() {
let (_td, cfg, cache) = setup_env();
let mut cmd = Command::cargo_bin("gman").unwrap();
cmd.env("XDG_CACHE_HOME", &cache)
.env("XDG_CONFIG_HOME", &cfg)
.args(["completions", "bash"]);
cmd.assert()
.success()
.stdout(predicate::str::contains("_gman").or(predicate::str::contains("complete -F")));
}
#[test]
fn cli_add_get_list_update_delete_roundtrip() {
if !can_execute_binary() {
eprintln!("Skipping test: cannot execute cross-compiled binary");
return;
}
let (td, xdg_cfg, xdg_cache) = setup_env();
let pw_file = td.path().join("pw.txt");
fs::write(&pw_file, b"testpw\n").unwrap();
create_password_file(&pw_file, b"testpw\n");
write_yaml_config(&xdg_cfg, &pw_file, None);
// add
let mut add = Command::cargo_bin("gman").unwrap();
let mut add = Command::new(gman_bin());
add.env("XDG_CONFIG_HOME", &xdg_cfg)
.env("XDG_CACHE_HOME", &xdg_cache)
.stdin(Stdio::piped())
@@ -97,8 +200,7 @@ fn cli_add_get_list_update_delete_roundtrip() {
let add_out = child.wait_with_output().unwrap();
assert!(add_out.status.success());
// get (text)
let mut get = Command::cargo_bin("gman").unwrap();
let mut get = Command::new(gman_bin());
get.env("XDG_CONFIG_HOME", &xdg_cfg)
.env("XDG_CACHE_HOME", &xdg_cache)
.args(["get", "my_api_key"]);
@@ -106,8 +208,7 @@ fn cli_add_get_list_update_delete_roundtrip() {
.success()
.stdout(predicate::str::contains("super_secret"));
// get as JSON
let mut get_json = Command::cargo_bin("gman").unwrap();
let mut get_json = Command::new(gman_bin());
get_json
.env("XDG_CONFIG_HOME", &xdg_cfg)
.env("XDG_CACHE_HOME", &xdg_cache)
@@ -116,8 +217,7 @@ fn cli_add_get_list_update_delete_roundtrip() {
predicate::str::contains("my_api_key").and(predicate::str::contains("super_secret")),
);
// list
let mut list = Command::cargo_bin("gman").unwrap();
let mut list = Command::new(gman_bin());
list.env("XDG_CONFIG_HOME", &xdg_cfg)
.env("XDG_CACHE_HOME", &xdg_cache)
.arg("list");
@@ -125,8 +225,7 @@ fn cli_add_get_list_update_delete_roundtrip() {
.success()
.stdout(predicate::str::contains("my_api_key"));
// update
let mut update = Command::cargo_bin("gman").unwrap();
let mut update = Command::new(gman_bin());
update
.env("XDG_CONFIG_HOME", &xdg_cfg)
.env("XDG_CACHE_HOME", &xdg_cache)
@@ -142,8 +241,7 @@ fn cli_add_get_list_update_delete_roundtrip() {
let upd_out = child.wait_with_output().unwrap();
assert!(upd_out.status.success());
// get again
let mut get2 = Command::cargo_bin("gman").unwrap();
let mut get2 = Command::new(gman_bin());
get2.env("XDG_CONFIG_HOME", &xdg_cfg)
.env("XDG_CACHE_HOME", &xdg_cache)
.args(["get", "my_api_key"]);
@@ -151,15 +249,13 @@ fn cli_add_get_list_update_delete_roundtrip() {
.success()
.stdout(predicate::str::contains("new_val"));
// delete
let mut del = Command::cargo_bin("gman").unwrap();
let mut del = Command::new(gman_bin());
del.env("XDG_CONFIG_HOME", &xdg_cfg)
.env("XDG_CACHE_HOME", &xdg_cache)
.args(["delete", "my_api_key"]);
del.assert().success();
// get should now fail
let mut get_missing = Command::cargo_bin("gman").unwrap();
let mut get_missing = Command::new(gman_bin());
get_missing
.env("XDG_CONFIG_HOME", &xdg_cfg)
.env("XDG_CACHE_HOME", &xdg_cache)
@@ -169,13 +265,17 @@ fn cli_add_get_list_update_delete_roundtrip() {
#[test]
fn cli_wrap_dry_run_env_injection() {
if !can_execute_binary() {
eprintln!("Skipping test: cannot execute cross-compiled binary");
return;
}
let (td, xdg_cfg, xdg_cache) = setup_env();
let pw_file = td.path().join("pw.txt");
fs::write(&pw_file, b"pw\n").unwrap();
create_password_file(&pw_file, b"pw\n");
write_yaml_config(&xdg_cfg, &pw_file, Some("echo"));
// Add the secret so the profile can read it
let mut add = Command::cargo_bin("gman").unwrap();
let mut add = Command::new(gman_bin());
add.env("XDG_CONFIG_HOME", &xdg_cfg)
.env("XDG_CACHE_HOME", &xdg_cache)
.stdin(Stdio::piped())
@@ -186,8 +286,7 @@ fn cli_wrap_dry_run_env_injection() {
let add_out = child.wait_with_output().unwrap();
assert!(add_out.status.success());
// Dry-run wrapping: prints preview command
let mut wrap = Command::cargo_bin("gman").unwrap();
let mut wrap = Command::new(gman_bin());
wrap.env("XDG_CONFIG_HOME", &xdg_cfg)
.env("XDG_CACHE_HOME", &xdg_cache)
.arg("--dry-run")
+40 -10
View File
@@ -9,6 +9,7 @@ mod tests {
fn test_run_config_valid() {
let run_config = RunConfig {
name: Some("test".to_string()),
provider: None,
secrets: Some(vec!["secret1".to_string()]),
flag: None,
flag_position: None,
@@ -23,6 +24,7 @@ mod tests {
fn test_run_config_missing_name() {
let run_config = RunConfig {
name: None,
provider: None,
secrets: Some(vec!["secret1".to_string()]),
flag: None,
flag_position: None,
@@ -37,6 +39,7 @@ mod tests {
fn test_run_config_missing_secrets() {
let run_config = RunConfig {
name: Some("test".to_string()),
provider: None,
secrets: None,
flag: None,
flag_position: None,
@@ -51,6 +54,7 @@ mod tests {
fn test_run_config_invalid_flag_position() {
let run_config = RunConfig {
name: Some("test".to_string()),
provider: None,
secrets: Some(vec!["secret1".to_string()]),
flag: Some("--test-flag".to_string()),
flag_position: Some(0),
@@ -65,6 +69,7 @@ mod tests {
fn test_run_config_flags_or_none_all_some() {
let run_config = RunConfig {
name: Some("test".to_string()),
provider: None,
secrets: Some(vec!["secret1".to_string()]),
flag: Some("--test-flag".to_string()),
flag_position: Some(1),
@@ -79,6 +84,7 @@ mod tests {
fn test_run_config_flags_or_none_all_none() {
let run_config = RunConfig {
name: Some("test".to_string()),
provider: None,
secrets: Some(vec!["secret1".to_string()]),
flag: None,
flag_position: None,
@@ -93,6 +99,7 @@ mod tests {
fn test_run_config_flags_or_none_partial_some() {
let run_config = RunConfig {
name: Some("test".to_string()),
provider: None,
secrets: Some(vec!["secret1".to_string()]),
flag: Some("--test-flag".to_string()),
flag_position: None,
@@ -107,6 +114,7 @@ mod tests {
fn test_run_config_flags_or_none_missing_placeholder() {
let run_config = RunConfig {
name: Some("test".to_string()),
provider: None,
secrets: Some(vec!["secret1".to_string()]),
flag: Some("--test-flag".to_string()),
flag_position: Some(1),
@@ -121,6 +129,7 @@ mod tests {
fn test_run_config_flags_or_files_all_none() {
let run_config = RunConfig {
name: Some("test".to_string()),
provider: None,
secrets: Some(vec!["secret1".to_string()]),
flag: None,
flag_position: None,
@@ -135,6 +144,7 @@ mod tests {
fn test_run_config_flags_or_files_files_is_some() {
let run_config = RunConfig {
name: Some("test".to_string()),
provider: None,
secrets: Some(vec!["secret1".to_string()]),
flag: None,
flag_position: None,
@@ -149,6 +159,7 @@ mod tests {
fn test_run_config_flags_or_files_all_some() {
let run_config = RunConfig {
name: Some("test".to_string()),
provider: None,
secrets: Some(vec!["secret1".to_string()]),
flag: Some("--test-flag".to_string()),
flag_position: Some(1),
@@ -241,15 +252,34 @@ mod tests {
#[test]
fn test_config_local_provider_password_file() {
let path = Config::local_provider_password_file();
let expected_path = dirs::home_dir().map(|p| p.join(".gman_password"));
if let Some(p) = &expected_path {
if !p.exists() {
assert_eq!(path, None);
} else {
assert_eq!(path, expected_path);
}
} else {
assert_eq!(path, None);
}
// Derive expected filename based on current test executable name
let exe = std::env::current_exe().expect("current_exe");
let stem = exe
.file_stem()
.and_then(|s| s.to_str())
.expect("utf-8 file stem");
let expected = dirs::home_dir().map(|p| p.join(format!(".{}_password", stem)));
assert_eq!(Some(path), expected);
}
#[test]
fn test_config_duplicate_provider_names_is_invalid() {
let name = Some("dup".into());
let p1 = ProviderConfig {
name: name.clone(),
..Default::default()
};
let p2 = ProviderConfig {
name,
..Default::default()
};
let cfg = Config {
default_provider: Some("dup".into()),
providers: vec![p1, p2],
run_configs: None,
};
assert!(cfg.validate().is_err());
}
}
+8
View File
@@ -0,0 +1,8 @@
# Seeds for failure cases proptest has generated in the past. It is
# automatically read and these particular cases re-run before any
# novel cases are generated.
#
# It is recommended to check this file in to source control so that
# everyone who runs the test benefits from these saved cases.
cc 155469a45d7311cd4003e23a3bcdaa8e55879e6222c1b6313a2b1f0b563bb195 # shrinks to password = "", msg = " "
cc 0bc9f608677234c082d10ff51b15dc39b4c194cdf920b4d87e553467c93824ed # shrinks to password = "", msg = ""
+6 -7
View File
@@ -1,15 +1,15 @@
use base64::Engine;
use gman::{decrypt_string, encrypt_string};
use proptest::prelude::*;
proptest! {
#![proptest_config(ProptestConfig::with_cases(64))]
}
use secrecy::SecretString;
proptest! {
// Reduced case count because Argon2 key derivation is intentionally slow
// (65 MiB memory, 3 iterations per encryption/decryption)
#![proptest_config(ProptestConfig::with_cases(4))]
#[test]
fn prop_encrypt_decrypt_roundtrip(password in ".{0,64}", msg in ".{0,512}") {
fn prop_encrypt_decrypt_roundtrip(password in ".{1,64}", msg in ".{0,512}") {
let pw = SecretString::new(password.into());
let env = encrypt_string(pw.clone(), &msg).unwrap();
let out = decrypt_string(pw, &env).unwrap();
@@ -18,10 +18,9 @@ proptest! {
}
#[test]
fn prop_tamper_ciphertext_detected(password in ".{0,32}", msg in ".{1,128}") {
fn prop_tamper_ciphertext_detected(password in ".{1,32}", msg in ".{1,128}") {
let pw = SecretString::new(password.into());
let env = encrypt_string(pw.clone(), &msg).unwrap();
// Flip a bit in the ct payload segment
let mut parts: Vec<&str> = env.split(';').collect();
let ct_b64 = parts[6].strip_prefix("ct=").unwrap();
let mut ct = base64::engine::general_purpose::STANDARD.decode(ct_b64).unwrap();
+53
View File
@@ -0,0 +1,53 @@
use gman::config::{Config, ProviderConfig};
use gman::providers::{SecretProvider, SupportedProvider};
use pretty_assertions::{assert_eq, assert_str_eq};
use validator::Validate;
#[test]
fn test_gopass_supported_provider_display_and_validate_from_yaml() {
// Build a SupportedProvider via YAML to avoid direct type import
let yaml = r#"---
type: gopass
store: personal
"#;
let sp: SupportedProvider = serde_yaml::from_str(yaml).expect("valid supported provider yaml");
// Validate delegates to inner provider (no required fields)
assert!(sp.validate().is_ok());
// Display formatting for the enum variant
assert_eq!(sp.to_string(), "gopass");
}
#[test]
fn test_provider_config_with_gopass_deserialize_and_extract() {
// Minimal ProviderConfig YAML using the gopass variant
let yaml = r#"---
name: gopass
type: gopass
"#;
let pc: ProviderConfig = serde_yaml::from_str(yaml).expect("valid provider config yaml");
// Gopass has no required fields, so validation should pass
assert!(pc.validate().is_ok());
// Extract the provider and inspect its name via the trait
let mut pc_owned = pc.clone();
let provider: &mut dyn SecretProvider = pc_owned.extract_provider();
assert_str_eq!(provider.name(), "GopassProvider");
// Round-trip through Config with default_provider
let cfg_yaml = r#"---
default_provider: gopass
providers:
- name: gopass
type: gopass
store: personal
"#;
let cfg: Config = serde_yaml::from_str(cfg_yaml).expect("valid config yaml");
assert!(cfg.validate().is_ok());
let extracted = cfg
.extract_provider_config(None)
.expect("should find default provider");
assert_eq!(extracted.name.as_deref(), Some("gopass"));
}
+7 -4
View File
@@ -34,6 +34,7 @@ fn test_local_provider_valid() {
git_user_name: None,
git_user_email: Some("test@example.com".to_string()),
git_executable: None,
runtime_provider_name: None,
};
assert!(provider.validate().is_ok());
@@ -48,6 +49,7 @@ fn test_local_provider_invalid_email() {
git_user_name: None,
git_user_email: Some("test".to_string()),
git_executable: None,
runtime_provider_name: None,
};
assert!(config.validate().is_err());
@@ -56,10 +58,11 @@ fn test_local_provider_invalid_email() {
#[test]
fn test_local_provider_default() {
let provider = LocalProvider::default();
assert_eq!(
provider.password_file,
Config::local_provider_password_file()
);
let expected_pw = {
let p = Config::local_provider_password_file();
if p.exists() { Some(p) } else { None }
};
assert_eq!(provider.password_file, expected_pw);
assert_eq!(provider.git_branch, Some("main".into()));
assert_eq!(provider.git_remote_url, None);
assert_eq!(provider.git_user_name, None);
+2
View File
@@ -1,5 +1,7 @@
mod aws_secrets_manager_tests;
mod azure_key_vault_tests;
mod gcp_secret_manager_tests;
mod gopass_tests;
mod local_tests;
mod one_password_tests;
mod provider_tests;
+113
View File
@@ -0,0 +1,113 @@
use gman::config::{Config, ProviderConfig};
use gman::providers::{SecretProvider, SupportedProvider};
use pretty_assertions::{assert_eq, assert_str_eq};
use validator::Validate;
#[test]
fn test_one_password_supported_provider_display_and_validate_from_yaml() {
let yaml = r#"---
type: one_password
vault: Production
account: my.1password.com
"#;
let sp: SupportedProvider = serde_yaml::from_str(yaml).expect("valid supported provider yaml");
assert!(sp.validate().is_ok());
assert_eq!(sp.to_string(), "one_password");
}
#[test]
fn test_one_password_supported_provider_minimal_yaml() {
let yaml = r#"---
type: one_password
"#;
let sp: SupportedProvider = serde_yaml::from_str(yaml).expect("valid supported provider yaml");
assert!(sp.validate().is_ok());
assert_eq!(sp.to_string(), "one_password");
}
#[test]
fn test_one_password_supported_provider_vault_only() {
let yaml = r#"---
type: one_password
vault: Personal
"#;
let sp: SupportedProvider = serde_yaml::from_str(yaml).expect("valid supported provider yaml");
assert!(sp.validate().is_ok());
}
#[test]
fn test_one_password_supported_provider_account_only() {
let yaml = r#"---
type: one_password
account: team.1password.com
"#;
let sp: SupportedProvider = serde_yaml::from_str(yaml).expect("valid supported provider yaml");
assert!(sp.validate().is_ok());
}
#[test]
fn test_one_password_supported_provider_rejects_unknown_fields() {
let yaml = r#"---
type: one_password
vault: Production
unknown_field: bad
"#;
let result: Result<SupportedProvider, _> = serde_yaml::from_str(yaml);
assert!(result.is_err());
}
#[test]
fn test_provider_config_with_one_password_deserialize_and_extract() {
let yaml = r#"---
name: op
type: one_password
"#;
let pc: ProviderConfig = serde_yaml::from_str(yaml).expect("valid provider config yaml");
assert!(pc.validate().is_ok());
let mut pc_owned = pc.clone();
let provider: &mut dyn SecretProvider = pc_owned.extract_provider();
assert_str_eq!(provider.name(), "OnePasswordProvider");
let cfg_yaml = r#"---
default_provider: op
providers:
- name: op
type: one_password
vault: Production
account: my.1password.com
"#;
let cfg: Config = serde_yaml::from_str(cfg_yaml).expect("valid config yaml");
assert!(cfg.validate().is_ok());
let extracted = cfg
.extract_provider_config(None)
.expect("should find default provider");
assert_eq!(extracted.name.as_deref(), Some("op"));
}
#[test]
fn test_one_password_config_with_multiple_providers() {
let cfg_yaml = r#"---
default_provider: local
providers:
- name: local
type: local
- name: op
type: one_password
vault: Production
"#;
let cfg: Config = serde_yaml::from_str(cfg_yaml).expect("valid config yaml");
assert!(cfg.validate().is_ok());
let extracted = cfg
.extract_provider_config(Some("op".into()))
.expect("should find op provider");
assert_eq!(extracted.name.as_deref(), Some("op"));
}