30 Commits

Author SHA1 Message Date
github-actions[bot]
9f63ee8265 bump: version 0.4.0 → 0.4.1 [skip ci] 2026-03-20 22:04:37 +00:00
6cba3d6d0b feat: Upgraded aws-lc-sys version to address high severity CWE-295
Check / stable / fmt (push) Failing after 25s
Check / beta / clippy (push) Failing after 39s
Check / stable / clippy (push) Failing after 40s
Check / nightly / doc (push) Failing after 41s
Check / 1.89.0 / check (push) Failing after 41s
Test Suite / ubuntu / beta (push) Failing after 43s
Test Suite / ubuntu / stable (push) Failing after 42s
Test Suite / ubuntu / stable / coverage (push) Failing after 59s
Test Suite / macos-latest / stable (push) Has been cancelled
Test Suite / windows-latest / stable (push) Has been cancelled
2026-03-20 16:03:49 -06:00
b2a51dc1b1 docs: Cleaned up README formatting a tad (80 character column length)
Check / stable / fmt (push) Has been cancelled
Check / beta / clippy (push) Has been cancelled
Check / stable / clippy (push) Has been cancelled
Check / nightly / doc (push) Has been cancelled
Check / 1.89.0 / check (push) Has been cancelled
Test Suite / ubuntu / beta (push) Has been cancelled
Test Suite / ubuntu / stable (push) Has been cancelled
Test Suite / macos-latest / stable (push) Has been cancelled
Test Suite / windows-latest / stable (push) Has been cancelled
Test Suite / ubuntu / stable / coverage (push) Has been cancelled
2026-03-09 17:35:22 -06:00
github-actions[bot]
cc44fca54e bump: version 0.3.0 → 0.4.0 [skip ci] 2026-03-09 23:06:23 +00:00
9a678ae67d build: Updated dependencies to address security issues
Check / stable / fmt (push) Has been cancelled
Check / beta / clippy (push) Has been cancelled
Check / stable / clippy (push) Has been cancelled
Check / nightly / doc (push) Has been cancelled
Check / 1.89.0 / check (push) Has been cancelled
Test Suite / ubuntu / beta (push) Has been cancelled
Test Suite / ubuntu / stable (push) Has been cancelled
Test Suite / macos-latest / stable (push) Has been cancelled
Test Suite / windows-latest / stable (push) Has been cancelled
Test Suite / ubuntu / stable / coverage (push) Has been cancelled
2026-03-09 16:57:29 -06:00
66b950991c feat: Added 1password support
Check / stable / fmt (push) Has been cancelled
Check / beta / clippy (push) Has been cancelled
Check / stable / clippy (push) Has been cancelled
Check / nightly / doc (push) Has been cancelled
Check / 1.89.0 / check (push) Has been cancelled
Test Suite / ubuntu / beta (push) Has been cancelled
Test Suite / ubuntu / stable (push) Has been cancelled
Test Suite / macos-latest / stable (push) Has been cancelled
Test Suite / windows-latest / stable (push) Has been cancelled
Test Suite / ubuntu / stable / coverage (push) Has been cancelled
2026-03-09 16:33:57 -06:00
e8e0bd02e9 docs: created an authorship policy and pull request template to require disclosure of AI coding assistance for all contributions 2026-02-24 17:48:28 -07:00
ed5a7308be build: Migrated from Makefile to justfile
Check / stable / fmt (push) Successful in 9m56s
Check / beta / clippy (push) Failing after 40s
Check / stable / clippy (push) Failing after 39s
Check / nightly / doc (push) Failing after 36s
Check / 1.89.0 / check (push) Failing after 39s
Test Suite / ubuntu / beta (push) Failing after 39s
Test Suite / ubuntu / stable (push) Failing after 38s
Test Suite / ubuntu / stable / coverage (push) Failing after 1m4s
Test Suite / macos-latest / stable (push) Has been cancelled
Test Suite / windows-latest / stable (push) Has been cancelled
2026-02-02 10:40:52 -07:00
044d5960eb feat: sort local keys alphabetically when listing them
Check / stable / fmt (push) Has been cancelled
Check / beta / clippy (push) Has been cancelled
Check / stable / clippy (push) Has been cancelled
Check / nightly / doc (push) Has been cancelled
Check / 1.89.0 / check (push) Has been cancelled
Test Suite / ubuntu / beta (push) Has been cancelled
Test Suite / ubuntu / stable (push) Has been cancelled
Test Suite / macos-latest / stable (push) Has been cancelled
Test Suite / windows-latest / stable (push) Has been cancelled
Test Suite / ubuntu / stable / coverage (push) Has been cancelled
2026-02-02 10:36:56 -07:00
github-actions[bot]
c0aa379b20 bump: version 0.2.3 → 0.3.0 [skip ci] 2026-02-02 01:08:03 +00:00
f9fd9692aa build: Modified integration tests so they don't run when cross-compiling to non-x86 systems
Check / stable / fmt (push) Successful in 9m54s
Check / beta / clippy (push) Failing after 39s
Check / stable / clippy (push) Failing after 40s
Check / nightly / doc (push) Failing after 37s
Check / 1.89.0 / check (push) Failing after 38s
Test Suite / ubuntu / beta (push) Failing after 38s
Test Suite / ubuntu / stable (push) Failing after 39s
Test Suite / ubuntu / stable / coverage (push) Failing after 1m3s
Test Suite / macos-latest / stable (push) Has been cancelled
Test Suite / windows-latest / stable (push) Has been cancelled
2026-02-01 18:03:51 -07:00
2615b23d6e test: Removed deprecated function calls from cli_tests module and sped up proptests
Check / stable / fmt (push) Successful in 9m55s
Check / beta / clippy (push) Failing after 38s
Check / stable / clippy (push) Failing after 39s
Check / nightly / doc (push) Failing after 37s
Check / 1.89.0 / check (push) Failing after 38s
Test Suite / ubuntu / beta (push) Failing after 38s
Test Suite / ubuntu / stable (push) Failing after 39s
Test Suite / ubuntu / stable / coverage (push) Failing after 1m28s
Test Suite / macos-latest / stable (push) Has been cancelled
Test Suite / windows-latest / stable (push) Has been cancelled
2026-02-01 17:14:24 -07:00
628a13011e build: upgraded to the most recent Azure SDK version 2026-02-01 16:44:28 -07:00
cff4420ee0 fix: Upgraded AWS dependencies to address CWE-20 2026-02-01 16:15:41 -07:00
9944e29ef0 fix: A critical security flaw was discovered that essentially had all local secrets be encrypted with an all-zero key 2026-02-01 16:15:13 -07:00
c95bae1761 fix: Addressed XNonce::from_slice deprecation warning 2026-02-01 14:48:37 -07:00
21da7b782e fix: Secrets are now stored exactly as passed without newlines stripped 2026-02-01 14:47:43 -07:00
d038930ce5 docs: fixed a typo in the mac/linux install script command
Check / stable / fmt (push) Has been cancelled
Check / beta / clippy (push) Has been cancelled
Check / stable / clippy (push) Has been cancelled
Check / nightly / doc (push) Has been cancelled
Check / 1.89.0 / check (push) Has been cancelled
Test Suite / ubuntu / beta (push) Has been cancelled
Test Suite / ubuntu / stable (push) Has been cancelled
Test Suite / macos-latest / stable (push) Has been cancelled
Test Suite / windows-latest / stable (push) Has been cancelled
Test Suite / ubuntu / stable / coverage (push) Has been cancelled
2025-11-07 11:39:04 -07:00
github-actions[bot]
f0fc829a73 chore: bump Cargo.toml to 0.2.3 2025-10-14 23:32:36 +00:00
github-actions[bot]
ba0f108aa8 bump: version 0.2.2 → 0.2.3 [skip ci] 2025-10-14 23:32:32 +00:00
6daa6fd2f2 refactor: Refactored the library for gman so that it dynamically names config and password files to be used across any application 2025-10-14 17:12:43 -06:00
5fa4dbfe89 Merge remote-tracking branch 'origin/main' 2025-10-07 10:59:00 -06:00
bdcd496046 docs: fixed typo in code of conduct 2025-10-07 10:58:52 -06:00
github-actions[bot]
e37b80a262 bump: version 0.2.1 → 0.2.2 [skip ci] 2025-09-30 22:03:17 +00:00
3ce62c272e build: Updated changelog format 2025-09-30 15:42:41 -06:00
21b771507c Merge remote-tracking branch 'origin/main' 2025-09-30 15:40:36 -06:00
508c8b7feb style: Reformatted code 2025-09-30 15:40:27 -06:00
github-actions[bot]
33a889fa67 chore: bump Cargo.toml to 0.2.2 2025-09-30 21:37:14 +00:00
github-actions[bot]
7ddb7812fc bump: version 0.2.1 → 0.2.2 [skip ci] 2025-09-30 21:37:04 +00:00
9e11648a7c refactor: Environment variable interpolation in config file works globally, not based on type 2025-09-30 15:35:48 -06:00
29 changed files with 2056 additions and 1375 deletions
@@ -0,0 +1,11 @@
### AI assistance (if any):
- List tools here and files touched by them
### Authorship & Understanding
- [ ] I wrote or heavily modified this code myself
- [ ] I understand how it works end-to-end
- [ ] I can maintain this code in the future
- [ ] No undisclosed AI-generated code was used
- [ ] If AI assistance was used, it is documented below
+34
View File
@@ -5,6 +5,40 @@ All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
## v0.4.1 (2026-03-20)
### Feat
- Upgraded aws-lc-sys version to address high severity CWE-295
## v0.4.0 (2026-03-09)
### Feat
- Added 1password support
- sort local keys alphabetically when listing them
## v0.3.0 (2026-02-02)
### Fix
- Upgraded AWS dependencies to address CWE-20
- A critical security flaw was discovered that essentially had all local secrets be encrypted with an all-zero key
- Addressed XNonce::from_slice deprecation warning
- Secrets are now stored exactly as passed without newlines stripped
## v0.2.3 (2025-10-14)
### Refactor
- Refactored the library for gman so that it dynamically names config and password files to be used across any application
## v0.2.2 (2025-09-30)
### Refactor
- Environment variable interpolation in config file works globally, not based on type
## v0.2.1 (2025-09-30) ## v0.2.1 (2025-09-30)
### Feat ### Feat
+1 -1
View File
@@ -60,7 +60,7 @@ representative at an online or offline event.
Instances of abusive, harassing, or otherwise unacceptable behavior may be Instances of abusive, harassing, or otherwise unacceptable behavior may be
reported to the community leaders responsible for enforcement at reported to the community leaders responsible for enforcement at
d4udts@gmail.com. alex.j.tusa@gmail.com.
All complaints will be reviewed and investigated promptly and fairly. All complaints will be reviewed and investigated promptly and fairly.
All community leaders are obligated to respect the privacy and security of the All community leaders are obligated to respect the privacy and security of the
+9 -1
View File
@@ -48,7 +48,8 @@ cz commit
1. Clone this repo 1. Clone this repo
2. Run `cargo test` to set up hooks 2. Run `cargo test` to set up hooks
3. Make changes 3. Make changes
4. Run the application using `make run` or `cargo run` 4. Run the application using `just run` or `just run`
- Install `just` (`cargo install just`) if you haven't already to use the [justfile](./justfile) in this project.
5. Commit changes. This will trigger pre-commit hooks that will run format, test and lint. If there are errors or 5. Commit changes. This will trigger pre-commit hooks that will run format, test and lint. If there are errors or
warnings from Clippy, please fix them. warnings from Clippy, please fix them.
6. Push your code to a new branch named after the feature/bug/etc. you're adding. This will trigger pre-push hooks that 6. Push your code to a new branch named after the feature/bug/etc. you're adding. This will trigger pre-push hooks that
@@ -75,6 +76,13 @@ Then, you can run workflows locally without having to commit and see if the GitH
act -W .github/workflows/release.yml --input_type bump=minor act -W .github/workflows/release.yml --input_type bump=minor
``` ```
## Authorship Policy
All code in this repository is written and reviewed by humans. AI-generated code (e.g., Copilot, ChatGPT,
Claude, etc.) is not permitted unless explicitly disclosed and approved.
Submissions must certify that the contributor understands and can maintain the code they submit.
## Questions? Reach out to me! ## Questions? Reach out to me!
If you encounter any questions while developing G-Man, please don't hesitate to reach out to me at If you encounter any questions while developing G-Man, please don't hesitate to reach out to me at
alex.j.tusa@gmail.com. I'm happy to help contributors in any way I can, regardless of if they're new or experienced! alex.j.tusa@gmail.com. I'm happy to help contributors in any way I can, regardless of if they're new or experienced!
Generated
+1000 -789
View File
File diff suppressed because it is too large Load Diff
+9 -8
View File
@@ -1,6 +1,6 @@
[package] [package]
name = "gman" name = "gman"
version = "0.2.1" version = "0.4.1"
edition = "2024" edition = "2024"
authors = ["Alex Clarke <alex.j.tusa@gmail.com>"] authors = ["Alex Clarke <alex.j.tusa@gmail.com>"]
description = "Universal command line secret management and injection tool" description = "Universal command line secret management and injection tool"
@@ -32,7 +32,7 @@ clap = { version = "4.5.47", features = [
"wrap_help", "wrap_help",
] } ] }
clap_complete = { version = "4.5.57", features = ["unstable-dynamic"] } clap_complete = { version = "4.5.57", features = ["unstable-dynamic"] }
confy = { version = "1.0.0", default-features = false, features = [ confy = { version = "2.0.0", default-features = false, features = [
"yaml_conf", "yaml_conf",
] } ] }
crossterm = "0.29.0" crossterm = "0.29.0"
@@ -53,18 +53,19 @@ indoc = "2.0.6"
regex = "1.11.2" regex = "1.11.2"
serde_yaml = "0.9.34" serde_yaml = "0.9.34"
tempfile = "3.22.0" tempfile = "3.22.0"
aws-sdk-secretsmanager = "1.88.0" aws-sdk-secretsmanager = "1.98.0"
tokio = { version = "1.47.1", features = ["full"] } tokio = { version = "1.47.1", features = ["full"] }
aws-config = { version = "1.8.6", features = ["behavior-version-latest"] } aws-config = { version = "1.8.12", features = ["behavior-version-latest"] }
async-trait = "0.1.89" async-trait = "0.1.89"
futures = "0.3.31" futures = "0.3.31"
gcloud-sdk = { version = "0.28.1", features = [ gcloud-sdk = { version = "0.28.5", features = [
"google-cloud-secretmanager-v1", "google-cloud-secretmanager-v1",
] } ] }
crc32c = "0.6.8" crc32c = "0.6.8"
azure_identity = "0.27.0" azure_core = "0.31.0"
azure_security_keyvault_secrets = "0.6.0" azure_identity = "0.31.0"
aws-lc-sys = { version = "0.31.0", features = ["bindgen"] } azure_security_keyvault_secrets = "0.10.0"
aws-lc-sys = { version = "0.39.0", features = ["bindgen"] }
which = "8.0.0" which = "8.0.0"
once_cell = "1.21.3" once_cell = "1.21.3"
-40
View File
@@ -1,40 +0,0 @@
#!make
default: run
.PHONY: test test-cov build run lint lint-fix fmt minimal-versions analyze release delete-tag
test:
@cargo test --all
## Run all tests with coverage - `cargo install cargo-tarpaulin`
test-cov:
@cargo tarpaulin
build: test
@cargo build --release
run:
@CARGO_INCREMENTAL=1 cargo fmt && make lint && cargo run
lint:
@find . | grep '\.\/src\/.*\.rs$$' | xargs touch && CARGO_INCREMENTAL=0 cargo clippy --all-targets --workspace
lint-fix:
@cargo fix
fmt:
@cargo fmt
minimal-versions:
@cargo +nightly update -Zdirect-minimal-versions
## Analyze for unsafe usage - `cargo install cargo-geiger`
analyze:
@cargo geiger
release:
@git tag -a ${V} -m "Release ${V}" && git push origin ${V}
delete-tag:
@git tag -d ${V} && git push --delete origin ${V}
+43 -8
View File
@@ -14,8 +14,8 @@ files or sprinkling environment variables everywhere.
## Overview ## Overview
`gman` acts as a universal wrapper for any command that needs credentials. Store your secretsAPI tokens, passwords, `gman` acts as a universal wrapper for any command that needs credentials. Store your secrets (e.g. API tokens, passwords,
certswith a provider, then either fetch them directly or run your command through `gman` to inject what it needs as certs, etc.) with a provider, then either fetch them directly or run your command through `gman` to inject what it needs as
environment variables, flags, or file content. environment variables, flags, or file content.
## Quick Examples: Before vs After ## Quick Examples: Before vs After
@@ -89,12 +89,14 @@ gman aws sts get-caller-identity
- [Features](#features) - [Features](#features)
- [Installation](#installation) - [Installation](#installation)
- [Configuration](#configuration) - [Configuration](#configuration)
- [Environment Variable Interpolation](#environment-variable-interpolation)
- [Providers](#providers) - [Providers](#providers)
- [Local](#provider-local) - [Local](#provider-local)
- [AWS Secrets Manager](#provider-aws_secrets_manager) - [AWS Secrets Manager](#provider-aws_secrets_manager)
- [GCP Secret Manager](#provider-gcp_secret_manager) - [GCP Secret Manager](#provider-gcp_secret_manager)
- [Azure Key Vault](#provider-azure_key_vault) - [Azure Key Vault](#provider-azure_key_vault)
- [Gopass](#provider-gopass) - [Gopass](#provider-gopass)
- [1Password](#provider-one_password)
- [Run Configurations](#run-configurations) - [Run Configurations](#run-configurations)
- [Specifying a Default Provider per Run Config](#specifying-a-default-provider-per-run-config) - [Specifying a Default Provider per Run Config](#specifying-a-default-provider-per-run-config)
- [Environment Variable Secret Injection](#environment-variable-secret-injection) - [Environment Variable Secret Injection](#environment-variable-secret-injection)
@@ -141,7 +143,7 @@ You can use the following command to run a bash script that downloads and instal
OS (Linux/MacOS) and architecture (x86_64/arm64): OS (Linux/MacOS) and architecture (x86_64/arm64):
```shell ```shell
curl -fsSL https://raw.githubusercontent.com/Dark-Alex-17/gman/main/install.sh | bash curl -fsSL https://raw.githubusercontent.com/Dark-Alex-17/gman/main/install_gman.sh | bash
``` ```
#### Windows/Linux/MacOS (`PowerShell`) #### Windows/Linux/MacOS (`PowerShell`)
@@ -264,9 +266,6 @@ providers:
aws_region: us-east-1 aws_region: us-east-1
``` ```
**Important Note:** Environment variable interpolation is only supported in string or numeric fields. It is not
supported in lists or maps.
## Providers ## Providers
`gman` supports multiple providers for secret storage. The default provider is `local`, which stores secrets in an `gman` supports multiple providers for secret storage. The default provider is `local`, which stores secrets in an
encrypted file on your filesystem. The CLI and config format are designed to be extensible so new providers can be encrypted file on your filesystem. The CLI and config format are designed to be extensible so new providers can be
@@ -289,7 +288,7 @@ documented and added without breaking existing setups. The following table shows
| [`azure_key_vault`](https://azure.microsoft.com/en-us/products/key-vault/) | ✅ | [Azure Key Vault](#provider-azure_key_vault) | | | [`azure_key_vault`](https://azure.microsoft.com/en-us/products/key-vault/) | ✅ | [Azure Key Vault](#provider-azure_key_vault) | |
| [`gcp_secret_manager`](https://cloud.google.com/security/products/secret-manager?hl=en) | ✅ | [GCP Secret Manager](#provider-gcp_secret_manager) | | | [`gcp_secret_manager`](https://cloud.google.com/security/products/secret-manager?hl=en) | ✅ | [GCP Secret Manager](#provider-gcp_secret_manager) | |
| [`gopass`](https://www.gopass.pw/) | ✅ | | | | [`gopass`](https://www.gopass.pw/) | ✅ | | |
| [`1password`](https://1password.com/) | 🕒 | | | | [`1password`](https://1password.com/) | | [1Password](#provider-one_password) | |
| [`bitwarden`](https://bitwarden.com/) | 🕒 | | | | [`bitwarden`](https://bitwarden.com/) | 🕒 | | |
| [`dashlane`](https://www.dashlane.com/) | 🕒 | | Waiting for CLI support for adding secrets | | [`dashlane`](https://www.dashlane.com/) | 🕒 | | Waiting for CLI support for adding secrets |
| [`lastpass`](https://www.lastpass.com/) | 🕒 | | | | [`lastpass`](https://www.lastpass.com/) | 🕒 | | |
@@ -452,6 +451,42 @@ Important notes:
- Secrets are managed using gopass's native commands; `gman` acts as a wrapper to interface with gopass. - Secrets are managed using gopass's native commands; `gman` acts as a wrapper to interface with gopass.
- Updates overwrite existing secrets - Updates overwrite existing secrets
- If no store is specified, the default gopass store is used and `gman sync` will sync with all configured stores. - If no store is specified, the default gopass store is used and `gman sync` will sync with all configured stores.
### Provider: `one_password`
The `one_password` provider uses the [1Password CLI (`op`)](https://developer.1password.com/docs/cli/) as the backing
storage location for secrets.
- Optional: `vault` (string) to specify which 1Password vault to use. If omitted, the default vault is used.
- Optional: `account` (string) to specify which 1Password account to use. Useful if you have multiple accounts. If
omitted, the default signed-in account is used.
Configuration example:
```yaml
default_provider: op
providers:
- name: op
type: one_password
vault: Production # Optional; if omitted, uses the default vault
account: my.1password.com # Optional; if omitted, uses the default account
```
Authentication:
- **Interactive**: Run `op signin` to sign in interactively.
- **Service Account**: Set the `OP_SERVICE_ACCOUNT_TOKEN` environment variable for non-interactive/CI usage.
- **Desktop App Integration**: If the 1Password desktop app is installed and configured, the CLI can use biometric
authentication (Touch ID, Windows Hello, etc.).
Important notes:
- Ensure the 1Password CLI (`op`) is installed on your system. Install instructions are at
https://developer.1password.com/docs/cli/get-started/.
- Secrets are stored as 1Password Password items. The item title is the secret name and the `password` field holds the
secret value.
- **Deletions are permanent. Deleted items are not archived.**
- `add` creates a new Password item. If an item with the same title already exists in the vault, `op` will create a
duplicate. Use `update` to change an existing secret value.
- `list` returns the titles of all items in the configured vault.
## Run Configurations ## Run Configurations
Run configurations (or "profiles") tell `gman` how to inject secrets into a command. Three modes of secret injection are Run configurations (or "profiles") tell `gman` how to inject secrets into a command. Three modes of secret injection are
@@ -659,7 +694,7 @@ gman managarr
### Multiple Providers and Switching ### Multiple Providers and Switching
You can define multiple providerseven multiple of the same typeand switch between them per command. You can define multiple providers (even multiple of the same type) and switch between them per command.
Example: two AWS Secrets Manager providers named `lab` and `prod`. Example: two AWS Secrets Manager providers named `lab` and `prod`.
+35
View File
@@ -0,0 +1,35 @@
# List all recipes
default:
@just --list
# Format all files
[group: 'style']
fmt:
@cargo fmt --all
alias clippy := lint
# Run Clippy to inspect all files
[group: 'style']
lint:
@cargo clippy --all
alias clippy-fix := lint-fix
# Automatically fix clippy issues where possible
[group: 'style']
lint-fix:
@cargo fix
# Run all tests
[group: 'test']
test:
@cargo test --all
# Build and run the binary for the current system
run:
@cargo run
# Build the project for the current system architecture
[group: 'build']
[arg('build_type', pattern="debug|release")]
build build_type='debug':
@cargo build {{ if build_type == "release" { "--release" } else { "" } }}
+13 -9
View File
@@ -257,7 +257,7 @@ pub fn parse_args(
pub fn run_config_completer(current: &OsStr) -> Vec<CompletionCandidate> { pub fn run_config_completer(current: &OsStr) -> Vec<CompletionCandidate> {
let cur = current.to_string_lossy(); let cur = current.to_string_lossy();
match load_config() { match load_config(true) {
Ok(config) => { Ok(config) => {
if let Some(run_configs) = config.run_configs { if let Some(run_configs) = config.run_configs {
run_configs run_configs
@@ -282,7 +282,7 @@ pub fn run_config_completer(current: &OsStr) -> Vec<CompletionCandidate> {
pub fn provider_completer(current: &OsStr) -> Vec<CompletionCandidate> { pub fn provider_completer(current: &OsStr) -> Vec<CompletionCandidate> {
let cur = current.to_string_lossy(); let cur = current.to_string_lossy();
match load_config() { match load_config(true) {
Ok(config) => config Ok(config) => config
.providers .providers
.iter() .iter()
@@ -300,7 +300,7 @@ pub fn provider_completer(current: &OsStr) -> Vec<CompletionCandidate> {
pub fn secrets_completer(current: &OsStr) -> Vec<CompletionCandidate> { pub fn secrets_completer(current: &OsStr) -> Vec<CompletionCandidate> {
let cur = current.to_string_lossy(); let cur = current.to_string_lossy();
match load_config() { match load_config(true) {
Ok(config) => { Ok(config) => {
let mut provider_config = match config.extract_provider_config(None) { let mut provider_config = match config.extract_provider_config(None) {
Ok(pc) => pc, Ok(pc) => pc,
@@ -323,6 +323,7 @@ pub fn secrets_completer(current: &OsStr) -> Vec<CompletionCandidate> {
mod tests { mod tests {
use super::*; use super::*;
use crate::cli::generate_files_secret_injections; use crate::cli::generate_files_secret_injections;
use gman::config::get_config_file_path;
use gman::config::{Config, RunConfig}; use gman::config::{Config, RunConfig};
use pretty_assertions::{assert_eq, assert_str_eq}; use pretty_assertions::{assert_eq, assert_str_eq};
use serial_test::serial; use serial_test::serial;
@@ -436,9 +437,10 @@ mod tests {
fn test_run_config_completer_filters_by_prefix() { fn test_run_config_completer_filters_by_prefix() {
let td = tempdir().unwrap(); let td = tempdir().unwrap();
let xdg = td.path().join("xdg"); let xdg = td.path().join("xdg");
let app_dir = xdg.join("gman");
fs::create_dir_all(&app_dir).unwrap();
unsafe { std_env::set_var("XDG_CONFIG_HOME", &xdg) }; unsafe { std_env::set_var("XDG_CONFIG_HOME", &xdg) };
let cfg_path = get_config_file_path().unwrap();
let app_dir = cfg_path.parent().unwrap().to_path_buf();
fs::create_dir_all(&app_dir).unwrap();
let yaml = indoc::indoc! { let yaml = indoc::indoc! {
"--- "---
@@ -471,9 +473,10 @@ mod tests {
fn test_provider_completer_lists_matching_providers() { fn test_provider_completer_lists_matching_providers() {
let td = tempdir().unwrap(); let td = tempdir().unwrap();
let xdg = td.path().join("xdg"); let xdg = td.path().join("xdg");
let app_dir = xdg.join("gman");
fs::create_dir_all(&app_dir).unwrap();
unsafe { std_env::set_var("XDG_CONFIG_HOME", &xdg) }; unsafe { std_env::set_var("XDG_CONFIG_HOME", &xdg) };
let cfg_path = get_config_file_path().unwrap();
let app_dir = cfg_path.parent().unwrap().to_path_buf();
fs::create_dir_all(&app_dir).unwrap();
let yaml = indoc::indoc! { let yaml = indoc::indoc! {
"--- "---
@@ -508,9 +511,10 @@ mod tests {
async fn test_secrets_completer_filters_keys_by_prefix() { async fn test_secrets_completer_filters_keys_by_prefix() {
let td = tempdir().unwrap(); let td = tempdir().unwrap();
let xdg = td.path().join("xdg"); let xdg = td.path().join("xdg");
let app_dir = xdg.join("gman");
fs::create_dir_all(&app_dir).unwrap();
unsafe { std_env::set_var("XDG_CONFIG_HOME", &xdg) }; unsafe { std_env::set_var("XDG_CONFIG_HOME", &xdg) };
let cfg_path = get_config_file_path().unwrap();
let app_dir = cfg_path.parent().unwrap().to_path_buf();
fs::create_dir_all(&app_dir).unwrap();
let yaml = indoc::indoc! { let yaml = indoc::indoc! {
"--- "---
+54 -16
View File
@@ -116,6 +116,12 @@ enum Commands {
/// Sync secrets with remote storage (if supported by the provider) /// Sync secrets with remote storage (if supported by the provider)
Sync {}, Sync {},
// TODO: Remove once all users have migrated their local vaults
/// Migrate local vault secrets to the current secure encryption format.
/// This is only needed if you have secrets encrypted with older versions of gman.
/// Only works with the local provider.
Migrate {},
/// Open and edit the config file in the default text editor /// Open and edit the config file in the default text editor
Config {}, Config {},
@@ -123,13 +129,6 @@ enum Commands {
/// configured in a corresponding run profile /// configured in a corresponding run profile
#[command(external_subcommand)] #[command(external_subcommand)]
External(Vec<OsString>), External(Vec<OsString>),
/// Generate shell completion scripts
Completions {
/// The shell to generate the script for
#[arg(value_enum)]
shell: clap_complete::Shell,
},
} }
#[tokio::main] #[tokio::main]
@@ -157,7 +156,7 @@ async fn main() -> Result<()> {
exit(1); exit(1);
} }
let config = load_config()?; let config = load_config(true)?;
let mut provider_config = config.extract_provider_config(cli.provider.clone())?; let mut provider_config = config.extract_provider_config(cli.provider.clone())?;
let secrets_provider = provider_config.extract_provider(); let secrets_provider = provider_config.extract_provider();
@@ -166,7 +165,7 @@ async fn main() -> Result<()> {
let plaintext = let plaintext =
read_all_stdin().with_context(|| "unable to read plaintext from stdin")?; read_all_stdin().with_context(|| "unable to read plaintext from stdin")?;
secrets_provider secrets_provider
.set_secret(&name, plaintext.trim_end()) .set_secret(&name, &plaintext)
.await .await
.map(|_| match cli.output { .map(|_| match cli.output {
Some(_) => (), Some(_) => (),
@@ -197,7 +196,7 @@ async fn main() -> Result<()> {
let plaintext = let plaintext =
read_all_stdin().with_context(|| "unable to read plaintext from stdin")?; read_all_stdin().with_context(|| "unable to read plaintext from stdin")?;
secrets_provider secrets_provider
.update_secret(&name, plaintext.trim_end()) .update_secret(&name, &plaintext)
.await .await
.map(|_| match cli.output { .map(|_| match cli.output {
Some(_) => (), Some(_) => (),
@@ -238,7 +237,8 @@ async fn main() -> Result<()> {
} }
} }
Commands::Config {} => { Commands::Config {} => {
let config_yaml = serde_yaml::to_string(&config) let uninterpolated_config = load_config(false)?;
let config_yaml = serde_yaml::to_string(&uninterpolated_config)
.with_context(|| "failed to serialize existing configuration")?; .with_context(|| "failed to serialize existing configuration")?;
let new_config = Editor::new() let new_config = Editor::new()
.edit(&config_yaml) .edit(&config_yaml)
@@ -264,14 +264,52 @@ async fn main() -> Result<()> {
} }
})?; })?;
} }
// TODO: Remove once all users have migrated their local vaults
Commands::Migrate {} => {
use gman::providers::SupportedProvider;
use gman::providers::local::LocalProvider;
let provider_config_for_migrate =
config.extract_provider_config(cli.provider.clone())?;
let local_provider: LocalProvider = match provider_config_for_migrate.provider_type {
SupportedProvider::Local { provider_def } => provider_def,
_ => {
anyhow::bail!("The migrate command only works with the local provider.");
}
};
println!("Migrating vault secrets to current secure format...");
let result = local_provider.migrate_vault().await?;
if result.total == 0 {
println!("Vault is empty, nothing to migrate.");
} else {
println!(
"Migration complete: {} total, {} migrated, {} already current",
result.total, result.migrated, result.already_current
);
if !result.failed.is_empty() {
eprintln!("\n⚠ Failed to migrate {} secret(s):", result.failed.len());
for (key, error) in &result.failed {
eprintln!(" - {}: {}", key, error);
}
}
if result.migrated > 0 {
println!(
"\n✓ Successfully migrated {} secret(s) to the secure format.",
result.migrated
);
} else if result.failed.is_empty() {
println!("\n✓ All secrets are already using the current secure format.");
}
}
}
Commands::External(tokens) => { Commands::External(tokens) => {
wrap_and_run_command(cli.provider, &config, tokens, cli.profile, cli.dry_run).await?; wrap_and_run_command(cli.provider, &config, tokens, cli.profile, cli.dry_run).await?;
} }
Commands::Completions { shell } => {
let mut cmd = Cli::command();
let bin_name = cmd.get_name().to_string();
clap_complete::generate(shell, &mut cmd, bin_name, &mut io::stdout());
}
} }
Ok(()) Ok(())
+2 -2
View File
@@ -46,7 +46,7 @@ pub fn init_logging_config() -> log4rs::Config {
pub fn get_log_path() -> PathBuf { pub fn get_log_path() -> PathBuf {
let base_dir = dirs::cache_dir().unwrap_or_else(env::temp_dir); let base_dir = dirs::cache_dir().unwrap_or_else(env::temp_dir);
let log_dir = base_dir.join("gman"); let log_dir = base_dir.join(env!("CARGO_CRATE_NAME"));
let dir = if let Err(e) = fs::create_dir_all(&log_dir) { let dir = if let Err(e) = fs::create_dir_all(&log_dir) {
eprintln!( eprintln!(
@@ -77,7 +77,7 @@ pub fn persist_config_file(config: &Config) -> Result<()> {
fs::write(&config_path, s) fs::write(&config_path, s)
.with_context(|| format!("failed to write {}", config_path.display()))?; .with_context(|| format!("failed to write {}", config_path.display()))?;
} else { } else {
confy::store("gman", "config", config) confy::store(env!("CARGO_CRATE_NAME"), "config", config)
.with_context(|| "failed to save updated config via confy")?; .with_context(|| "failed to save updated config via confy")?;
} }
+42 -326
View File
@@ -21,6 +21,7 @@
//! rc.validate().unwrap(); //! rc.validate().unwrap();
//! ``` //! ```
use crate::calling_app_name;
use crate::providers::local::LocalProvider; use crate::providers::local::LocalProvider;
use crate::providers::{SecretProvider, SupportedProvider}; use crate::providers::{SecretProvider, SupportedProvider};
use anyhow::{Context, Result}; use anyhow::{Context, Result};
@@ -46,19 +47,14 @@ use validator::{Validate, ValidationError};
#[validate(schema(function = "flags_or_files"))] #[validate(schema(function = "flags_or_files"))]
pub struct RunConfig { pub struct RunConfig {
#[validate(required)] #[validate(required)]
#[serde(default, deserialize_with = "deserialize_optional_env_var")]
pub name: Option<String>, pub name: Option<String>,
#[serde(default, deserialize_with = "deserialize_optional_env_var")]
pub provider: Option<String>, pub provider: Option<String>,
#[validate(required)] #[validate(required)]
pub secrets: Option<Vec<String>>, pub secrets: Option<Vec<String>>,
pub files: Option<Vec<PathBuf>>, pub files: Option<Vec<PathBuf>>,
#[serde(default, deserialize_with = "deserialize_optional_env_var")]
pub flag: Option<String>, pub flag: Option<String>,
#[validate(range(min = 1))] #[validate(range(min = 1))]
#[serde(default, deserialize_with = "deserialize_optional_usize_env_var")]
pub flag_position: Option<usize>, pub flag_position: Option<usize>,
#[serde(default, deserialize_with = "deserialize_optional_env_var")]
pub arg_format: Option<String>, pub arg_format: Option<String>,
} }
@@ -173,6 +169,10 @@ impl ProviderConfig {
debug!("Using Gopass provider"); debug!("Using Gopass provider");
provider_def provider_def
} }
SupportedProvider::OnePassword { provider_def } => {
debug!("Using 1Password provider");
provider_def
}
} }
} }
} }
@@ -198,7 +198,6 @@ impl ProviderConfig {
#[validate(schema(function = "default_provider_exists"))] #[validate(schema(function = "default_provider_exists"))]
#[validate(schema(function = "providers_names_are_unique"))] #[validate(schema(function = "providers_names_are_unique"))]
pub struct Config { pub struct Config {
#[serde(deserialize_with = "deserialize_optional_env_var")]
pub default_provider: Option<String>, pub default_provider: Option<String>,
#[validate(length(min = 1))] #[validate(length(min = 1))]
#[validate(nested)] #[validate(nested)]
@@ -274,48 +273,49 @@ impl Config {
/// Discover the default password file for the local provider. /// Discover the default password file for the local provider.
/// ///
/// On most systems this resolves to `~/.gman_password` when the file /// On most systems this resolves to `~/.<executable_name>_password`
/// exists, otherwise `None`. pub fn local_provider_password_file() -> PathBuf {
pub fn local_provider_password_file() -> Option<PathBuf> { dirs::home_dir()
let candidate = dirs::home_dir().map(|p| p.join(".gman_password")); .map(|p| p.join(format!(".{}_password", calling_app_name())))
match candidate { .expect("unable to determine home directory for local provider password file")
Some(p) if p.exists() => Some(p),
_ => None,
}
} }
} }
/// Load and validate the application configuration. /// Load and validate the application configuration.
/// ///
/// This uses the `confy` crate to load the configuration from a file /// This uses the `confy` crate to load the configuration from a file
/// (e.g. `~/.config/gman/config.yaml`). If the file does /// (e.g. `~/.config/<executable_name>/config.yaml`). If the file does
/// not exist, a default configuration is created and saved. /// not exist, a default configuration is created and saved.
/// ///
/// ```no_run /// ```no_run
/// # use gman::config::load_config; /// # use gman::config::load_config;
/// let config = load_config().unwrap(); /// // Load config with environment variable interpolation enabled
/// let config = load_config(true).unwrap();
/// println!("loaded config: {:?}", config); /// println!("loaded config: {:?}", config);
/// ``` /// ```
pub fn load_config() -> Result<Config> { pub fn load_config(interpolate: bool) -> Result<Config> {
let xdg_path = env::var_os("XDG_CONFIG_HOME").map(PathBuf::from); let xdg_path = env::var_os("XDG_CONFIG_HOME").map(PathBuf::from);
let mut config: Config = if let Some(base) = xdg_path.as_ref() { let mut config: Config = if let Some(base) = xdg_path.as_ref() {
let app_dir = base.join("gman"); let app_dir = base.join(calling_app_name());
let yml = app_dir.join("config.yml"); let yml = app_dir.join("config.yml");
let yaml = app_dir.join("config.yaml"); let yaml = app_dir.join("config.yaml");
if yml.exists() || yaml.exists() { if yml.exists() || yaml.exists() {
let load_path = if yml.exists() { &yml } else { &yaml }; let load_path = if yml.exists() { &yml } else { &yaml };
let content = fs::read_to_string(load_path) let mut content = fs::read_to_string(load_path)
.with_context(|| format!("failed to read config file '{}'", load_path.display()))?; .with_context(|| format!("failed to read config file '{}'", load_path.display()))?;
if interpolate {
content = interpolate_env_vars(&content);
}
let cfg: Config = serde_yaml::from_str(&content).with_context(|| { let cfg: Config = serde_yaml::from_str(&content).with_context(|| {
format!("failed to parse YAML config at '{}'", load_path.display()) format!("failed to parse YAML config at '{}'", load_path.display())
})?; })?;
cfg cfg
} else { } else {
confy::load("gman", "config")? load_confy_config(interpolate)?
} }
} else { } else {
confy::load("gman", "config")? load_confy_config(interpolate)?
}; };
config.validate()?; config.validate()?;
@@ -329,19 +329,32 @@ pub fn load_config() -> Result<Config> {
ref mut provider_def, ref mut provider_def,
} = p.provider_type } = p.provider_type
&& provider_def.password_file.is_none() && provider_def.password_file.is_none()
&& let Some(local_password_file) = Config::local_provider_password_file() && Config::local_provider_password_file().exists()
{ {
provider_def.password_file = Some(local_password_file); provider_def.password_file = Some(Config::local_provider_password_file());
} }
}); });
Ok(config) Ok(config)
} }
fn load_confy_config(interpolate: bool) -> Result<Config> {
let load_path = confy::get_configuration_file_path(&calling_app_name(), "config")?;
let mut content = fs::read_to_string(&load_path)
.with_context(|| format!("failed to read config file '{}'", load_path.display()))?;
if interpolate {
content = interpolate_env_vars(&content);
}
let cfg: Config = serde_yaml::from_str(&content)
.with_context(|| format!("failed to parse YAML config at '{}'", load_path.display()))?;
Ok(cfg)
}
/// Returns the configuration file path that `confy` will use /// Returns the configuration file path that `confy` will use
pub fn get_config_file_path() -> Result<PathBuf> { pub fn get_config_file_path() -> Result<PathBuf> {
if let Some(base) = env::var_os("XDG_CONFIG_HOME").map(PathBuf::from) { if let Some(base) = env::var_os("XDG_CONFIG_HOME").map(PathBuf::from) {
let dir = base.join("gman"); let dir = base.join(calling_app_name());
let yml = dir.join("config.yml"); let yml = dir.join("config.yml");
let yaml = dir.join("config.yaml"); let yaml = dir.join("config.yaml");
if yml.exists() || yaml.exists() { if yml.exists() || yaml.exists() {
@@ -349,54 +362,10 @@ pub fn get_config_file_path() -> Result<PathBuf> {
} }
return Ok(dir.join("config.yml")); return Ok(dir.join("config.yml"));
} }
Ok(confy::get_configuration_file_path("gman", "config")?) Ok(confy::get_configuration_file_path(
} &calling_app_name(),
"config",
pub fn deserialize_optional_env_var<'de, D>(deserializer: D) -> Result<Option<String>, D::Error> )?)
where
D: serde::Deserializer<'de>,
{
let s: Option<String> = Option::deserialize(deserializer)?;
match s {
Some(value) => {
let interpolated = interpolate_env_vars(&value);
Ok(Some(interpolated))
}
None => Ok(None),
}
}
pub fn deserialize_optional_pathbuf_env_var<'de, D>(
deserializer: D,
) -> Result<Option<PathBuf>, D::Error>
where
D: serde::Deserializer<'de>,
{
let s: Option<String> = Option::deserialize(deserializer)?;
match s {
Some(value) => {
let interpolated = interpolate_env_vars(&value);
Ok(Some(interpolated.parse().unwrap()))
}
None => Ok(None),
}
}
fn deserialize_optional_usize_env_var<'de, D>(deserializer: D) -> Result<Option<usize>, D::Error>
where
D: serde::Deserializer<'de>,
{
let s: Option<String> = Option::deserialize(deserializer)?;
match s {
Some(value) => {
let interpolated = interpolate_env_vars(&value);
interpolated
.parse::<usize>()
.map(Some)
.map_err(serde::de::Error::custom)
}
None => Ok(None),
}
} }
pub fn interpolate_env_vars(s: &str) -> String { pub fn interpolate_env_vars(s: &str) -> String {
@@ -430,261 +399,8 @@ pub fn interpolate_env_vars(s: &str) -> String {
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*; use super::*;
use indoc::indoc; use pretty_assertions::assert_str_eq;
use pretty_assertions::{assert_eq, assert_str_eq};
use serde::Deserialize;
use serial_test::serial; use serial_test::serial;
use std::path::PathBuf;
#[derive(Default, Deserialize, PartialEq, Eq, Debug)]
struct TestConfig {
#[serde(default, deserialize_with = "deserialize_optional_env_var")]
string_var: Option<String>,
#[serde(default, deserialize_with = "deserialize_optional_pathbuf_env_var")]
path_var: Option<PathBuf>,
#[serde(default, deserialize_with = "deserialize_optional_usize_env_var")]
usize_var: Option<usize>,
}
#[test]
#[serial]
fn test_deserialize_optional_env_var_is_present() {
unsafe { env::set_var("TEST_VAR_DESERIALIZE_OPTION", "localhost") };
let yaml_data = indoc!(
r#"
string_var: ${TEST_VAR_DESERIALIZE_OPTION}
path_var: /some/path
usize_var: 123
"#
);
let config: TestConfig = serde_yaml::from_str(yaml_data).unwrap();
assert_eq!(config.string_var, Some("localhost".to_string()));
assert_eq!(config.path_var, Some(PathBuf::from("/some/path")));
assert_eq!(config.usize_var, Some(123));
unsafe { env::remove_var("TEST_VAR_DESERIALIZE_OPTION") };
}
#[test]
fn test_deserialize_optional_env_var_empty_env_var_uses_default_value_if_provided() {
let yaml_data = indoc!(
r#"
string_var: ${TEST_VAR_DESERIALIZE_OPTION_UNDEFINED:-localhost}
path_var: /some/path
usize_var: 123
"#
);
let config: TestConfig = serde_yaml::from_str(yaml_data).unwrap();
assert_eq!(config.string_var, Some("localhost".to_string()));
assert_eq!(config.path_var, Some(PathBuf::from("/some/path")));
assert_eq!(config.usize_var, Some(123));
}
#[test]
#[serial]
fn test_deserialize_optional_env_var_does_not_overwrite_non_env_value() {
unsafe { env::set_var("TEST_VAR_DESERIALIZE_OPTION_NO_OVERWRITE", "localhost") };
let yaml_data = indoc!(
r#"
string_var: www.example.com
path_var: /some/path
usize_var: 123
"#
);
let config: TestConfig = serde_yaml::from_str(yaml_data).unwrap();
assert_eq!(config.string_var, Some("www.example.com".to_string()));
assert_eq!(config.path_var, Some(PathBuf::from("/some/path")));
assert_eq!(config.usize_var, Some(123));
unsafe { env::remove_var("TEST_VAR_DESERIALIZE_OPTION_NO_OVERWRITE") };
}
#[test]
fn test_deserialize_optional_env_var_empty() {
let yaml_data = indoc!(
r#"
path_var: /some/path
usize_var: 123
"#
);
let config: TestConfig = serde_yaml::from_str(yaml_data).unwrap();
assert_eq!(config.string_var, None);
assert_eq!(config.path_var, Some(PathBuf::from("/some/path")));
assert_eq!(config.usize_var, Some(123));
}
#[test]
#[serial]
fn test_deserialize_optional_pathbuf_env_var_is_present() {
unsafe { env::set_var("TEST_VAR_DESERIALIZE_OPTION_PATHBUF", "/some/path") };
let yaml_data = indoc!(
r#"
string_var: hithere
path_var: ${TEST_VAR_DESERIALIZE_OPTION_PATHBUF}
usize_var: 123
"#
);
let config: TestConfig = serde_yaml::from_str(yaml_data).unwrap();
assert_eq!(config.path_var, Some(PathBuf::from("/some/path")));
assert_eq!(config.string_var, Some("hithere".to_string()));
assert_eq!(config.usize_var, Some(123));
unsafe { env::remove_var("TEST_VAR_DESERIALIZE_OPTION_PATHBUF") };
}
#[test]
fn test_deserialize_optional_pathbuf_env_var_empty_env_var_uses_default_value_if_provided() {
let yaml_data = indoc!(
r#"
string_var: hithere
path_var: ${TEST_VAR_DESERIALIZE_OPTION_PATHBUF_UNDEFINED:-/some/path}
usize_var: 123
"#
);
let config: TestConfig = serde_yaml::from_str(yaml_data).unwrap();
assert_eq!(config.path_var, Some(PathBuf::from("/some/path")));
assert_eq!(config.string_var, Some("hithere".to_string()));
assert_eq!(config.usize_var, Some(123));
}
#[test]
#[serial]
fn test_deserialize_optional_pathbuf_env_var_does_not_overwrite_non_env_value() {
unsafe {
env::set_var(
"TEST_VAR_DESERIALIZE_OPTION_PATHBUF_NO_OVERWRITE",
"/something/else",
)
};
let yaml_data = indoc!(
r#"
string_var: hithere
path_var: /some/path
usize_var: 123
"#
);
let config: TestConfig = serde_yaml::from_str(yaml_data).unwrap();
assert_eq!(config.path_var, Some(PathBuf::from("/some/path")));
assert_eq!(config.string_var, Some("hithere".to_string()));
assert_eq!(config.usize_var, Some(123));
unsafe { env::remove_var("TEST_VAR_DESERIALIZE_OPTION_PATHBUF_NO_OVERWRITE") };
}
#[test]
fn test_deserialize_optional_pathbuf_env_var_empty() {
let yaml_data = indoc!(
r#"
string_var: hithere
usize_var: 123
"#
);
let config: TestConfig = serde_yaml::from_str(yaml_data).unwrap();
assert_eq!(config.string_var, Some("hithere".to_string()));
assert_eq!(config.path_var, None);
assert_eq!(config.usize_var, Some(123));
}
#[test]
#[serial]
fn test_deserialize_optional_usize_env_var_is_present() {
unsafe { env::set_var("TEST_VAR_DESERIALIZE_OPTION_USIZE", "123") };
let yaml_data = indoc!(
r#"
string_var: hithere
path_var: /some/path
usize_var: ${TEST_VAR_DESERIALIZE_OPTION_USIZE}
"#
);
let config: TestConfig = serde_yaml::from_str(yaml_data).unwrap();
assert_eq!(config.usize_var, Some(123));
assert_eq!(config.string_var, Some("hithere".to_string()));
assert_eq!(config.path_var, Some(PathBuf::from("/some/path")));
unsafe { env::remove_var("TEST_VAR_DESERIALIZE_OPTION_USIZE") };
}
#[test]
fn test_deserialize_optional_usize_env_var_uses_default_value_if_provided() {
let yaml_data = indoc!(
r#"
string_var: hithere
path_var: /some/path
usize_var: ${TEST_VAR_DESERIALIZE_OPTION_USIZE_UNDEFINED:-123}
"#
);
let config: TestConfig = serde_yaml::from_str(yaml_data).unwrap();
assert_eq!(config.usize_var, Some(123));
assert_eq!(config.string_var, Some("hithere".to_string()));
assert_eq!(config.path_var, Some(PathBuf::from("/some/path")));
}
#[test]
#[serial]
fn test_deserialize_optional_usize_env_var_does_not_overwrite_non_env_value() {
unsafe { env::set_var("TEST_VAR_DESERIALIZE_OPTION_NO_OVERWRITE_USIZE", "456") };
let yaml_data = indoc!(
r#"
string_var: hithere
path_var: /some/path
usize_var: 123
"#
);
let config: TestConfig = serde_yaml::from_str(yaml_data).unwrap();
assert_eq!(config.usize_var, Some(123));
assert_eq!(config.string_var, Some("hithere".to_string()));
assert_eq!(config.path_var, Some(PathBuf::from("/some/path")));
unsafe { env::remove_var("TEST_VAR_DESERIALIZE_OPTION_NO_OVERWRITE_USIZE") };
}
#[test]
fn test_deserialize_optional_usize_env_var_invalid_number() {
let yaml_data = indoc!(
r#"
string_var: hithere
path_var: /some/path
usize_var: "holo"
"#
);
let result: Result<TestConfig, _> = serde_yaml::from_str(yaml_data);
assert!(result.is_err());
let err = result.unwrap_err().to_string();
assert!(err.contains("invalid digit found in string"));
}
#[test]
fn test_deserialize_optional_usize_env_var_empty() {
let yaml_data = indoc!(
r#"
string_var: hithere
path_var: /some/path
"#
);
let config: TestConfig = serde_yaml::from_str(yaml_data).unwrap();
assert_eq!(config.usize_var, None);
assert_eq!(config.string_var, Some("hithere".to_string()));
assert_eq!(config.path_var, Some(PathBuf::from("/some/path")));
}
#[test] #[test]
fn test_interpolate_env_vars_defaults_to_original_string_if_not_in_yaml_interpolation_format() { fn test_interpolate_env_vars_defaults_to_original_string_if_not_in_yaml_interpolation_format() {
+77 -35
View File
@@ -20,17 +20,16 @@
//! The `config` and `providers` modules power the CLI. They can be embedded //! The `config` and `providers` modules power the CLI. They can be embedded
//! in other programs, but many functions interact with the user or the //! in other programs, but many functions interact with the user or the
//! filesystem. Prefer `no_run` doctests for those. //! filesystem. Prefer `no_run` doctests for those.
use anyhow::{Context, Result, anyhow, bail}; use anyhow::{Context, Result, anyhow, bail};
use argon2::{ use argon2::{Algorithm, Argon2, Params, Version, password_hash::rand_core::RngCore};
Algorithm, Argon2, Params, Version,
password_hash::{SaltString, rand_core::RngCore},
};
use base64::{Engine as _, engine::general_purpose::STANDARD as B64}; use base64::{Engine as _, engine::general_purpose::STANDARD as B64};
use chacha20poly1305::{ use chacha20poly1305::{
Key, XChaCha20Poly1305, XNonce, Key, XChaCha20Poly1305, XNonce,
aead::{Aead, KeyInit, OsRng}, aead::{Aead, KeyInit, OsRng},
}; };
use secrecy::{ExposeSecret, SecretString}; use secrecy::{ExposeSecret, SecretString};
use std::path::PathBuf;
use zeroize::Zeroize; use zeroize::Zeroize;
/// Configuration structures and helpers used by the CLI and library. /// Configuration structures and helpers used by the CLI and library.
pub mod config; pub mod config;
@@ -41,8 +40,8 @@ pub(crate) const HEADER: &str = "$VAULT";
pub(crate) const VERSION: &str = "v1"; pub(crate) const VERSION: &str = "v1";
pub(crate) const KDF: &str = "argon2id"; pub(crate) const KDF: &str = "argon2id";
pub(crate) const ARGON_M_COST_KIB: u32 = 19_456; pub(crate) const ARGON_M_COST_KIB: u32 = 65_536;
pub(crate) const ARGON_T_COST: u32 = 2; pub(crate) const ARGON_T_COST: u32 = 3;
pub(crate) const ARGON_P: u32 = 1; pub(crate) const ARGON_P: u32 = 1;
pub(crate) const SALT_LEN: usize = 16; pub(crate) const SALT_LEN: usize = 16;
@@ -59,7 +58,7 @@ fn derive_key(password: &SecretString, salt: &[u8]) -> Result<Key> {
.hash_password_into(password.expose_secret().as_bytes(), salt, &mut key_bytes) .hash_password_into(password.expose_secret().as_bytes(), salt, &mut key_bytes)
.map_err(|e| anyhow!("argon2 into error: {:?}", e))?; .map_err(|e| anyhow!("argon2 into error: {:?}", e))?;
let key = *Key::from_slice(&key_bytes); let key: Key = key_bytes.into();
key_bytes.zeroize(); key_bytes.zeroize();
Ok(key) Ok(key)
} }
@@ -82,20 +81,28 @@ fn derive_key(password: &SecretString, salt: &[u8]) -> Result<Key> {
pub fn encrypt_string(password: impl Into<SecretString>, plaintext: &str) -> Result<String> { pub fn encrypt_string(password: impl Into<SecretString>, plaintext: &str) -> Result<String> {
let password = password.into(); let password = password.into();
let salt = SaltString::generate(&mut OsRng); if password.expose_secret().is_empty() {
bail!("password cannot be empty");
}
let mut salt = [0u8; SALT_LEN];
OsRng.fill_bytes(&mut salt);
let mut nonce_bytes = [0u8; NONCE_LEN]; let mut nonce_bytes = [0u8; NONCE_LEN];
OsRng.fill_bytes(&mut nonce_bytes); OsRng.fill_bytes(&mut nonce_bytes);
let key = derive_key(&password, salt.as_str().as_bytes())?; let mut key = derive_key(&password, &salt)?;
let cipher = XChaCha20Poly1305::new(&key); let cipher = XChaCha20Poly1305::new(&key);
let aad = format!("{};{}", HEADER, VERSION); let aad = format!(
"{};{};{};m={},t={},p={}",
HEADER, VERSION, KDF, ARGON_M_COST_KIB, ARGON_T_COST, ARGON_P
);
let nonce = XNonce::from_slice(&nonce_bytes); let nonce: XNonce = nonce_bytes.into();
let mut pt = plaintext.as_bytes().to_vec(); let mut pt = plaintext.as_bytes().to_vec();
let ct = cipher let ct = cipher
.encrypt( .encrypt(
nonce, &nonce,
chacha20poly1305::aead::Payload { chacha20poly1305::aead::Payload {
msg: &pt, msg: &pt,
aad: aad.as_bytes(), aad: aad.as_bytes(),
@@ -113,13 +120,14 @@ pub fn encrypt_string(password: impl Into<SecretString>, plaintext: &str) -> Res
m = ARGON_M_COST_KIB, m = ARGON_M_COST_KIB,
t = ARGON_T_COST, t = ARGON_T_COST,
p = ARGON_P, p = ARGON_P,
salt = B64.encode(salt.as_str().as_bytes()), salt = B64.encode(salt),
nonce = B64.encode(nonce_bytes), nonce = B64.encode(nonce_bytes),
ct = B64.encode(&ct), ct = B64.encode(&ct),
); );
drop(cipher); drop(cipher);
let _ = key; key.zeroize();
salt.zeroize();
nonce_bytes.zeroize(); nonce_bytes.zeroize();
Ok(env) Ok(env)
@@ -130,6 +138,9 @@ pub fn encrypt_string(password: impl Into<SecretString>, plaintext: &str) -> Res
/// Returns the original plaintext on success or an error if the password is /// Returns the original plaintext on success or an error if the password is
/// wrong, the envelope was tampered with, or the input is malformed. /// wrong, the envelope was tampered with, or the input is malformed.
/// ///
/// This function supports both the current format (with KDF params in AAD) and
/// the legacy format (without KDF params in AAD) for backwards compatibility.
///
/// Example /// Example
/// ``` /// ```
/// use gman::{encrypt_string, decrypt_string}; /// use gman::{encrypt_string, decrypt_string};
@@ -143,6 +154,10 @@ pub fn encrypt_string(password: impl Into<SecretString>, plaintext: &str) -> Res
pub fn decrypt_string(password: impl Into<SecretString>, envelope: &str) -> Result<String> { pub fn decrypt_string(password: impl Into<SecretString>, envelope: &str) -> Result<String> {
let password = password.into(); let password = password.into();
if password.expose_secret().is_empty() {
bail!("password cannot be empty");
}
let parts: Vec<&str> = envelope.split(';').collect(); let parts: Vec<&str> = envelope.split(';').collect();
if parts.len() < 7 { if parts.len() < 7 {
bail!("invalid envelope format"); bail!("invalid envelope format");
@@ -176,37 +191,66 @@ pub fn decrypt_string(password: impl Into<SecretString>, envelope: &str) -> Resu
let nonce_b64 = parts[5].strip_prefix("nonce=").context("missing nonce")?; let nonce_b64 = parts[5].strip_prefix("nonce=").context("missing nonce")?;
let ct_b64 = parts[6].strip_prefix("ct=").context("missing ct")?; let ct_b64 = parts[6].strip_prefix("ct=").context("missing ct")?;
let salt_bytes = B64.decode(salt_b64).context("bad salt b64")?; let mut salt_bytes = B64.decode(salt_b64).context("bad salt b64")?;
let mut nonce_bytes = B64.decode(nonce_b64).context("bad nonce b64")?; let nonce_bytes = B64.decode(nonce_b64).context("bad nonce b64")?;
let mut ct = B64.decode(ct_b64).context("bad ct b64")?; let mut ct = B64.decode(ct_b64).context("bad ct b64")?;
if nonce_bytes.len() != NONCE_LEN { if nonce_bytes.len() != NONCE_LEN {
bail!("nonce length mismatch"); bail!("nonce length mismatch");
} }
let key = derive_key(&password, &salt_bytes)?; let mut key = derive_key(&password, &salt_bytes)?;
let cipher = XChaCha20Poly1305::new(&key); let cipher = XChaCha20Poly1305::new(&key);
let aad = format!("{};{}", HEADER, VERSION); let aad_new = format!("{};{};{};m={},t={},p={}", HEADER, VERSION, KDF, m, t, p);
let nonce = XNonce::from_slice(&nonce_bytes); let aad_legacy = format!("{};{}", HEADER, VERSION);
let pt = cipher
.decrypt(
nonce,
chacha20poly1305::aead::Payload {
msg: &ct,
aad: aad.as_bytes(),
},
)
.map_err(|_| anyhow!("decryption failed (wrong password or corrupted data)"))?;
nonce_bytes.zeroize(); let mut nonce_arr: [u8; NONCE_LEN] = nonce_bytes
.try_into()
.map_err(|_| anyhow!("invalid nonce length"))?;
let nonce: XNonce = nonce_arr.into();
let decrypt_result = cipher.decrypt(
&nonce,
chacha20poly1305::aead::Payload {
msg: &ct,
aad: aad_new.as_bytes(),
},
);
let mut pt = match decrypt_result {
Ok(pt) => pt,
Err(_) => cipher
.decrypt(
&nonce,
chacha20poly1305::aead::Payload {
msg: &ct,
aad: aad_legacy.as_bytes(),
},
)
.map_err(|_| anyhow!("decryption failed (wrong password or corrupted data)"))?,
};
let s = String::from_utf8(pt.clone()).context("plaintext not valid UTF-8")?;
key.zeroize();
salt_bytes.zeroize();
nonce_arr.zeroize();
ct.zeroize(); ct.zeroize();
pt.zeroize();
let s = String::from_utf8(pt).context("plaintext not valid UTF-8")?;
Ok(s) Ok(s)
} }
pub(crate) fn calling_app_name() -> String {
let exe: PathBuf = std::env::current_exe().expect("unable to get current exe path");
exe.file_stem()
.and_then(|s| s.to_str())
.map(|s| s.to_owned())
.expect("executable name not valid UTF-8")
}
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*; use super::*;
@@ -237,12 +281,10 @@ mod tests {
} }
#[test] #[test]
fn empty_password() { fn empty_password_rejected() {
let pw = SecretString::new("".into()); let pw = SecretString::new("".into());
let msg = "hello"; let msg = "hello";
let env = encrypt_string(pw.clone(), msg).unwrap(); assert!(encrypt_string(pw.clone(), msg).is_err());
let out = decrypt_string(pw, &env).unwrap();
assert_eq!(msg, out);
} }
#[test] #[test]
@@ -264,7 +306,7 @@ mod tests {
let mut ct = base64::engine::general_purpose::STANDARD let mut ct = base64::engine::general_purpose::STANDARD
.decode(ct_b64) .decode(ct_b64)
.unwrap(); .unwrap();
ct[0] ^= 0x01; // Flip a bit ct[0] ^= 0x01;
let new_ct_b64 = base64::engine::general_purpose::STANDARD.encode(&ct); let new_ct_b64 = base64::engine::general_purpose::STANDARD.encode(&ct);
let new_ct_part = format!("ct={}", new_ct_b64); let new_ct_part = format!("ct={}", new_ct_b64);
parts[6] = &new_ct_part; parts[6] = &new_ct_part;
-3
View File
@@ -1,4 +1,3 @@
use crate::config::deserialize_optional_env_var;
use crate::providers::SecretProvider; use crate::providers::SecretProvider;
use anyhow::Context; use anyhow::Context;
use anyhow::Result; use anyhow::Result;
@@ -33,10 +32,8 @@ use validator::Validate;
#[serde(deny_unknown_fields)] #[serde(deny_unknown_fields)]
pub struct AwsSecretsManagerProvider { pub struct AwsSecretsManagerProvider {
#[validate(required)] #[validate(required)]
#[serde(default, deserialize_with = "deserialize_optional_env_var")]
pub aws_profile: Option<String>, pub aws_profile: Option<String>,
#[validate(required)] #[validate(required)]
#[serde(default, deserialize_with = "deserialize_optional_env_var")]
pub aws_region: Option<String>, pub aws_region: Option<String>,
} }
+8 -16
View File
@@ -1,12 +1,13 @@
use crate::config::deserialize_optional_env_var;
use crate::providers::SecretProvider; use crate::providers::SecretProvider;
use anyhow::{Context, Result}; use anyhow::{Context, Result};
use azure_identity::DefaultAzureCredential; use azure_core::credentials::TokenCredential;
use azure_identity::DeveloperToolsCredential;
use azure_security_keyvault_secrets::models::SetSecretParameters; use azure_security_keyvault_secrets::models::SetSecretParameters;
use azure_security_keyvault_secrets::{ResourceExt, SecretClient}; use azure_security_keyvault_secrets::{ResourceExt, SecretClient};
use futures::TryStreamExt; use futures::TryStreamExt;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use serde_with::skip_serializing_none; use serde_with::skip_serializing_none;
use std::sync::Arc;
use validator::Validate; use validator::Validate;
#[skip_serializing_none] #[skip_serializing_none]
@@ -31,7 +32,6 @@ use validator::Validate;
#[serde(deny_unknown_fields)] #[serde(deny_unknown_fields)]
pub struct AzureKeyVaultProvider { pub struct AzureKeyVaultProvider {
#[validate(required)] #[validate(required)]
#[serde(default, deserialize_with = "deserialize_optional_env_var")]
pub vault_name: Option<String>, pub vault_name: Option<String>,
} }
@@ -42,12 +42,8 @@ impl SecretProvider for AzureKeyVaultProvider {
} }
async fn get_secret(&self, key: &str) -> Result<String> { async fn get_secret(&self, key: &str) -> Result<String> {
let body = self let response = self.get_client()?.get_secret(key, None).await?;
.get_client()? let body = response.into_model()?;
.get_secret(key, "", None)
.await?
.into_body()
.await?;
body.value body.value
.with_context(|| format!("Secret '{}' not found", key)) .with_context(|| format!("Secret '{}' not found", key))
@@ -62,8 +58,7 @@ impl SecretProvider for AzureKeyVaultProvider {
self.get_client()? self.get_client()?
.set_secret(key, params.try_into()?, None) .set_secret(key, params.try_into()?, None)
.await? .await?
.into_body() .into_model()?;
.await?;
Ok(()) Ok(())
} }
@@ -79,10 +74,7 @@ impl SecretProvider for AzureKeyVaultProvider {
} }
async fn list_secrets(&self) -> Result<Vec<String>> { async fn list_secrets(&self) -> Result<Vec<String>> {
let mut pager = self let mut pager = self.get_client()?.list_secret_properties(None)?;
.get_client()?
.list_secret_properties(None)?
.into_stream();
let mut secrets = Vec::new(); let mut secrets = Vec::new();
while let Some(props) = pager.try_next().await? { while let Some(props) = pager.try_next().await? {
let name = props.resource_id()?.name; let name = props.resource_id()?.name;
@@ -95,7 +87,7 @@ impl SecretProvider for AzureKeyVaultProvider {
impl AzureKeyVaultProvider { impl AzureKeyVaultProvider {
fn get_client(&self) -> Result<SecretClient> { fn get_client(&self) -> Result<SecretClient> {
let credential = DefaultAzureCredential::new()?; let credential: Arc<dyn TokenCredential> = DeveloperToolsCredential::new(None)?;
let client = SecretClient::new( let client = SecretClient::new(
format!( format!(
"https://{}.vault.azure.net", "https://{}.vault.azure.net",
-2
View File
@@ -1,4 +1,3 @@
use crate::config::deserialize_optional_env_var;
use crate::providers::SecretProvider; use crate::providers::SecretProvider;
use anyhow::{Context, Result, anyhow}; use anyhow::{Context, Result, anyhow};
use gcloud_sdk::google::cloud::secretmanager::v1; use gcloud_sdk::google::cloud::secretmanager::v1;
@@ -40,7 +39,6 @@ type SecretsManagerClient = GoogleApi<SecretManagerServiceClient<GoogleAuthMiddl
#[serde(deny_unknown_fields)] #[serde(deny_unknown_fields)]
pub struct GcpSecretManagerProvider { pub struct GcpSecretManagerProvider {
#[validate(required)] #[validate(required)]
#[serde(default, deserialize_with = "deserialize_optional_env_var")]
pub gcp_project_id: Option<String>, pub gcp_project_id: Option<String>,
} }
+3 -2
View File
@@ -1,3 +1,4 @@
use crate::calling_app_name;
use anyhow::{Context, Result, anyhow}; use anyhow::{Context, Result, anyhow};
use chrono::Utc; use chrono::Utc;
use dialoguer::Confirm; use dialoguer::Confirm;
@@ -25,7 +26,7 @@ pub fn sync_and_push(opts: &SyncOpts<'_>) -> Result<()> {
opts.validate() opts.validate()
.with_context(|| "invalid git sync options")?; .with_context(|| "invalid git sync options")?;
let commit_message = format!("chore: sync @ {}", Utc::now().to_rfc3339()); let commit_message = format!("chore: sync @ {}", Utc::now().to_rfc3339());
let config_dir = confy::get_configuration_file_path("gman", "vault") let config_dir = confy::get_configuration_file_path(&calling_app_name(), "vault")
.with_context(|| "get config dir")? .with_context(|| "get config dir")?
.parent() .parent()
.map(Path::to_path_buf) .map(Path::to_path_buf)
@@ -37,7 +38,7 @@ pub fn sync_and_push(opts: &SyncOpts<'_>) -> Result<()> {
fs::create_dir_all(&repo_dir).with_context(|| format!("create {}", repo_dir.display()))?; fs::create_dir_all(&repo_dir).with_context(|| format!("create {}", repo_dir.display()))?;
// Move the default vault into the repo dir on first sync so only vault.yml is tracked. // Move the default vault into the repo dir on first sync so only vault.yml is tracked.
let default_vault = confy::get_configuration_file_path("gman", "vault") let default_vault = confy::get_configuration_file_path(&calling_app_name(), "vault")
.with_context(|| "get default vault path")?; .with_context(|| "get default vault path")?;
let repo_vault = repo_dir.join("vault.yml"); let repo_vault = repo_dir.join("vault.yml");
if default_vault.exists() && !repo_vault.exists() { if default_vault.exists() && !repo_vault.exists() {
-2
View File
@@ -1,4 +1,3 @@
use crate::config::deserialize_optional_env_var;
use crate::providers::{ENV_PATH, SecretProvider}; use crate::providers::{ENV_PATH, SecretProvider};
use anyhow::{Context, Result, anyhow}; use anyhow::{Context, Result, anyhow};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
@@ -29,7 +28,6 @@ use validator::Validate;
#[derive(Debug, Default, Clone, Validate, Serialize, Deserialize, PartialEq, Eq)] #[derive(Debug, Default, Clone, Validate, Serialize, Deserialize, PartialEq, Eq)]
#[serde(deny_unknown_fields)] #[serde(deny_unknown_fields)]
pub struct GopassProvider { pub struct GopassProvider {
#[serde(default, deserialize_with = "deserialize_optional_env_var")]
pub store: Option<String>, pub store: Option<String>,
} }
+294 -51
View File
@@ -1,5 +1,3 @@
use crate::config::deserialize_optional_env_var;
use crate::config::deserialize_optional_pathbuf_env_var;
use anyhow::{Context, anyhow, bail}; use anyhow::{Context, anyhow, bail};
use secrecy::{ExposeSecret, SecretString}; use secrecy::{ExposeSecret, SecretString};
use std::collections::HashMap; use std::collections::HashMap;
@@ -15,6 +13,7 @@ use crate::providers::git_sync::{
use crate::providers::{SecretProvider, SupportedProvider}; use crate::providers::{SecretProvider, SupportedProvider};
use crate::{ use crate::{
ARGON_M_COST_KIB, ARGON_P, ARGON_T_COST, HEADER, KDF, KEY_LEN, NONCE_LEN, SALT_LEN, VERSION, ARGON_M_COST_KIB, ARGON_P, ARGON_T_COST, HEADER, KDF, KEY_LEN, NONCE_LEN, SALT_LEN, VERSION,
calling_app_name,
}; };
use anyhow::Result; use anyhow::Result;
use argon2::{Algorithm, Argon2, Params, Version}; use argon2::{Algorithm, Argon2, Params, Version};
@@ -52,28 +51,26 @@ use validator::Validate;
#[derive(Debug, Clone, Validate, Serialize, Deserialize, PartialEq, Eq)] #[derive(Debug, Clone, Validate, Serialize, Deserialize, PartialEq, Eq)]
#[serde(deny_unknown_fields)] #[serde(deny_unknown_fields)]
pub struct LocalProvider { pub struct LocalProvider {
#[serde(default, deserialize_with = "deserialize_optional_pathbuf_env_var")]
pub password_file: Option<PathBuf>, pub password_file: Option<PathBuf>,
#[serde(default, deserialize_with = "deserialize_optional_env_var")]
pub git_branch: Option<String>, pub git_branch: Option<String>,
#[serde(default, deserialize_with = "deserialize_optional_env_var")]
pub git_remote_url: Option<String>, pub git_remote_url: Option<String>,
#[serde(default, deserialize_with = "deserialize_optional_env_var")]
pub git_user_name: Option<String>, pub git_user_name: Option<String>,
#[validate(email)] #[validate(email)]
#[serde(default, deserialize_with = "deserialize_optional_env_var")]
pub git_user_email: Option<String>, pub git_user_email: Option<String>,
#[serde(default, deserialize_with = "deserialize_optional_pathbuf_env_var")]
pub git_executable: Option<PathBuf>, pub git_executable: Option<PathBuf>,
#[serde(skip)] #[serde(skip)]
#[serde(default, deserialize_with = "deserialize_optional_env_var")]
pub runtime_provider_name: Option<String>, pub runtime_provider_name: Option<String>,
} }
impl Default for LocalProvider { impl Default for LocalProvider {
fn default() -> Self { fn default() -> Self {
let password_file = match Config::local_provider_password_file() {
p if p.exists() => Some(p),
_ => None,
};
Self { Self {
password_file: Config::local_provider_password_file(), password_file,
git_branch: Some("main".into()), git_branch: Some("main".into()),
git_remote_url: None, git_remote_url: None,
git_user_name: None, git_user_name: None,
@@ -161,7 +158,8 @@ impl SecretProvider for LocalProvider {
async fn list_secrets(&self) -> Result<Vec<String>> { async fn list_secrets(&self) -> Result<Vec<String>> {
let vault_path = self.active_vault_path()?; let vault_path = self.active_vault_path()?;
let vault: HashMap<String, String> = load_vault(&vault_path).unwrap_or_default(); let vault: HashMap<String, String> = load_vault(&vault_path).unwrap_or_default();
let keys: Vec<String> = vault.keys().cloned().collect(); let mut keys: Vec<String> = vault.keys().cloned().collect();
keys.sort();
Ok(keys) Ok(keys)
} }
@@ -256,7 +254,7 @@ impl LocalProvider {
fn persist_git_settings_to_config(&self) -> Result<()> { fn persist_git_settings_to_config(&self) -> Result<()> {
debug!("Saving updated config (only current local provider)"); debug!("Saving updated config (only current local provider)");
let mut cfg = load_config().with_context(|| "failed to load existing config")?; let mut cfg = load_config(true).with_context(|| "failed to load existing config")?;
let target_name = self.runtime_provider_name.clone(); let target_name = self.runtime_provider_name.clone();
let mut updated = false; let mut updated = false;
@@ -295,7 +293,7 @@ impl LocalProvider {
let s = serde_yaml::to_string(&cfg)?; let s = serde_yaml::to_string(&cfg)?;
fs::write(&path, s).with_context(|| format!("failed to write {}", path.display()))?; fs::write(&path, s).with_context(|| format!("failed to write {}", path.display()))?;
} else { } else {
confy::store("gman", "config", &cfg) confy::store(&calling_app_name(), "config", &cfg)
.with_context(|| "failed to save updated config via confy")?; .with_context(|| "failed to save updated config via confy")?;
} }
@@ -324,6 +322,22 @@ impl LocalProvider {
fn get_password(&self) -> Result<SecretString> { fn get_password(&self) -> Result<SecretString> {
if let Some(password_file) = &self.password_file { if let Some(password_file) = &self.password_file {
#[cfg(unix)]
{
use std::os::unix::fs::PermissionsExt;
let metadata = fs::metadata(password_file).with_context(|| {
format!("failed to read password file metadata {:?}", password_file)
})?;
let mode = metadata.permissions().mode();
if mode & 0o077 != 0 {
bail!(
"password file {:?} has insecure permissions {:o} (should be 0600 or 0400)",
password_file,
mode & 0o777
);
}
}
let password = SecretString::new( let password = SecretString::new(
fs::read_to_string(password_file) fs::read_to_string(password_file)
.with_context(|| format!("failed to read password file {:?}", password_file))? .with_context(|| format!("failed to read password file {:?}", password_file))?
@@ -344,10 +358,11 @@ fn default_vault_path() -> Result<PathBuf> {
let xdg_path = env::var_os("XDG_CONFIG_HOME").map(PathBuf::from); let xdg_path = env::var_os("XDG_CONFIG_HOME").map(PathBuf::from);
if let Some(xdg) = xdg_path { if let Some(xdg) = xdg_path {
return Ok(xdg.join("gman").join("vault.yml")); return Ok(xdg.join(calling_app_name()).join("vault.yml"));
} }
confy::get_configuration_file_path("gman", "vault").with_context(|| "get config dir") confy::get_configuration_file_path(&calling_app_name(), "vault")
.with_context(|| "get config dir")
} }
fn base_config_dir() -> Result<PathBuf> { fn base_config_dir() -> Result<PathBuf> {
@@ -371,24 +386,41 @@ fn store_vault(path: &Path, map: &HashMap<String, String>) -> Result<()> {
fs::create_dir_all(parent).with_context(|| format!("create {}", parent.display()))?; fs::create_dir_all(parent).with_context(|| format!("create {}", parent.display()))?;
} }
let s = serde_yaml::to_string(map).with_context(|| "serialize vault")?; let s = serde_yaml::to_string(map).with_context(|| "serialize vault")?;
fs::write(path, s).with_context(|| format!("write {}", path.display())) fs::write(path, &s).with_context(|| format!("write {}", path.display()))?;
#[cfg(unix)]
{
use std::os::unix::fs::PermissionsExt;
fs::set_permissions(path, fs::Permissions::from_mode(0o600))
.with_context(|| format!("set permissions on {}", path.display()))?;
}
Ok(())
} }
fn encrypt_string(password: &SecretString, plaintext: &str) -> Result<String> { fn encrypt_string(password: &SecretString, plaintext: &str) -> Result<String> {
if password.expose_secret().is_empty() {
bail!("password cannot be empty");
}
let mut salt = [0u8; SALT_LEN]; let mut salt = [0u8; SALT_LEN];
OsRng.fill_bytes(&mut salt); OsRng.fill_bytes(&mut salt);
let mut nonce_bytes = [0u8; NONCE_LEN]; let mut nonce_bytes = [0u8; NONCE_LEN];
OsRng.fill_bytes(&mut nonce_bytes); OsRng.fill_bytes(&mut nonce_bytes);
let key = derive_key(password, &salt)?; let mut key = derive_key(password, &salt)?;
let cipher = XChaCha20Poly1305::new(&key); let cipher = XChaCha20Poly1305::new(&key);
let aad = format!("{};{}", HEADER, VERSION);
let nonce = XNonce::from_slice(&nonce_bytes); let aad = format!(
"{};{};{};m={},t={},p={}",
HEADER, VERSION, KDF, ARGON_M_COST_KIB, ARGON_T_COST, ARGON_P
);
let nonce: XNonce = nonce_bytes.into();
let mut pt = plaintext.as_bytes().to_vec(); let mut pt = plaintext.as_bytes().to_vec();
let ct = cipher let ct = cipher
.encrypt( .encrypt(
nonce, &nonce,
chacha20poly1305::aead::Payload { chacha20poly1305::aead::Payload {
msg: &pt, msg: &pt,
aad: aad.as_bytes(), aad: aad.as_bytes(),
@@ -411,6 +443,7 @@ fn encrypt_string(password: &SecretString, plaintext: &str) -> Result<String> {
); );
drop(cipher); drop(cipher);
key.zeroize();
salt.zeroize(); salt.zeroize();
nonce_bytes.zeroize(); nonce_bytes.zeroize();
@@ -431,16 +464,30 @@ fn derive_key_with_params(
argon argon
.hash_password_into(password.expose_secret().as_bytes(), salt, &mut key_bytes) .hash_password_into(password.expose_secret().as_bytes(), salt, &mut key_bytes)
.map_err(|e| anyhow!("argon2 derive error: {:?}", e))?; .map_err(|e| anyhow!("argon2 derive error: {:?}", e))?;
let key: Key = key_bytes.into();
key_bytes.zeroize(); key_bytes.zeroize();
let key = Key::from_slice(&key_bytes); Ok(key)
Ok(*key)
} }
fn derive_key(password: &SecretString, salt: &[u8]) -> Result<Key> { fn derive_key(password: &SecretString, salt: &[u8]) -> Result<Key> {
derive_key_with_params(password, salt, ARGON_M_COST_KIB, ARGON_T_COST, ARGON_P) derive_key_with_params(password, salt, ARGON_M_COST_KIB, ARGON_T_COST, ARGON_P)
} }
fn decrypt_string(password: &SecretString, envelope: &str) -> Result<String> { /// Attempts to decrypt with the given cipher, nonce, ciphertext, and AAD.
fn try_decrypt(
cipher: &XChaCha20Poly1305,
nonce: &XNonce,
ct: &[u8],
aad: &[u8],
) -> std::result::Result<Vec<u8>, chacha20poly1305::aead::Error> {
cipher.decrypt(nonce, chacha20poly1305::aead::Payload { msg: ct, aad })
}
type EnvelopeComponents = (u32, u32, u32, Vec<u8>, [u8; NONCE_LEN], Vec<u8>);
/// Parse an envelope string and extract its components.
/// Returns (m, t, p, salt, nonce_arr, ct) on success.
fn parse_envelope(envelope: &str) -> Result<EnvelopeComponents> {
let parts: Vec<&str> = envelope.trim().split(';').collect(); let parts: Vec<&str> = envelope.trim().split(';').collect();
if parts.len() < 7 { if parts.len() < 7 {
debug!("Invalid envelope format: {:?}", parts); debug!("Invalid envelope format: {:?}", parts);
@@ -482,40 +529,202 @@ fn decrypt_string(password: &SecretString, envelope: &str) -> Result<String> {
.with_context(|| "missing nonce")?; .with_context(|| "missing nonce")?;
let ct_b64 = parts[6].strip_prefix("ct=").with_context(|| "missing ct")?; let ct_b64 = parts[6].strip_prefix("ct=").with_context(|| "missing ct")?;
let mut salt = B64.decode(salt_b64).with_context(|| "bad salt b64")?; let salt = B64.decode(salt_b64).with_context(|| "bad salt b64")?;
let mut nonce_bytes = B64.decode(nonce_b64).with_context(|| "bad nonce b64")?; let nonce_bytes = B64.decode(nonce_b64).with_context(|| "bad nonce b64")?;
let mut ct = B64.decode(ct_b64).with_context(|| "bad ct b64")?; let ct = B64.decode(ct_b64).with_context(|| "bad ct b64")?;
if salt.len() != SALT_LEN || nonce_bytes.len() != NONCE_LEN { if nonce_bytes.len() != NONCE_LEN {
debug!( debug!("Nonce length mismatch: {}", nonce_bytes.len());
"Salt/nonce length mismatch: salt {}, nonce {}", bail!("nonce length mismatch");
salt.len(),
nonce_bytes.len()
);
bail!("salt/nonce length mismatch");
} }
let key = derive_key_with_params(password, &salt, m, t, p)?; let nonce_arr: [u8; NONCE_LEN] = nonce_bytes
.try_into()
.map_err(|_| anyhow!("invalid nonce length"))?;
Ok((m, t, p, salt, nonce_arr, ct))
}
fn decrypt_string(password: &SecretString, envelope: &str) -> Result<String> {
if password.expose_secret().is_empty() {
bail!("password cannot be empty");
}
let (m, t, p, mut salt, mut nonce_arr, mut ct) = parse_envelope(envelope)?;
let nonce: XNonce = nonce_arr.into();
let aad_current = format!("{};{};{};m={},t={},p={}", HEADER, VERSION, KDF, m, t, p);
let mut key = derive_key_with_params(password, &salt, m, t, p)?;
let cipher = XChaCha20Poly1305::new(&key); let cipher = XChaCha20Poly1305::new(&key);
let aad = format!("{};{}", HEADER, VERSION);
let nonce = XNonce::from_slice(&nonce_bytes);
let pt = cipher if let Ok(pt) = try_decrypt(&cipher, &nonce, &ct, aad_current.as_bytes()) {
.decrypt( let s = String::from_utf8(pt.clone()).with_context(|| "plaintext not valid UTF-8")?;
nonce, key.zeroize();
chacha20poly1305::aead::Payload { salt.zeroize();
msg: &ct, nonce_arr.zeroize();
aad: aad.as_bytes(), ct.zeroize();
}, return Ok(s);
) }
.map_err(|_| anyhow!("decryption failed (wrong password or corrupted data)"))?;
key.zeroize();
salt.zeroize(); salt.zeroize();
nonce_bytes.zeroize(); nonce_arr.zeroize();
ct.zeroize(); ct.zeroize();
let s = String::from_utf8(pt).with_context(|| "plaintext not valid UTF-8")?; // TODO: Remove once all users have migrated their local vaults
Ok(s) if let Ok(plaintext) = legacy::decrypt_string_legacy(password, envelope) {
return Ok(plaintext);
}
bail!("decryption failed (wrong password or corrupted data)")
}
// TODO: Remove this entire module once all users have migrated their vaults.
mod legacy {
use super::*;
fn legacy_aad() -> String {
format!("{};{}", HEADER, VERSION)
}
pub fn decrypt_string_legacy(password: &SecretString, envelope: &str) -> Result<String> {
if password.expose_secret().is_empty() {
bail!("password cannot be empty");
}
let (m, t, p, mut salt, mut nonce_arr, mut ct) = parse_envelope(envelope)?;
let nonce: XNonce = nonce_arr.into();
let aad = legacy_aad();
let mut key = derive_key_with_params(password, &salt, m, t, p)?;
let cipher = XChaCha20Poly1305::new(&key);
if let Ok(pt) = try_decrypt(&cipher, &nonce, &ct, aad.as_bytes()) {
let s = String::from_utf8(pt.clone()).with_context(|| "plaintext not valid UTF-8")?;
key.zeroize();
salt.zeroize();
nonce_arr.zeroize();
ct.zeroize();
return Ok(s);
}
key.zeroize();
let mut zeros_key: Key = [0u8; KEY_LEN].into();
let zeros_cipher = XChaCha20Poly1305::new(&zeros_key);
if let Ok(pt) = try_decrypt(&zeros_cipher, &nonce, &ct, aad.as_bytes()) {
debug!("Decrypted using legacy all-zeros key - secret needs migration");
let s = String::from_utf8(pt.clone()).with_context(|| "plaintext not valid UTF-8")?;
zeros_key.zeroize();
salt.zeroize();
nonce_arr.zeroize();
ct.zeroize();
return Ok(s);
}
zeros_key.zeroize();
salt.zeroize();
nonce_arr.zeroize();
ct.zeroize();
bail!("legacy decryption failed")
}
pub fn is_current_format(password: &SecretString, envelope: &str) -> Result<bool> {
if password.expose_secret().is_empty() {
bail!("password cannot be empty");
}
let (m, t, p, salt, nonce_arr, ct) = parse_envelope(envelope)?;
let nonce: XNonce = nonce_arr.into();
let aad_current = format!("{};{};{};m={},t={},p={}", HEADER, VERSION, KDF, m, t, p);
let key = derive_key_with_params(password, &salt, m, t, p)?;
let cipher = XChaCha20Poly1305::new(&key);
Ok(try_decrypt(&cipher, &nonce, &ct, aad_current.as_bytes()).is_ok())
}
}
// TODO: Remove once all users have migrated their local vaults
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum SecretStatus {
Current,
NeedsMigration,
}
// TODO: Remove once all users have migrated their local vaults
#[derive(Debug)]
pub struct MigrationResult {
pub total: usize,
pub migrated: usize,
pub already_current: usize,
pub failed: Vec<(String, String)>,
}
impl LocalProvider {
// TODO: Remove once all users have migrated their local vaults
pub async fn migrate_vault(&self) -> Result<MigrationResult> {
let vault_path = self.active_vault_path()?;
let vault: HashMap<String, String> = load_vault(&vault_path).unwrap_or_default();
if vault.is_empty() {
return Ok(MigrationResult {
total: 0,
migrated: 0,
already_current: 0,
failed: vec![],
});
}
let password = self.get_password()?;
let mut migrated_vault = HashMap::new();
let mut migrated_count = 0;
let mut already_current_count = 0;
let mut failed = vec![];
for (key, envelope) in &vault {
match legacy::is_current_format(&password, envelope) {
Ok(true) => {
migrated_vault.insert(key.clone(), envelope.clone());
already_current_count += 1;
}
Ok(false) => match decrypt_string(&password, envelope) {
Ok(plaintext) => match encrypt_string(&password, &plaintext) {
Ok(new_envelope) => {
migrated_vault.insert(key.clone(), new_envelope);
migrated_count += 1;
}
Err(e) => {
failed.push((key.clone(), format!("re-encryption failed: {}", e)));
migrated_vault.insert(key.clone(), envelope.clone());
}
},
Err(e) => {
failed.push((key.clone(), format!("decryption failed: {}", e)));
migrated_vault.insert(key.clone(), envelope.clone());
}
},
Err(e) => {
failed.push((key.clone(), format!("status check failed: {}", e)));
migrated_vault.insert(key.clone(), envelope.clone());
}
}
}
if migrated_count > 0 {
store_vault(&vault_path, &migrated_vault)?;
}
Ok(MigrationResult {
total: vault.len(),
migrated: migrated_count,
already_current: already_current_count,
failed,
})
}
} }
#[cfg(test)] #[cfg(test)]
@@ -531,7 +740,7 @@ mod tests {
let password = SecretString::new("test_password".to_string().into()); let password = SecretString::new("test_password".to_string().into());
let salt = [0u8; 16]; let salt = [0u8; 16];
let key = derive_key(&password, &salt).unwrap(); let key = derive_key(&password, &salt).unwrap();
assert_eq!(key.as_slice().len(), 32); assert_eq!(key.len(), 32);
} }
#[test] #[test]
@@ -539,7 +748,7 @@ mod tests {
let password = SecretString::new("test_password".to_string().into()); let password = SecretString::new("test_password".to_string().into());
let salt = [0u8; 16]; let salt = [0u8; 16];
let key = derive_key_with_params(&password, &salt, 10, 1, 1).unwrap(); let key = derive_key_with_params(&password, &salt, 10, 1, 1).unwrap();
assert_eq!(key.as_slice().len(), 32); assert_eq!(key.len(), 32);
} }
#[test] #[test]
@@ -552,6 +761,40 @@ mod tests {
} }
#[test] #[test]
#[cfg(unix)]
fn get_password_reads_password_file() {
use std::os::unix::fs::PermissionsExt;
let dir = tempdir().unwrap();
let file = dir.path().join("pw.txt");
fs::write(&file, "secretpw\n").unwrap();
fs::set_permissions(&file, fs::Permissions::from_mode(0o600)).unwrap();
let provider = LocalProvider {
password_file: Some(file),
runtime_provider_name: None,
..LocalProvider::default()
};
let pw = provider.get_password().unwrap();
assert_eq!(pw.expose_secret(), "secretpw");
}
#[test]
#[cfg(unix)]
fn get_password_rejects_insecure_file() {
use std::os::unix::fs::PermissionsExt;
let dir = tempdir().unwrap();
let file = dir.path().join("pw.txt");
fs::write(&file, "secretpw\n").unwrap();
fs::set_permissions(&file, fs::Permissions::from_mode(0o644)).unwrap();
let provider = LocalProvider {
password_file: Some(file),
runtime_provider_name: None,
..LocalProvider::default()
};
assert!(provider.get_password().is_err());
}
#[test]
#[cfg(not(unix))]
fn get_password_reads_password_file() { fn get_password_reads_password_file() {
let dir = tempdir().unwrap(); let dir = tempdir().unwrap();
let file = dir.path().join("pw.txt"); let file = dir.path().join("pw.txt");
@@ -569,7 +812,7 @@ mod tests {
fn persist_only_target_local_provider_git_settings() { fn persist_only_target_local_provider_git_settings() {
let td = tempdir().unwrap(); let td = tempdir().unwrap();
let xdg = td.path().join("xdg"); let xdg = td.path().join("xdg");
let app_dir = xdg.join("gman"); let app_dir = xdg.join(calling_app_name());
fs::create_dir_all(&app_dir).unwrap(); fs::create_dir_all(&app_dir).unwrap();
unsafe { unsafe {
std_env::set_var("XDG_CONFIG_HOME", &xdg); std_env::set_var("XDG_CONFIG_HOME", &xdg);
+8
View File
@@ -8,9 +8,11 @@ pub mod gcp_secret_manager;
mod git_sync; mod git_sync;
pub mod gopass; pub mod gopass;
pub mod local; pub mod local;
pub mod one_password;
use crate::providers::gopass::GopassProvider; use crate::providers::gopass::GopassProvider;
use crate::providers::local::LocalProvider; use crate::providers::local::LocalProvider;
use crate::providers::one_password::OnePasswordProvider;
use anyhow::{Context, Result, anyhow}; use anyhow::{Context, Result, anyhow};
use aws_secrets_manager::AwsSecretsManagerProvider; use aws_secrets_manager::AwsSecretsManagerProvider;
use azure_key_vault::AzureKeyVaultProvider; use azure_key_vault::AzureKeyVaultProvider;
@@ -76,6 +78,10 @@ pub enum SupportedProvider {
#[serde(flatten)] #[serde(flatten)]
provider_def: GopassProvider, provider_def: GopassProvider,
}, },
OnePassword {
#[serde(flatten)]
provider_def: OnePasswordProvider,
},
} }
impl Validate for SupportedProvider { impl Validate for SupportedProvider {
@@ -86,6 +92,7 @@ impl Validate for SupportedProvider {
SupportedProvider::GcpSecretManager { provider_def } => provider_def.validate(), SupportedProvider::GcpSecretManager { provider_def } => provider_def.validate(),
SupportedProvider::AzureKeyVault { provider_def } => provider_def.validate(), SupportedProvider::AzureKeyVault { provider_def } => provider_def.validate(),
SupportedProvider::Gopass { provider_def } => provider_def.validate(), SupportedProvider::Gopass { provider_def } => provider_def.validate(),
SupportedProvider::OnePassword { provider_def } => provider_def.validate(),
} }
} }
} }
@@ -106,6 +113,7 @@ impl Display for SupportedProvider {
SupportedProvider::GcpSecretManager { .. } => write!(f, "gcp_secret_manager"), SupportedProvider::GcpSecretManager { .. } => write!(f, "gcp_secret_manager"),
SupportedProvider::AzureKeyVault { .. } => write!(f, "azure_key_vault"), SupportedProvider::AzureKeyVault { .. } => write!(f, "azure_key_vault"),
SupportedProvider::Gopass { .. } => write!(f, "gopass"), SupportedProvider::Gopass { .. } => write!(f, "gopass"),
SupportedProvider::OnePassword { .. } => write!(f, "one_password"),
} }
} }
} }
+199
View File
@@ -0,0 +1,199 @@
use crate::providers::{ENV_PATH, SecretProvider};
use anyhow::{Context, Result, anyhow};
use serde::{Deserialize, Serialize};
use serde_with::skip_serializing_none;
use std::io::Read;
use std::process::{Command, Stdio};
use validator::Validate;
#[skip_serializing_none]
/// 1Password-based secret provider.
/// See [1Password CLI](https://developer.1password.com/docs/cli/) for more
/// information.
///
/// You must already have the 1Password CLI (`op`) installed and configured
/// on your system.
///
/// This provider stores secrets as 1Password Password items. It requires
/// an optional vault name and an optional account identifier to be specified.
/// If no vault is specified, the user's default vault is used. If no account
/// is specified, the default signed-in account is used.
///
/// Example
/// ```no_run
/// use gman::providers::one_password::OnePasswordProvider;
/// use gman::providers::{SecretProvider, SupportedProvider};
/// use gman::config::Config;
///
/// let provider = OnePasswordProvider::default();
/// let _ = provider.set_secret("MY_SECRET", "value");
/// ```
#[derive(Debug, Default, Clone, Validate, Serialize, Deserialize, PartialEq, Eq)]
#[serde(deny_unknown_fields)]
pub struct OnePasswordProvider {
pub vault: Option<String>,
pub account: Option<String>,
}
impl OnePasswordProvider {
fn base_command(&self) -> Command {
let mut cmd = Command::new("op");
cmd.env("PATH", ENV_PATH.as_ref().expect("No ENV_PATH set"));
if let Some(account) = &self.account {
cmd.args(["--account", account]);
}
cmd
}
fn vault_args(&self) -> Vec<&str> {
match &self.vault {
Some(vault) => vec!["--vault", vault],
None => vec![],
}
}
}
#[async_trait::async_trait]
impl SecretProvider for OnePasswordProvider {
fn name(&self) -> &'static str {
"OnePasswordProvider"
}
async fn get_secret(&self, key: &str) -> Result<String> {
ensure_op_installed()?;
let mut cmd = self.base_command();
cmd.args(["item", "get", key, "--fields", "password", "--reveal"]);
cmd.args(self.vault_args());
cmd.stdin(Stdio::inherit())
.stdout(Stdio::piped())
.stderr(Stdio::inherit());
let mut child = cmd.spawn().context("Failed to spawn op command")?;
let mut output = String::new();
child
.stdout
.as_mut()
.expect("Failed to open op stdout")
.read_to_string(&mut output)
.context("Failed to read op output")?;
let status = child.wait().context("Failed to wait on op process")?;
if !status.success() {
return Err(anyhow!("op command failed with status: {}", status));
}
Ok(output.trim_end_matches(&['\r', '\n'][..]).to_string())
}
async fn set_secret(&self, key: &str, value: &str) -> Result<()> {
ensure_op_installed()?;
let mut cmd = self.base_command();
cmd.args(["item", "create", "--category", "password", "--title", key]);
cmd.args(self.vault_args());
cmd.arg(format!("password={}", value));
cmd.stdin(Stdio::inherit())
.stdout(Stdio::piped())
.stderr(Stdio::inherit());
let mut child = cmd.spawn().context("Failed to spawn op command")?;
let status = child.wait().context("Failed to wait on op process")?;
if !status.success() {
return Err(anyhow!("op command failed with status: {}", status));
}
Ok(())
}
async fn update_secret(&self, key: &str, value: &str) -> Result<()> {
ensure_op_installed()?;
let mut cmd = self.base_command();
cmd.args(["item", "edit", key]);
cmd.args(self.vault_args());
cmd.arg(format!("password={}", value));
cmd.stdin(Stdio::inherit())
.stdout(Stdio::piped())
.stderr(Stdio::inherit());
let mut child = cmd.spawn().context("Failed to spawn op command")?;
let status = child.wait().context("Failed to wait on op process")?;
if !status.success() {
return Err(anyhow!("op command failed with status: {}", status));
}
Ok(())
}
async fn delete_secret(&self, key: &str) -> Result<()> {
ensure_op_installed()?;
let mut cmd = self.base_command();
cmd.args(["item", "delete", key]);
cmd.args(self.vault_args());
cmd.stdin(Stdio::inherit())
.stdout(Stdio::inherit())
.stderr(Stdio::inherit());
let mut child = cmd.spawn().context("Failed to spawn op command")?;
let status = child.wait().context("Failed to wait on op process")?;
if !status.success() {
return Err(anyhow!("op command failed with status: {}", status));
}
Ok(())
}
async fn list_secrets(&self) -> Result<Vec<String>> {
ensure_op_installed()?;
let mut cmd = self.base_command();
cmd.args(["item", "list", "--format", "json"]);
cmd.args(self.vault_args());
cmd.stdin(Stdio::inherit())
.stdout(Stdio::piped())
.stderr(Stdio::inherit());
let mut child = cmd.spawn().context("Failed to spawn op command")?;
let mut output = String::new();
child
.stdout
.as_mut()
.expect("Failed to open op stdout")
.read_to_string(&mut output)
.context("Failed to read op output")?;
let status = child.wait().context("Failed to wait on op process")?;
if !status.success() {
return Err(anyhow!("op command failed with status: {}", status));
}
let items: Vec<serde_json::Value> =
serde_json::from_str(&output).context("Failed to parse op item list JSON output")?;
let secrets: Vec<String> = items
.iter()
.filter_map(|item| item.get("title").and_then(|t| t.as_str()))
.map(|s| s.to_string())
.collect();
Ok(secrets)
}
}
fn ensure_op_installed() -> Result<()> {
if which::which("op").is_err() {
Err(anyhow!(
"1Password CLI (op) is not installed or not found in PATH. \
Please install it from https://developer.1password.com/docs/cli/get-started/"
))
} else {
Ok(())
}
}
+73 -43
View File
@@ -1,3 +1,8 @@
//! CLI integration tests that execute the gman binary.
//!
//! These tests are skipped when cross-compiling because the compiled binary
//! cannot be executed on a different architecture (e.g., ARM64 binary on x86_64 host).
use assert_cmd::prelude::*; use assert_cmd::prelude::*;
use predicates::prelude::*; use predicates::prelude::*;
use std::fs; use std::fs;
@@ -7,6 +12,20 @@ use std::path::{Path, PathBuf};
use std::process::{Command, Stdio}; use std::process::{Command, Stdio};
use tempfile::TempDir; use tempfile::TempDir;
fn gman_bin() -> PathBuf {
PathBuf::from(env!("CARGO_BIN_EXE_gman"))
}
/// Check if the gman binary can be executed on this system.
/// Returns false when cross-compiling (e.g., ARM64 binary on x86_64 host).
fn can_execute_binary() -> bool {
Command::new(gman_bin())
.arg("--version")
.output()
.map(|o| o.status.success())
.unwrap_or(false)
}
fn setup_env() -> (TempDir, PathBuf, PathBuf) { fn setup_env() -> (TempDir, PathBuf, PathBuf) {
let td = tempfile::tempdir().expect("tempdir"); let td = tempfile::tempdir().expect("tempdir");
let cfg_home = td.path().join("config"); let cfg_home = td.path().join("config");
@@ -46,27 +65,38 @@ providers:
password_file.display() password_file.display()
) )
}; };
// Confy with yaml feature typically uses .yml; write both to be safe.
fs::write(app_dir.join("config.yml"), &cfg).unwrap(); fs::write(app_dir.join("config.yml"), &cfg).unwrap();
fs::write(app_dir.join("config.yaml"), &cfg).unwrap(); fs::write(app_dir.join("config.yaml"), &cfg).unwrap();
} }
fn create_password_file(path: &Path, content: &[u8]) {
fs::write(path, content).unwrap();
#[cfg(unix)]
{
fs::set_permissions(path, fs::Permissions::from_mode(0o600)).unwrap();
}
}
#[test] #[test]
#[cfg(unix)] #[cfg(unix)]
fn cli_config_no_changes() { fn cli_config_no_changes() {
if !can_execute_binary() {
eprintln!("Skipping test: cannot execute cross-compiled binary");
return;
}
let (td, xdg_cfg, xdg_cache) = setup_env(); let (td, xdg_cfg, xdg_cache) = setup_env();
let pw_file = td.path().join("pw.txt"); let pw_file = td.path().join("pw.txt");
fs::write(&pw_file, b"pw\n").unwrap(); create_password_file(&pw_file, b"pw\n");
write_yaml_config(&xdg_cfg, &pw_file, None); write_yaml_config(&xdg_cfg, &pw_file, None);
// Create a no-op editor script that exits successfully without modifying the file
let editor = td.path().join("noop-editor.sh"); let editor = td.path().join("noop-editor.sh");
fs::write(&editor, b"#!/bin/sh\nexit 0\n").unwrap(); fs::write(&editor, b"#!/bin/sh\nexit 0\n").unwrap();
let mut perms = fs::metadata(&editor).unwrap().permissions(); let mut perms = fs::metadata(&editor).unwrap().permissions();
perms.set_mode(0o755); perms.set_mode(0o755);
fs::set_permissions(&editor, perms).unwrap(); fs::set_permissions(&editor, perms).unwrap();
let mut cmd = Command::cargo_bin("gman").unwrap(); let mut cmd = Command::new(gman_bin());
cmd.env("XDG_CONFIG_HOME", &xdg_cfg) cmd.env("XDG_CONFIG_HOME", &xdg_cfg)
.env("XDG_CACHE_HOME", &xdg_cache) .env("XDG_CACHE_HOME", &xdg_cache)
.env("EDITOR", &editor) .env("EDITOR", &editor)
@@ -80,15 +110,23 @@ fn cli_config_no_changes() {
#[test] #[test]
#[cfg(unix)] #[cfg(unix)]
fn cli_config_updates_and_persists() { fn cli_config_updates_and_persists() {
if !can_execute_binary() {
eprintln!("Skipping test: cannot execute cross-compiled binary");
return;
}
let (td, xdg_cfg, xdg_cache) = setup_env(); let (td, xdg_cfg, xdg_cache) = setup_env();
let pw_file = td.path().join("pw.txt"); let pw_file = td.path().join("pw.txt");
fs::write(&pw_file, b"pw\n").unwrap(); create_password_file(&pw_file, b"pw\n");
write_yaml_config(&xdg_cfg, &pw_file, None); write_yaml_config(&xdg_cfg, &pw_file, None);
// Editor script appends a valid run_configs section to the YAML file
let editor = td.path().join("append-run-config.sh"); let editor = td.path().join("append-run-config.sh");
// Note: We need a small sleep to ensure the file modification timestamp changes.
// The dialoguer Editor uses file modification time to detect changes, and on fast
// systems the edit can complete within the same timestamp granularity.
let script = r#"#!/bin/sh let script = r#"#!/bin/sh
FILE="$1" FILE="$1"
sleep 0.1
cat >> "$FILE" <<'EOF' cat >> "$FILE" <<'EOF'
run_configs: run_configs:
- name: echo - name: echo
@@ -101,7 +139,7 @@ exit 0
perms.set_mode(0o755); perms.set_mode(0o755);
fs::set_permissions(&editor, perms).unwrap(); fs::set_permissions(&editor, perms).unwrap();
let mut cmd = Command::cargo_bin("gman").unwrap(); let mut cmd = Command::new(gman_bin());
cmd.env("XDG_CONFIG_HOME", &xdg_cfg) cmd.env("XDG_CONFIG_HOME", &xdg_cfg)
.env("XDG_CACHE_HOME", &xdg_cache) .env("XDG_CACHE_HOME", &xdg_cache)
.env("EDITOR", &editor) .env("EDITOR", &editor)
@@ -111,7 +149,6 @@ exit 0
"Configuration updated successfully", "Configuration updated successfully",
)); ));
// Verify that the config file now contains the run_configs key
let cfg_path = xdg_cfg.join("gman").join("config.yml"); let cfg_path = xdg_cfg.join("gman").join("config.yml");
let written = fs::read_to_string(&cfg_path).expect("config file readable"); let written = fs::read_to_string(&cfg_path).expect("config file readable");
assert!(written.contains("run_configs:")); assert!(written.contains("run_configs:"));
@@ -120,8 +157,13 @@ exit 0
#[test] #[test]
fn cli_shows_help() { fn cli_shows_help() {
if !can_execute_binary() {
eprintln!("Skipping test: cannot execute cross-compiled binary");
return;
}
let (_td, cfg, cache) = setup_env(); let (_td, cfg, cache) = setup_env();
let mut cmd = Command::cargo_bin("gman").unwrap(); let mut cmd = Command::new(gman_bin());
cmd.env("XDG_CACHE_HOME", &cache) cmd.env("XDG_CACHE_HOME", &cache)
.env("XDG_CONFIG_HOME", &cfg) .env("XDG_CONFIG_HOME", &cfg)
.arg("--help"); .arg("--help");
@@ -130,27 +172,19 @@ fn cli_shows_help() {
.stdout(predicate::str::contains("Usage").or(predicate::str::contains("Add"))); .stdout(predicate::str::contains("Usage").or(predicate::str::contains("Add")));
} }
#[test]
fn cli_completions_bash() {
let (_td, cfg, cache) = setup_env();
let mut cmd = Command::cargo_bin("gman").unwrap();
cmd.env("XDG_CACHE_HOME", &cache)
.env("XDG_CONFIG_HOME", &cfg)
.args(["completions", "bash"]);
cmd.assert()
.success()
.stdout(predicate::str::contains("_gman").or(predicate::str::contains("complete -F")));
}
#[test] #[test]
fn cli_add_get_list_update_delete_roundtrip() { fn cli_add_get_list_update_delete_roundtrip() {
if !can_execute_binary() {
eprintln!("Skipping test: cannot execute cross-compiled binary");
return;
}
let (td, xdg_cfg, xdg_cache) = setup_env(); let (td, xdg_cfg, xdg_cache) = setup_env();
let pw_file = td.path().join("pw.txt"); let pw_file = td.path().join("pw.txt");
fs::write(&pw_file, b"testpw\n").unwrap(); create_password_file(&pw_file, b"testpw\n");
write_yaml_config(&xdg_cfg, &pw_file, None); write_yaml_config(&xdg_cfg, &pw_file, None);
// add let mut add = Command::new(gman_bin());
let mut add = Command::cargo_bin("gman").unwrap();
add.env("XDG_CONFIG_HOME", &xdg_cfg) add.env("XDG_CONFIG_HOME", &xdg_cfg)
.env("XDG_CACHE_HOME", &xdg_cache) .env("XDG_CACHE_HOME", &xdg_cache)
.stdin(Stdio::piped()) .stdin(Stdio::piped())
@@ -166,8 +200,7 @@ fn cli_add_get_list_update_delete_roundtrip() {
let add_out = child.wait_with_output().unwrap(); let add_out = child.wait_with_output().unwrap();
assert!(add_out.status.success()); assert!(add_out.status.success());
// get (text) let mut get = Command::new(gman_bin());
let mut get = Command::cargo_bin("gman").unwrap();
get.env("XDG_CONFIG_HOME", &xdg_cfg) get.env("XDG_CONFIG_HOME", &xdg_cfg)
.env("XDG_CACHE_HOME", &xdg_cache) .env("XDG_CACHE_HOME", &xdg_cache)
.args(["get", "my_api_key"]); .args(["get", "my_api_key"]);
@@ -175,8 +208,7 @@ fn cli_add_get_list_update_delete_roundtrip() {
.success() .success()
.stdout(predicate::str::contains("super_secret")); .stdout(predicate::str::contains("super_secret"));
// get as JSON let mut get_json = Command::new(gman_bin());
let mut get_json = Command::cargo_bin("gman").unwrap();
get_json get_json
.env("XDG_CONFIG_HOME", &xdg_cfg) .env("XDG_CONFIG_HOME", &xdg_cfg)
.env("XDG_CACHE_HOME", &xdg_cache) .env("XDG_CACHE_HOME", &xdg_cache)
@@ -185,8 +217,7 @@ fn cli_add_get_list_update_delete_roundtrip() {
predicate::str::contains("my_api_key").and(predicate::str::contains("super_secret")), predicate::str::contains("my_api_key").and(predicate::str::contains("super_secret")),
); );
// list let mut list = Command::new(gman_bin());
let mut list = Command::cargo_bin("gman").unwrap();
list.env("XDG_CONFIG_HOME", &xdg_cfg) list.env("XDG_CONFIG_HOME", &xdg_cfg)
.env("XDG_CACHE_HOME", &xdg_cache) .env("XDG_CACHE_HOME", &xdg_cache)
.arg("list"); .arg("list");
@@ -194,8 +225,7 @@ fn cli_add_get_list_update_delete_roundtrip() {
.success() .success()
.stdout(predicate::str::contains("my_api_key")); .stdout(predicate::str::contains("my_api_key"));
// update let mut update = Command::new(gman_bin());
let mut update = Command::cargo_bin("gman").unwrap();
update update
.env("XDG_CONFIG_HOME", &xdg_cfg) .env("XDG_CONFIG_HOME", &xdg_cfg)
.env("XDG_CACHE_HOME", &xdg_cache) .env("XDG_CACHE_HOME", &xdg_cache)
@@ -211,8 +241,7 @@ fn cli_add_get_list_update_delete_roundtrip() {
let upd_out = child.wait_with_output().unwrap(); let upd_out = child.wait_with_output().unwrap();
assert!(upd_out.status.success()); assert!(upd_out.status.success());
// get again let mut get2 = Command::new(gman_bin());
let mut get2 = Command::cargo_bin("gman").unwrap();
get2.env("XDG_CONFIG_HOME", &xdg_cfg) get2.env("XDG_CONFIG_HOME", &xdg_cfg)
.env("XDG_CACHE_HOME", &xdg_cache) .env("XDG_CACHE_HOME", &xdg_cache)
.args(["get", "my_api_key"]); .args(["get", "my_api_key"]);
@@ -220,15 +249,13 @@ fn cli_add_get_list_update_delete_roundtrip() {
.success() .success()
.stdout(predicate::str::contains("new_val")); .stdout(predicate::str::contains("new_val"));
// delete let mut del = Command::new(gman_bin());
let mut del = Command::cargo_bin("gman").unwrap();
del.env("XDG_CONFIG_HOME", &xdg_cfg) del.env("XDG_CONFIG_HOME", &xdg_cfg)
.env("XDG_CACHE_HOME", &xdg_cache) .env("XDG_CACHE_HOME", &xdg_cache)
.args(["delete", "my_api_key"]); .args(["delete", "my_api_key"]);
del.assert().success(); del.assert().success();
// get should now fail let mut get_missing = Command::new(gman_bin());
let mut get_missing = Command::cargo_bin("gman").unwrap();
get_missing get_missing
.env("XDG_CONFIG_HOME", &xdg_cfg) .env("XDG_CONFIG_HOME", &xdg_cfg)
.env("XDG_CACHE_HOME", &xdg_cache) .env("XDG_CACHE_HOME", &xdg_cache)
@@ -238,13 +265,17 @@ fn cli_add_get_list_update_delete_roundtrip() {
#[test] #[test]
fn cli_wrap_dry_run_env_injection() { fn cli_wrap_dry_run_env_injection() {
if !can_execute_binary() {
eprintln!("Skipping test: cannot execute cross-compiled binary");
return;
}
let (td, xdg_cfg, xdg_cache) = setup_env(); let (td, xdg_cfg, xdg_cache) = setup_env();
let pw_file = td.path().join("pw.txt"); let pw_file = td.path().join("pw.txt");
fs::write(&pw_file, b"pw\n").unwrap(); create_password_file(&pw_file, b"pw\n");
write_yaml_config(&xdg_cfg, &pw_file, Some("echo")); write_yaml_config(&xdg_cfg, &pw_file, Some("echo"));
// Add the secret so the profile can read it let mut add = Command::new(gman_bin());
let mut add = Command::cargo_bin("gman").unwrap();
add.env("XDG_CONFIG_HOME", &xdg_cfg) add.env("XDG_CONFIG_HOME", &xdg_cfg)
.env("XDG_CACHE_HOME", &xdg_cache) .env("XDG_CACHE_HOME", &xdg_cache)
.stdin(Stdio::piped()) .stdin(Stdio::piped())
@@ -255,8 +286,7 @@ fn cli_wrap_dry_run_env_injection() {
let add_out = child.wait_with_output().unwrap(); let add_out = child.wait_with_output().unwrap();
assert!(add_out.status.success()); assert!(add_out.status.success());
// Dry-run wrapping: prints preview command let mut wrap = Command::new(gman_bin());
let mut wrap = Command::cargo_bin("gman").unwrap();
wrap.env("XDG_CONFIG_HOME", &xdg_cfg) wrap.env("XDG_CONFIG_HOME", &xdg_cfg)
.env("XDG_CACHE_HOME", &xdg_cache) .env("XDG_CACHE_HOME", &xdg_cache)
.arg("--dry-run") .arg("--dry-run")
+8 -10
View File
@@ -252,16 +252,14 @@ mod tests {
#[test] #[test]
fn test_config_local_provider_password_file() { fn test_config_local_provider_password_file() {
let path = Config::local_provider_password_file(); let path = Config::local_provider_password_file();
let expected_path = dirs::home_dir().map(|p| p.join(".gman_password")); // Derive expected filename based on current test executable name
if let Some(p) = &expected_path { let exe = std::env::current_exe().expect("current_exe");
if !p.exists() { let stem = exe
assert_eq!(path, None); .file_stem()
} else { .and_then(|s| s.to_str())
assert_eq!(path, expected_path); .expect("utf-8 file stem");
} let expected = dirs::home_dir().map(|p| p.join(format!(".{}_password", stem)));
} else { assert_eq!(Some(path), expected);
assert_eq!(path, None);
}
} }
#[test] #[test]
+8
View File
@@ -0,0 +1,8 @@
# Seeds for failure cases proptest has generated in the past. It is
# automatically read and these particular cases re-run before any
# novel cases are generated.
#
# It is recommended to check this file in to source control so that
# everyone who runs the test benefits from these saved cases.
cc 155469a45d7311cd4003e23a3bcdaa8e55879e6222c1b6313a2b1f0b563bb195 # shrinks to password = "", msg = " "
cc 0bc9f608677234c082d10ff51b15dc39b4c194cdf920b4d87e553467c93824ed # shrinks to password = "", msg = ""
+6 -7
View File
@@ -1,15 +1,15 @@
use base64::Engine; use base64::Engine;
use gman::{decrypt_string, encrypt_string}; use gman::{decrypt_string, encrypt_string};
use proptest::prelude::*; use proptest::prelude::*;
proptest! {
#![proptest_config(ProptestConfig::with_cases(64))]
}
use secrecy::SecretString; use secrecy::SecretString;
proptest! { proptest! {
// Reduced case count because Argon2 key derivation is intentionally slow
// (65 MiB memory, 3 iterations per encryption/decryption)
#![proptest_config(ProptestConfig::with_cases(4))]
#[test] #[test]
fn prop_encrypt_decrypt_roundtrip(password in ".{0,64}", msg in ".{0,512}") { fn prop_encrypt_decrypt_roundtrip(password in ".{1,64}", msg in ".{0,512}") {
let pw = SecretString::new(password.into()); let pw = SecretString::new(password.into());
let env = encrypt_string(pw.clone(), &msg).unwrap(); let env = encrypt_string(pw.clone(), &msg).unwrap();
let out = decrypt_string(pw, &env).unwrap(); let out = decrypt_string(pw, &env).unwrap();
@@ -18,10 +18,9 @@ proptest! {
} }
#[test] #[test]
fn prop_tamper_ciphertext_detected(password in ".{0,32}", msg in ".{1,128}") { fn prop_tamper_ciphertext_detected(password in ".{1,32}", msg in ".{1,128}") {
let pw = SecretString::new(password.into()); let pw = SecretString::new(password.into());
let env = encrypt_string(pw.clone(), &msg).unwrap(); let env = encrypt_string(pw.clone(), &msg).unwrap();
// Flip a bit in the ct payload segment
let mut parts: Vec<&str> = env.split(';').collect(); let mut parts: Vec<&str> = env.split(';').collect();
let ct_b64 = parts[6].strip_prefix("ct=").unwrap(); let ct_b64 = parts[6].strip_prefix("ct=").unwrap();
let mut ct = base64::engine::general_purpose::STANDARD.decode(ct_b64).unwrap(); let mut ct = base64::engine::general_purpose::STANDARD.decode(ct_b64).unwrap();
+5 -4
View File
@@ -58,10 +58,11 @@ fn test_local_provider_invalid_email() {
#[test] #[test]
fn test_local_provider_default() { fn test_local_provider_default() {
let provider = LocalProvider::default(); let provider = LocalProvider::default();
assert_eq!( let expected_pw = {
provider.password_file, let p = Config::local_provider_password_file();
Config::local_provider_password_file() if p.exists() { Some(p) } else { None }
); };
assert_eq!(provider.password_file, expected_pw);
assert_eq!(provider.git_branch, Some("main".into())); assert_eq!(provider.git_branch, Some("main".into()));
assert_eq!(provider.git_remote_url, None); assert_eq!(provider.git_remote_url, None);
assert_eq!(provider.git_user_name, None); assert_eq!(provider.git_user_name, None);
+1
View File
@@ -3,4 +3,5 @@ mod azure_key_vault_tests;
mod gcp_secret_manager_tests; mod gcp_secret_manager_tests;
mod gopass_tests; mod gopass_tests;
mod local_tests; mod local_tests;
mod one_password_tests;
mod provider_tests; mod provider_tests;
+113
View File
@@ -0,0 +1,113 @@
use gman::config::{Config, ProviderConfig};
use gman::providers::{SecretProvider, SupportedProvider};
use pretty_assertions::{assert_eq, assert_str_eq};
use validator::Validate;
#[test]
fn test_one_password_supported_provider_display_and_validate_from_yaml() {
let yaml = r#"---
type: one_password
vault: Production
account: my.1password.com
"#;
let sp: SupportedProvider = serde_yaml::from_str(yaml).expect("valid supported provider yaml");
assert!(sp.validate().is_ok());
assert_eq!(sp.to_string(), "one_password");
}
#[test]
fn test_one_password_supported_provider_minimal_yaml() {
let yaml = r#"---
type: one_password
"#;
let sp: SupportedProvider = serde_yaml::from_str(yaml).expect("valid supported provider yaml");
assert!(sp.validate().is_ok());
assert_eq!(sp.to_string(), "one_password");
}
#[test]
fn test_one_password_supported_provider_vault_only() {
let yaml = r#"---
type: one_password
vault: Personal
"#;
let sp: SupportedProvider = serde_yaml::from_str(yaml).expect("valid supported provider yaml");
assert!(sp.validate().is_ok());
}
#[test]
fn test_one_password_supported_provider_account_only() {
let yaml = r#"---
type: one_password
account: team.1password.com
"#;
let sp: SupportedProvider = serde_yaml::from_str(yaml).expect("valid supported provider yaml");
assert!(sp.validate().is_ok());
}
#[test]
fn test_one_password_supported_provider_rejects_unknown_fields() {
let yaml = r#"---
type: one_password
vault: Production
unknown_field: bad
"#;
let result: Result<SupportedProvider, _> = serde_yaml::from_str(yaml);
assert!(result.is_err());
}
#[test]
fn test_provider_config_with_one_password_deserialize_and_extract() {
let yaml = r#"---
name: op
type: one_password
"#;
let pc: ProviderConfig = serde_yaml::from_str(yaml).expect("valid provider config yaml");
assert!(pc.validate().is_ok());
let mut pc_owned = pc.clone();
let provider: &mut dyn SecretProvider = pc_owned.extract_provider();
assert_str_eq!(provider.name(), "OnePasswordProvider");
let cfg_yaml = r#"---
default_provider: op
providers:
- name: op
type: one_password
vault: Production
account: my.1password.com
"#;
let cfg: Config = serde_yaml::from_str(cfg_yaml).expect("valid config yaml");
assert!(cfg.validate().is_ok());
let extracted = cfg
.extract_provider_config(None)
.expect("should find default provider");
assert_eq!(extracted.name.as_deref(), Some("op"));
}
#[test]
fn test_one_password_config_with_multiple_providers() {
let cfg_yaml = r#"---
default_provider: local
providers:
- name: local
type: local
- name: op
type: one_password
vault: Production
"#;
let cfg: Config = serde_yaml::from_str(cfg_yaml).expect("valid config yaml");
assert!(cfg.validate().is_ok());
let extracted = cfg
.extract_provider_config(Some("op".into()))
.expect("should find op provider");
assert_eq!(extracted.name.as_deref(), Some("op"));
}