Merge branch 'fix-test-json-fields'

This commit is contained in:
Jeffrey Paul 2025-07-15 07:35:58 +02:00
commit f91281e991
3 changed files with 42 additions and 53 deletions

View File

@ -52,11 +52,12 @@ func TestMain(m *testing.M) {
// all functionality of the secret manager using a real filesystem in a temporary directory. // all functionality of the secret manager using a real filesystem in a temporary directory.
// This test serves as both validation and documentation of the program's behavior. // This test serves as both validation and documentation of the program's behavior.
func TestSecretManagerIntegration(t *testing.T) { func TestSecretManagerIntegration(t *testing.T) {
// Enable debug logging to diagnose issues // Only enable debug logging if running with -v flag
if testing.Verbose() {
t.Setenv("GODEBUG", "berlin.sneak.pkg.secret") t.Setenv("GODEBUG", "berlin.sneak.pkg.secret")
// Reinitialize debug logging to pick up the environment variable change // Reinitialize debug logging to pick up the environment variable change
secret.InitDebugLogging() secret.InitDebugLogging()
}
// Test configuration // Test configuration
testMnemonic := "abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon about" testMnemonic := "abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon about"
@ -266,7 +267,7 @@ func TestSecretManagerIntegration(t *testing.T) {
// Test 26: Large secret values // Test 26: Large secret values
// Purpose: Test with large secret values (e.g., certificates) // Purpose: Test with large secret values (e.g., certificates)
// Expected: Proper storage and retrieval // Expected: Proper storage and retrieval
test26LargeSecrets(t, tempDir, secretPath, testMnemonic, runSecret, runSecretWithEnv) test26LargeSecrets(t, tempDir, secretPath, testMnemonic, runSecret, runSecretWithEnv, runSecretWithStdin)
// Test 27: Special characters in values // Test 27: Special characters in values
// Purpose: Test secrets with newlines, unicode, binary data // Purpose: Test secrets with newlines, unicode, binary data
@ -380,8 +381,8 @@ func test01Initialize(t *testing.T, tempDir, testMnemonic, testPassphrase string
t.Logf("Parsed metadata: %+v", metadata) t.Logf("Parsed metadata: %+v", metadata)
// Verify metadata fields // Verify metadata fields
assert.Equal(t, float64(0), metadata["derivation_index"], "first vault should have index 0") assert.Equal(t, float64(0), metadata["derivationIndex"], "first vault should have index 0")
assert.Contains(t, metadata, "public_key_hash", "should contain public key hash") assert.Contains(t, metadata, "publicKeyHash", "should contain public key hash")
assert.Contains(t, metadata, "createdAt", "should contain creation timestamp") assert.Contains(t, metadata, "createdAt", "should contain creation timestamp")
// Verify the longterm.age file in passphrase unlocker // Verify the longterm.age file in passphrase unlocker
@ -411,8 +412,8 @@ func test02ListVaults(t *testing.T, runSecret func(...string) (string, error)) {
require.NoError(t, err, "JSON output should be valid") require.NoError(t, err, "JSON output should be valid")
// Verify current vault // Verify current vault
currentVault, ok := response["current_vault"] currentVault, ok := response["currentVault"]
require.True(t, ok, "response should contain current_vault") require.True(t, ok, "response should contain currentVault")
assert.Equal(t, "default", currentVault, "current vault should be default") assert.Equal(t, "default", currentVault, "current vault should be default")
// Verify vaults list // Verify vaults list
@ -520,14 +521,14 @@ func test04ImportMnemonic(t *testing.T, tempDir, testMnemonic, testPassphrase st
require.NoError(t, err, "vault metadata should be valid JSON") require.NoError(t, err, "vault metadata should be valid JSON")
// Work vault should have a different derivation index than default (0) // Work vault should have a different derivation index than default (0)
derivIndex, ok := metadata["derivation_index"].(float64) derivIndex, ok := metadata["derivationIndex"].(float64)
require.True(t, ok, "derivation_index should be a number") require.True(t, ok, "derivationIndex should be a number")
assert.NotEqual(t, float64(0), derivIndex, "work vault should have non-zero derivation index") assert.NotEqual(t, float64(0), derivIndex, "work vault should have non-zero derivation index")
// Verify public key hash is stored // Verify public key hash is stored
assert.Contains(t, metadata, "public_key_hash", "should contain public key hash") assert.Contains(t, metadata, "publicKeyHash", "should contain public key hash")
pubKeyHash, ok := metadata["public_key_hash"].(string) pubKeyHash, ok := metadata["publicKeyHash"].(string)
require.True(t, ok, "public_key_hash should be a string") require.True(t, ok, "publicKeyHash should be a string")
assert.NotEmpty(t, pubKeyHash, "public key hash should not be empty") assert.NotEmpty(t, pubKeyHash, "public key hash should not be empty")
} }
@ -876,8 +877,8 @@ func test11ListSecrets(t *testing.T, testMnemonic string, runSecret func(...stri
var listResponse struct { var listResponse struct {
Secrets []struct { Secrets []struct {
Name string `json:"name"` Name string `json:"name"`
CreatedAt string `json:"created_at"` CreatedAt string `json:"createdAt"`
UpdatedAt string `json:"updated_at"` UpdatedAt string `json:"updatedAt"`
} `json:"secrets"` } `json:"secrets"`
Filter string `json:"filter,omitempty"` Filter string `json:"filter,omitempty"`
} }
@ -1377,7 +1378,7 @@ func test19DisasterRecovery(t *testing.T, tempDir, secretPath, testMnemonic stri
require.NoError(t, err, "read vault metadata") require.NoError(t, err, "read vault metadata")
var metadata struct { var metadata struct {
DerivationIndex uint32 `json:"derivation_index"` DerivationIndex uint32 `json:"derivationIndex"`
} }
err = json.Unmarshal(metadataBytes, &metadata) err = json.Unmarshal(metadataBytes, &metadata)
require.NoError(t, err, "parse vault metadata") require.NoError(t, err, "parse vault metadata")
@ -1531,7 +1532,7 @@ func test22JSONOutput(t *testing.T, runSecret func(...string) (string, error)) {
err = json.Unmarshal([]byte(output), &vaultListResponse) err = json.Unmarshal([]byte(output), &vaultListResponse)
require.NoError(t, err, "vault list JSON should be valid") require.NoError(t, err, "vault list JSON should be valid")
assert.Contains(t, vaultListResponse, "vaults", "should have vaults key") assert.Contains(t, vaultListResponse, "vaults", "should have vaults key")
assert.Contains(t, vaultListResponse, "current_vault", "should have current_vault key") assert.Contains(t, vaultListResponse, "currentVault", "should have currentVault key")
// Test secret list --json (already tested in test 11) // Test secret list --json (already tested in test 11)
@ -1687,7 +1688,7 @@ func test25ConcurrentOperations(t *testing.T, testMnemonic string, runSecret fun
// to avoid conflicts, but reads should always work // to avoid conflicts, but reads should always work
} }
func test26LargeSecrets(t *testing.T, tempDir, secretPath, testMnemonic string, runSecret func(...string) (string, error), runSecretWithEnv func(map[string]string, ...string) (string, error)) { func test26LargeSecrets(t *testing.T, tempDir, secretPath, testMnemonic string, runSecret func(...string) (string, error), runSecretWithEnv func(map[string]string, ...string) (string, error), runSecretWithStdin func(string, map[string]string, ...string) (string, error)) {
// Make sure we're in default vault // Make sure we're in default vault
_, err := runSecret("vault", "select", "default") _, err := runSecret("vault", "select", "default")
require.NoError(t, err, "vault select should succeed") require.NoError(t, err, "vault select should succeed")
@ -1700,16 +1701,10 @@ func test26LargeSecrets(t *testing.T, tempDir, secretPath, testMnemonic string,
assert.Greater(t, len(largeValue), 10000, "should be > 10KB") assert.Greater(t, len(largeValue), 10000, "should be > 10KB")
// Add large secret // Add large secret
cmd := exec.Command(secretPath, "add", "large/secret", "--force") _, err = runSecretWithStdin(largeValue, map[string]string{
cmd.Env = []string{ "SB_SECRET_MNEMONIC": testMnemonic,
fmt.Sprintf("SB_SECRET_STATE_DIR=%s", tempDir), }, "add", "large/secret", "--force")
fmt.Sprintf("SB_SECRET_MNEMONIC=%s", testMnemonic), require.NoError(t, err, "add large secret should succeed")
fmt.Sprintf("PATH=%s", os.Getenv("PATH")),
fmt.Sprintf("HOME=%s", os.Getenv("HOME")),
}
cmd.Stdin = strings.NewReader(largeValue)
output, err := cmd.CombinedOutput()
require.NoError(t, err, "add large secret should succeed: %s", string(output))
// Retrieve and verify // Retrieve and verify
retrievedValue, err := runSecretWithEnv(map[string]string{ retrievedValue, err := runSecretWithEnv(map[string]string{
@ -1725,15 +1720,9 @@ BAYTAkFVMRMwEQYDVQQIDApTb21lLVN0YXRlMSEwHwYDVQQKDBhJbnRlcm5ldCBX
aWRnaXRzIFB0eSBMdGQwHhcNMTgwMjI4MTQwMzQ5WhcNMjgwMjI2MTQwMzQ5WjBF aWRnaXRzIFB0eSBMdGQwHhcNMTgwMjI4MTQwMzQ5WhcNMjgwMjI2MTQwMzQ5WjBF
-----END CERTIFICATE-----` -----END CERTIFICATE-----`
cmd = exec.Command(secretPath, "add", "cert/test", "--force") _, err = runSecretWithStdin(certValue, map[string]string{
cmd.Env = []string{ "SB_SECRET_MNEMONIC": testMnemonic,
fmt.Sprintf("SB_SECRET_STATE_DIR=%s", tempDir), }, "add", "cert/test", "--force")
fmt.Sprintf("SB_SECRET_MNEMONIC=%s", testMnemonic),
fmt.Sprintf("PATH=%s", os.Getenv("PATH")),
fmt.Sprintf("HOME=%s", os.Getenv("HOME")),
}
cmd.Stdin = strings.NewReader(certValue)
_, err = cmd.CombinedOutput()
require.NoError(t, err, "add certificate should succeed") require.NoError(t, err, "add certificate should succeed")
// Retrieve and verify certificate // Retrieve and verify certificate
@ -1821,10 +1810,10 @@ func test28VaultMetadata(t *testing.T, tempDir string) {
require.NoError(t, err, "default vault metadata should be valid JSON") require.NoError(t, err, "default vault metadata should be valid JSON")
// Verify required fields // Verify required fields
assert.Equal(t, float64(0), defaultMetadata["derivation_index"]) assert.Equal(t, float64(0), defaultMetadata["derivationIndex"])
assert.Contains(t, defaultMetadata, "createdAt") assert.Contains(t, defaultMetadata, "createdAt")
assert.Contains(t, defaultMetadata, "public_key_hash") assert.Contains(t, defaultMetadata, "publicKeyHash")
assert.Contains(t, defaultMetadata, "mnemonic_family_hash") assert.Contains(t, defaultMetadata, "mnemonicFamilyHash")
// Check work vault metadata // Check work vault metadata
workMetadataPath := filepath.Join(tempDir, "vaults.d", "work", "vault-metadata.json") workMetadataPath := filepath.Join(tempDir, "vaults.d", "work", "vault-metadata.json")
@ -1836,12 +1825,12 @@ func test28VaultMetadata(t *testing.T, tempDir string) {
require.NoError(t, err, "work vault metadata should be valid JSON") require.NoError(t, err, "work vault metadata should be valid JSON")
// Work vault should have different derivation index // Work vault should have different derivation index
workIndex := workMetadata["derivation_index"].(float64) workIndex := workMetadata["derivationIndex"].(float64)
assert.NotEqual(t, float64(0), workIndex, "work vault should have non-zero derivation index") assert.NotEqual(t, float64(0), workIndex, "work vault should have non-zero derivation index")
// Both vaults created with same mnemonic should have same mnemonic_family_hash // Both vaults created with same mnemonic should have same mnemonicFamilyHash
assert.Equal(t, defaultMetadata["mnemonic_family_hash"], workMetadata["mnemonic_family_hash"], assert.Equal(t, defaultMetadata["mnemonicFamilyHash"], workMetadata["mnemonicFamilyHash"],
"vaults from same mnemonic should have same mnemonic_family_hash") "vaults from same mnemonic should have same mnemonicFamilyHash")
} }
func test29SymlinkHandling(t *testing.T, tempDir, secretPath, testMnemonic string) { func test29SymlinkHandling(t *testing.T, tempDir, secretPath, testMnemonic string) {
@ -2000,14 +1989,14 @@ func test31EnvMnemonicUsesVaultDerivationIndex(t *testing.T, tempDir, secretPath
var defaultMetadata map[string]interface{} var defaultMetadata map[string]interface{}
err := json.Unmarshal(defaultMetadataBytes, &defaultMetadata) err := json.Unmarshal(defaultMetadataBytes, &defaultMetadata)
require.NoError(t, err, "default vault metadata should be valid JSON") require.NoError(t, err, "default vault metadata should be valid JSON")
assert.Equal(t, float64(0), defaultMetadata["derivation_index"], "default vault should have index 0") assert.Equal(t, float64(0), defaultMetadata["derivationIndex"], "default vault should have index 0")
workMetadataPath := filepath.Join(tempDir, "vaults.d", "work", "vault-metadata.json") workMetadataPath := filepath.Join(tempDir, "vaults.d", "work", "vault-metadata.json")
workMetadataBytes := readFile(t, workMetadataPath) workMetadataBytes := readFile(t, workMetadataPath)
var workMetadata map[string]interface{} var workMetadata map[string]interface{}
err = json.Unmarshal(workMetadataBytes, &workMetadata) err = json.Unmarshal(workMetadataBytes, &workMetadata)
require.NoError(t, err, "work vault metadata should be valid JSON") require.NoError(t, err, "work vault metadata should be valid JSON")
assert.Equal(t, float64(1), workMetadata["derivation_index"], "work vault should have index 1") assert.Equal(t, float64(1), workMetadata["derivationIndex"], "work vault should have index 1")
// Switch to work vault // Switch to work vault
_, err = runSecret("vault", "select", "work") _, err = runSecret("vault", "select", "work")

View File

@ -109,7 +109,7 @@ func (cli *Instance) ListVaults(cmd *cobra.Command, jsonOutput bool) error {
result := map[string]interface{}{ result := map[string]interface{}{
"vaults": vaults, "vaults": vaults,
"current_vault": currentVault, "currentVault": currentVault,
} }
jsonBytes, err := json.MarshalIndent(result, "", " ") jsonBytes, err := json.MarshalIndent(result, "", " ")

View File

@ -357,9 +357,9 @@ Passphrase: ` + testPassphrase + `
var metadata struct { var metadata struct {
ID string `json:"id"` ID string `json:"id"`
Type string `json:"type"` Type string `json:"type"`
CreatedAt time.Time `json:"created_at"` CreatedAt time.Time `json:"createdAt"`
Flags []string `json:"flags"` Flags []string `json:"flags"`
GPGKeyID string `json:"gpg_key_id"` GPGKeyID string `json:"gpgKeyId"`
} }
if err := json.Unmarshal(metadataBytes, &metadata); err != nil { if err := json.Unmarshal(metadataBytes, &metadata); err != nil {
@ -396,7 +396,7 @@ Passphrase: ` + testPassphrase + `
// Create PGP metadata with GPG key ID // Create PGP metadata with GPG key ID
type PGPUnlockerMetadata struct { type PGPUnlockerMetadata struct {
secret.UnlockerMetadata secret.UnlockerMetadata
GPGKeyID string `json:"gpg_key_id"` GPGKeyID string `json:"gpgKeyId"`
} }
pgpMetadata := PGPUnlockerMetadata{ pgpMetadata := PGPUnlockerMetadata{