73 Commits

Author SHA1 Message Date
Tom Limoncelli
c6ea0dc870 Set expectations about the project in README.md 2022-08-25 13:36:54 -04:00
Ben Iofel
6517bfd21d Package missing bins for deb, rpm, and macports (#360) 2022-08-06 09:35:32 -04:00
6543
abb3bbe275 Update golang deps (#359)
* git ignore golang vendor folder

* update golang deps
2022-07-25 10:01:41 -04:00
Ali Azam
221010228e edit (#358)
Fix typo: 'afterwards'.
2022-07-05 12:17:30 -04:00
Tom Limoncelli
66b65751c4 Update CHANGELOG.md (#356) 2022-06-10 07:26:26 -04:00
Joe Block
d84e904973 Add .gitattributes during repo initialization (#352)
Make `blackbox_initialize` include `.blackbox/.gitattributes` when
creating a new repository.

Closes #351

Signed-off-by: Joe Block <jpb@unixorn.net>
2022-03-31 15:34:00 -04:00
Joe Block
7715254169 Update zgen reference to zgenom (#350)
zgen hasn't had an update in three years. zgenom is a fork that is
actively maintained.

Update the zgen reference to use zgenom instead.

Signed-off-by: Joe Block <jpb@unixorn.net>
2022-03-17 15:33:53 -04:00
Tom Limoncelli
8091467952 Improve test data generation (#348)
* Improve test data generation

* Add homebrew to "make test" path
2022-03-16 13:29:31 -04:00
Jesus Galvan
dc01038efe Fix 'chmod' for macOS Monterey 12.3 (#347) 2022-03-16 13:15:18 -04:00
Tom Limoncelli
86716b3432 Merge pull request #345 from raboof/patch-3
recommend nix-shell over nix-env
2022-01-12 17:47:46 -05:00
Arnout Engelen
a03b6bf84a recommend nix-shell over nix-env
as it's more idiomatic nix
2022-01-12 18:04:15 +01:00
Tom Limoncelli
b520eb04a4 Merge pull request #330 from Humzini/patch-1
Update LICENSE.txt
2021-05-14 09:52:46 -04:00
Humz
a6d8f3d981 Update LICENSE.txt 2021-05-08 10:23:09 +01:00
Tom Limoncelli
a413affb56 Merge pull request #325 from darwinz/readme-confusion
Update some terms in README to get past some confusion
2021-03-29 16:18:06 -04:00
Brandon Johnson
9031973d38 Update some terms in README to get past some confusion 2021-03-29 13:01:52 -07:00
Fabien Villepinte
c1a2b3e88a Fix formatting in README (#322) 2021-01-11 16:27:58 -05:00
Max Horstmann
d45564db11 (Go version) Add release workflow (#318) 2020-11-19 08:42:22 -05:00
Max Horstmann
b71378db82 (Go version) Multi platform build (#319) 2020-11-18 10:42:08 -05:00
Max Horstmann
4807dc527c Revert "Remove v2 from go.mod- doesn't match latest tag ye"
This reverts commit 412af48ab5.
2020-11-17 09:47:39 -05:00
Max Horstmann
412af48ab5 Remove v2 from go.mod- doesn't match latest tag ye 2020-11-17 09:29:41 -05:00
Max Horstmann
2c0c8cefa4 Go version: run build checks & tests (#317) 2020-11-13 10:01:22 -05:00
Tom Limoncelli
4b1e090446 Fix broken test 2020-11-12 02:14:07 -05:00
Max Horstmann
05edef2e88 clean up outdated workflows (#316) 2020-11-08 18:33:14 -05:00
Tom Limoncelli
513e9fc55c Update RELEASE_ENGINEERING.md 2020-09-09 09:53:36 -04:00
Tom Limoncelli
2eafe8429a Update CHANGELOG.md 2020-09-09 09:53:34 -04:00
Tom Limoncelli
6a34987ba0 disable demo zip stuff 2020-07-24 15:59:33 -04:00
Tom Limoncelli
b07793faf8 Golanggithub (#314)
* Add Github Actions
2020-07-24 14:25:17 -04:00
Tom Limoncelli
1c77c87555 Implement blackbox in Golang (#250)
* Initial release
2020-07-24 14:21:33 -04:00
Tom Limoncelli
e049c02655 More thoughts on v2 2020-05-27 08:53:17 -04:00
Tom Limoncelli
5ce3c9370f Revert "[wip] Fix how vcs_relative_path first resolves an absolute path"
This reverts commit a7fd514569.

Sadly this doesn't work in all cases (if the file doesn't exist) and
fails on MacOS (I think).
2020-05-27 08:03:12 -04:00
Tom Limoncelli
6ae742aa7a Make the /etc/profile.d script executable 2020-05-26 19:31:24 +00:00
James Ottaway
a7fd514569 [wip] Fix how vcs_relative_path first resolves an absolute path 2020-05-14 09:49:49 -04:00
James Ottaway
72253818be Call vcs_ignore bwfore the file is shredded 2020-05-14 09:49:49 -04:00
James Ottaway
6761bfc356 Add a test for blackbox_cat 2020-05-14 09:49:49 -04:00
Lucas Ramage
cf3915fbd8 Document working with Ansible (#306)
Bug: https://github.com/StackExchange/blackbox/issues/295
See: https://docs.ansible.com/ansible/latest/user_guide/vault.html#providing-vault-passwords
See: https://docs.ansible.com/ansible/latest/reference_appendices/config.html#default-vault-password-file
2020-05-03 11:41:34 -04:00
Tom Limoncelli
2ef26f4c90 Update RELEASE_ENGINEERING.md 2020-04-29 20:38:58 -04:00
Tom Limoncelli
6a53644a62 Update CHANGELOG.md 2020-04-29 20:36:37 -04:00
Tom Limoncelli
6c83f606da Revert "Fix how vcs_relative_path first resolves an absolute path (#304)"
This reverts commit 87b7cd0eae.
2020-04-29 19:46:13 -04:00
James Ottaway
87b7cd0eae Fix how vcs_relative_path first resolves an absolute path (#304) 2020-04-27 08:51:24 -04:00
Lucas Ramage
90418566e3 Respect PREFIX variable for copy-install (#294) 2020-02-01 10:10:25 -05:00
Travis Paul
35ebdabe81 Documentation: Add pkgsrc install instructions (#292)
Available in pkgsrc (via pkgin) as scm-blackbox
2020-01-27 12:25:06 -05:00
Joshua B
02c3edb9e6 Add better support for Windows (#291) 2019-10-02 09:01:32 -04:00
Tom Limoncelli
51ed419354 Clarify gpg version usage (#290) 2019-09-19 16:43:37 -04:00
Tom Limoncelli
31240d18e2 Doc: Missing closing parenthesis
Missing closing parenthesis
2019-08-30 07:33:46 -04:00
sblondon
5ee696239d Missing closing parenthesis
A `)` parenthesis is missing in the README file. This PR adds it.
2019-08-30 12:50:40 +02:00
Ben Holden-Crowther
155140d24c Documentation: Fix capitalization in heading (#281)
Very simple change
2019-04-20 17:04:59 -04:00
Reut Sharabani
79a9e978b0 Fix typo in readme file (#278)
"machine secure machine" -> "secure machine"
2019-04-20 17:04:23 -04:00
Pierre Gordon
f67d15638f BUG: blackbox_initialize output uses keyrings instead of $BLACKBOXDATA (#284) 2019-03-22 08:59:19 -04:00
Sirio Balmelli
292f1e5f74 DOCUMENTATION: Promote 'getting started' to a section, enumerate steps (#283)
Explicitly wrap lines in sub-paragraphs with '\'.

This is an attempt to have "getting started" stan out and improve
user-approachability (admittedly, subjective).

Signed-off-by: Sirio Balmelli <sirio@b-ad.ch>
2019-03-04 13:20:53 -05:00
Johannes Liebermann
226a84ba3c Commit changes to gitignore when deregistering (#282) 2019-01-09 10:43:24 -05:00
Tom Limoncelli
6bb21bb83a Documentation: Update copyright year
Update license date
2019-01-03 10:28:45 -05:00
Ben Holden-Crowther
b0136641f0 Update license date
To 2019
2019-01-03 14:41:29 +00:00
Tom Limoncelli
ad66e58bfe RELEASE_ENGINEERING.md 2018-12-19 20:20:44 -05:00
Tom Limoncelli
70e8c625e5 Add support for NetBSD and SunOS (SmartOS)
Add support for NetBSD and SunOS (SmartOS)
2018-12-09 07:32:44 -05:00
Tom Limoncelli
d6f997e8df README.md: Minor fixes 2018-12-07 13:49:38 -05:00
Travis Paul
e17c44aa61 Add NetBSD and SmartOS to list of supported OSes. 2018-12-03 09:19:27 -06:00
Travis Paul
f681872c4d Remove -n 1 argument from the xargs invocation in blackbox_shred_all_files.
The -I and -n options are mutually-exclusive, don't work as
expected with xargs from SunOS, and appear to be unecessary anyway.
2018-11-30 13:51:01 +08:00
Travis Paul
3594a3124e Bash from pkgsrc has a flag to disable importing functions unless explicitly enabled.
The patch was created in response to ShellShock and still remains:
https://www.mail-archive.com/smartos-discuss@lists.smartos.org/msg01247.html
https://github.com/NetBSD/pkgsrc/blob/trunk/shells/bash/patches/patch-shell.c
2018-11-30 13:49:15 +08:00
Travis Paul
fd3ad2fcea Add better support for NetBSD and SunOS in test scripts. 2018-11-30 10:59:21 +08:00
Travis Paul
3a491aad01 Add NetBSD and SunOS (SmartOS) support to _stack_lib.sh. 2018-11-29 14:01:54 +08:00
Travis Paul
b3b0604be7 Add NetBSD and SunOS support to cp_permissions.
Note that this likely won't work on Solaris without Coreutils as
Solaris lacks stat(1). SmartOS has stat from Coreutils in base
and the chmod(1) from it's OpenSolaris heritage. Using the chmod
from either Coreutils or Solaris will work the same (in this case)
on SmartOS.
2018-11-29 13:31:47 +08:00
Travis Paul
6408b622bf Add NetBSD and SunOS support to md5sum_file. 2018-11-29 12:47:37 +08:00
Tom Limoncelli
ab1430b74d Testing: Fix confidence test. 2018-10-03 10:46:07 -04:00
Kamil Wilczek
17ce90125b .blackbox is now the default config directory for new repos. (#272)
- _blackbox_common.sh sets the default Blackbox directory
  for the new repositories using the first entry of the
  BLACKBOX_CANDIDATES array. This small change sets the
  first entry to the new .blackbox dir (instead of the keyring/live)
2018-10-03 09:09:11 -04:00
Tobias Dubois
9d305233ca Add blackbox_decrypt_file (#270)
Add a command for decrypting single files. It is currently just an alias
for blackbox_edit_start.
It is meant to be a more obvious command for decrypting a single file
without editing it.

Fixes #268
2018-09-27 07:31:03 -04:00
r-savu
dc9fa326f4 Improved compatibility: change"/bin/[x]" to "/usr/bin/env [x]" (#265)
changed paths of the form "/bin/[x]" into "/usr/bin/env [x]" (#265)
2018-08-13 10:39:15 -04:00
winter0mute
74de17a4f6 Add blackbox_less. (#263)
* Add blackbox_view and use PAGER (default to less)
2018-07-26 10:24:32 -04:00
Ben Creasy
ebaa22a981 add nix method of install (#261) 2018-07-10 10:44:34 -07:00
Tom Limoncelli
0b8c3df70b Linked setting up of GPG key (#260) 2018-07-05 08:26:19 -07:00
Tom Limoncelli
918632436a Reformat README.md 2018-07-05 10:31:14 -04:00
Tom Limoncelli
d268a9e16a Release v1.20180615 2018-06-18 21:17:11 -04:00
Tom Limoncelli
ad2bc19b33 Merge branch 'master' of work-github.com:StackExchange/blackbox 2018-06-18 21:15:25 -04:00
Ben Limmer
1988a883a0 Restore make manual-install with warning. (#258) 2018-06-15 20:04:07 -04:00
113 changed files with 6679 additions and 94 deletions

28
.github/workflows/build.yml vendored Normal file
View File

@@ -0,0 +1,28 @@
name: build
on:
pull_request:
branches: [ master ]
push:
branches: [ master ]
jobs:
build:
runs-on: ubuntu-latest
steps:
- name: Checkout repo
uses: actions/checkout@v2
with:
fetch-depth: 0
- name: Set up Go
uses: actions/setup-go@v2
with:
go-version: ^1.15
- name: Build binaries
run: go run build/build.go
- name: Run unit tests
run: go test ./...
- name: Run integration tests
working-directory: integrationTest
run: umask 0027 ; rm -rf /tmp/bbhome-* && go test -long -nocleanup

59
.github/workflows/release.yml vendored Normal file
View File

@@ -0,0 +1,59 @@
on:
release:
types: [published]
name: release
jobs:
release:
name: release
runs-on: ubuntu-latest
steps:
- name: Get release
id: get_release
uses: bruceadams/get-release@v1.2.2
env:
GITHUB_TOKEN: ${{ github.token }}
- name: Checkout repo
uses: actions/checkout@v2
with:
fetch-depth: 0
- name: Set up Go
uses: actions/setup-go@v2
with:
go-version: ^1.15
- name: Build binaries
run: go run build/build.go
- name: Upload blackbox-Darwin
uses: actions/upload-release-asset@v1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.get_release.outputs.upload_url }}
asset_path: ./blackbox-Darwin
asset_name: blackbox-Darwin
asset_content_type: application/octet-stream
- name: Upload blackbox-Linux
uses: actions/upload-release-asset@v1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.get_release.outputs.upload_url }}
asset_path: ./blackbox-Linux
asset_name: blackbox-Linux
asset_content_type: application/octet-stream
- name: Upload blackbox.exe
uses: actions/upload-release-asset@v1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.get_release.outputs.upload_url }}
asset_path: ./blackbox.exe
asset_name: blackbox.exe
asset_content_type: application/octet-stream

14
.gitignore vendored
View File

@@ -5,13 +5,9 @@ __pycache__/
# C extensions
*.so
# backup shell files
*~
# Distribution / packaging
.Python
env/
build/
develop-eggs/
dist/
eggs/
@@ -54,3 +50,13 @@ coverage.xml
# Sphinx documentation
docs/_build/
# macOS
.DS_Store
# Blackbox
bbintegration
.*.swp
/integrationTest/.blackbox
# golang
/vendor/

View File

@@ -1,3 +1,50 @@
Release v1.20220610
NOTE: I don't have a lot of time to commit to this project. I'd gladly accept help, especially
with improving the testing on various operating systems.
Major feature: macOS users rejoice! Incompatibility with macOS Monterey 12.3 is fixed! (#347)
* Add .gitattributes during repo initialization (#352)
* Update zgen reference to zgenom (#350)
* Improve test data generation (#348)
* Fix 'chmod' for macOS Monterey 12.3 (#347)
Release v1.20200429
NOTE: While there is now support for NetBSD and SunOS/SmartOS, the
release process only tests on macOS and CentOS7 because that's all I
have access to.
* Fix tools that break when ".." or "." are used in a path (#304)
* Respect PREFIX variable for copy-install (#294)
* Documentation: Add pkgsrc install instructions (#292)
* Improve support for Windows (#291)
* Clarify gpg version usage (#290)
* Many documentation fixes
* DOCUMENTATION: Promote 'getting started' to a section, enumerate steps (#283)
* Commit changes to gitignore when deregistering (#282)
* Add support for NetBSD and SunOS (SmartOS)
* Defend against ShellShock
Release v1.20181219
* New OS support: Add support for NetBSD and SunOS (SmartOS)
* Testing: Improve confidence test.
* .blackbox is now the default config directory for new repos. (#272)
* Add blackbox_decrypt_file (#270)
* Improved compatibility: change"/bin/[x]" to "/usr/bin/env [x]" (#265)
* Add blackbox_less. (#263)
* add nix method of install (#261)
* Linked setting up of GPG key (#260)
Release v1.20180618
* Restore `make manual-install` with warning. (#258)
Release v1.20180615
* Standardize on .blackbox for config. Use keyrings/live for backwards compatibility.

74
DESIGN.md Normal file
View File

@@ -0,0 +1,74 @@
BlackBox Internals
==================
The goal of the Go rewrite is to improve the usability and
maintainability of Blackbox, meanwhile make it easier to implement new
The system is built in distinct layers: view, controller, model.
Suppose there is a subcommand "`foo`". `blackbox.go` parses the
user's command line args and calls `cmdFoo()`, which is given
everything it needs to do the operation. For example, it is given the
filenames the user specified exactly; even if an empty list means "all
files", at this layer the empty list is passed to the function.
`cmdFoo()` contains the business logic of how the operation should be
done: usually iterating over filenames and calling verb(s) for each
one. For example if an empty file list means "all files", this is the
layer that enumerates the files.
`cmdFoo()` is implemented in the file `cmd_foo.go`. The caller of
`cmdFoo()` should provide all data it needs to get the job done.
`cmdFoo()` doesn't refer to global flags, they are passed to the
function as parameters. Therefore the function has zero side-effects
(except possibly logging) and can be called as library functions by
other systems. This is the external (binary) API which should be
relatively stable.
`cmdFoo()` calls verbs that are in `bbutil/`. Some of those verbs are
actually interfaces. For example, any VCS-related verbs are actually a
Go interface which might be implemented one of many ways (Git,
Subversion, Mercurial), GPG-functions may be implemented by shelling
out to `gpg.exe` or by using Go's gpg library.
They layers look like this:
| View | `blackbox.go` | Parses User Commands, calls controller |
| Controller | `cmd_*.go` | The business logic. Iterates and calls verbs |
| Model | `pkg/bbutil` | Verbs |
| Interfaces | `pkg/*` | Interfaces and their implementations |
At least that's the goal. We'll see how well we can achieve this.
Version 2.0
===========
Software architecture.
We try to keep the command-line parsing separate from the business
logic and all plug-ins. This keeps things clean and easy to refactor.
In fact layer 2 could be used as a stand-alone module for projects
that want to embed blackbox actions.
Layer 1: The command itself
* cmd/blackbox/blackbox.go -- main() not much more
* cmd/blackbox/cli.go -- Set up and call the ufave/cli flag parser
* cmd/blackbox/drive.go -- Check # of arguments, conflicting flags, and then call the businss logic layer
Layer 2: The business logic
* pkg/box/box.go -- The interface to accessing .blackbox (admins, files, etc.)
* pkg/box/verbs.go -- Verbs called by Layer 1. Just the verbs
* pkg/box/boxutils.go -- Functions needed by the verbs
Layer 3: The plug-ins
* pkg/vcs/... -- Plug-ins for Git, (Mercurial, Subversion, Perforce,) and None
* pkg/crypters/... -- Plug-ins for PGP access: GnuPG, (go-openpgp, others in the future)
Layer 4: Support functions for use by Layer 3
* pkg/bbutil/filestats.go -- File manipulations
* pkg/bbutil/runbash.go -- Safely run external Linux commands

View File

@@ -1,6 +1,6 @@
The MIT License (MIT)
Copyright (c) 2014-2018 Stack Exchange, Inc.
Copyright (c) 2014-2021 Stack Exchange, Inc.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal

View File

@@ -1,5 +1,5 @@
SHELL=/bin/sh
PREFIX?=/usr/local
PKGNAME=stack_blackbox
BASEDIR?=$(HOME)
OUTPUTDIR?="$(BASEDIR)/debbuild-${PKGNAME}"
@@ -9,9 +9,9 @@ all:
@echo ' make update Update any generated files'
@echo ' make packages-rpm Make RPM packages'
@echo ' make packages-deb Make DEB packages'
@echo ' make symlinks-install Make symlinks in /usr/local/bin/'
@echo ' make copy-install Copy "bin" files to /usr/local/bin/'
@echo ' make usrlocal-uninstall Remove blackbox files from /usr/local/bin/'
@echo ' make symlinks-install Make symlinks in ${PREFIX}/bin/'
@echo ' make copy-install Copy "bin" files to ${PREFIX}/bin/'
@echo ' make copy-uninstall Remove blackbox files from ${PREFIX}/bin/'
@echo ' make test Run tests'
install:
@@ -55,18 +55,25 @@ unlock-rpm:
# Manual install
#
symlinks-install:
@echo 'Symlinking files from ./bin to /usr/local/bin'
@cd bin && for f in `find . -type f -iname "*" ! -iname "Makefile"`; do ln -fs `pwd`/$$f /usr/local/bin/$$f; done
@echo "Symlinking files from ./bin to ${PREFIX}/bin"
@cd bin && for f in `find . -type f -iname "*" ! -iname "Makefile"`; do ln -fs `pwd`/$$f $(PREFIX)/bin/$$f; done
@echo 'Done.'
manual-install:
@echo '***************************************************************'
@echo '* DEPRECATED *'
@echo '* `make manual-install` is now called `make symlinks-install` *'
@echo '***************************************************************'
$(MAKE) symlinks-install
copy-install:
@echo 'Copying files from ./bin to /usr/local/bin'
@cd bin && for f in `find . -type f -iname "*" ! -iname "Makefile"`; do cp `pwd`/$$f /usr/local/bin/$$f; done
@echo "Copying files from ./bin to ${PREFIX}/bin"
@cd bin && for f in `find . -type f -iname "*" ! -iname "Makefile"`; do cp `pwd`/$$f $(PREFIX)/bin/$$f; done
@echo 'Done.'
usrlocal-uninstall:
@echo 'Removing blackbox files from /usr/local/bin'
@cd bin && for f in `find . -type f -iname "*" ! -iname "Makefile"`; do rm /usr/local/bin/$$f; done
copy-uninstall:
@echo "Removing blackbox files from ${PREFIX}/bin"
@cd bin && for f in `find . -type f -iname "*" ! -iname "Makefile"`; do rm $(PREFIX)/bin/$$f; done
@echo 'Done.'
#
@@ -135,6 +142,6 @@ test: confidence
confidence:
@if [ -e ~/.gnupg ]; then echo ERROR: '~/.gnupg should not exist. If it does, bugs may polute your .gnupg configuration. If the code has no bugs everything will be fine. Do you feel lucky?'; false ; fi
@if which >/dev/null gpg-agent ; then pkill gpg-agent ; rm -rf /tmp/tmp.* ; fi
@export PATH="$(PWD)/bin:/usr/local/bin:/bin:/usr/bin:/usr/local/sbin:/usr/sbin:/sbin:/opt/local/bin:$(PATH)" ; tools/auto_system_test
@export PATH="$(PWD)/bin:$(PREFIX)/bin:/usr/local/bin:/bin:/usr/bin:/usr/local/sbin:/usr/sbin:/sbin:/opt/local/bin:/usr/local/MacGPG2/bin:/opt/homebrew/bin:$(PATH)" ; tools/auto_system_test
@if which >/dev/null gpg-agent ; then pkill gpg-agent ; fi
@if [ -e ~/.gnupg ]; then echo ERROR: '~/.gnupg was created which means the scripts might be poluting GnuPG configuration. Fix this bug.'; false ; fi

65
README-v2.md Normal file
View File

@@ -0,0 +1,65 @@
BlackBox v2
===========
WARNING: v2 is still experimental. It is in the same git repo as v1
because the filenames do not overlap. Please do not mix the two. v1
is in `bin`. v2 is in `cmd/blackbox` and `binv2`.
Blackbox is an open source tool that enables you to safe store sensitive information in
Git (or other) repos by encrypting them with GPG. Only the encrypted
version of the file is available. You can be free to provide access
to the repo, as but only people with the right GPG keys can access the
encrypted data.
Things you should **never** store in a repo without encryption:
* TLS (SSL) certificates
* Passwords
* API keys
* And more!
Project Info:
* [Overview](user-overview.md)
* [Why is this important?](why-is-this-important.md)
* [Support/Community](support.md)
* [How BB encrypts](encryption.md)
* [OS Compatibility](compatibility.md)
* [Installation Instructions](installation.md)
* [Alternatives](alternatives.md)
User Info:
* [Enabling Blackbox on a Repo](enable-repo.md)
* [Enroll a file](enable-repo.md)
* [Full Command List](full-command-list.md)
* [Add/Remove users](admin-ops.md)
* [Add/Remove files](file-ops.md)
* [Advanced techiques](advanced.md)
* [Use with Role Accounts](role-accounts.md)
* [Backwards Compatibility](backwards-compatibility.md)
* [Replacing expired keys](expired-keys.md)
* [Git Tips](git-tips.md)
* [SubVersion Tips](subversion-tips.md)
* [GnuPG tips](gnupg-tips.md)
* [Use with Ansible](with-ansible.md)
* [Use with Puppet](with-puppet.md)
For contributors:
* [Developer Info](dev.md)
* [Code overview](dev-code-overview.md)
* [HOWTO: Add new OS support](dev-add-os-support.md)
* [HOWTO: Add new VCS support](dev-add-vcs-support.md)
A slide presentation about an older release [is on SlideShare](http://www.slideshare.net/TomLimoncelli/the-blackbox-project-sfae).
Join our mailing list: [https://groups.google.com/d/forum/blackbox-project](https://groups.google.com/d/forum/blackbox-project)
License
=======
This content is released under the MIT License.
See the [LICENSE.txt](LICENSE.txt) file.

View File

@@ -1,8 +1,10 @@
BlackBox [![CircleCI](https://circleci.com/gh/StackExchange/blackbox.svg?style=shield)](https://circleci.com/gh/StackExchange/workflows/blackbox)
BlackBox [![CircleCI](https://circleci.com/gh/StackExchange/blackbox.svg?style=shield)](https://circleci.com/gh/StackExchange/workflows/blackbox) [![Build Status](https://github.com/StackExchange/blackbox/workflows/build/badge.svg)](https://github.com/StackExchange/blackbox/actions?query=workflow%3Abuild+branch%3Amaster)
========
Safely store secrets in a VCS repo (i.e. Git, Mercurial, Subversion or Perforce). These commands make it easy for you to Gnu Privacy Guard (GPG) encrypt specific files in a repo so they are "encrypted at rest" in your repository. However, the scripts make it easy to decrypt them when you need to view or edit them, and decrypt them for use in production. Originally written for Puppet, BlackBox now works with any Git or Mercurial repository.
WARNING: The goal of this project is to be a simple wrapper around `gpg` so you and your coworkers don't have to remember its all those inscrutable and confusing flags. It is *not* intented to be a sophisticated encryption system that solves all problems or supports a large numbers of files. The ideal use-case is to keep secrets in a secure service such as Conjur, AWS KMS, Azure Key Vault or GCP KMS; then use Blackbox for safely storing the API keys needed to access that system. That way you are encrypting one tiny file.
A slide presentation about an older release [is on SlideShare](http://www.slideshare.net/TomLimoncelli/the-blackbox-project-sfae).
Join our mailing list: [https://groups.google.com/d/forum/blackbox-project](https://groups.google.com/d/forum/blackbox-project)
@@ -19,14 +21,19 @@ Table of Contents
- [Compatibility](#compatibility)
- [How is the encryption done?](#how-is-the-encryption-done)
- [What does this look like to the typical user?](#what-does-this-look-like-to-the-typical-user)
- [How to use the secrets with Puppet?](#how-to-use-the-secrets-with-puppet)
- [Entire files](#entire-files)
- [Small strings](#small-strings)
- [How to enroll a new file into the system?](#how-to-enroll-a-new-file-into-the-system)
- [How to remove a file from the system?](#how-to-remove-a-file-from-the-system)
- [How to indoctrinate a new user into the system?](#how-to-indoctrinate-a-new-user-into-the-system)
- [How to remove a user from the system?](#how-to-remove-a-user-from-the-system)
- [Enabling BlackBox For a Repo](#enabling-blackbox-for-a-repo)
- Configuration Management
- [How to use the secrets with Ansible?](#how-to-use-the-secrets-with-ansible)
- [How to use the secrets with Puppet?](#how-to-use-the-secrets-with-puppet)
- [Entire files](#entire-files)
- [Small strings](#small-strings)
- File Management
- [How to enroll a new file into the system?](#how-to-enroll-a-new-file-into-the-system)
- [How to remove a file from the system?](#how-to-remove-a-file-from-the-system)
- User Management
- [How to indoctrinate a new user into the system?](#how-to-indoctrinate-a-new-user-into-the-system)
- [How to remove a user from the system?](#how-to-remove-a-user-from-the-system)
- Repo Management
- [Enabling BlackBox For a Repo](#enabling-blackbox-for-a-repo)
- [Set up automated users or &ldquo;role accounts&rdquo;](#set-up-automated-users-or-role-accounts)
- [Replacing expired keys](#replacing-expired-keys)
- [Some common errors](#some-common-errors)
@@ -50,7 +57,28 @@ Rather than one GPG passphrase for all the files, each person with access has th
Automated processes often need access to all the decrypted files. This is easy too. For example, suppose Git is being used for Puppet files. The master needs access to the decrypted version of all the files. Simply set up a GPG key for the Puppet master (or the role account that pushes new files to the Puppet master) and have that user run `blackbox_postdeploy` after any files are updated.
Getting started is easy. Just `cd` into a Git, Mercurial, Subversion or Perforce repository and run `blackbox_initialize`. After that, if a file is to be encrypted, run `blackbox_register_new_file` and you are done. Add and remove keys with `blackbox_addadmin` and `blackbox_removeadmin`. To view and/or edit a file, run `blackbox_edit`; this will decrypt the file and open with whatever is specified by your $EDITOR environment variable. When you close the editor the file will automatically be encrypted again and the temporary plaintext file will be shredded. If you need to leave the file decrypted while you update you can use the`blackbox_edit_start` to decrypt the file and `blackbox_edit_end` when you want to "put it back in the box."
Getting started
---------------
1. If you don't have a GPG key, set it up using instructions such as:
[Set up GPG key](https://help.github.com/articles/generating-a-new-gpg-key/). \
Now you are ready to go.
1. `cd` into a Git, Mercurial, Subversion or Perforce repository and run `blackbox_initialize`.
1. If a file is to be encrypted, run `blackbox_register_new_file` and you are done.
1. Add and remove keys with `blackbox_addadmin` and `blackbox_removeadmin`.
1. To view and/or edit a file, run `blackbox_edit`;
this will decrypt the file and open with whatever is specified by
your $EDITOR environment variable. \
When you close the editor the
file will automatically be encrypted again and the temporary plaintext
file will be shredded. \
If you need to leave the file decrypted while
you update you can use the`blackbox_edit_start` to decrypt the file
and `blackbox_edit_end` when you want to "put it back in the box."
Why is this important?
======================
@@ -64,15 +92,17 @@ The ability to be open and transparent about our code, with the exception of a f
Installation Instructions
=========================
- *The hard way (manual*: Copy all the files in "bin" to your "bin".
- *The hard way (automatic)*: `make copy-install` will copy the bin files into /usr/local/bin (uninstall with `make usrlocal-uninstall`).
- *The symlinks way*: `make symlinks-install` will make symlinks of the bin files into /usr/local/bin (uninstall with `make usrlocal-uninstall`) (useful when doing development)
- *The hard way (manual)*: Copy all the files in "bin" to your "bin".
- *The hard way (automatic)*: `make copy-install` will copy the bin files into $PREFIX/bin, default is /usr/local (uninstall with `make copy-uninstall`).
- *The symlinks way*: `make symlinks-install` will make symlinks of the bin files into $PREFIX/bin, default is /usr/local (uninstall with `make copy-uninstall`) (useful when doing development)
- *The MacPorts Way*: `sudo port install vcs_blackbox`
- *The Homebrew Way*: `brew install blackbox`
- *The RPM way*: Check out the repo and make an RPM via `make packages-rpm`; now you can distribute the RPM via local methods. (Requires [fpm](https://github.com/jordansissel/fpm).)
- *The Debian/Ubuntu way*: Check out the repo and make a DEB via `make packages-deb`; now you can distribute the DEB via local methods. (Requires [fpm](https://github.com/jordansissel/fpm).)
- *The Antigen Way*: Add `antigen bundle StackExchange/blackbox` to your .zshrc
- *The Zgen Way*: Add `zgen load StackExchange/blackbox` to your .zshrc where you're loading your other plugins.
- *The Zgenom Way*: Add `zgenom load StackExchange/blackbox` to your .zshrc where you're loading your other plugins.
- *The Nix Way*: `nix-shell -p blackbox`
- *The Pkgsrc Way*: `pkgin in scm-blackbox`
Commands
========
@@ -83,12 +113,14 @@ Commands
| `blackbox_edit_start <file>` | Decrypt a file so it can be updated |
| `blackbox_edit_end <file>` | Encrypt a file after blackbox_edit_start was used |
| `blackbox_cat <file>` | Decrypt and view the contents of a file |
| `blackbox_view <file>` | Like blackbox_cat but pipes to `less` or $PAGER |
| `blackbox_diff` | Diff decrypted files against their original crypted version |
| `blackbox_initialize` | Enable blackbox for a GIT or HG repo |
| `blackbox_register_new_file <file>` | Encrypt a file for the first time |
| `blackbox_deregister_file <file>` | Remove a file from blackbox |
| `blackbox_list_files` | List the files maintained by blackbox |
| `blackbox_list_admins` | List admins currently authorized for blackbox |
| `blackbox_decrypt_file <file>` | Decrypt a file |
| `blackbox_decrypt_all_files` | Decrypt all managed files (INTERACTIVE) |
| `blackbox_postdeploy` | Decrypt all managed files (batch) |
| `blackbox_addadmin <gpg-key>` | Add someone to the list of people that can encrypt/decrypt secrets |
@@ -113,6 +145,8 @@ BlackBox automatically determines which VCS you are using and does the right thi
- MacOS X
- Cygwin (Thanks, Ben Drasin!) **See Note Below**
- MinGW (git bash on windows) **See Note Below**
- NetBSD
- SmartOS
To add or fix support for a VCS system, look for code at the end of `bin/_blackbox_common.sh`
@@ -203,6 +237,22 @@ What does this look like to the typical user?
Wait... it can be even easier than that! Run `blackbox_edit FILENAME`, and it'll decrypt the file in a temp file and call `$EDITOR` on it, re-encrypting again after the editor is closed.
How to use the secrets with Ansible?
===================================
Ansible Vault provides functionality for encrypting both entire files and strings stored within files; however,
keeping track of the password(s) required for decryption is not handled by this module.
Instead one must specify a password file when running the playbook.
Ansible example for password file: `my_secret_password.txt.gpg`
```
ansible-playbook --vault-password-file my_secret_password.txt site.yml
```
Alternatively, one can specify this in the `ANSIBLE_VAULT_PASSWORD_FILE` environment variable.
How to use the secrets with Puppet?
===================================
@@ -299,7 +349,7 @@ FYI: Your repo may use `keyrings/live` instead of `.blackbox`. See "Where is the
To join the list of people that can edit the file requires three steps; You create a GPG key and add it to the key ring. Then, someone that already has access adds you to the system. Lastly, you should test your access.
### Step 1: YOU create a GPG key pair on a secure machine and add to public keychain.
### Step 1: NEW USER creates a GPG key pair on a secure machine and adds to public keychain.
If you don't already have a GPG key, here's how to generate one:
@@ -307,7 +357,19 @@ If you don't already have a GPG key, here's how to generate one:
gpg --gen-key
```
Pick defaults for encryption settings, 0 expiration. Pick a VERY GOOD passphrase. Store a backup of the private key someplace secure. For example, keep the backup copy on a USB drive that is locked in safe. Or, at least put it on a machine secure machine with little or no internet access, full-disk-encryption, etc. Your employer probably has rules about how to store such things.
WARNING: New versions of GPG generate keys which are not understood by
old versions of GPG. If you generate a key with a new version of GPG,
this will cause problems for users of older versions of GPG.
Therefore it is recommended that you either assure that everyone using
Blackbox have the exact same version of GPG, or generate GPG keys
using a version of GPG as old as the oldest version of GPG used by
everyone using Blackbox.
Pick defaults for encryption settings, 0 expiration. Pick a VERY GOOD passphrase. Store a backup of the private key someplace secure. For example, keep the backup copy on a USB drive that is locked in safe. Or, at least put it on a secure machine with little or no internet access, full-disk-encryption, etc. Your employer probably has rules about how to store such things.
FYI: If generating the key is slow, this is usually because the system
isn't generating enough entropy. Tip: Open another window on that
machine and run this command: `ls -R /`
Now that you have a GPG key, add yourself as an admin:
@@ -341,13 +403,14 @@ ht push
NOTE: Creating a Role Account? If you are adding the pubring.gpg of a role account, you can specify the directory where the pubring.gpg file can be found as a 2nd parameter: `blackbox_addadmin puppetmaster@puppet-master-1.example.com /path/to/the/dir`
### Step 2: SOMEONE ELSE adds you to the system.
### Step 2: EXISTING ADMIN adds new user to the system.
Ask someone that already has access to re-encrypt the data files. This gives you access. They simply decrypt and re-encrypt the data without making any changes.
Pre-check: Verify the new keys look good.
```
git pull # Or whatever is required for your system
gpg --homedir=.blackbox --list-keys
```
@@ -372,7 +435,7 @@ hg commit
hg push
```
### Step 3: YOU test.
### Step 3: NEW USER tests.
Make sure you can decrypt a file. (Suggestion: Keep a dummy file in VCS just for new people to practice on.)

View File

@@ -2,6 +2,7 @@ Table of Contents:
==================
- [Branches and Tags:](#branches-and-tags)
- [Testing:](#testing)
- [Build Tasks](#build-tasks)
- [Stable Releases](#stable-releases)
- [Production Releases](#production-releases)
@@ -19,12 +20,42 @@ There are 3 branches/tags:
If you are packaging BlackBox for distribution, you should track the *tag production*. You might also want to provide a separate package that tracks *tag stable:* for early adopters.
Testing
=======
Tips:
* macOS: `brew install gpg pinentry`
* FreeBSD: `pkg install gpg gmake`
* CentOS7: `yum install gpg`
To run a suite of tests:
```
cd ~/src/github.com/StackExchange/blackbox
make test
```
FYI: For FreeBSD, use `gmake test`
Build Tasks
===========
Stable Releases
===============
Step 0. Test the software
Run this command to run the unit and system tests:
```
make test
```
NOTE: The tests require pinentry-tty. On macOS with NIX this
can be installed via: `nix-env -i pinentry`
Marking the software to be "stable":
Step 1. Update CHANGELOG.md
@@ -72,6 +103,12 @@ git tag "$R"
git push origin tag "$R"
```
Step 4. Get credit!
Record the fact that you did this release in your weekly accomplishments file.
Updating MacPorts (automatic)
=============================

View File

@@ -18,41 +18,87 @@ These are the things I'd like to change someday.
There should be one program, with subcommands that have names that make more sense:
* `blackbox admin add <key>`
* `blackbox admin list`
* `blackbox admin remove <key>`
* `blackbox cat <filename> ...`
* `blackbox decrypt <filename> ...`
* `blackbox diff <filename> ...`
* `blackbox edit <filename> ...`
* `blackbox encrypt <filename> ...`
* `blackbox file add <filename> ...`
* `blackbox file list`
* `blackbox file remove <filename> ...`
* `blackbox info`
* `blackbox init`
* `blackbox register <filename> <...>`
* `blackbox deregister <filename> <...>`
* `blackbox edit <filename> <...>`
* `blackbox decrypt <filename> <...>`
* `blackbox encrypt <filename> <...>`
* `blackbox decrypt_all`
* `blackbox addadmin <key>`
* `blackbox removeadmin <key>`
* `blackbox cat <filename> <...>`
* `blackbox diff <filename> <...>`
* `blackbox list_files`
* `blackbox list_admins`
* `blackbox shred_all`
* `blackbox update_all`
* `blackbox whatsnew`
* `blackbox reencrypt`
* `blackbox shred --all|<filename> ...`
* `blackbox status --all|<filename> ...`
Backwards compatibility: The old commands would simply call the new commands.
Backwards compatibility: The old scripts will be rewritten to use the new commands.
## Change the "keyrings" directory
The name "keyrings" was unfortunate. First, it should probably begin with a ".". Second, it stores more than just keyrings. Lastly, I'm finding that in most cases we want many repos to refer to the same keyring, which is not supported very well.
The name `keyrings` was unfortunate. First, it should probably begin with a `.`. Second, it stores more than just keyrings. Lastly, I'm finding that in most cases we want many repos to refer to the same keyring, which is not supported very well.
A better system would be:
1. If `$BLACKBOX_CONFIG` is set, use that directory.
2. If the repo base directory has a file called ".blackbox_external", read that file as if you are reading `$BLACKBOX_CONFIG`
3. If the repo base directory has a "keyrings" directory, use that.
4. If the repo base directory has a ".blackboxconfig" directory, use that.
2. If the repo base directory has a file called `.blackbox_external`, read that file as if you are reading `$BLACKBOX_CONFIG`
3. If the repo base directory has a `keyrings` directory, use that.
4. If the repo base directory has a `.blackbox` directory, use that.
Some thoughts on .blackbox_external:
I'm not sure what the format should be, but I want it to be simple and expandable. It should support support "../../dir/name" and "/long/path". However some day we may want to include a Git URL and have the system automatically get the keychain from it. That means the format has to be something like directory:../dir/name so that later we can add git:the_url.
Some thoughts on `.blackbox_external`:
I'm not sure what the format should be, but I want it to be simple and expandable. It should support support `../../dir/name` and `/long/path`. However some day we may want to include a Git URL and have the system automatically get the keychain from it. That means the format has to be something like directory:../dir/name so that later we can add git:the-url.
NOTE: Maybe `.blackbox_external` should be `.blackbox/BLACKBOX_CONFIG`?
Backwards compatibility: `keyrings` would be checked before `.blackbox`.
## System Test
There needs to be a very complete system test. The `make test` we
have now is great for something written in bash.
It should be easy to make tests. Perhaps a directory of files, each
specifying a test. We could make a little language for writing tests.
# This test becomes the user "alice" and verifies that she
# can encrypt a file, and decrypt it, with full fidelity.
BECOME alice a
BASH echo "foo contents" >foo.txt
SHOULD_NOT_EXIST foo.txt.gpg
BASH blackbox encrypt foo.txt
SHOULD_NOT_EXIST foo.txt
SHOULD_EXIST foo.txt.gpg
BASH_WITH_PASSWORD a blackbox decrypt foo.txt
SHOULD_EXIST foo.txt.gpg
SHOULD_EXIST foo.txt
SHOULD_CONTAIN foo.txt "foo contents\n"
## Plug-in support
There should plug-ins support for:
Repo type:
* Git -- Using /usr/bin/git or git.exe
* Subversion
* Mercurial
* None (repoless)
* Autodetect
Encryption software:
* GnuPG -- using /usr/bin/gpg{,2} or gpg.exe
* golang.org/x/crypto/openpgp
## JSON or .txt
The files in .blackbox are mostly .txt files. Instead we should
define a .json format, and only read the .txt file is the .json file
doesn't exist.
Backwards compatibility: "keyrings" would be checked before .blackbox
## Repo-less mode
@@ -62,33 +108,34 @@ I prefer the file commits to be automatic because when they were manual, people
That said, I'm willing to have a "repo-less" mode.
When this mode is triggered, no add/commit/ignore tasks are done. The search for the keyrings directory still uses `$BLACKBOX_CONFIG` but if that is unset it looks for .blackbox_config in the current directory, then recursively ".." until we hit "/".
When this mode is triggered, no add/commit/ignore tasks are done. The search for the keyrings directory still uses `$BLACKBOX_CONFIG` but if that is unset it looks for `.blackbox_config` in the current directory, then recursively `..` until we hit `/`.
I think (but I'm not sure) this would benefit the entire system because it would force us to re-think what VCS actions are done when.
I think (but I'm not sure) that a simple way to implement this would be to add an environment variable that overrides the automatic VCS detection. When set to "none", all VCS operations would basically become no-ops. (This could be done by writing a plug-in that does nothing for all the vcs_* calls)
I think (but I'm not sure) that a simple way to implement this would be to add an environment variable that overrides the automatic VCS detection. When set to "none", all VCS operations would basically become no-ops. (This could be done by writing a plug-in that does nothing for all the `vcs_*` calls)
Backwards compatibility: This would add a "none" VCS, not remove any existing functionality.
Backwards compatibility: This would add a `none` VCS, not remove any existing functionality.
## Is "bash" the right language?
`bash` is fairly universal. It even exists on Windows. However it is not the right language for large systems. Writing the acceptance tests is quite a bear. Managing ".gitignore" files in bash is impossible and the current implementation fails in many cases.
`bash` is fairly universal. It even exists on Windows. However it is not the right language for large systems. Writing the acceptance tests is quite a bear. Managing `.gitignore` files in bash is impossible and the current implementation fails in many cases.
`python` is my second favorite language. It would make the code cleaner and more testable. However it is not installed everywhere. I would also want to write it in Python3 (why start a new project in Python2?) but sadly Python3 is less common. It is a chicken vs. egg situation.
`go` is my favorite language. I could probably rewrite this in go in a weekend. However, now the code is compiled, not interpreted. Therefore we lose the ability to just "git clone" and have the tools you want. Not everyone has a Go compiler installed on every machine.
`go` is my favorite language. I could probably rewrite this in go in a weekend. However, now the code is compiled, not interpreted. Therefore we lose the ability to just `git clone` and have the tools you want. Not everyone has a Go compiler installed on every machine.
The system is basically unusable on Windows without Cygwin or MINGW. A rewrite in python or go would make it work better on Windows, which currently requires Cygwin or MinGW (which is a bigger investment than installing Python). On the other hand, maybe Ubuntu-on-Windows makes that a non-issue.
As long as the code is in `bash` the configuration files like `blackbox-files.txt` and `blackbox-admins.txt` have problems. Filenames with carriage returns aren't supported. If this was in Python/Go/etc. those files could be json or some format with decent quoting and we could handle funny file names better. On the other hand, maybe it is best that we don't support funny filenames... we shouldn't enable bad behavior.
How important is itto blackbox users that the system is written in "bash"?
How important is itto blackbox users that the system is written in `bash`?
## ditch the project and use git-crypt
## Ditch the project and use git-crypt
People tell me that git-crypt is better because, as a plug-in, automagically supports "git diff", "git log" and "git blame".
People tell me that git-crypt is better because, as a plug-in, automagically supports `git diff`, `git log` and `git blame`.
However, I've never used it so I don't have any idea whether git-crypt is any better than blackbox.

View File

@@ -16,10 +16,13 @@ source "${0%/*}"/_stack_lib.sh
: "${BLACKBOX_HOME:="$(cd "${0%/*}" ; pwd)"}" ;
# What are the candidates for the blackbox data directory?
#
# The order of candidates matter. The first entry of the array
# sets the default Blackbox directory for all new repositories.
declare -a BLACKBOXDATA_CANDIDATES
BLACKBOXDATA_CANDIDATES=(
'keyrings/live'
'.blackbox'
'keyrings/live'
)
# If $EDITOR is not set, set it to "vi":
@@ -140,7 +143,7 @@ function fail_if_not_on_cryptlist() {
if ! is_on_cryptlist "$name" ; then
echo "ERROR: $name not found in $BB_FILES" >&2
echo "PWD=$(/bin/pwd)" >&2
echo "PWD=$(/usr/bin/env pwd)" >&2
echo 'Exiting...' >&2
exit 1
fi
@@ -210,7 +213,8 @@ function add_filename_to_cryptlist() {
else
echo "========== Adding file to list."
touch "$BB_FILES"
sort -u -o "$BB_FILES" <(echo "$name") "$BB_FILES"
echo "$name" >> "$BB_FILES"
sort -u -o "$BB_FILES" "$BB_FILES"
fi
}
@@ -408,6 +412,12 @@ function md5sum_file() {
Darwin | FreeBSD )
md5 -r "$1" | awk '{ print $1 }'
;;
NetBSD )
md5 -q "$1"
;;
SunOS )
digest -a md5 "$1"
;;
Linux | CYGWIN* | MINGW* )
md5sum "$1" | awk '{ print $1 }'
;;
@@ -422,12 +432,15 @@ function cp_permissions() {
# Copy the perms of $1 onto $2 .. end.
case $(uname -s) in
Darwin )
chmod $( stat -f '%p' "$1" ) "${@:2}"
chmod $( stat -f '%Lp' "$1" ) "${@:2}"
;;
FreeBSD )
FreeBSD | NetBSD )
chmod $( stat -f '%p' "$1" | sed -e "s/^100//" ) "${@:2}"
;;
Linux | CYGWIN* | MINGW* )
SunOS )
chmod $( stat -c '%a' "$1" ) "${@:2}"
;;
Linux | CYGWIN* | MINGW* | SunOS )
if [[ -e /etc/alpine-release ]]; then
chmod $( stat -c '%a' "$1" ) "${@:2}"
else

View File

@@ -57,7 +57,7 @@ function create_self_deleting_tempfile() {
: "${TMPDIR:=/tmp}" ;
filename=$(mktemp -t _stacklib_.XXXXXXXX )
;;
Linux | CYGWIN* | MINGW* )
Linux | CYGWIN* | MINGW* | NetBSD | SunOS )
filename=$(mktemp)
;;
* )
@@ -78,7 +78,7 @@ function create_self_deleting_tempdir() {
: "${TMPDIR:=/tmp}" ;
filename=$(mktemp -d -t _stacklib_.XXXXXXXX )
;;
Linux | CYGWIN* | MINGW* )
Linux | CYGWIN* | MINGW* | NetBSD | SunOS )
filename=$(mktemp -d)
;;
* )
@@ -102,7 +102,7 @@ function make_self_deleting_tempfile() {
: "${TMPDIR:=/tmp}" ;
name=$(mktemp -t _stacklib_.XXXXXXXX )
;;
Linux | CYGWIN* | MINGW* )
Linux | CYGWIN* | MINGW* | NetBSD | SunOS )
name=$(mktemp)
;;
* )
@@ -127,7 +127,7 @@ function make_tempdir() {
# which needs to fit within sockaddr_un.sun_path (see unix(7)).
name=$(mktemp -d -t SO )
;;
Linux | CYGWIN* | MINGW* )
Linux | CYGWIN* | MINGW* | NetBSD | SunOS )
name=$(mktemp -d)
;;
* )
@@ -160,14 +160,14 @@ function fail_if_not_running_as_root() {
function fail_if_in_root_directory() {
# Verify nobody has tricked us into being in "/".
case $(uname -s) in
Darwin | FreeBSD )
Darwin | FreeBSD | NetBSD )
if [[ $(stat -f'%i' / ) == $(stat -f'%i' . ) ]] ; then
echo 'SECURITY ALERT: The current directory is the root directory.'
echo 'Exiting...'
exit 1
fi
;;
Linux | CYGWIN* | MINGW* )
Linux | CYGWIN* | MINGW* | SunOS )
if [[ $(stat -c'%i' / ) == $(stat -c'%i' . ) ]] ; then
echo 'SECURITY ALERT: The current directory is the root directory.'
echo 'Exiting...'

View File

@@ -19,7 +19,8 @@ KEYNAME="$1"
# Add the email address to the BB_ADMINS file. Remove any duplicates.
# The file must exist for sort to act as we expect.
touch "$BB_ADMINS"
sort -fdu -o "$BB_ADMINS" <(echo "$1") "$BB_ADMINS"
echo "$1" >> "$BB_ADMINS"
sort -fdu -o "$BB_ADMINS" "$BB_ADMINS"
# Add the user's key to the keychain.

15
bin/blackbox_decrypt_file Executable file
View File

@@ -0,0 +1,15 @@
#!/usr/bin/env bash
#
# blackbox_decrypt_file -- Decrypt one or more blackbox files.
#
set -e
source "${0%/*}/_blackbox_common.sh"
if [ $# -eq 0 ]; then
echo >&2 "Please provide at least one file to decrypt"
exit 1
fi
"${BLACKBOX_HOME}/blackbox_edit_start" "$@"

View File

@@ -29,7 +29,7 @@ vcs_remove "$encrypted_file"
vcs_notice "$unencrypted_file"
vcs_add "$BB_FILES"
vcs_commit "Removing from blackbox: ${unencrypted_file}" "$BB_FILES" "$encrypted_file"
vcs_commit "Removing from blackbox: ${unencrypted_file}" "$BB_FILES" "$encrypted_file" "$(vcs_ignore_file_path)"
echo "========== UPDATING VCS: DONE"
echo "Local repo updated. Please push when ready."
echo " $VCS_TYPE push"

View File

@@ -44,7 +44,7 @@ if [[ $VCS_TYPE = "git" ]]; then
grep -qF "$LINE" "$FILE" || echo "$LINE" >> "$FILE"
LINE='blackbox-files.txt text eol=lf'
grep -qF "$LINE" "$FILE" || echo "$LINE" >> "$FILE"
vcs_add "$FILE"
fi
if [[ $VCS_TYPE = "svn" ]]; then
@@ -59,5 +59,5 @@ else
echo
echo
echo 'NEXT STEP: You need to manually check these in:'
echo ' ' $VCS_TYPE commit -m\'INITIALIZE BLACKBOX\' keyrings "$IGNOREFILE"
echo ' ' $VCS_TYPE commit -m\'INITIALIZE BLACKBOX\' "$BLACKBOXDATA" "$IGNOREFILE"
fi

View File

@@ -29,6 +29,7 @@ function register_new_file() {
prepare_keychain
encrypt_file "$unencrypted_file" "$encrypted_file"
add_filename_to_cryptlist "$unencrypted_file"
vcs_ignore "$unencrypted_file"
# Is the unencrypted file already in HG? (ie. are we correcting a bad situation)
SECRETSEXPOSED=$(is_in_vcs "${unencrypted_file}")
@@ -41,7 +42,6 @@ function register_new_file() {
vcs_add "$encrypted_file"
fi
vcs_ignore "$unencrypted_file"
echo 'NOTE: "already tracked!" messages are safe to ignore.'
vcs_add "$BB_FILES" "$encrypted_file"
vcs_commit "registered in blackbox: ${unencrypted_file}" "$BB_FILES" "$encrypted_file" "$(vcs_ignore_file_path)"

View File

@@ -39,7 +39,12 @@ export -f exported_internal_shred_file
DEREFERENCED_BIN_DIR="${0%/*}"
MAX_PARALLEL_SHRED=10
bash_args=
if bash --help | grep import-functions >/dev/null 2>/dev/null; then
bash_args=--import-functions
fi
export IFS=
tr '\n' '\0' <"$BB_FILES" | xargs -0 -I{} -n 1 -P $MAX_PARALLEL_SHRED bash -c "exported_internal_shred_file $DEREFERENCED_BIN_DIR \"{}\"" $DEREFERENCED_BIN_DIR/fake
tr '\n' '\0' <"$BB_FILES" | xargs -0 -I{} -P $MAX_PARALLEL_SHRED bash $bash_args -c "exported_internal_shred_file $DEREFERENCED_BIN_DIR \"{}\"" $DEREFERENCED_BIN_DIR/fake
echo '========== DONE.'

20
bin/blackbox_view Executable file
View File

@@ -0,0 +1,20 @@
#!/usr/bin/env bash
#
# blackbox_view -- Decrypt a file, view it, shred it
#
set -e
source "${0%/*}/_blackbox_common.sh"
for param in "$@" ; do
shreddable=0
unencrypted_file=$(get_unencrypted_filename "$param")
if [[ ! -e "$unencrypted_file" ]]; then
"${BLACKBOX_HOME}/blackbox_edit_start" "$param"
shreddable=1
fi
${PAGER:-less} "$unencrypted_file"
if [[ $shreddable = 1 ]]; then
shred_file "$unencrypted_file"
fi
done

2
binv2/blackbox_addadmin Executable file
View File

@@ -0,0 +1,2 @@
#!/usr/bin/env bash
exec blackbox admin add "$@"

2
binv2/blackbox_cat Executable file
View File

@@ -0,0 +1,2 @@
#!/usr/bin/env bash
exec blackbox cat "$@"

View File

@@ -0,0 +1,2 @@
#!/usr/bin/env bash
exec blackbox decrypt --all --agentcheck=true --overwrite "@"

2
binv2/blackbox_decrypt_file Executable file
View File

@@ -0,0 +1,2 @@
#!/usr/bin/env bash
exec blackbox decrypt --overwrite "$@"

2
binv2/blackbox_deregister_file Executable file
View File

@@ -0,0 +1,2 @@
#!/usr/bin/env bash
exec blackbox file remove --safe "$@"

2
binv2/blackbox_diff Executable file
View File

@@ -0,0 +1,2 @@
#!/usr/bin/env bash
exec blackbox diff --diff "$@"

2
binv2/blackbox_edit Executable file
View File

@@ -0,0 +1,2 @@
#!/usr/bin/env bash
exec blackbox edit "$@"

2
binv2/blackbox_edit_end Executable file
View File

@@ -0,0 +1,2 @@
#!/usr/bin/env bash
exec blackbox encrypt --shred "$@"

2
binv2/blackbox_edit_start Executable file
View File

@@ -0,0 +1,2 @@
#!/usr/bin/env bash
exec blackbox decrypt "$@"

2
binv2/blackbox_initialize Executable file
View File

@@ -0,0 +1,2 @@
#!/usr/bin/env bash
exec blackbox init "$@"

2
binv2/blackbox_list_admins Executable file
View File

@@ -0,0 +1,2 @@
#!/usr/bin/env bash
exec blackbox admin list

2
binv2/blackbox_list_files Executable file
View File

@@ -0,0 +1,2 @@
#!/usr/bin/env bash
exec blackbox file list

2
binv2/blackbox_listadmins Executable file
View File

@@ -0,0 +1,2 @@
#!/usr/bin/env bash
exec blackbox admin list

2
binv2/blackbox_postdeploy Executable file
View File

@@ -0,0 +1,2 @@
#!/usr/bin/env bash
blackbox decrypt --all --overwrite --group "$1"

View File

@@ -0,0 +1,2 @@
#!/usr/bin/env bash
exec blackbox file add --shred "$@"

2
binv2/blackbox_removeadmin Executable file
View File

@@ -0,0 +1,2 @@
#!/usr/bin/env bash
exec blackbox admin remove "$@"

2
binv2/blackbox_shred_all_files Executable file
View File

@@ -0,0 +1,2 @@
#!/usr/bin/env bash
exec blackbox shred --all

View File

@@ -0,0 +1,2 @@
#!/usr/bin/env bash
exec blackbox reencrypt --all --agentcheck

2
binv2/blackbox_view Executable file
View File

@@ -0,0 +1,2 @@
#!/usr/bin/env bash
blackbox cat "$@" | ${PAGER:-less}

51
binv2/blackbox_whatsnew Executable file
View File

@@ -0,0 +1,51 @@
#!/usr/bin/env bash
#
# blackbox_whatsnew - show what has changed in the last commit for a given file
#
exec blackbox whatsnew "$@"
exit 0
set -e
source "${0%/*}/_blackbox_common.sh"
if [[ $# -ne 1 ]]
then
echo "Pass only 1 file at a time"
exit 1
fi
fail_if_not_in_repo
gpg_agent_notice
COLUMNS=`tput cols`
FILE=$1
GIT="git log --abbrev-commit --pretty=oneline"
CURR_COMMIT=`$GIT $FILE | head -1 | awk '{print $1}'`
PREV_COMMIT=`$GIT ${CURR_COMMIT}~1 $FILE | head -1 | awk '{print $1}'`
# Use colordiff if available
if which colordiff > /dev/null 2>&1
then DIFF="colordiff"
else DIFF="diff"
fi
cat_commit()
{
COMMIT=$1
git checkout $COMMIT $FILE
echo "[$COMMIT] $FILE"
echo "---------------------"
"${BLACKBOX_HOME}/blackbox_cat" $FILE | sed '/========== PLAINFILE/,/========== EXTRACTING/d'
}
CURR_CONTENT=`cat_commit $CURR_COMMIT`
PREV_CONTENT=`cat_commit $PREV_COMMIT`
clear
# For some unknown reason this command executes fine but return exit code 1
$DIFF -y --width $COLUMNS \
<(echo "CURRENT" "$CURR_CONTENT" | fold -w $(( $COLUMNS / 2 - 4 )) ) \
<(echo "PREVIOUS" "$PREV_CONTENT" | fold -w $(( $COLUMNS / 2 - 4 )) )
git checkout $CURR_COMMIT $FILE
echo

View File

@@ -1,4 +1,4 @@
#!/bin/zsh
#!/usr/bin/env zsh
# The MIT License (MIT)
# Copyright (c) 2014 Stack Exchange, Inc.

75
build/build.go Normal file
View File

@@ -0,0 +1,75 @@
package main
import (
"flag"
"fmt"
"log"
"os"
"os/exec"
"strings"
"time"
)
var sha = flag.String("sha", "", "SHA of current commit")
var goos = flag.String("os", "", "OS to build (linux, windows, or darwin) Defaults to all.")
func main() {
flag.Parse()
flags := fmt.Sprintf(`-s -w -X main.SHA="%s" -X main.BuildTime=%d`, getVersion(), time.Now().Unix())
pkg := "github.com/StackExchange/blackbox/v2/cmd/blackbox"
build := func(out, goos string) {
log.Printf("Building %s", out)
cmd := exec.Command("go", "build", "-o", out, "-ldflags", flags, pkg)
os.Setenv("GOOS", goos)
os.Setenv("GO111MODULE", "on")
cmd.Stderr = os.Stderr
cmd.Stdout = os.Stdout
err := cmd.Run()
if err != nil {
log.Fatal(err)
}
}
for _, env := range []struct {
binary, goos string
}{
{"blackbox-Linux", "linux"},
{"blackbox.exe", "windows"},
{"blackbox-Darwin", "darwin"},
} {
if *goos == "" || *goos == env.goos {
build(env.binary, env.goos)
}
}
}
func getVersion() string {
if *sha != "" {
return *sha
}
// check teamcity build version
if v := os.Getenv("BUILD_VCS_NUMBER"); v != "" {
return v
}
// check git
cmd := exec.Command("git", "rev-parse", "HEAD")
v, err := cmd.CombinedOutput()
if err != nil {
return ""
}
ver := strings.TrimSpace(string(v))
// see if dirty
cmd = exec.Command("git", "diff-index", "--quiet", "HEAD", "--")
err = cmd.Run()
// exit status 1 indicates dirty tree
if err != nil {
if err.Error() == "exit status 1" {
ver += "[dirty]"
} else {
log.Printf("!%s!", err.Error())
}
}
return ver
}

22
cmd/blackbox/blackbox.go Normal file
View File

@@ -0,0 +1,22 @@
package main
import (
"fmt"
"os"
_ "github.com/StackExchange/blackbox/v2/pkg/crypters"
_ "github.com/StackExchange/blackbox/v2/pkg/crypters/_all"
_ "github.com/StackExchange/blackbox/v2/pkg/vcs"
_ "github.com/StackExchange/blackbox/v2/pkg/vcs/_all"
)
var dryRun bool
func main() {
app := flags()
err := app.Run(os.Args)
if err != nil {
fmt.Fprintf(os.Stderr, "ERROR: %s\n", err)
os.Exit(1)
}
}

227
cmd/blackbox/cli.go Normal file
View File

@@ -0,0 +1,227 @@
package main
// cli.go -- Create urfave/cli datastructures and apply them.
import (
"fmt"
"github.com/urfave/cli/v2"
"github.com/StackExchange/blackbox/v2/pkg/bbutil"
)
func flags() *cli.App {
app := cli.NewApp()
app.Version = "2.0.0"
app.Usage = "Maintain encrypted files in a VCS (Git, Hg, Svn)"
defUmask := bbutil.Umask(0)
bbutil.Umask(defUmask)
defUmaskS := fmt.Sprintf("%04o", defUmask)
app.Flags = []cli.Flag{
// &cli.BoolFlag{
// Name: "dry-run",
// Aliases: []string{"n"},
// Usage: "show what would have been done",
// Destination: &dryRun,
// },
&cli.StringFlag{
Name: "vcs",
Usage: "Use this VCS (GIT, NONE) rather than autodetect",
EnvVars: []string{"BLACKBOX_VCS"},
},
&cli.StringFlag{
Name: "crypto",
Usage: "Crypto back-end plugin",
Value: "GnuPG",
EnvVars: []string{"BLACKBOX_CRYPTO"},
},
&cli.StringFlag{
Name: "config",
Usage: "Path to config",
//Value: ".blackbox",
EnvVars: []string{"BLACKBOX_CONFIGDIR", "BLACKBOXDATA"},
},
&cli.StringFlag{
Name: "team",
Usage: "Use .blackbox-$TEAM as the configdir",
EnvVars: []string{"BLACKBOX_TEAM"},
},
&cli.StringFlag{
Name: "editor",
Usage: "editor to use",
Value: "vi",
EnvVars: []string{"EDITOR", "BLACKBOX_EDITOR"},
},
&cli.StringFlag{
Name: "umask",
Usage: "umask to set when decrypting",
Value: defUmaskS,
EnvVars: []string{"BLACKBOX_UMASK", "DECRYPT_UMASK"},
},
&cli.BoolFlag{
Name: "debug",
Usage: "Show debug output",
EnvVars: []string{"BLACKBOX_DEBUG"},
},
}
app.Commands = []*cli.Command{
// List items in the order they appear in the help menu.
{
Name: "decrypt",
Aliases: []string{"de", "start"},
Usage: "Decrypt file(s)",
Flags: []cli.Flag{
&cli.BoolFlag{Name: "all", Usage: "All registered files"},
&cli.BoolFlag{Name: "agentcheck", Usage: "Do not check for gpg-agent when using --all"},
&cli.StringFlag{Name: "group", Usage: "Set group ownership"},
&cli.BoolFlag{Name: "overwrite", Usage: "Overwrite plaintext if it exists"},
},
Action: func(c *cli.Context) error { return cmdDecrypt(c) },
},
{
Name: "encrypt",
Aliases: []string{"en", "end"},
Usage: "Encrypts file(s)",
Flags: []cli.Flag{
&cli.BoolFlag{Name: "shred", Usage: "Remove plaintext afterwards"},
},
Action: func(c *cli.Context) error { return cmdEncrypt(c) },
},
{
Name: "edit",
Aliases: []string{"vi"},
Usage: "Runs $EDITOR on file(s) (decrypt if needed)",
Action: func(c *cli.Context) error { return cmdEdit(c) },
},
{
Name: "cat",
Usage: "Output plaintext to stderr (decrypt if needed)",
Action: func(c *cli.Context) error { return cmdCat(c) },
},
{
Name: "diff",
Usage: "Diffs against encrypted version",
Flags: []cli.Flag{
&cli.BoolFlag{Name: "all", Usage: "all files"},
},
Action: func(c *cli.Context) error { return cmdDiff(c) },
},
{
Name: "init",
Category: "ADMINISTRATIVE",
Usage: "Initialized blackbox for this repo",
Action: func(c *cli.Context) error { return cmdInit(c) },
},
{
Name: "admin",
Category: "ADMINISTRATIVE",
Usage: "Add/list/remove administrators",
Subcommands: []*cli.Command{
{
Name: "add",
Usage: "Adds admin(s)",
Action: func(c *cli.Context) error { return cmdAdminAdd(c) },
},
{
Name: "list",
Usage: "Lists admins",
Action: func(c *cli.Context) error { return cmdAdminList(c) },
},
{
Name: "remove",
Usage: "Remove admin(s)",
Action: func(c *cli.Context) error { return cmdAdminRemove(c) },
},
},
},
{
Name: "file",
Category: "ADMINISTRATIVE",
Usage: "Add/list/remove files from the registry",
Subcommands: []*cli.Command{
{
Name: "add",
Usage: "Registers file with the system",
Flags: []cli.Flag{
&cli.BoolFlag{Name: "shred", Usage: "Remove plaintext afterwords"},
},
Action: func(c *cli.Context) error { return cmdFileAdd(c) },
},
{
Name: "list",
Usage: "Lists the registered files",
Action: func(c *cli.Context) error { return cmdFileList(c) },
},
{
Name: "remove",
Usage: "Deregister file from the system",
Action: func(c *cli.Context) error { return cmdFileRemove(c) },
},
},
},
{
Name: "info",
Category: "DEBUG",
Usage: "Report what we know about this repo",
Action: func(c *cli.Context) error { return cmdInfo(c) },
},
{
Name: "shred",
Usage: "Shred files, or --all for all registered files",
Flags: []cli.Flag{
&cli.BoolFlag{Name: "all", Usage: "All registered files"},
},
Action: func(c *cli.Context) error { return cmdShred(c) },
},
{
Name: "status",
Category: "ADMINISTRATIVE",
Usage: "Print status of files",
Flags: []cli.Flag{
&cli.BoolFlag{Name: "name-only", Usage: "Show only names of the files"},
&cli.BoolFlag{Name: "all", Usage: "All registered files"},
&cli.StringFlag{Name: "type", Usage: "only list if status matching this string"},
},
Action: func(c *cli.Context) error { return cmdStatus(c) },
},
{
Name: "reencrypt",
Usage: "Decrypt then re-encrypt files (erases any plaintext)",
Category: "ADMINISTRATIVE",
Flags: []cli.Flag{
&cli.BoolFlag{Name: "all", Usage: "All registered files"},
&cli.BoolFlag{Name: "overwrite", Usage: "Overwrite plaintext if it exists"},
&cli.BoolFlag{Name: "agentcheck", Usage: "Do not check for gpg-agent when using --all"},
},
Action: func(c *cli.Context) error { return cmdReencrypt(c) },
},
{
Name: "testing_init",
Usage: "For use with integration test",
Category: "INTEGRATION TEST",
Action: func(c *cli.Context) error { return testingInit(c) },
},
//
}
return app
}

296
cmd/blackbox/drive.go Normal file
View File

@@ -0,0 +1,296 @@
package main
// Now that cli.go has processed the flags, validate there are no
// conflicts and drive to the business logic.
import (
"fmt"
"log"
"os"
"github.com/StackExchange/blackbox/v2/pkg/bblog"
"github.com/StackExchange/blackbox/v2/pkg/box"
"github.com/urfave/cli/v2"
)
var logErr *log.Logger
func init() {
if logErr == nil {
logErr = log.New(os.Stderr, "", 0)
}
}
func allOrSomeFiles(c *cli.Context) error {
if c.Bool("all") && c.Args().Present() {
return fmt.Errorf("Can not specify filenames and --all")
}
if (!c.Args().Present()) && (!c.Bool("all")) {
return fmt.Errorf("Must specify at least one file name or --all")
}
return nil
}
const roError = `This command is disabled due to --config flag being used.
We can not determine if the flag's value is in or out of the repo, and
Blackbox can only work on one repo at a time. If the value is inside the
repo, and you'd like to suggest an algorithm that would let us determine that
automatically, please file a bug. We'd love to have this work better. In the
meanwhile, run this command without the --config flag, perhaps after cd'ing
to the base of the repo.`
// Keep these functions in alphabetical order.
func cmdAdminAdd(c *cli.Context) error {
if c.NArg() == 0 || c.NArg() > 2 {
return fmt.Errorf(
"Must specify one admin's GnuPG user-id (i.e. email address) and optionally the directory of the pubkey data (default ~/.GnuPG)")
}
bx := box.NewFromFlags(c)
if bx.ConfigRO {
return fmt.Errorf(roError)
}
err := bx.AdminAdd(c.Args().Get(0), c.Args().Get(1))
if err != nil {
return err
}
return bx.Vcs.FlushCommits()
}
func cmdAdminList(c *cli.Context) error {
if c.Args().Present() {
return fmt.Errorf("This command takes zero arguments")
}
bx := box.NewFromFlags(c)
err := bx.AdminList()
if err != nil {
return err
}
return bx.Vcs.FlushCommits()
}
func cmdAdminRemove(c *cli.Context) error {
if !c.Args().Present() {
return fmt.Errorf("Must specify at least one admin's GnuPG user-id (i.e. email address)")
}
bx := box.NewFromFlags(c)
if bx.ConfigRO {
return fmt.Errorf(roError)
}
err := bx.AdminRemove(c.Args().Slice())
if err != nil {
return err
}
return bx.Vcs.FlushCommits()
}
func cmdCat(c *cli.Context) error {
if !c.Args().Present() {
return fmt.Errorf("Must specify at least one file name")
}
bx := box.NewFromFlags(c)
err := bx.Cat(c.Args().Slice())
if err != nil {
return err
}
return bx.Vcs.FlushCommits()
}
func cmdDecrypt(c *cli.Context) error {
if err := allOrSomeFiles(c); err != nil {
return err
}
// The default for --agentcheck is off normally, and on when using --all.
pauseNeeded := c.Bool("all")
// If the user used the flag, abide by it.
if c.IsSet("agentcheck") {
pauseNeeded = c.Bool("agentcheck")
}
bx := box.NewFromFlags(c)
err := bx.Decrypt(c.Args().Slice(),
c.Bool("overwrite"),
pauseNeeded,
c.String("group"),
)
if err != nil {
return err
}
return bx.Vcs.FlushCommits()
}
func cmdDiff(c *cli.Context) error {
if err := allOrSomeFiles(c); err != nil {
return err
}
bx := box.NewFromFlags(c)
err := bx.Diff(c.Args().Slice())
if err != nil {
return err
}
return bx.Vcs.FlushCommits()
}
func cmdEdit(c *cli.Context) error {
if !c.Args().Present() {
return fmt.Errorf("Must specify at least one file name")
}
bx := box.NewFromFlags(c)
err := bx.Edit(c.Args().Slice())
if err != nil {
return err
}
return bx.Vcs.FlushCommits()
}
func cmdEncrypt(c *cli.Context) error {
if err := allOrSomeFiles(c); err != nil {
return err
}
bx := box.NewFromFlags(c)
err := bx.Encrypt(c.Args().Slice(), c.Bool("shred"))
if err != nil {
return err
}
return bx.Vcs.FlushCommits()
}
func cmdFileAdd(c *cli.Context) error {
if !c.Args().Present() {
return fmt.Errorf("Must specify at least one file name")
}
bx := box.NewFromFlags(c)
if bx.ConfigRO {
return fmt.Errorf(roError)
}
err := bx.FileAdd(c.Args().Slice(), c.Bool("shred"))
if err != nil {
return err
}
return bx.Vcs.FlushCommits()
}
func cmdFileList(c *cli.Context) error {
if c.Args().Present() {
return fmt.Errorf("This command takes zero arguments")
}
bx := box.NewFromFlags(c)
err := bx.FileList()
if err != nil {
return err
}
return bx.Vcs.FlushCommits()
}
func cmdFileRemove(c *cli.Context) error {
if !c.Args().Present() {
return fmt.Errorf("Must specify at least one file name")
}
bx := box.NewFromFlags(c)
if bx.ConfigRO {
return fmt.Errorf(roError)
}
err := bx.FileRemove(c.Args().Slice())
if err != nil {
return err
}
return bx.Vcs.FlushCommits()
}
func cmdInfo(c *cli.Context) error {
if c.Args().Present() {
return fmt.Errorf("This command takes zero arguments")
}
bx := box.NewFromFlags(c)
err := bx.Info()
if err != nil {
return err
}
return bx.Vcs.FlushCommits()
}
func cmdInit(c *cli.Context) error {
if c.Args().Len() > 1 {
return fmt.Errorf("This command takes one or two arguments")
}
bx := box.NewUninitialized(c)
if bx.ConfigRO {
return fmt.Errorf(roError)
}
err := bx.Init(c.Args().First(), c.String("vcs"))
if err != nil {
return err
}
return bx.Vcs.FlushCommits()
}
func cmdReencrypt(c *cli.Context) error {
if err := allOrSomeFiles(c); err != nil {
return err
}
// The default for --agentcheck is off normally, and on when using --all.
pauseNeeded := c.Bool("all")
// If the user used the flag, abide by it.
if c.IsSet("agentcheck") {
pauseNeeded = c.Bool("agentcheck")
}
bx := box.NewFromFlags(c)
err := bx.Reencrypt(c.Args().Slice(),
c.Bool("overwrite"),
pauseNeeded,
)
if err != nil {
return err
}
return bx.Vcs.FlushCommits()
}
func cmdShred(c *cli.Context) error {
if err := allOrSomeFiles(c); err != nil {
return err
}
bx := box.NewFromFlags(c)
err := bx.Shred(c.Args().Slice())
if err != nil {
return err
}
return bx.Vcs.FlushCommits()
}
func cmdStatus(c *cli.Context) error {
if c.Bool("all") && c.Args().Present() {
return fmt.Errorf("Can not specify filenames and --all")
}
bx := box.NewFromFlags(c)
err := bx.Status(c.Args().Slice(), c.Bool("name-only"), c.String("type"))
if err != nil {
return err
}
return bx.Vcs.FlushCommits()
}
// These are "secret" commands used by the integration tests.
func testingInit(c *cli.Context) error {
if c.Args().Present() {
return fmt.Errorf("No args required")
}
logDebug := bblog.GetDebug(c.Bool("debug"))
logDebug.Printf(
"c.String(vcs) reports %q\n",
c.String("vcs"),
)
bx := box.NewForTestingInit(c.String("vcs"))
if bx.ConfigRO {
return fmt.Errorf(roError)
}
err := bx.TestingInitRepo()
if err != nil {
return err
}
return bx.Vcs.FlushCommits()
}

61
docs/README.md Normal file
View File

@@ -0,0 +1,61 @@
BlackBox
========
Blackbox is an open source tool that enables you to safe store sensitive information in
Git (or other) repos by encrypting them with GPG. Only the encrypted
version of the file is available. You can be free to provide access
to the repo, as but only people with the right GPG keys can access the
encrypted data.
Things you should **never** store in a repo without encryption:
* TLS (SSL) certificates
* Passwords
* API keys
* And more!
Project Info:
* [Overview](user-overview.md)
* [Why is this important?](why-is-this-important.md)
* [Support/Community](support.md)
* [How BB encrypts](encryption.md)
* [OS Compatibility](compatibility.md)
* [Installation Instructions](installation.md)
* [Alternatives](alternatives.md)
User Info:
* [Enabling Blackbox on a Repo](enable-repo.md)
* [Enroll a file](enable-repo.md)
* [Full Command List](full-command-list.md)
* [Add/Remove users](admin-ops.md)
* [Add/Remove files](file-ops.md)
* [Advanced techiques](advanced.md)
* [Use with Role Accounts](role-accounts.md)
* [Backwards Compatibility](backwards-compatibility.md)
* [Replacing expired keys](expired-keys.md)
* [Git Tips](git-tips.md)
* [SubVersion Tips](subversion-tips.md)
* [GnuPG tips](gnupg-tips.md)
* [Use with Ansible](with-ansible.md)
* [Use with Puppet](with-puppet.md)
For contributors:
* [Developer Info](dev.md)
* [Code overview](dev-code-overview.md)
* [HOWTO: Add new OS support](dev-add-os-support.md)
* [HOWTO: Add new VCS support](dev-add-vcs-support.md)
A slide presentation about an older release [is on SlideShare](http://www.slideshare.net/TomLimoncelli/the-blackbox-project-sfae).
Join our mailing list: [https://groups.google.com/d/forum/blackbox-project](https://groups.google.com/d/forum/blackbox-project)
License
=======
This content is released under the MIT License.
See the [LICENSE.txt](LICENSE.txt) file.

148
docs/admin-ops.md Normal file
View File

@@ -0,0 +1,148 @@
User Management
===============
# Who are the current admins?
```
blackbox admin list
```
# Add a new user (admin)
FYI: Your repo may use `keyrings/live` instead of `.blackbox`. See "Where is the configuration stored?"
`.blackbox/blackbox-admins.txt` is a file that lists which users are able to decrypt files. (More pedantically, it is a list of the GnuPG key names that the file is encrypted for.)
To join the list of people that can edit the file requires three steps; You create a GPG key and add it to the key ring. Then, someone that already has access adds you to the system. Lastly, you should test your access.
## Step 1: NEWPERSON creates a GPG key pair on a secure machine and add to public keychain.
If you don't already have a GPG key, here's how to generate one:
```
gpg --gen-key
```
WARNING: New versions of GPG generate keys which are not understood by
old versions of GPG. If you generate a key with a new version of GPG,
this will cause problems for users of older versions of GPG.
Therefore it is recommended that you either assure that everyone using
Blackbox have the exact same version of GPG, or generate GPG keys
using a version of GPG as old as the oldest version of GPG used by
everyone using Blackbox.
Pick defaults for encryption settings, 0 expiration. Pick a VERY GOOD
passphrase. Store a backup of the private key someplace secure. For
example, keep the backup copy on a USB drive that is locked in safe.
Or, at least put it on a secure machine with little or no internet
access, full-disk-encryption, etc. Your employer probably has rules
about how to store such things.
FYI: If generating the key is slow, this is usually because the system
isn't generating enough entropy. Tip: Open another window on that
machine and run this command: `ls -R /`
Now that you have a GPG key, add yourself as an admin:
```
blackbox admin add KEYNAME
```
...where "KEYNAME" is the email address listed in the gpg key you created previously. For example:
```
blackbox admin add tal@example.com
```
When the command completes successfully, instructions on how to commit these changes will be output. Run the command as given to commit the changes. It will look like this:
```
git commit -m'NEW ADMIN: tal@example.com' .blackbox/pubring.gpg .blackbox/trustdb.gpg .blackbox/blackbox-admins.txt
```
Then push it to the repo:
```
git push
or
ht push
(or whatever is appropriate)
```
NOTE: Creating a Role Account? If you are adding the pubring.gpg of a role account, you can specify the directory where the pubring.gpg file can be found as a 2nd parameter: `blackbox admin add puppetmaster@puppet-master-1.example.com /path/to/the/dir`
## Step 2: AN EXISTING ADMIN accepts you into the system.
Ask someone that already has access to re-encrypt the data files. This
gives you access. They simply decrypt and re-encrypt the data without
making any changes.
Pre-check: Verify the new keys look good.
```
git pull # Or whatever is required for your system
gpg --homedir=.blackbox --list-keys
```
For example, examine the key name (email address) to make sure it conforms to corporate standards.
Import the keychain into your personal keychain and reencrypt:
```
gpg --import .blackbox/pubring.gpg
blackbox reencrypt --all shred
```
Push the re-encrypted files:
```
git commit -a
git push
or
hg commit
hg push
```
### Step 3: NEWPERSON tests.
Make sure you can decrypt a file. (Suggestion: Keep a dummy file in
VCS just for new people to practice on.)
# Remove a user
Simply run `blackbox admin remove` with their keyname then re-encrypt:
Example:
```
blackbox admin remove olduser@example.com
blackbox reencrypt --all shred
```
When the command completes, you will be given a reminder to check in the change and push it.
Note that their keys will still be in the key ring, but they will go unused. If you'd like to clean up the keyring, use the normal GPG commands and check in the file.
FYI: Your repo may use `keyrings/live` instead of `.blackbox`. See "Where is the configuration stored?"
```
gpg --homedir=.blackbox --list-keys
gpg --homedir=.blackbox --delete-key olduser@example.com
git commit -m'Cleaned olduser@example.com from keyring' .blackbox/*
```
FYI: Your repo may use `keyrings/live` instead of `.blackbox`. See "Where is the configuration stored?"
The key ring only has public keys. There are no secret keys to delete.
Remember that this person did have access to all the secrets at one time. They could have made a copy. Therefore, to be completely secure, you should change all passwords, generate new SSL keys, and so on just like when anyone that had privileged access leaves an organization.

46
docs/advanced.md Normal file
View File

@@ -0,0 +1,46 @@
Advanced Techniques
===================
# Using Blackbox without a repo
If the files are copied out of a repo they can still be decrypted and
edited. Obviously edits, changes to keys, and such will be lost if
they are made outside the repo. Also note that commands are most
likely to only work if run from the base directory (i.e. the parent to
the .blackbox directory).
Without a repo, all commands must be run from the same directory
as the ".blackbox" directory. It might work otherwise but no
promises.
# Mixing gpg 1.x/2.0 and 2.2
WARNING: Each version of GnuPG uses a different, and incompatible,
binary format to store the keychain. When Blackbox was originally
created, I didn't know this. Things are mostly upwards compatible.
That said, if you have some admins with GnuPG 1.x and others with GnuPG 2.2,
you may corrupt the keychain.
A future version will store the keychain in an GnuPG-approved
version-neutral format.
# Having gpg and gpg2 on the same machine
NOTE: This is not implemented at this time. TODO(tlim) Use GPG to find
the binary.
In some situations, team members or automated roles need to install gpg
2.x alongside the system gpg version 1.x to catch up with the team's gpg
version. On Ubuntu 16, you can ```apt-get install gnupg2``` which
installs the binary gpg2. If you want to use this gpg2 binary, run every
blackbox command with GPG=gpg2.
For example:
```
GPG=gpg2 blackbox_postdeploy
```

14
docs/alternatives.md Normal file
View File

@@ -0,0 +1,14 @@
Alternatives
============
Here are other open source packages that do something similar to
BlackBox. If you like them better than BlackBox, please use them.
- [git-crypt](https://www.agwa.name/projects/git-crypt/)
- [Pass](http://www.zx2c4.com/projects/password-store/)
- [Transcrypt](https://github.com/elasticdog/transcrypt)
- [Keyringer](https://keyringer.pw/)
- [git-secret](https://github.com/sobolevn/git-secret)
git-crypt has the best git integration. Once set up it is nearly
transparent to the users. However it only works with git.

View File

@@ -0,0 +1,55 @@
Backwards Compatibility
=======================
# Where is the configuration stored? .blackbox vs. keyrings/live
Blackbox stores its configuration data in the `.blackbox` subdirectory. Older
repos use `keyrings/live`. For backwards compatibility either will work.
All documentation refers to `.blackbox`.
You can convert an old repo by simply renaming the directory:
```
mv keyrings/live .blackbox
rmdir keyrings
```
There is no technical reason to convert old repos except that it is less
confusing to users.
This change was made in commit 60e782a0, release v1.20180615.
# How blackbox fines the config directory:
## Creating the repo:
`blackbox init` creates the config directory in the root
of the repo. Here's how it picks the name:
- If `$BLACKBOX_TEAM` is set, `.blackbox-$BLACKBOX_TEAM` is used.
- If the flag `--team <teamname>` is set, it uses `.blackbox-<teamname>`
- Otherwise, it uses `.blackbox`
When searching for the configuration directory, the following
locations are checked. First match wins.
- `.blackbox-$BLACKBOX_TEAM` (only if `$BLACKBOX_TEAM` is set)
- The value of `--config value` (if the flag is set)
- `$BLACKBOX_CONFIGDIR` (the preferred env. variable to use)
- `$BLACKBOXDATA` (for backwards compatibility with v1)
- `.blackbox`
- `keyrings/live` (for backwards compatibility)
NOTE: The env variables and `--config` should be set to the full path
to the config directory (i.e.: `/Users/tom/gitstuff/myrepo/.blackbox`).
If it is set to a relative directory (i.e. `.blackbox` or
`../myrepo/.blackbox`) most commands will break.
NOTE: Why the change from `$BLACKBOXDATA` to `$BLACKBOX_CONFIGDIR`? We want
all the env. variables to begin with the prefix `BLACKBOX_`. If v1
supported another name, that is still supported. If you are starting
with v2 and have no other users using v1, please use the `BLACKBOX_`
prefix.

78
docs/compatibility.md Normal file
View File

@@ -0,0 +1,78 @@
Compatibility
=============
# Compatibility with Blackbox v1
The command names all changed from v1 to v2. The `binv2` directory
includes shell scripts that provide full backwards compatibility.
# Supported Architectures
Blackbox supports a plug-in archtecture to easily support multiple VCS
system. Current support is for:
## Supported VCS/DVCS systems
* git
* "none" (repo-less use is supported)
* WOULD LOVE VOLUNTEERS TO HELP ADD SUPPORT FOR: hg, svn, p4
## Supported GPG versions
* Git 1.x and 2.0
* Git 2.2 and higher
* WOULD LOVE VOLUNTEERS TO HELP ADD SUPPORT FOR:
golang.org/x/crypto/openpgp (this would make the code have no
external dependencies)
## Supported Operating systems
Blackbox should work on any Linux system with GnuPG installed.
Blackbox simply looks for `gpg` in `$PATH`.
Windows: It should work (but has not been extensively tested) on
Windows WSL2.
# Automated testing
While many combinations work, we do automated tests
on these combinations. If any of these fail it blocks the release:
* macOS: GnuPG 2.2 executables from https://gpgtools.org/
* CentOS: GnuPG 2.0.x executables from the "base" or "updates" repo.
Windows native: VOLUNTEER NEEDED to make a native Windows version
(should be rather simple as Go does most of the work)
NOTE: Version 1 worked on CentOS/RedHat, macOS, Gygwin, WinGW, NetBSD,
and SmartOS. Hopefully we can achieve that broad level of support in
the future. Any system that is supported by the Go language and
has GuPG 2.0.x or higher binaries available should be easy to achieve.
We'd also like to have automated testing for the same.
# Windows Support
BlackBox assumes that `blackbox-admins.txt` and `blackbox-files.txt` will have
LF line endings. Windows users should be careful to configure Git or other systems
to not convert or "fix" those files.
If you use Git, add the following lines to your `.gitattributes` file:
**/blackbox-admins.txt text eol=lf
**/blackbox-files.txt text eol=lf
The `blackbox init` (and newer versions of `blackbox_initialize`)
will create an appropriate `.gitattributes` file for you.
# Cygwin
TODO: List what packages are required for building the software.
TODO: List what packages are required for running the software.
# MinGW
MinGW (comes with Git for Windows) support requires the following:
TODO: FILL IN any requirements

36
docs/dev-code-overview.md Normal file
View File

@@ -0,0 +1,36 @@
Code Overview
=============
Here is how the code is laid out.
TODO(tlim): Add a diagram of the layers
```
cmd/blackbox/ The command line tool.
blackbox.go main()
cli.go Definition of all subcommands and flags
drive.go Processes flags and calls functions in verbs.go
NOTE: These are the only files that are aware of the
flags. Everything else gets the flag data passed to it
as a parameter. This way the remaining system can be
used as a module.
pkg/box/ High-level functions related to "the black box".
verbs.go One function per subcommand.
box.go Functions for manipulating the files in .blackbox
boxutils.go Helper functions for the above.
pkg/bblog/ Module that provides logging facilities.
pkg/bbutil/ Functions that are useful to box, plug-ins, etc.
pkg/tainedname/ Module for printing filenames escaped for Bash.
models/vcs.go The interface that defines a VCS plug-in.
models/crypters.go The interface that defines a GPG plug-in.
pkg/crypters/ Plug-ins for GPG functionality.
pkg/crypters/gnupg Plug-in that runs an external gpg binary (found via $PATH)
pkg/vcs/ Plug-ins for VCS functionality.
pkg/vcs/none Repo-less mode.
pkg/vcs/git Git mode.
```

36
docs/dev.md Normal file
View File

@@ -0,0 +1,36 @@
Developer Info
==============
Code submissions are gladly welcomed! The code is fairly easy to read.
Get the code:
```
git clone git@github.com:StackExchange/blackbox.git
```
Test your changes:
```
go test ./...
```
This runs through a number of system tests. It creates a repo,
encrypts files, decrypts files, and so on. You can run these tests to
verify that the changes you made didn't break anything. You can also
use these tests to verify that the system works with a new operating
system.
Please submit tests with code changes:
The best way to change BlackBox is via Test Driven Development. First
add a test to `tools/confidence.sh`. This test should fail, and
demonstrate the need for the change you are about to make. Then fix
the bug or add the feature you want. When you are done, `make
confidence` should pass all tests. The PR you submit should include
your code as well as the new test. This way the confidence tests
accumulate as the system grows as we know future changes don't break
old features.
Note: More info about compatibility are on the [Compatibility Page](compatibility.md)

58
docs/enable-repo.md Normal file
View File

@@ -0,0 +1,58 @@
Enabling Blackbox on a Repo
===========================
Overview:
1. Run the initialization command
2. Add at least one admin.
3. Add files. (don't add files before the admins)
The long version:
1. If you don't have a GPG key, set it up using instructions such as:
[Set up GPG key](https://help.github.com/articles/generating-a-new-gpg-key/). \
Now you are ready to go.
1. `cd` into a Git, Mercurial, Subversion or Perforce repository and run `blackbox init`.
1. Add yourself with `blackbox admin add YOUR@EMAIL`
1. Commit the files as directed.
That's it!
At this point you should encrypt a file and make sure you can decrypt
it. This verifies that everything is working as expected.
1. Pick a file to be encrypted. Since this is a test, you might want
to create a test file. Call it `secret.txt` and edit the file
so that it includes your mother's maiden name. Just kidding!
Store this sentence: `This is my test file.`
2. Run `blackbox file add secret.txt`
3. Decode the encrypted version: `blackbox cat secret.txt`
The "cat" subcommand only accesses the encrypted (`.gpg`) file and is
a good way to see that the file was encrypted properly. You should
see `This is my test file.`
4 Verify that editing the file works.
To view and/or edit a file, run `blackbox edit --shred secret.txt`
Now encrypt it and shred the original:
```
blackbox encrypt --shred secret.txt
```
Now make sure you can decrypt the new file:
```
blackbox cat secret.txt
```
You should see the changed text.
Now commit and push `secret.txt.gpg` and you are done!

54
docs/encryption.md Normal file
View File

@@ -0,0 +1,54 @@
How is the encryption done?
===========================
GPG has many different ways to encrypt a file. BlackBox uses the mode
that lets you specify a list of keys that can decrypt the message.
If you have 5 people ("admins") that should be able to access the
secrets, each creates a GPG key and adds their public key to the
keychain. The GPG command used to encrypt the file lists all 5 key
names, and therefore any 1 key can decrypt the file.
Blackbox stores a copy of the public keys of all admins. It never
stores the private keys.
To remove someone's access, remove that admin's key name (i.e. email
address) from the list of admins and re-encrypt all the files. They
can still read the .gpg file (assuming they have access to the
repository) but they can't decrypt it any more.
*What if they kept a copy of the old repo before you removed access?*
Yes, they can decrypt old versions of the file. This is why when an
admin leaves the team, you should change all your passwords, SSL
certs, and so on. You should have been doing that before BlackBox,
right?
*Why don't you use symmetric keys?* In other words, why mess with all
this GPG key stuff and instead why don't we just encrypt all the files
with a single passphrase. Yes, GPG supports that, but then we are
managing a shared password, which is fraught with problems. If someone
"leaves the team" we would have to communicate to everyone a new
password. Now we just have to remove their key. This scales better.
*How do automated processes decrypt without asking for a password?*
GPG requires a passphrase on a private key. However, it permits the
creation of subkeys that have no passphrase. For automated processes,
create a subkey that is only stored on the machine that needs to
decrypt the files. For example, at Stack Exchange, when our Continuous
Integration (CI) system pushes a code change to our Puppet masters,
they run `blackbox decrypt --all --overwrite` to decrypt all the files.
The user that
runs this code has a subkey that doesn't require a passphrase. Since
we have many masters, each has its own key. And, yes, this means our
Puppet Masters have to be very secure. However, they were already
secure because, like, dude... if you can break into someone's puppet
master you own their network.
*If you use Puppet, why didn't you just use hiera-eyaml?* There are 4
reasons:
1. This works with any Git or Mercurial repo, even if you aren't using Puppet.
2. hiera-eyaml decrypts "on demand" which means your Puppet Master now uses a lot of CPU to decrypt keys every time it is contacted. It slows down your master, which, in my case, is already slow enough.
3. This works with binary files, without having to ASCIIify them and paste them into a YAML file. Have you tried to do this with a cert that is 10K long and changes every few weeks? Ick.
4. hiera-eyaml didn't exist when I wrote this. (That's the real reason.)

62
docs/expired-keys.md Normal file
View File

@@ -0,0 +1,62 @@
Replacing expired keys
======================
If someone's key has already expired, blackbox will stop
encrypting. You see this error:
```
$ blackbox_edit_end modified_file.txt
--> Error: can't re-encrypt because a key has expired.
```
FYI: Your repo may use `keyrings/live` instead of `.blackbox`. See "Where is the configuration stored?"
You can also detect keys that are about to expire by issuing this command and manually reviewing the "expired:" dates:
gpg --homedir=.blackbox --list-keys
or... list UIDs that will expire within 1 month from today: (Warning: this also lists keys without an expiration date)
gpg --homedir=.blackbox --list-keys --with-colons --fixed-list-mode | grep ^uid | awk -F: '$6 < '$(( $(date +%s) + 2592000))
Here's how to replace the key:
- Step 1. Administrator removes expired user:
Warning: This process will erase any unencrypted files that you were in the process of editing. Copy them elsewhere and restore the changes when done.
```
blackbox_removeadmin expired_user@example.com
# This next command overwrites any changed unencrypted files. See warning above.
blackbox_update_all_files
git commit -m "Re-encrypt all files"
gpg --homedir=.blackbox --delete-key expired_user@example.com
git commit -m 'Cleaned expired_user@example.com from keyring' .blackbox/*
git push
```
- Step 2. Expired user adds an updated key:
```
git pull
blackbox_addadmin updated_user@example.com
git commit -m'NEW ADMIN: updated_user@example.com .blackbox/pubring.gpg .blackbox/trustdb.gpg .blackbox/blackbox-admins.txt
git push
```
- Step 3. Administrator re-encrypts all files with the updated key of the expired user:
```
git pull
gpg --import .blackbox/pubring.gpg
blackbox_update_all_files
git commit -m "Re-encrypt all files"
git push
```
- Step 4: Clean up:
Any files that were temporarily copied in the first step so as to not be overwritten can now be copied back and re-encrypted with the `blackbox_edit_end` command.
(Thanks to @chishaku for finding a solution to this problem!)

55
docs/file-ops.md Normal file
View File

@@ -0,0 +1,55 @@
How to add/remove a file into the system?
=========================================
# Adding files:
- If you need to, start the GPG Agent: `eval $(gpg-agent --daemon)`
- Add the file to the system:
```
blackbox file add path/to/file.name.key
# If you want to delete the old plaintext:
blackbox file add --shred path/to/file.name.key
```
Multiple file names can be specified on the command line:
Example 1: Register 2 files:
```
blackbox file add --shred file1.txt file2.txt
```
Example 2: Register all the files in `$DIR`:
```
find $DIR -type f -not -name '*.gpg' -print0 | xargs -0 blackbox file add
```
# Removing files
This command
```
blackbox file remove path/to/file.name.key
```
TODO(tlim): Add examples.
# List files
To see what files are currently enrolled in the system:
```
blackbox file list
```
You can also see their status:
```
blackbox status
blackbox status just_one_file.txt
blackbox status --type ENCRYPTED
```

34
docs/full-command-list.md Normal file
View File

@@ -0,0 +1,34 @@
Blackbox Command List
=====================
## Global Flags
### `--vcs`
### `--crypto`
### `--config`
### `--team`
### `--editor`
### `--umask`
### `--debug`
### `--help`
### `--help`
### `--version`
## User Commands
### `blackbox decrypt`
### `blackbox encrypt`
### `blackbox edit`
### `blackbox cat`
### `blackbox diff`
### `blackbox shred`
### `blackbox help`
## User Commands
### `blackbox init`
### `blackbox admin`
### `blackbox file`
### `blackbox status`
### `blackbox reencrypt`
## Debug
### `blackbox info`
## Integration Test (secret menu)
### `blackbox testing_init`
TODO(tlim): Can we automatically generate this? The data is all in cli.go

22
docs/git-tips.md Normal file
View File

@@ -0,0 +1,22 @@
GIT tips
========
# Configure git to show diffs in encrypted files
It's possible to tell Git to decrypt versions of the file before running them through `git diff` or `git log`. To achieve this do:
- Add the following to `.gitattributes` at the top of the git repository:
```
*.gpg diff=blackbox
```
- Add the following to `.git/config`:
```
[diff "blackbox"]
textconv = gpg --use-agent -q --batch --decrypt
````
Commands like `git log -p file.gpg` and `git diff master --` will display as expected.

31
docs/gnupg-tips.md Normal file
View File

@@ -0,0 +1,31 @@
GnuPG tips
==========
# Common error messages
* Message: `gpg: filename: skipped: No public key`
* Solution: Usually this means there is an item in
`.blackbox/blackbox-admins.txt` that is not the name of the key.
Either something invalid was inserted (like a filename instead of a
username) or a user has left the organization and their key was
removed from the keychain, but their name wasn't removed from the
blackbox-admins.txt file.
* Message: `gpg: decryption failed: No secret key`
* Solution: Usually means you forgot to re-encrypt the file with the new key.
* Message: `Error: can't re-encrypt because a key has expired.`
* Solution: A user's key has expired and can't be used to encrypt any more. Follow the [Replace expired keys](expired-keys.md) page.
FYI: Your repo may use `keyrings/live` instead of `.blackbox`. See "Where is the configuration stored?"
# GnuPG problems
Blackbox is just a front-end to GPG. If you get into a problem with a
key or file, you'll usually have better luck asking for advice on
the gnupg users mailing list TODO: Get link to this list
The author of Blackbox is not a GnuPG expert. He wrote Blackbox
because it was better than trying to remember GPG's horrible flag
names.

17
docs/installation.md Normal file
View File

@@ -0,0 +1,17 @@
Installation Instructions
=========================
Currently blackbox v2 is installed by compiling the code and
copying the binary someplace:
TODO:
```
git clone FILL IN
```
Future: We will have RPM, DEB, Chocolately packages.
Next step: [Enable on a repo](enable-repo.md)

158
docs/role-accounts.md Normal file
View File

@@ -0,0 +1,158 @@
Set up automated users or "role accounts"
=========================================
TODO(tlim): I think this is overly complex. With GnuPG 2.2 and later,
you can use `--password '' --quick-generate-key userid` and you are
done. No need for subkeys. Maybe rework this?
With role accounts, you have an automated system that needs to be able
to decrypt secrets without a password. This means the security of your
repo is based on how locked down the automation system is. This
is risky, so be careful.
i.e. This is how a Puppet Master can have access to the unencrypted data.
FYI: Your repo may use `keyrings/live` instead of `.blackbox`. See "Where is the configuration stored?"
An automated user (a "role account") is one that that must be able to decrypt without a passphrase. In general you'll want to do this for the user that pulls the files from the repo to the master. This may be automated with Jenkins CI or other CI system.
GPG keys have to have a passphrase. However, passphrases are optional on subkeys. Therefore, we will create a key with a passphrase then create a subkey without a passphrase. Since the subkey is very powerful, it should be created on a very secure machine.
There's another catch. The role account probably can't check files into Git/Mercurial. It probably only has read-only access to the repo. That's a good security policy. This means that the role account can't be used to upload the subkey public bits into the repo.
Therefore, we will create the key/subkey on a secure machine as yourself. From there we can commit the public portions into the repo. Also from this account we will export the parts that the role account needs, copy them to where the role account can access them, and import them as the role account.
ProTip: If asked to generate entropy, consider running this on the same machine in another window: `sudo dd if=/dev/sda of=/dev/null`
For the rest of this doc, you'll need to make the following substitutions:
- ROLEUSER: svc_deployacct or whatever your role account's name is.
- NEWMASTER: the machine this role account exists on.
- SECUREHOST: The machine you use to create the keys.
NOTE: This should be more automated/scripted. Patches welcome.
On SECUREHOST, create the puppet master's keys:
```
$ mkdir /tmp/NEWMASTER
$ cd /tmp/NEWMASTER
$ gpg --homedir . --gen-key
Your selection?
(1) RSA and RSA (default)
What keysize do you want? (2048) DEFAULT
Key is valid for? (0) DEFAULT
# Real name: Puppet CI Deploy Account
# Email address: svc_deployacct@hostname.domain.name
```
NOTE: Rather than a real email address, use the username@FQDN of the host the key will be used on. If you use this role account on many machines, each should have its own key. By using the FQDN of the host, you will be able to know which key is which. In this doc, we'll refer to username@FQDN as $KEYNAME
Save the passphrase somewhere safe!
Create a sub-key that has no password:
```
$ gpg --homedir . --edit-key svc_deployacct
gpg> addkey
(enter passphrase)
Please select what kind of key you want:
(3) DSA (sign only)
(4) RSA (sign only)
(5) Elgamal (encrypt only)
(6) RSA (encrypt only)
Your selection? 6
What keysize do you want? (2048)
Key is valid for? (0)
Command> key 2
(the new subkey has a "*" next to it)
Command> passwd
(enter the main key's passphrase)
(enter an empty passphrase for the subkey... confirm you want to do this)
Command> save
```
Now securely export this directory to NEWMASTER:
```
gpg --homedir . --export -a svc_sadeploy >/tmp/NEWMASTER/pubkey.txt
tar cvf /tmp/keys.tar .
rsync -avP /tmp/keys.tar NEWMASTER:/tmp/.
```
On NEWMASTER, receive the new GnuPG config:
```
sudo -u svc_deployacct bash
mkdir -m 0700 -p ~/.gnupg
cd ~/.gnupg && tar xpvf /tmp/keys.tar
```
<!---
Back on SECUREHOST, import the pubkey into the repository.
```
$ cd .blackbox
$ gpg --homedir . --import /tmp/NEWMASTER/pubkey.txt
```
-->
Back on SECUREHOST, add the new email address to .blackbox/blackbox-admins.txt:
```
cd /path/to/the/repo
blackbox_addadmin $KEYNAME /tmp/NEWMASTER
```
Verify that secring.gpg is a zero-length file. If it isn't, you have somehow added a private key to the keyring. Start over.
```
cd .blackbox
ls -l secring.gpg
```
Commit the recent changes:
```
cd .blackbox
git commit -m"Adding key for KEYNAME" pubring.gpg trustdb.gpg blackbox-admins.txt
```
Regenerate all encrypted files with the new key:
```
blackbox_update_all_files
git status
git commit -m"updated encryption" -a
git push
```
On NEWMASTER, import the keys and decrypt the files:
```
sudo -u svc_sadeploy bash # Become the role account.
gpg --import /etc/puppet/.blackbox/pubring.gpg
export PATH=$PATH:/path/to/blackbox/bin
blackbox_postdeploy
sudo -u puppet cat /etc/puppet/hieradata/blackbox.yaml # or any encrypted file.
```
ProTip: If you get "gpg: decryption failed: No secret key" then you forgot to re-encrypt blackbox.yaml with the new key.
On SECUREHOST, securely delete your files:
```
cd /tmp/NEWMASTER
# On machines with the "shred" command:
shred -u /tmp/keys.tar
find . -type f -print0 | xargs -0 shred -u
# All else:
rm -rf /tmp/NEWMASTER
```
Also shred any other temporary files you may have made.

21
docs/subversion-tips.md Normal file
View File

@@ -0,0 +1,21 @@
Subversion Tips
===============
NOTE: This is from v1. Can someone that uses Subversion check
this and update it?
The current implementation will store the blackbox in `/keyrings` at
the root of the entire repo. This will create an issue between
environments that have different roots (i.e. checking out `/` on
development vs `/releases/foo` in production). To get around this, you
can `export BLACKBOX_REPOBASE=/path/to/repo` and set a specific base
for your repo.
This was originally written for git and supports a two-phase commit,
in which `commit` is a local commit and "push" sends the change
upstream to the version control server when something is registered or
deregistered with the system. The current implementation will
immediately `commit` a file (to the upstream subversion server) when
you execute a `blackbox_*` command.

16
docs/support.md Normal file
View File

@@ -0,0 +1,16 @@
Support
=======
# Join our community!
Join the [blackbox-project mailing list](https://groups.google.com/d/forum/blackbox-project)!
# How to submit bugs or ask questions?
We welcome questions, bug reports and feedback!
The best place to start is to join the [blackbox-project mailing list](https://groups.google.com/d/forum/blackbox-project) and ask there.
Bugs are tracked here in Github. Please feel free to [report bugs](https://github.com/StackExchange/blackbox/issues) yourself.

124
docs/user-overview.md Normal file
View File

@@ -0,0 +1,124 @@
User Guide
==========
# Overview
Suppose you have a VCS repository (i.e. a Git or Mercurial repo) and
certain files contain secrets such as passwords or SSL private keys.
Often people just store such files "and hope that nobody finds them in
the repo". That's not safe. Hope is not a strategy.
With BlackBox, those files are stored encrypted using GPG. Access to
the repo without also having the right GPG keys makes those files as worthless
as random bits. As long as you keep your GPG keys safe, you don't
have to worry about storing your VCS repo on an untrusted server or
letting anyone clone the repo.
Heck, even if you trust your server, now you don't have to trust the
people that do backups of that server!
Each person ("admin") of the system can decrypt all the files using
their GPG key, which has its own passphrase. The authorized GPG keys
can decrypt any file. This is better than systems that use one
GPG key (and passphrase) that must be shared among a group of people.
It is much better than having one passphrase for each file (I don't
think anyone actually does that).
Since any admin's GPG key can decrypt the files, if one person leaves
the company, you don't have to communicate a new passphrase to everyone.
Simply disable the one key that should no longer have access.
The process for doing this is as easy as running 2 commands (1 to
disable their key, 1 to re-encrypt all files.) Obviously if they kept
a copy of the repo (and their own passphrase) before leaving the
company, they have access to the secrets. However, you should rotate
those secrets anyway. ("rotate secrets" means changing the passwords,
regenerating TLS certs, and so on).
# Sample session:
First we are going to list the files currently in the blackbox. In
this case, it is an SSH private key.
```
$ blackbox file list
modules/log_management/files/id_rsa
```
Excellent! Our coworkers have already registered a file with the
system. Let's decrypt it, edit it, and re-encrypt it.
```
$ blackbox decrypt modules/log_management/files/id_rsa
========== DECRYPTING "modules/log_management/files/id_rsa"
$ vi modules/log_management/files/id_rsa
```
That was easy so far!
When we encrypt it, Blackbox will not commit the changes, but it
will give a hint that you should. It spells out the exact command you
need to type and even proposes a commit message.
```
$ blackbox encrypt modules/log_management/files/id_rsa
========== ENCRYPTING "modules/log_management/files/id_rsa"
NEXT STEP: You need to manually check these in:
git commit -m"ENCRYPTED modules/log_management/files/id_rsa" modules/log_management/files/id_rsa.gpg
```
You can also use `blackbox edit <filename>` to decrypt a file, edit it
(it will call `$EDITOR`) and re-encrypt it.
Now let's register a new file with the blackbox system.
`data/pass.yaml` is a small file that stores a very important
password. In this example, we had just stored the unecrypted
password in our repo. That's bad. Let's encrypt it.
```
$ blackbox file add data/pass.yaml
========== SHREDDING ("/bin/rm", "-f"): "data/pass.yaml"
NEXT STEP: You need to manually check these in:
git commit -m"NEW FILES: data/pass.yaml" .gitignore keyrings/live/blackbox-files.txt modules/stacklb/pass.yaml modules/stacklb/pass.yaml.gpg
```
Before we commit the change, let's do a `git status` to see what else
has changed.
```
$ git status
On branch master
Changes to be committed:
(use "git restore --staged <file>..." to unstage)
modified: .gitignore
modified: keyrings/live/blackbox-files.txt
deleted: modules/stacklb/pass.yaml
new file: modules/stacklb/pass.yaml.gpg
```
Notice that a number of files were modified:
* `.gitignore`: This file is updated to include the plaintext
filename, so that you don't accidentally add it to the repo in the
future.
* `.blackbox/blackbox-files.txt`: The list of files that are registered with the system.
* `data/pass.yaml`: The file we encrypted is deleted from the repo.
* `data/pass.yaml.gpg`: The encrypted file is added to the repo.
Even though pass.yaml was deleted from the repo, it is still in the
repo's history. Anyone with an old copy of the repo, or a new copy
that knows how to view the repo's history, can see the secret
password. For that reason, you should change the password and
re-encrypt the file. This is an important point. Blackbox is not
magic and it doesn't have a "Men In Black"-style neuralizer that
can make people forget the past. If someone leaves a project, you
have to change the old passwords, etc.
Those are the basics. Your next step might be:
* TODO: How to enable Blackbox for a repo.
* TODO: How to add yourself as an admin to a repo.
* TODO: Complete list of [all blackbox commands](all-commands)

View File

@@ -0,0 +1,17 @@
Why encrypt your secrets?
=========================
OBVIOUSLY we don't want secret things like SSL private keys and
passwords to be leaked.
NOT SO OBVIOUSLY when we store "secrets" in a VCS repo like Git or
Mercurial, suddenly we are less able to share our code with other
people. Communication between subteams of an organization is hurt. You
can't collaborate as well. Either you find yourself emailing
individual files around (yuck!), making a special repo with just the
files needed by your collaborators (yuck!!), or just deciding that
collaboration isn't worth all that effort (yuck!!!).
The ability to be open and transparent about our code, with the
exception of a few specific files, is key to the kind of collaboration
that DevOps and modern IT practitioners need to do.

18
docs/with-ansible.md Normal file
View File

@@ -0,0 +1,18 @@
How to use the secrets with Ansible?
===================================
Ansible Vault provides functionality for encrypting both entire files
and strings stored within files; however, keeping track of the
password(s) required for decryption is not handled by this module.
Instead one must specify a password file when running the playbook.
Ansible example for password file: `my_secret_password.txt.gpg`
```
ansible-playbook --vault-password-file my_secret_password.txt site.yml
```
Alternatively, one can specify this in the
`ANSIBLE_VAULT_PASSWORD_FILE` environment variable.

68
docs/with-puppet.md Normal file
View File

@@ -0,0 +1,68 @@
How to use the secrets with Puppet?
===================================
# Entire files:
Entire files, such as SSL certs and private keys, are treated just
like regular files. You decrypt them any time you push a new release
to the puppet master.
Example of an encrypted file named `secret_file.key.gpg`
* Plaintext file is: `modules/${module_name}/files/secret_file.key`
* Encrypted file is: `modules/${module_name}/files/secret_file.key.gpg`
* Puppet sees it as: `puppet:///modules/${module_name}/secret_file.key`
Puppet code that stores `secret_file.key` in `/etc/my_little_secret.key`:
```
file { '/etc/my_little_secret.key':
ensure => 'file',
owner => 'root',
group => 'puppet',
mode => '0760',
source => "puppet:///modules/${module_name}/secret_file.key", # No ".gpg"
}
```
# Small strings:
For small strings such as passwords and API keys, it makes sense
to store them in an (encrypted) YAML file which is then made
available via hiera.
For example, we use a file called `blackbox.yaml`. You can access the
data in it using the hiera() function.
*Setup:*
Edit `hiera.yaml` to include "blackbox" to the search hierarchy:
```
:hierarchy:
- ...
- blackbox
- ...
```
In blackbox.yaml specify:
```
---
module::test_password: "my secret password"
```
In your Puppet Code, access the password as you would any hiera data:
```
$the_password = hiera('module::test_password', 'fail')
file {'/tmp/debug-blackbox.txt':
content => $the_password,
owner => 'root',
group => 'root',
mode => '0600',
}
```
The variable `$the_password` will contain "my secret password" and can be used anywhere strings are used.

11
go.mod Normal file
View File

@@ -0,0 +1,11 @@
module github.com/StackExchange/blackbox/v2
go 1.14
require (
github.com/andreyvit/diff v0.0.0-20170406064948-c7f18ee00883
github.com/mattn/go-runewidth v0.0.9 // indirect
github.com/olekukonko/tablewriter v0.0.4
github.com/sergi/go-diff v1.2.0 // indirect
github.com/urfave/cli/v2 v2.2.0
)

37
go.sum Normal file
View File

@@ -0,0 +1,37 @@
github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
github.com/andreyvit/diff v0.0.0-20170406064948-c7f18ee00883 h1:bvNMNQO63//z+xNgfBlViaCIJKLlCJ6/fmUseuG0wVQ=
github.com/andreyvit/diff v0.0.0-20170406064948-c7f18ee00883/go.mod h1:rCTlJbsFo29Kk6CurOXKm700vrz8f0KW0JNfpkRJY/8=
github.com/cpuguy83/go-md2man/v2 v2.0.0-20190314233015-f79a8a8ca69d h1:U+s90UTSYgptZMwQh2aRr3LuazLJIa+Pg3Kc1ylSYVY=
github.com/cpuguy83/go-md2man/v2 v2.0.0-20190314233015-f79a8a8ca69d/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU=
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/kr/pretty v0.1.0 h1:L/CwN0zerZDmRFUapSPitk6f+Q3+0za1rQkzVuMiMFI=
github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
github.com/kr/text v0.1.0 h1:45sCR5RtlFHMR4UwH9sdQ5TC8v0qDQCHnXt+kaKSTVE=
github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
github.com/mattn/go-runewidth v0.0.7/go.mod h1:H031xJmbD/WCDINGzjvQ9THkh0rPKHF+m2gUSrubnMI=
github.com/mattn/go-runewidth v0.0.9 h1:Lm995f3rfxdpd6TSmuVCHVb/QhupuXlYr8sCI/QdE+0=
github.com/mattn/go-runewidth v0.0.9/go.mod h1:H031xJmbD/WCDINGzjvQ9THkh0rPKHF+m2gUSrubnMI=
github.com/olekukonko/tablewriter v0.0.4 h1:vHD/YYe1Wolo78koG299f7V/VAS08c6IpCLn+Ejf/w8=
github.com/olekukonko/tablewriter v0.0.4/go.mod h1:zq6QwlOf5SlnkVbMSr5EoBv3636FWnp+qbPhuoO21uA=
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/russross/blackfriday/v2 v2.0.1 h1:lPqVAte+HuHNfhJ/0LC98ESWRz8afy9tM/0RK8m9o+Q=
github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
github.com/sergi/go-diff v1.2.0 h1:XU+rvMAioB0UC3q1MFrIQy4Vo5/4VsRDQQXHsEya6xQ=
github.com/sergi/go-diff v1.2.0/go.mod h1:STckp+ISIX8hZLjrqAeVduY0gWCT9IjLuqbuNXdaHfM=
github.com/shurcooL/sanitized_anchor_name v1.0.0 h1:PdmoCO6wvbs+7yrJyMORt4/BmY5IYyJwS/kOiWx8mHo=
github.com/shurcooL/sanitized_anchor_name v1.0.0/go.mod h1:1NzhyTcUVG4SuEtjjoZeVRXNmyL/1OwPU0+IJeTBvfc=
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/testify v1.4.0 h1:2E4SXV/wtOkTonXsotYi4li6zVWxYlZuYNCXe9XRJyk=
github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4=
github.com/urfave/cli/v2 v2.2.0 h1:JTTnM6wKzdA0Jqodd966MVj4vWbbquZykeX1sKbe2C4=
github.com/urfave/cli/v2 v2.2.0/go.mod h1:SE9GqnLQmjVa0iPEY0f1w3ygNIYcIJ0OKPMoW2caLfQ=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15 h1:YR8cESwS4TdDjEe65xsg0ogRM/Nc3DYOhEAlW+xobZo=
gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.2.4 h1:/eiJrUcujPVeJ3xlSWaiNi3uSVmDGBK1pDHUHAnao1I=
gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=

91
integrationTest/NOTES.txt Normal file
View File

@@ -0,0 +1,91 @@
This should accept VCS-type and --crypto flags.
Then a shell script should run various combinations of VCS and crypters.
# Startup
* Create a repo (git, none)
# Test basic operations:
* As Alice:
* initialize blackbox, add her keys to it, see that the usual files
exist. See her name in bb-admins.txt
* encrypt a file, see that the plaintext is deleted, see the file in bb-files.txt
* decrypt the file, see the original plaintext is recovered.
* Encrypt a file --noshred.
* Decrypt the file, it should fail as the plaintext exists.
* Remove the plaintext.
* Decrypt the file, it should fail as the plaintext exists.
# Test hand-off from Alice to Bob.
* As Bob
* add himself to the admins.
* As Alice
* Update-all-files
* Create a new file. Encrypt it.
* As Bob
* Decrypt both files
* Verify contents of the new file, and the file from previous.
* Create a new file. Encrypt it.
* As Alice:
* Decrypt all files.
* Verify contents of the 3 plaintext files.
# Test a git-less directory
* Copy the old repo somewhere. Remove the .git directory.
* As Alice:
* Decrypt all
* Verify plaintext contents
# Test post-deploy with/without GID
* Back at the original repo:
* Shred all
* Run post-deploy. Verify.
* Shred all
* Run post-deploy with a custom GID. Verify.
# Test removing an admin
* As Bob:
* removes Alice. (Verify)
* Re-encrypt
* Decrypt all & verify.
* As alice
* Decrypting should fail.
# Test funny names and paths
* my/path/to/relsecrets.txt
* cwd=other/place ../../my/path/to/relsecrets.txt
* !important!.txt
* #andpounds.txt
* stars*bars?.txt
* space space.txt
* Do add/encrypt/decrypt
* Do blackbox_update_all_files
* Do remove them all
# When people start asking for commands to work with relative paths
# Test from outside the repo
* mkdir ../other/place
* cd ../other/place
* decrypt ../../secret1.txt
* encrypt ../../secret1.txt
# Test specific commands:
# blackbox admins list
# blackbox file list
# blackbox status --name-only (create 1 of each "type")
# blackbox status --type=FOO
# These should all fail:
# blackbox file list --all
# blackbox file list blah
# blackbox shred list --all
# blackbox shred list blah
rm -rf /tmp/bbhome-* && BLACKBOX_DEBUG=true go test -verbose -long -nocleanup
rm -rf /tmp/bbhome-* && go test -long -nocleanup
( gbb && cd cmd/blackbox && go install ) && blackbox
cd /tmp && rm -rf /tmp/bbhome-* ; mkdir /tmp/bbhome-1 ; cd /tmp/bbhome-1 && git init ; gitmeWork ; ( gbb && cd cmd/blackbox && go install ) && blackbox init yes && gitmeWork ; git commit -mm -a ; blackbox admin add tlimoncelli ; git commit -mnewadmin -a ; echo secrt > secret.txt ; blackbox file add secret.txt

View File

@@ -0,0 +1,55 @@
Each test does the following:
1. Copy the files from testdata/NNNN
2. Run the command in test_NNNN.sh
3.
TEST ENROLLMENT:
PHASE 'Alice creates a repo. She creates secret.txt.'
PHASE 'Alice wants to be part of the secret system.'
PHASE 'She creates a GPG key...'
PHASE 'Initializes BB...'
PHASE 'and adds herself as an admin.'
PHASE 'Bob arrives.'
PHASE 'Bob creates a gpg key.'
PHASE 'Alice does the second part to enroll bob.'
PHASE 'She enrolls bob.'
PHASE 'She enrolls secrets.txt.'
PHASE 'She decrypts secrets.txt.'
PHASE 'She edits secrets.txt.'
PHASE 'Alice copies files to a non-repo directory. (NO REPO)'
PHASE 'Alice shreds these non-repo files. (NO REPO)'
PHASE 'Alice decrypts secrets.txt (NO REPO).'
PHASE 'Alice edits secrets.txt. (NO REPO EDIT)'
PHASE 'Alice decrypts secrets.txt (NO REPO EDIT).'
PHASE 'appears.'
#PHASE 'Bob makes sure he has all new keys.'
TEST INDIVIDUAL COMMANDS:
PHASE 'Bob postdeploys... default.'
PHASE 'Bob postdeploys... with a GID.'
PHASE 'Bob cleans up the secret.'
PHASE 'Bob removes Alice.'
PHASE 'Bob reencrypts files so alice can not access them.'
PHASE 'Bob decrypts secrets.txt.'
PHASE 'Bob edits secrets.txt.'
PHASE 'Bob decrypts secrets.txt VERSION 3.'
PHASE 'Bob exposes a secret in the repo.'
PHASE 'Bob corrects it by registering it.'
PHASE 'Bob enrolls my/path/to/relsecrets.txt.'
PHASE 'Bob decrypts relsecrets.txt.'
PHASE 'Bob enrolls !important!.txt'
PHASE 'Bob enrolls #andpounds.txt'
PHASE 'Bob enrolls stars*bars?.txt'
PHASE 'Bob enrolls space space.txt'
PHASE 'Bob checks out stars*bars?.txt.'
PHASE 'Bob checks out space space.txt.'
PHASE 'Bob shreds all exposed files.'
PHASE 'Bob updates all files.'
PHASE 'Bob DEregisters mistake.txt'
PHASE 'Bob enrolls multiple files: multi1.txt and multi2.txt'
PHASE 'Alice returns. She should be locked out'
PHASE 'Alice tries to decrypt secret.txt. Is blocked.'

View File

@@ -0,0 +1,68 @@
package main
import (
"io/ioutil"
"os"
"testing"
"github.com/andreyvit/diff"
)
func assertFileMissing(t *testing.T, name string) {
t.Helper()
_, err := os.Stat(name)
if err != nil && os.IsNotExist(err) {
return
}
if err == nil {
t.Fatalf("assertFileMissing failed: %v exists", name)
}
t.Fatalf("assertFileMissing: %q: %v", name, err)
}
func assertFileExists(t *testing.T, name string) {
t.Helper()
_, err := os.Stat(name)
if err == nil {
return
}
if os.IsNotExist(err) {
t.Fatalf("assertFileExists failed: %v not exist", name)
}
t.Fatalf("assertFileExists: file can't be accessed: %v: %v", name, err)
}
func assertFileEmpty(t *testing.T, name string) {
t.Helper()
c, err := ioutil.ReadFile(name)
if err != nil {
t.Fatal(err)
}
if len(c) != 0 {
t.Fatalf("got=%v want=%v: %v", len(c), 0, name)
}
}
func assertFileContents(t *testing.T, name string, contents string) {
t.Helper()
c, err := ioutil.ReadFile(name)
if err != nil {
t.Fatal(err)
}
if w, g := contents, string(c); w != g {
t.Errorf("assertFileContents(%q) mismatch (-got +want):\n%s",
name, diff.LineDiff(g, w))
}
}
func assertFilePerms(t *testing.T, name string, perms os.FileMode) {
t.Helper()
s, err := os.Stat(name)
if err != nil {
t.Fatal(err)
}
if s.Mode() != perms {
t.Fatalf("got=%#o want=%#o: %v", s.Mode(), perms, name)
}
}

View File

@@ -0,0 +1,343 @@
package main
import (
"flag"
"fmt"
"os"
"os/exec"
"path/filepath"
"testing"
"github.com/StackExchange/blackbox/v2/pkg/bblog"
_ "github.com/StackExchange/blackbox/v2/pkg/bblog"
_ "github.com/StackExchange/blackbox/v2/pkg/vcs/_all"
)
var vcsToTest = flag.String("testvcs", "GIT", "VCS to test")
var longTests = flag.Bool("long", false, "Run long version of tests")
//var crypterToTest = flag.String("crypter", "GnuPG", "crypter to test")
func init() {
testing.Init()
flag.Parse()
op, err := os.Getwd()
if err != nil {
panic(err)
}
originPath = op
}
func compile(t *testing.T) {
if PathToBlackBox() != "" {
// It's been compiled already.
return
}
// Make sure we have the latest binary
fmt.Println("========== Compiling")
cmd := exec.Command("go", "build", "-o", "../bbintegration", "../cmd/blackbox")
cmd.Stdout = os.Stdout
cmd.Stderr = os.Stderr
err := cmd.Run()
if err != nil {
t.Fatalf("setup_compile: %v", err)
}
cwd, err := os.Getwd()
if err != nil {
t.Fatal(err)
}
SetPathToBlackBox(filepath.Join(cwd, "../bbintegration"))
}
func setup(t *testing.T) {
logDebug := bblog.GetDebug(*verbose)
logDebug.Printf("flag.testvcs is %v", *vcsToTest)
vh := getVcs(t, *vcsToTest)
logDebug.Printf("Using BLACKBOX_VCS=%v", vh.Name())
os.Setenv("BLACKBOX_VCS", vh.Name())
}
func TestInit(t *testing.T) {
if !*longTests {
return
}
compile(t)
makeHomeDir(t, "init")
// Only zero or one args are permitted.
invalidArgs(t, "init", "one", "two")
invalidArgs(t, "init", "one", "two", "three")
runBB(t, "init", "yes")
assertFileEmpty(t, ".blackbox/blackbox-admins.txt")
assertFileEmpty(t, ".blackbox/blackbox-files.txt")
assertFilePerms(t, ".blackbox/blackbox-admins.txt", 0o640)
assertFilePerms(t, ".blackbox/blackbox-files.txt", 0o640)
}
func TestList(t *testing.T) {
if !*longTests {
return
}
compile(t)
makeHomeDir(t, "init")
runBB(t, "init", "yes")
createDummyFilesAdmin(t)
checkOutput("000-admin-list.txt", t, "admin", "list")
checkOutput("000-file-list.txt", t, "file", "list")
invalidArgs(t, "file", "list", "extra")
invalidArgs(t, "admin", "list", "extra")
}
func TestStatus(t *testing.T) {
if !*longTests {
return
}
compile(t)
makeHomeDir(t, "init")
runBB(t, "init", "yes")
createFilesStatus(t)
checkOutput("000-status.txt", t, "status")
}
func TestShred(t *testing.T) {
if !*longTests {
return
}
compile(t)
makeHomeDir(t, "shred")
runBB(t, "init", "yes")
makeFile(t, "shredme.txt", "File with SHREDME in it.\n")
assertFileExists(t, "shredme.txt")
runBB(t, "shred", "shredme.txt")
assertFileMissing(t, "shredme.txt")
}
func TestStatus_notreg(t *testing.T) {
if !*longTests {
return
}
compile(t)
makeHomeDir(t, "init")
runBB(t, "init", "yes")
createFilesStatus(t)
checkOutput("status-noreg.txt", t, "status", "status-ENCRYPTED.txt", "blah.txt")
}
// TestHard tests the functions using a fake homedir and repo.
func TestHard(t *testing.T) {
if !*longTests {
return
}
// These are basic tests that work on a fake repo.
// The repo has mostly real data, except any .gpg file
// is just garbage.
compile(t)
setup(t)
for _, cx := range []struct{ subname, prefix string }{
//{subname: ".", prefix: "."},
{subname: "mysub", prefix: ".."},
} {
subname := cx.subname
prefix := cx.prefix
_ = prefix
phase("========== SUBDIR = " + subname + " ==========")
makeHomeDir(t, "BasicAlice")
plaintextFoo := "I am the foo.txt file!\n"
plainAltered := "I am the altered file!\n"
runBB(t, "testing_init") // Runs "git init" or equiv
assertFileExists(t, ".git")
runBB(t, "init", "yes") // Creates .blackbox or equiv
if subname != "." {
err := os.Mkdir(subname, 0770)
if err != nil {
t.Fatal(fmt.Errorf("hard-mk-home %q: %v", subname, err))
}
}
olddir, err := os.Getwd()
if err != nil {
t.Fatal(err)
}
os.Chdir(subname)
os.Chdir(olddir)
phase("Alice creates a GPG key")
gpgdir := makeAdmin(t, "alice", "Alice Example", "alice@example.com")
become(t, "alice")
phase("Alice enrolls as an admin")
//os.Chdir(subname)
runBB(t, "admin", "add", "alice@example.com", gpgdir)
//os.Chdir(olddir)
// encrypt
phase("Alice registers foo.txt")
makeFile(t, "foo.txt", plaintextFoo)
//os.Chdir(subname)
//runBB(t, "file", "add", "--shred", filepath.Join(prefix, "foo.txt"))
runBB(t, "file", "add", "--shred", "foo.txt")
//os.Chdir(olddir)
// "file add" encrypts the file.
// We shred the plaintext so that we are sure that when Decrypt runs,
// we can verify the contents wasn't just sitting there all the time.
assertFileMissing(t, "foo.txt")
assertFileExists(t, "foo.txt.gpg")
phase("Alice decrypts foo.txt")
// decrypt
//os.Chdir(subname)
runBB(t, "decrypt", "foo.txt")
//runBB(t, "decrypt", filepath.Join(prefix, "foo.txt"))
//os.Chdir(olddir)
assertFileExists(t, "foo.txt")
assertFileExists(t, "foo.txt.gpg")
assertFileContents(t, "foo.txt", plaintextFoo)
// encrypts (without shredding)
phase("Alice encrypts foo.txt (again)")
runBB(t, "encrypt", "foo.txt")
assertFileExists(t, "foo.txt")
assertFileExists(t, "foo.txt.gpg")
assertFileContents(t, "foo.txt", plaintextFoo)
// reencrypt
phase("Alice reencrypts")
checkOutput("basic-status.txt", t, "status")
runBB(t, "reencrypt", "--overwrite", "foo.txt")
// Test variations of cat
// foo.txt=plain result=plain
phase("Alice cats plain:plain")
makeFile(t, "foo.txt", plaintextFoo)
assertFileExists(t, "foo.txt")
runBB(t, "encrypt", "foo.txt")
assertFileExists(t, "foo.txt")
assertFileExists(t, "foo.txt.gpg")
checkOutput("alice-cat-plain.txt", t, "cat", "foo.txt")
assertFileExists(t, "foo.txt")
assertFileExists(t, "foo.txt.gpg")
// foo.txt=altered result=plain
phase("Alice cats altered:plain")
makeFile(t, "foo.txt", plainAltered)
assertFileExists(t, "foo.txt")
assertFileExists(t, "foo.txt.gpg")
checkOutput("alice-cat-plain.txt", t, "cat", "foo.txt")
assertFileExists(t, "foo.txt")
assertFileExists(t, "foo.txt.gpg")
// foo.txt=missing result=plain
phase("Alice cats missing:plain")
removeFile(t, "foo.txt")
assertFileMissing(t, "foo.txt")
assertFileMissing(t, "foo.txt")
assertFileExists(t, "foo.txt.gpg")
checkOutput("alice-cat-plain.txt", t, "cat", "foo.txt")
assertFileMissing(t, "foo.txt")
assertFileExists(t, "foo.txt.gpg")
// Chapter 2: Bob
// Alice adds Bob.
// Bob encrypts a file.
// Bob makes sure he can decrypt alice's file.
// Bob removes Alice.
// Alice verifies she CAN'T decrypt files.
// Bob adds Alice back.
// Alice verifies she CAN decrypt files.
// Bob adds an encrypted file by mistake, "bb add" and fixes it.
// Bob corrupts the blackbox-admins.txt file, verifies that commands fail.
}
}
// TestEvilFilenames verifies commands work with "difficult" file names
func TestEvilFilenames(t *testing.T) {
if !*longTests {
return
}
compile(t)
setup(t)
makeHomeDir(t, "Mallory")
runBB(t, "testing_init") // Runs "git init" or equiv
assertFileExists(t, ".git")
runBB(t, "init", "yes") // Creates .blackbox or equiv
phase("Malory creates a GPG key")
gpgdir := makeAdmin(t, "mallory", "Mallory Evil", "mallory@example.com")
become(t, "mallory")
phase("Mallory enrolls as an admin")
runBB(t, "admin", "add", "mallory@example.com", gpgdir)
_ = os.MkdirAll("my/path/to", 0o770)
_ = os.Mkdir("other", 0o770)
for i, name := range []string{
"!important!.txt",
"#andpounds.txt",
"stars*bars?.txt",
"space space.txt",
"tab\ttab.txt",
"ret\rret.txt",
"smile😁eyes",
"¡que!",
"thé",
"pound£",
"*.go",
"rm -f erase ; echo done",
`smile☺`,
`dub𝓦`,
"my/path/to/relsecrets.txt",
//"my/../my/path/../path/to/myother.txt", // Not permitted yet
//"other/../my//path/../path/to/otherother.txt", // Not permitted yet
//"new\nnew.txt", // \n not permitted
//"two\n", // \n not permitted (yet)
//"four\U0010FFFF", // Illegal byte sequence. git won't accept.
} {
phase(fmt.Sprintf("Mallory tries %02d: %q", i, name))
contents := "the name of this file is the talking heads... i mean, " + name
makeFile(t, name, contents)
assertFileExists(t, name)
assertFileMissing(t, name+".gpg")
assertFileContents(t, name, contents)
runBB(t, "file", "add", name)
assertFileMissing(t, name)
assertFileExists(t, name+".gpg")
runBB(t, "decrypt", name)
assertFileExists(t, name)
assertFileExists(t, name+".gpg")
assertFileContents(t, name, contents)
runBB(t, "encrypt", name)
assertFileExists(t, name)
assertFileExists(t, name+".gpg")
assertFileContents(t, name, contents)
runBB(t, "shred", name)
assertFileMissing(t, name)
assertFileExists(t, name+".gpg")
}
}
// More tests to implement.
// 1. Verify that the --gid works (blackbox decrypt --gid)

View File

@@ -0,0 +1,617 @@
package main
import (
"flag"
"fmt"
"io/ioutil"
"log"
"os"
"os/exec"
"path/filepath"
"runtime"
"strings"
"testing"
"time"
"github.com/StackExchange/blackbox/v2/pkg/bblog"
"github.com/StackExchange/blackbox/v2/pkg/bbutil"
"github.com/StackExchange/blackbox/v2/pkg/vcs"
_ "github.com/StackExchange/blackbox/v2/pkg/vcs/_all"
"github.com/andreyvit/diff"
)
var verbose = flag.Bool("verbose", false, "reveal stderr")
var nocleanup = flag.Bool("nocleanup", false, "do not delete the tmp directory")
type userinfo struct {
name string
dir string // .gnupg-$name
agentInfo string // GPG_AGENT_INFO
email string
fullname string
}
var users = map[string]*userinfo{}
func init() {
testing.Init()
flag.Parse()
}
var logErr *log.Logger
var logDebug *log.Logger
func init() {
logErr = bblog.GetErr()
logDebug = bblog.GetDebug(*verbose)
}
func getVcs(t *testing.T, name string) vcs.Vcs {
t.Helper()
// Set up the vcs
for _, v := range vcs.Catalog {
logDebug.Printf("Testing vcs: %v == %v", name, v.Name)
if strings.ToLower(v.Name) == strings.ToLower(name) {
h, err := v.New()
if err != nil {
return nil // No idea how that would happen.
}
return h
}
logDebug.Println("...Nope.")
}
return nil
}
// TestBasicCommands's helpers
func makeHomeDir(t *testing.T, testname string) {
t.Helper()
var homedir string
var err error
if *nocleanup {
// Make a predictable location; don't deleted.
homedir = "/tmp/bbhome-" + testname
os.RemoveAll(homedir)
err = os.Mkdir(homedir, 0770)
if err != nil {
t.Fatal(fmt.Errorf("mk-home %q: %v", homedir, err))
}
} else {
// Make a random location that is deleted automatically
homedir, err = ioutil.TempDir("", filepath.Join("bbhome-"+testname))
defer os.RemoveAll(homedir) // clean up
if err != nil {
t.Fatal(err)
}
}
err = os.Setenv("HOME", homedir)
if err != nil {
t.Fatal(err)
}
logDebug.Printf("TESTING DIR HOME: cd %v\n", homedir)
repodir := filepath.Join(homedir, "repo")
err = os.Mkdir(repodir, 0770)
if err != nil {
t.Fatal(fmt.Errorf("mk-repo %q: %v", repodir, err))
}
err = os.Chdir(repodir)
if err != nil {
t.Fatal(err)
}
}
func createDummyFilesAdmin(t *testing.T) {
// This creates a repo with real data, except any .gpg file
// is just garbage.
addLineSorted(t, ".blackbox/blackbox-admins.txt", "user1@example.com")
addLineSorted(t, ".blackbox/blackbox-admins.txt", "user2@example.com")
addLineSorted(t, ".blackbox/blackbox-files.txt", "foo.txt")
addLineSorted(t, ".blackbox/blackbox-files.txt", "bar.txt")
makeFile(t, "foo.txt", "I am the foo.txt file!")
makeFile(t, "bar.txt", "I am the foo.txt file!")
makeFile(t, "foo.txt.gpg", "V nz gur sbb.gkg svyr!")
makeFile(t, "bar.txt.gpg", "V nz gur one.gkg svyr!")
}
func createFilesStatus(t *testing.T) {
// This creates a few files with real plaintext but fake cyphertext.
// There are a variety of timestamps to enable many statuses.
t.Helper()
// DECRYPTED: File is decrypted and ready to edit (unknown if it has been edited).
// ENCRYPTED: GPG file is newer than plaintext. Indicates recented edited then encrypted.
// SHREDDED: Plaintext is missing.
// GPGMISSING: The .gpg file is missing. Oops?
// PLAINERROR: Can't access the plaintext file to determine status.
// GPGERROR: Can't access .gpg file to determine status.
addLineSorted(t, ".blackbox/blackbox-files.txt", "status-DECRYPTED.txt")
addLineSorted(t, ".blackbox/blackbox-files.txt", "status-ENCRYPTED.txt")
addLineSorted(t, ".blackbox/blackbox-files.txt", "status-SHREDDED.txt")
addLineSorted(t, ".blackbox/blackbox-files.txt", "status-GPGMISSING.txt")
// addLineSorted(t, ".blackbox/blackbox-files.txt", "status-PLAINERROR.txt")
// addLineSorted(t, ".blackbox/blackbox-files.txt", "status-GPGERROR.txt")
addLineSorted(t, ".blackbox/blackbox-files.txt", "status-BOTHMISSING.txt")
// Combination of age difference either missing, file error, both missing.
makeFile(t, "status-DECRYPTED.txt", "File with DECRYPTED in it.")
makeFile(t, "status-DECRYPTED.txt.gpg", "Svyr jvgu QRPELCGRQ va vg.")
makeFile(t, "status-ENCRYPTED.txt", "File with ENCRYPTED in it.")
makeFile(t, "status-ENCRYPTED.txt.gpg", "Svyr jvgu RAPELCGRQ va vg.")
// Plaintext intentionally missing.
makeFile(t, "status-SHREDDED.txt.gpg", "Svyr jvgu FUERQQRQ va vg.")
makeFile(t, "status-GPGMISSING.txt", "File with GPGMISSING in it.")
// gpg file intentionally missing.
// Plaintext intentionally missing. ("status-BOTHMISSING.txt")
// gpg file intentionally missing. ("status-BOTHMISSING.txt.gpg")
// NB(tlim): commented out. I can't think of an error I can reproduce.
// makeFile(t, "status-PLAINERROR.txt", "File with PLAINERROR in it.")
// makeFile(t, "status-PLAINERROR.txt.gpg", "Svyr jvgu CYNVAREEBE va vg.")
// setFilePerms(t, "status-PLAINERROR.txt", 0000)
// NB(tlim): commented out. I can't think of an error I can reproduce.
// makeFile(t, "status-GPGERROR.txt", "File with GPGERROR in it.")
// makeFile(t, "status-GPGERROR.txt.gpg", "Svyr jvgu TCTREEBE va vg.")
// setFilePerms(t, "status-GPGERROR.txt.gpg", 0000)
time.Sleep(200 * time.Millisecond)
if err := bbutil.Touch("status-DECRYPTED.txt"); err != nil {
t.Fatal(err)
}
if err := bbutil.Touch("status-ENCRYPTED.txt.gpg"); err != nil {
t.Fatal(err)
}
}
func addLineSorted(t *testing.T, filename, line string) {
err := bbutil.AddLinesToSortedFile(filename, line)
if err != nil {
t.Fatalf("addLineSorted failed: %v", err)
}
}
func removeFile(t *testing.T, name string) {
os.RemoveAll(name)
}
func makeFile(t *testing.T, name string, content string) {
t.Helper()
err := ioutil.WriteFile(name, []byte(content), 0666)
if err != nil {
t.Fatalf("makeFile can't create %q: %v", name, err)
}
}
func setFilePerms(t *testing.T, name string, perms int) {
t.Helper()
err := os.Chmod(name, os.FileMode(perms))
if err != nil {
t.Fatalf("setFilePerms can't chmod %q: %v", name, err)
}
}
var originPath string // CWD when program started.
// checkOutput runs blackbox with args, the last arg is the filename
// of the expected output. Error if output is not expected.
func checkOutput(name string, t *testing.T, args ...string) {
t.Helper()
cmd := exec.Command(PathToBlackBox(), args...)
cmd.Stdin = nil
cmd.Stdout = nil
cmd.Stderr = os.Stderr
var gb []byte
gb, err := cmd.Output()
if err != nil {
t.Fatal(fmt.Errorf("checkOutput(%q): %w", args, err))
}
got := string(gb)
wb, err := ioutil.ReadFile(filepath.Join(originPath, "test_data", name))
if err != nil {
t.Fatalf("checkOutput can't read %v: %v", name, err)
}
want := string(wb)
//fmt.Printf("CHECKOUTPUT g: %v\n", got)
//fmt.Printf("CHECKOUTPUT w: %v\n", want)
if g, w := got, want; g != w {
t.Errorf("checkOutput(%q) mismatch (-got +want):\n%s",
args, diff.LineDiff(g, w))
}
}
func invalidArgs(t *testing.T, args ...string) {
t.Helper()
logDebug.Printf("invalidArgs(%q): \n", args)
cmd := exec.Command(PathToBlackBox(), args...)
cmd.Stdin = nil
if *verbose {
cmd.Stdout = os.Stdout
cmd.Stderr = os.Stderr
}
err := cmd.Run()
if err == nil {
logDebug.Println("BAD")
t.Fatal(fmt.Errorf("invalidArgs(%q): wanted failure but got success", args))
}
logDebug.Printf("^^^^ (correct error received): err=%q\n", err)
}
// TestAliceAndBob's helpers.
func setupUser(t *testing.T, user, passphrase string) {
t.Helper()
logDebug.Printf("DEBUG: setupUser %q %q\n", user, passphrase)
}
var pathToBlackBox string
// PathToBlackBox returns the path to the executable we compile for integration testing.
func PathToBlackBox() string { return pathToBlackBox }
// SetPathToBlackBox sets the path.
func SetPathToBlackBox(n string) {
logDebug.Printf("PathToBlackBox=%q\n", n)
pathToBlackBox = n
}
func runBB(t *testing.T, args ...string) {
t.Helper()
logDebug.Printf("runBB(%q)\n", args)
cmd := exec.Command(PathToBlackBox(), args...)
cmd.Stdin = nil
cmd.Stdout = os.Stdout
cmd.Stderr = os.Stderr
err := cmd.Run()
if err != nil {
t.Fatal(fmt.Errorf("runBB(%q): %w", args, err))
}
}
func phase(msg string) {
logDebug.Println("********************")
logDebug.Println("********************")
logDebug.Printf("********* %v\n", msg)
logDebug.Println("********************")
logDebug.Println("********************")
}
func makeAdmin(t *testing.T, name, fullname, email string) string {
testing.Init()
dir, err := filepath.Abs(filepath.Join(os.Getenv("HOME"), ".gnupg-"+name))
if err != nil {
t.Fatal(err)
}
os.Mkdir(dir, 0700)
u := &userinfo{
name: name,
dir: dir,
fullname: fullname,
email: email,
}
users[name] = u
// GNUPGHOME=u.dir
// echo 'pinentry-program' "$(which pinentry-tty)" >> "$GNUPGHOME/gpg-agent.conf"
os.Setenv("GNUPGHOME", u.dir)
if runtime.GOOS != "darwin" {
ai, err := bbutil.RunBashOutput("gpg-agent", "--homedir", u.dir, "--daemon")
// NB(tlim): It should return something like:
// `GPG_AGENT_INFO=/home/tlimoncelli/.gnupg/S.gpg-agent:18548:1; export GPG_AGENT_INFO;`
if err != nil {
//t.Fatal(err)
}
if !strings.HasPrefix(ai, "GPG_AGENT_INFO=") {
fmt.Println("WARNING: gpg-agent didn't output what we expected. Assumed dead.")
} else {
u.agentInfo = ai[15:strings.Index(ai, ";")]
os.Setenv("GPG_AGENT_INFO", u.agentInfo)
fmt.Printf("GPG_AGENT_INFO=%q (was %q)\n", ai, u.agentInfo)
}
}
os.Setenv("GNUPGHOME", u.dir)
// Generate key:
if hasQuick(t) {
fmt.Println("DISCOVERED: NEW GPG")
fmt.Printf("Generating %q using --qgk\n", u.email)
bbutil.RunBash("gpg",
"--homedir", u.dir,
"--batch",
"--passphrase", "",
"--quick-generate-key", u.email,
)
if err != nil {
t.Fatal(err)
}
} else {
fmt.Println("DISCOVERED: OLD GPG")
fmt.Println("MAKING KEY")
tmpfile, err := ioutil.TempFile("", "example")
if err != nil {
log.Fatal(err)
}
defer os.Remove(tmpfile.Name()) // clean up
batch := `%echo Generating a basic OpenPGP key
Key-Type: RSA
Key-Length: 2048
Subkey-Type: RSA
Subkey-Length: 2048
Name-Real: ` + u.fullname + `
Name-Comment: Not for actual use
Name-Email: ` + u.email + `
Expire-Date: 0
%pubring ` + filepath.Join(u.dir, `pubring.gpg`) + `
%secring ` + filepath.Join(u.dir, `secring.gpg`) + `
# Do a commit here, so that we can later print "done"
%commit
%echo done`
//fmt.Printf("BATCH START\n%s\nBATCH END\n", batch)
fmt.Fprintln(tmpfile, batch)
// FIXME(tlim): The batch file should include a password, but then
// we need to figure out how to get "blackbox encrypt" and other
// commands to input a password in an automated way.
// To experiment with this, add after "Expire-Date:" a line like:
// Passphrase: kljfhslfjkhsaljkhsdflgjkhsd
// Current status: without that line GPG keys have no passphrase
// and none is requested.
bbutil.RunBash("gpg",
"--homedir", u.dir,
"--verbose",
"--batch",
"--gen-key",
tmpfile.Name(),
)
if err != nil {
t.Fatal(err)
}
if err := tmpfile.Close(); err != nil {
log.Fatal(err)
}
// We do this just to for gpg to create trustdb.gpg
bbutil.RunBash("gpg",
"--homedir", u.dir,
"--list-keys",
)
if err != nil {
t.Fatal(err)
}
bbutil.RunBash("gpg",
"--homedir", u.dir,
"--list-secret-keys",
)
if err != nil {
t.Fatal(err)
}
}
return u.dir
}
func hasQuick(t *testing.T) bool {
testing.Init()
fmt.Println("========== Do we have --quick-generate-key?")
err := bbutil.RunBash("gpg2",
"--dry-run",
"--quick-generate-key",
"--batch",
"--passphrase", "",
"foo", "rsa", "encr")
fmt.Println("========== Done")
if err == nil {
return true
}
//fmt.Printf("DISCOVER GPG: %d", err.ExitCode())
if exitError, ok := err.(*exec.ExitError); ok {
if exitError.ExitCode() == 0 {
return true
}
}
return false
}
func become(t *testing.T, name string) {
testing.Init()
u := users[name]
os.Setenv("GNUPGHOME", u.dir)
os.Setenv("GPG_AGENT_INFO", u.agentInfo)
bbutil.RunBash("git", "config", "user.name", u.name)
bbutil.RunBash("git", "config", "user.email", u.fullname)
}
// // Get fingerprint:
// // Retrieve fingerprint of generated key.
// // Use it to extract the secret/public keys.
// // (stolen from https://raymii.org/s/articles/GPG_noninteractive_batch_sign_trust_and_send_gnupg_keys.html)
//
// // fpr=`gpg --homedir /tmp/blackbox_createrole --fingerprint --with-colons "$ROLE_NAME" | awk -F: '/fpr:/ {print $10}' | head -n 1`
// var fpr string
// bbutil.RunBashOutput("gpg",
// "--homedir", "/tmp/blackbox_createrole",
// "--fingerprint",
// "--with-colons",
// u.email,
// )
// for i, l := range string.Split(out, "\n") {
// if string.HasPrefix(l, "fpr:") {
// fpr = strings.Split(l, ":")[9]
// }
// break
// }
//
// // Create key key:
// // gpg --homedir "$gpghomedir" --batch --passphrase '' --quick-add-key "$fpr" rsa encr
// bbutil.RunBash("gpg",
// "--homedir", u.dir,
// "--batch",
// "--passphrase", "",
// "--quick-add-key", fpr,
// "rsa", "encr",
// )
// function md5sum_file() {
// # Portably generate the MD5 hash of file $1.
// case $(uname -s) in
// Darwin | FreeBSD )
// md5 -r "$1" | awk '{ print $1 }'
// ;;
// NetBSD )
// md5 -q "$1"
// ;;
// SunOS )
// digest -a md5 "$1"
// ;;
// Linux )
// md5sum "$1" | awk '{ print $1 }'
// ;;
// CYGWIN* )
// md5sum "$1" | awk '{ print $1 }'
// ;;
// * )
// echo 'ERROR: Unknown OS. Exiting.'
// exit 1
// ;;
// esac
// }
//
// function assert_file_missing() {
// if [[ -e "$1" ]]; then
// echo "ASSERT FAILED: ${1} should not exist."
// exit 1
// fi
// }
//
// function assert_file_exists() {
// if [[ ! -e "$1" ]]; then
// echo "ASSERT FAILED: ${1} should exist."
// echo "PWD=$(/usr/bin/env pwd -P)"
// #echo "LS START"
// #ls -la
// #echo "LS END"
// exit 1
// fi
// }
// function assert_file_md5hash() {
// local file="$1"
// local wanted="$2"
// assert_file_exists "$file"
// local found
// found=$(md5sum_file "$file")
// if [[ "$wanted" != "$found" ]]; then
// echo "ASSERT FAILED: $file hash wanted=$wanted found=$found"
// exit 1
// fi
// }
// function assert_file_group() {
// local file="$1"
// local wanted="$2"
// local found
// assert_file_exists "$file"
//
// case $(uname -s) in
// Darwin | FreeBSD | NetBSD )
// found=$(stat -f '%Dg' "$file")
// ;;
// Linux | SunOS )
// found=$(stat -c '%g' "$file")
// ;;
// CYGWIN* )
// echo "ASSERT_FILE_GROUP: Running on Cygwin. Not being tested."
// return 0
// ;;
// * )
// echo 'ERROR: Unknown OS. Exiting.'
// exit 1
// ;;
// esac
//
// echo "DEBUG: assert_file_group X${wanted}X vs. X${found}X"
// echo "DEBUG:" $(which stat)
// if [[ "$wanted" != "$found" ]]; then
// echo "ASSERT FAILED: $file chgrp group wanted=$wanted found=$found"
// exit 1
// fi
// }
// function assert_file_perm() {
// local wanted="$1"
// local file="$2"
// local found
// assert_file_exists "$file"
//
// case $(uname -s) in
// Darwin | FreeBSD | NetBSD )
// found=$(stat -f '%Sp' "$file")
// ;;
// # NB(tlim): CYGWIN hasn't been tested. It might be more like Darwin.
// Linux | CYGWIN* | SunOS )
// found=$(stat -c '%A' "$file")
// ;;
// * )
// echo 'ERROR: Unknown OS. Exiting.'
// exit 1
// ;;
// esac
//
// echo "DEBUG: assert_file_perm X${wanted}X vs. X${found}X"
// echo "DEBUG:" $(which stat)
// if [[ "$wanted" != "$found" ]]; then
// echo "ASSERT FAILED: $file chgrp perm wanted=$wanted found=$found"
// exit 1
// fi
// }
// function assert_line_not_exists() {
// local target="$1"
// local file="$2"
// assert_file_exists "$file"
// if grep -F -x -s -q >/dev/null "$target" "$file" ; then
// echo "ASSERT FAILED: line '$target' should not exist in file $file"
// echo "==== file contents: START $file"
// cat "$file"
// echo "==== file contents: END $file"
// exit 1
// fi
// }
// function assert_line_exists() {
// local target="$1"
// local file="$2"
// assert_file_exists "$file"
// if ! grep -F -x -s -q >/dev/null "$target" "$file" ; then
// echo "ASSERT FAILED: line '$target' should exist in file $file"
// echo "==== file contents: START $file"
// cat "$file"
// echo "==== file contents: END $file"
// exit 1
// fi
// }

View File

@@ -0,0 +1,2 @@
user1@example.com
user2@example.com

View File

@@ -0,0 +1,2 @@
bar.txt
foo.txt

View File

@@ -0,0 +1,9 @@
+-------------+------------------------+
| STATUS | NAME |
+-------------+------------------------+
| BOTHMISSING | status-BOTHMISSING.txt |
| DECRYPTED | status-DECRYPTED.txt |
| ENCRYPTED | status-ENCRYPTED.txt |
| GPGMISSING | status-GPGMISSING.txt |
| SHREDDED | status-SHREDDED.txt |
+-------------+------------------------+

View File

@@ -0,0 +1 @@
I am the foo.txt file!

View File

@@ -0,0 +1,5 @@
+-----------+---------+
| STATUS | NAME |
+-----------+---------+
| ENCRYPTED | foo.txt |
+-----------+---------+

View File

@@ -0,0 +1 @@
I am the foo.txt file!

View File

@@ -0,0 +1,6 @@
+-----------+----------------------+
| STATUS | NAME |
+-----------+----------------------+
| ENCRYPTED | status-ENCRYPTED.txt |
| NOTREG | blah.txt |
+-----------+----------------------+

15
models/crypters.go Normal file
View File

@@ -0,0 +1,15 @@
package models
// Crypter is gpg binaries, go-opengpg, etc.
type Crypter interface {
// Name returns the plug-in's canonical name.
Name() string
// Decrypt name+".gpg", possibly overwriting name.
Decrypt(filename string, umask int, overwrite bool) error
// Encrypt name, overwriting name+".gpg"
Encrypt(filename string, umask int, receivers []string) (string, error)
// Cat outputs a file, unencrypting if needed.
Cat(filename string) ([]byte, error)
// AddNewKey extracts keyname from sourcedir's GnuPG chain to destdir keychain.
AddNewKey(keyname, repobasename, sourcedir, destdir string) ([]string, error)
}

30
models/vcs.go Normal file
View File

@@ -0,0 +1,30 @@
package models
import "github.com/StackExchange/blackbox/v2/pkg/commitlater"
// Vcs is git/hg/etc.
type Vcs interface {
// Name returns the plug-in's canonical name.
Name() string
// Discover returns true if we are a repo of this type; along with the Abs path to the repo root (or "" if we don't know).
Discover() (bool, string)
// SetFileTypeUnix informs the VCS that files should maintain unix-style line endings.
SetFileTypeUnix(repobasedir string, files ...string) error
// IgnoreAnywhere tells the VCS to ignore these files anywhere in the repo.
IgnoreAnywhere(repobasedir string, files []string) error
// IgnoreAnywhere tells the VCS to ignore these files, rooted in the base of the repo.
IgnoreFiles(repobasedir string, files []string) error
// CommitTitle sets the title of the next commit.
CommitTitle(title string)
// NeedsCommit queues up commits for later execution.
NeedsCommit(message string, repobasedir string, names []string)
// DebugCommits dumps a list of future commits.
DebugCommits() commitlater.List
// FlushCommits informs the VCS to do queued up commits.
FlushCommits() error
// TestingInitRepo initializes a repo of this type (for use by integration tests)
TestingInitRepo() error
}

48
pkg/bblog/bblog.go Normal file
View File

@@ -0,0 +1,48 @@
package bblog
import (
"io/ioutil"
"log"
"os"
)
/*
To use this, include the following lines in your .go file.
var logErr *log.Logger
var logDebug *log.Logger
func init() {
logErr = bblog.GetErr()
logDebug = bblog.GetDebug(debug)
}
Or in a function:
logErr := bblog.GetErr()
logDebug := bblog.GetDebug(debug)
logDebug.Printf("whatever: %v", err)
*/
var logErr *log.Logger
var logDebug *log.Logger
// GetErr returns a logger handle used for errors
func GetErr() *log.Logger {
if logErr == nil {
logErr = log.New(os.Stderr, "", 0)
}
return logErr
}
// GetDebug returns a Logger handle used for debug info (output is discarded if viable=false)
func GetDebug(visible bool) *log.Logger {
if visible {
logDebug = log.New(os.Stderr, "", 0)
} else {
// Invisible mode (i.e. display nothing)
logDebug = log.New(ioutil.Discard, "", 0)
}
return logDebug
}

130
pkg/bbutil/filestats.go Normal file
View File

@@ -0,0 +1,130 @@
package bbutil
import (
"fmt"
"io/ioutil"
"os"
"path/filepath"
"sort"
"strings"
"time"
)
// DirExists returns true if directory exists.
func DirExists(path string) (bool, error) {
stat, err := os.Stat(path)
if err == nil {
return stat.IsDir(), nil
}
if os.IsNotExist(err) {
return false, nil
}
return true, err
}
// FileExistsOrProblem returns true if the file exists or if we can't determine its existence.
func FileExistsOrProblem(path string) bool {
_, err := os.Stat(path)
if err == nil {
return true
}
if os.IsNotExist(err) {
return false
}
return true
}
// Touch updates the timestamp of a file.
func Touch(name string) error {
var err error
_, err = os.Stat(name)
if os.IsNotExist(err) {
file, err := os.Create(name)
if err != nil {
return fmt.Errorf("TouchFile failed: %w", err)
}
file.Close()
}
currentTime := time.Now().Local()
return os.Chtimes(name, currentTime, currentTime)
}
// ReadFileLines is like ioutil.ReadFile() but returns an []string.
func ReadFileLines(filename string) ([]string, error) {
b, err := ioutil.ReadFile(filename)
if err != nil {
return nil, err
}
s := string(b)
s = strings.TrimSuffix(s, "\n")
if s == "" {
return []string{}, nil
}
l := strings.Split(s, "\n")
return l, nil
}
// AddLinesToSortedFile adds a line to a sorted file.
func AddLinesToSortedFile(filename string, newlines ...string) error {
lines, err := ReadFileLines(filename)
//fmt.Printf("DEBUG: read=%q\n", lines)
if err != nil {
return fmt.Errorf("AddLinesToSortedFile can't read %q: %w", filename, err)
}
if !sort.StringsAreSorted(lines) {
return fmt.Errorf("AddLinesToSortedFile: file wasn't sorted: %v", filename)
}
lines = append(lines, newlines...)
sort.Strings(lines)
contents := strings.Join(lines, "\n") + "\n"
//fmt.Printf("DEBUG: write=%q\n", contents)
err = ioutil.WriteFile(filename, []byte(contents), 0o660)
if err != nil {
return fmt.Errorf("AddLinesToSortedFile can't write %q: %w", filename, err)
}
return nil
}
// AddLinesToFile adds lines to the end of a file.
func AddLinesToFile(filename string, newlines ...string) error {
lines, err := ReadFileLines(filename)
if err != nil {
return fmt.Errorf("AddLinesToFile can't read %q: %w", filename, err)
}
lines = append(lines, newlines...)
contents := strings.Join(lines, "\n") + "\n"
err = ioutil.WriteFile(filename, []byte(contents), 0o660)
if err != nil {
return fmt.Errorf("AddLinesToFile can't write %q: %w", filename, err)
}
return nil
}
// FindDirInParent looks for target in CWD, or .., or ../.., etc.
func FindDirInParent(target string) (string, error) {
// Prevent an infinite loop by only doing "cd .." this many times
maxDirLevels := 30
relpath := "."
for i := 0; i < maxDirLevels; i++ {
// Does relpath contain our target?
t := filepath.Join(relpath, target)
//logDebug.Printf("Trying %q\n", t)
_, err := os.Stat(t)
if err == nil {
return t, nil
}
if !os.IsNotExist(err) {
return "", fmt.Errorf("stat failed FindDirInParent (%q): %w", t, err)
}
// Ok, it really wasn't found.
// If we are at the root, stop.
if abs, err := filepath.Abs(relpath); err == nil && abs == "/" {
break
}
// Try one directory up
relpath = filepath.Join("..", relpath)
}
return "", fmt.Errorf("Not found")
}

21
pkg/bbutil/rbio_test.go Normal file
View File

@@ -0,0 +1,21 @@
package bbutil
import (
"testing"
)
func TestRunBashInputOutput(t *testing.T) {
in := "This is a test of the RBIO system.\n"
bin := []byte(in)
out, err := RunBashInputOutput(bin, "cat")
sout := string(out)
if err != nil {
t.Error(err)
}
if in != sout {
t.Errorf("not equal %q %q", in, out)
}
}

77
pkg/bbutil/runbash.go Normal file
View File

@@ -0,0 +1,77 @@
package bbutil
import (
"bytes"
"fmt"
"log"
"os"
"os/exec"
)
// RunBash runs a Bash command.
func RunBash(command string, args ...string) error {
cmd := exec.Command(command, args...)
cmd.Stdin = os.Stdin
cmd.Stdout = os.Stdout
cmd.Stderr = os.Stderr
err := cmd.Start()
if err != nil {
log.Fatal(err)
}
err = cmd.Wait()
if err != nil {
return fmt.Errorf("RunBash cmd=%q err=%w", command, err)
}
return nil
}
// RunBashOutput runs a Bash command, captures output.
func RunBashOutput(command string, args ...string) (string, error) {
cmd := exec.Command(command, args...)
cmd.Stdin = os.Stdin
cmd.Stderr = os.Stderr
out, err := cmd.Output()
if err != nil {
return "", fmt.Errorf("RunBashOutput err=%w", err)
}
return string(out), err
}
// RunBashOutputSilent runs a Bash command, captures output, discards stderr.
func RunBashOutputSilent(command string, args ...string) (string, error) {
cmd := exec.Command(command, args...)
cmd.Stdin = os.Stdin
// Leave cmd.Stderr unmodified and stderr is discarded.
out, err := cmd.Output()
if err != nil {
return "", fmt.Errorf("RunBashOutputSilent err=%w", err)
}
return string(out), err
}
// RunBashInput runs a Bash command, sends input on stdin.
func RunBashInput(input string, command string, args ...string) error {
cmd := exec.Command(command, args...)
cmd.Stdin = bytes.NewBuffer([]byte(input))
cmd.Stdout = os.Stdout
cmd.Stderr = os.Stderr
err := cmd.Run()
if err != nil {
return fmt.Errorf("RunBashInput err=%w", err)
}
return nil
}
// RunBashInputOutput runs a Bash command, sends input on stdin.
func RunBashInputOutput(input []byte, command string, args ...string) ([]byte, error) {
cmd := exec.Command(command, args...)
cmd.Stdin = bytes.NewBuffer(input)
cmd.Stderr = os.Stderr
out, err := cmd.Output()
if err != nil {
return nil, fmt.Errorf("RunBashInputOutput err=%w", err)
}
return out, nil
}

109
pkg/bbutil/shred.go Normal file
View File

@@ -0,0 +1,109 @@
package bbutil
// Pick an appropriate secure erase command for this operating system
// or just delete the file with os.Remove().
// Code rewritten based https://codereview.stackexchange.com/questions/245072
import (
"fmt"
"io/ioutil"
"os"
"os/exec"
)
var shredCmds = []struct {
name, opts string
}{
{"sdelete", "-a"},
{"shred", "-u"},
{"srm", "-f"},
{"rm", "-Pf"},
}
func shredTemp(path, opts string) error {
file, err := ioutil.TempFile("", "shredTemp.")
if err != nil {
return err
}
filename := file.Name()
defer os.Remove(filename)
defer file.Close()
err = file.Close()
if err != nil {
return err
}
err = RunBash(path, opts, filename)
if err != nil {
return err
}
return nil
}
var shredPath, shredOpts = func() (string, string) {
for _, cmd := range shredCmds {
path, err := exec.LookPath(cmd.name)
if err != nil {
continue
}
err = shredTemp(path, cmd.opts)
if err == nil {
return path, cmd.opts
}
}
return "", ""
}()
// ShredInfo reveals the shred command and flags (for "blackbox info")
func ShredInfo() string {
return shredPath + " " + shredOpts
}
// shredFile shreds one file.
func shredFile(filename string) error {
fi, err := os.Stat(filename)
if err != nil {
return err
}
if !fi.Mode().IsRegular() {
err := fmt.Errorf("filename is not mode regular")
return err
}
if shredPath == "" {
// No secure erase command found. Default to a normal file delete.
// TODO(tlim): Print a warning? Have a flag that causes this to be an error?
return os.Remove(filename)
}
err = RunBash(shredPath, shredOpts, filename)
if err != nil {
return err
}
return nil
}
// ShredFiles securely erases a list of files.
func ShredFiles(names []string) error {
// TODO(tlim) DO the shredding in parallel like in v1.
var eerr error
for _, n := range names {
_, err := os.Stat(n)
if err != nil {
if os.IsNotExist(err) {
fmt.Printf("======= already gone: %q\n", n)
continue
}
}
fmt.Printf("========== SHREDDING: %q\n", n)
e := shredFile(n)
if e != nil {
eerr = e
fmt.Printf("ERROR: %v\n", e)
}
}
return eerr
}

View File

@@ -0,0 +1,66 @@
package bbutil
import (
"io/ioutil"
"os"
"testing"
)
func TestAddLinesToSortedFile(t *testing.T) {
var tests = []struct {
start string
add []string
expected string
}{
{
"",
[]string{"one"},
"one\n",
},
{
"begin\ntwo\n",
[]string{"at top"},
"at top\nbegin\ntwo\n",
},
{
"begin\ntwo\n",
[]string{"zbottom"},
"begin\ntwo\nzbottom\n",
},
{
"begin\ntwo\n",
[]string{"middle"},
"begin\nmiddle\ntwo\n",
},
}
for i, test := range tests {
content := []byte(test.start)
tmpfile, err := ioutil.TempFile("", "example")
if err != nil {
t.Fatal(err)
}
tmpfilename := tmpfile.Name()
defer os.Remove(tmpfilename)
if _, err := tmpfile.Write(content); err != nil {
t.Fatal(err)
}
if err := tmpfile.Close(); err != nil {
t.Fatal(err)
}
AddLinesToSortedFile(tmpfilename, test.add...)
expected := test.expected
got, err := ioutil.ReadFile(tmpfilename)
if err != nil {
t.Fatal(err)
}
if expected != string(got) {
t.Errorf("test %v: contents wrong:\nexpected: %q\n got: %q", i, expected, got)
}
os.Remove(tmpfilename)
}
}

11
pkg/bbutil/umask_posix.go Normal file
View File

@@ -0,0 +1,11 @@
// +build !windows
package bbutil
import "syscall"
// Umask is a no-op on Windows, and calls syscall.Umask on all other
// systems. On Windows it returns 0, which is a decoy.
func Umask(mask int) int {
return syscall.Umask(mask)
}

View File

@@ -0,0 +1,9 @@
// +build windows
package bbutil
// Umask is a no-op on Windows, and calls syscall.Umask on all other
// systems. On Windows it returns 0, which is a decoy.
func Umask(mask int) int {
return 0o000
}

233
pkg/box/box.go Normal file
View File

@@ -0,0 +1,233 @@
package box
// box implements the box model.
import (
"fmt"
"log"
"os"
"path/filepath"
"sort"
"strings"
"github.com/StackExchange/blackbox/v2/pkg/bblog"
"github.com/StackExchange/blackbox/v2/pkg/bbutil"
"github.com/StackExchange/blackbox/v2/pkg/crypters"
"github.com/StackExchange/blackbox/v2/pkg/vcs"
"github.com/urfave/cli/v2"
)
var logErr *log.Logger
var logDebug *log.Logger
// Box describes what we know about a box.
type Box struct {
// Paths:
Team string // Name of the team (i.e. .blackbox-$TEAM)
RepoBaseDir string // Rel path to the VCS repo.
ConfigPath string // Abs or Rel path to the .blackbox (or whatever) directory.
ConfigRO bool // True if we should not try to change files in ConfigPath.
// Settings:
Umask int // umask to set when decrypting
Editor string // Editor to call
Debug bool // Are we in debug logging mode?
// Cache of data gathered from .blackbox:
Admins []string // If non-empty, the list of admins.
Files []string // If non-empty, the list of files.
FilesSet map[string]bool // If non-nil, a set of Files.
// Handles to interfaces:
Vcs vcs.Vcs // Interface access to the VCS.
Crypter crypters.Crypter // Inteface access to GPG.
logErr *log.Logger
logDebug *log.Logger
}
// StatusMode is a type of query.
type StatusMode int
const (
// Itemized is blah
Itemized StatusMode = iota // Individual files by name
// All files is blah
All
// Unchanged is blah
Unchanged
// Changed is blah
Changed
)
// NewFromFlags creates a box using items from flags. Nearly all subcommands use this.
func NewFromFlags(c *cli.Context) *Box {
// The goal of this is to create a fully-populated box (and box.Vcs)
// so that all subcommands have all the fields and interfaces they need
// to do their job.
logErr = bblog.GetErr()
logDebug = bblog.GetDebug(c.Bool("debug"))
bx := &Box{
Umask: c.Int("umask"),
Editor: c.String("editor"),
Team: c.String("team"),
logErr: bblog.GetErr(),
logDebug: bblog.GetDebug(c.Bool("debug")),
Debug: c.Bool("debug"),
}
// Discover which kind of VCS is in use, and the repo root.
bx.Vcs, bx.RepoBaseDir = vcs.Discover()
// Discover the crypto backend (GnuPG, go-openpgp, etc.)
bx.Crypter = crypters.SearchByName(c.String("crypto"), c.Bool("debug"))
if bx.Crypter == nil {
fmt.Printf("ERROR! No CRYPTER found! Please set --crypto correctly or use the damn default\n")
os.Exit(1)
}
// Find the .blackbox (or equiv.) directory.
var err error
configFlag := c.String("config")
if configFlag != "" {
// Flag is set. Better make sure it is valid.
if !filepath.IsAbs(configFlag) {
fmt.Printf("config flag value is a relative path. Too risky. Exiting.\n")
os.Exit(1)
// NB(tlim): We could return filepath.Abs(config) or maybe it just
// works as is. I don't know, and until we have a use case to prove
// it out, it's best to just not implement this.
}
bx.ConfigPath = configFlag
bx.ConfigRO = true // External configs treated as read-only.
// TODO(tlim): We could get fancy here and set ConfigReadOnly=true only
// if we are sure configFlag is not within bx.RepoBaseDir. Again, I'd
// like to see a use-case before we implement this.
return bx
}
// Normal path. Flag not set, so we discover the path.
bx.ConfigPath, err = FindConfigDir(bx.RepoBaseDir, c.String("team"))
if err != nil && c.Command.Name != "info" {
fmt.Printf("Can't find .blackbox or equiv. Have you run init?\n")
os.Exit(1)
}
return bx
}
// NewUninitialized creates a box in a pre-init situation.
func NewUninitialized(c *cli.Context) *Box {
/*
This is for "blackbox init" (used before ".blackbox*" exists)
Init needs: How we populate it:
bx.Vcs: Discovered by calling each plug-in until succeeds.
bx.ConfigDir: Generated algorithmically (it doesn't exist yet).
*/
bx := &Box{
Umask: c.Int("umask"),
Editor: c.String("editor"),
Team: c.String("team"),
logErr: bblog.GetErr(),
logDebug: bblog.GetDebug(c.Bool("debug")),
Debug: c.Bool("debug"),
}
bx.Vcs, bx.RepoBaseDir = vcs.Discover()
if c.String("configdir") == "" {
rel := ".blackbox"
if bx.Team != "" {
rel = ".blackbox-" + bx.Team
}
bx.ConfigPath = filepath.Join(bx.RepoBaseDir, rel)
} else {
// Wait. The user is using the --config flag on a repo that
// hasn't been created yet? I hope this works!
fmt.Printf("ERROR: You can not set --config when initializing a new repo. Please run this command from within a repo, with no --config flag. Or, file a bug explaining your use caseyour use-case. Exiting!\n")
os.Exit(1)
// TODO(tlim): We could get fancy here and query the Vcs to see if the
// path would fall within the repo, figure out the relative path, and
// use that value. (and error if configflag is not within the repo).
// That would be error prone and would only help the zero users that
// ever see the above error message.
}
return bx
}
// NewForTestingInit creates a box in a bare environment.
func NewForTestingInit(vcsname string) *Box {
/*
This is for "blackbox test_init" (secret command used in integration tests; when nothing exists)
TestingInitRepo only uses bx.Vcs, so that's all we set.
Populates bx.Vcs by finding the provider named vcsname.
*/
bx := &Box{}
// Find the
var vh vcs.Vcs
var err error
vcsname = strings.ToLower(vcsname)
for _, v := range vcs.Catalog {
if strings.ToLower(v.Name) == vcsname {
vh, err = v.New()
if err != nil {
return nil // No idea how that would happen.
}
}
}
bx.Vcs = vh
return bx
}
func (bx *Box) getAdmins() error {
// Memoized
if len(bx.Admins) != 0 {
return nil
}
// TODO(tlim): Try the json file.
// Try the legacy file:
fn := filepath.Join(bx.ConfigPath, "blackbox-admins.txt")
bx.logDebug.Printf("Admins file: %q", fn)
a, err := bbutil.ReadFileLines(fn)
if err != nil {
return fmt.Errorf("getAdmins can't load %q: %v", fn, err)
}
if !sort.StringsAreSorted(a) {
return fmt.Errorf("file corrupt. Lines not sorted: %v", fn)
}
bx.Admins = a
return nil
}
// getFiles populates Files and FileMap.
func (bx *Box) getFiles() error {
if len(bx.Files) != 0 {
return nil
}
// TODO(tlim): Try the json file.
// Try the legacy file:
fn := filepath.Join(bx.ConfigPath, "blackbox-files.txt")
bx.logDebug.Printf("Files file: %q", fn)
a, err := bbutil.ReadFileLines(fn)
if err != nil {
return fmt.Errorf("getFiles can't load %q: %v", fn, err)
}
if !sort.StringsAreSorted(a) {
return fmt.Errorf("file corrupt. Lines not sorted: %v", fn)
}
for _, n := range a {
bx.Files = append(bx.Files, filepath.Join(bx.RepoBaseDir, n))
}
bx.FilesSet = make(map[string]bool, len(bx.Files))
for _, s := range bx.Files {
bx.FilesSet[s] = true
}
return nil
}

224
pkg/box/boxutils.go Normal file
View File

@@ -0,0 +1,224 @@
package box
import (
"bufio"
"fmt"
"os"
"os/user"
"path/filepath"
"runtime"
"strconv"
"strings"
"github.com/StackExchange/blackbox/v2/pkg/makesafe"
)
// FileStatus returns the status of a file.
func FileStatus(name string) (string, error) {
/*
DECRYPTED: File is decrypted and ready to edit (unknown if it has been edited).
ENCRYPTED: GPG file is newer than plaintext. Indicates recented edited then encrypted.
SHREDDED: Plaintext is missing.
GPGMISSING: The .gpg file is missing. Oops?
PLAINERROR: Can't access the plaintext file to determine status.
GPGERROR: Can't access .gpg file to determine status.
*/
p := name
e := p + ".gpg"
ps, perr := os.Stat(p)
es, eerr := os.Stat(e)
if perr == nil && eerr == nil {
if ps.ModTime().Before(es.ModTime()) {
return "ENCRYPTED", nil
}
return "DECRYPTED", nil
}
if os.IsNotExist(perr) && os.IsNotExist(eerr) {
return "BOTHMISSING", nil
}
if eerr != nil {
if os.IsNotExist(eerr) {
return "GPGMISSING", nil
}
return "GPGERROR", eerr
}
if perr != nil {
if os.IsNotExist(perr) {
return "SHREDDED", nil
}
}
return "PLAINERROR", perr
}
func anyGpg(names []string) error {
for _, name := range names {
if strings.HasSuffix(name, ".gpg") {
return fmt.Errorf(
"no not specify .gpg files. Specify %q not %q",
strings.TrimSuffix(name, ".gpg"), name)
}
}
return nil
}
// func isChanged(pname string) (bool, error) {
// // if .gpg exists but not plainfile: unchanged
// // if plaintext exists but not .gpg: changed
// // if plainfile < .gpg: unchanged
// // if plainfile > .gpg: don't know, need to try diff
// // Gather info about the files:
// pstat, perr := os.Stat(pname)
// if perr != nil && (!os.IsNotExist(perr)) {
// return false, fmt.Errorf("isChanged(%q) returned error: %w", pname, perr)
// }
// gname := pname + ".gpg"
// gstat, gerr := os.Stat(gname)
// if gerr != nil && (!os.IsNotExist(perr)) {
// return false, fmt.Errorf("isChanged(%q) returned error: %w", gname, gerr)
// }
// pexists := perr == nil
// gexists := gerr == nil
// // Use the above rules:
// // if .gpg exists but not plainfile: unchanged
// if gexists && !pexists {
// return false, nil
// }
// // if plaintext exists but not .gpg: changed
// if pexists && !gexists {
// return true, nil
// }
// // At this point we can conclude that both p and g exist.
// // Can't hurt to test that assertion.
// if (!pexists) && (!gexists) {
// return false, fmt.Errorf("Assertion failed. p and g should exist: pn=%q", pname)
// }
// pmodtime := pstat.ModTime()
// gmodtime := gstat.ModTime()
// // if plainfile < .gpg: unchanged
// if pmodtime.Before(gmodtime) {
// return false, nil
// }
// // if plainfile > .gpg: don't know, need to try diff
// return false, fmt.Errorf("Can not know for sure. Try git diff?")
// }
func parseGroup(userinput string) (int, error) {
if userinput == "" {
return -1, fmt.Errorf("group spec is empty string")
}
// If it is a valid number, use it.
i, err := strconv.Atoi(userinput)
if err == nil {
return i, nil
}
// If not a number, look it up by name.
g, err := user.LookupGroup(userinput)
if err == nil {
i, err = strconv.Atoi(g.Gid)
return i, nil
}
// Give up.
return -1, err
}
// FindConfigDir tests various places until it finds the config dir.
// If we can't determine the relative path, "" is returned.
func FindConfigDir(reporoot, team string) (string, error) {
candidates := []string{}
if team != "" {
candidates = append(candidates, ".blackbox-"+team)
}
candidates = append(candidates, ".blackbox")
candidates = append(candidates, "keyrings/live")
logDebug.Printf("DEBUG: candidates = %q\n", candidates)
maxDirLevels := 30 // Prevent an infinite loop
relpath := "."
for i := 0; i < maxDirLevels; i++ {
// Does relpath contain any of our directory names?
for _, c := range candidates {
t := filepath.Join(relpath, c)
logDebug.Printf("Trying %q\n", t)
fi, err := os.Stat(t)
if err == nil && fi.IsDir() {
return t, nil
}
if err == nil {
return "", fmt.Errorf("path %q is not a directory: %w", t, err)
}
if !os.IsNotExist(err) {
return "", fmt.Errorf("dirExists access error: %w", err)
}
}
// If we are at the root, stop.
if abs, _ := filepath.Abs(relpath); abs == "/" {
break
}
// Try one directory up
relpath = filepath.Join("..", relpath)
}
return "", fmt.Errorf("No .blackbox (or equiv) directory found")
}
func gpgAgentNotice() {
// Is gpg-agent configured?
if os.Getenv("GPG_AGENT_INFO") != "" {
return
}
// Are we on macOS?
if runtime.GOOS == "darwin" {
// We assume the use of https://gpgtools.org, which
// uses the keychain.
return
}
// TODO(tlim): v1 verifies that "gpg-agent --version" outputs a version
// string that is 2.1.0 or higher. It seems that 1.x is incompatible.
fmt.Println("WARNING: You probably want to run gpg-agent as")
fmt.Println("you will be asked for your passphrase many times.")
fmt.Println("Example: $ eval $(gpg-agent --daemon)")
fmt.Print("Press CTRL-C now to stop. ENTER to continue: ")
input := bufio.NewScanner(os.Stdin)
input.Scan()
}
func shouldWeOverwrite() {
fmt.Println()
fmt.Println("WARNING: This will overwrite any unencrypted files laying about.")
fmt.Print("Press CTRL-C now to stop. ENTER to continue: ")
input := bufio.NewScanner(os.Stdin)
input.Scan()
}
// PrettyCommitMessage generates a pretty commit message.
func PrettyCommitMessage(verb string, files []string) string {
if len(files) == 0 {
// This use-case should probably be an error.
return verb + " (no files)"
}
rfiles := makesafe.RedactMany(files)
m, truncated := makesafe.FirstFewFlag(rfiles)
if truncated {
return verb + ": " + m
}
return verb + ": " + m
}

35
pkg/box/pretty_test.go Normal file
View File

@@ -0,0 +1,35 @@
package box
import "testing"
func TestPrettyCommitMessage(t *testing.T) {
long := "aVeryVeryLongLongLongStringStringString"
for i, test := range []struct {
data []string
expected string
}{
{[]string{}, `HEADING (no files)`},
{[]string{"one"}, `HEADING: one`},
{[]string{"one", "two"}, `HEADING: one two`},
{[]string{"one", "two", "three"}, `HEADING: one two three`},
{[]string{"one", "two", "three", "four"},
`HEADING: one two three four`},
{[]string{"one", "two", "three", "four", "five"},
`HEADING: one two three four five`},
{[]string{"has spaces.txt"}, `HEADING: "has spaces.txt"`},
{[]string{"two\n"}, `HEADING: "twoX"(redacted)`},
{[]string{"smile😁eyes"}, `HEADING: smile😁eyes`},
{[]string{"tab\ttab", "two very long strings.txt"},
`HEADING: "tabXtab"(redacted) "two very long strings.txt"`},
{[]string{long, long, long, long},
"HEADING: " + long + " " + long + " (and others)"},
} {
g := PrettyCommitMessage("HEADING", test.data)
if g == test.expected {
//t.Logf("%03d: PASSED files=%q\n", i, test.data)
t.Logf("%03d: PASSED", i)
} else {
t.Errorf("%03d: FAILED files==%q got=(%q) wanted=(%q)\n", i, test.data, g, test.expected)
}
}
}

633
pkg/box/verbs.go Normal file
View File

@@ -0,0 +1,633 @@
package box
// This file implements the business logic related to a black box.
// These functions are usually called from cmd/blackbox/drive.go or
// external sytems that use box as a module.
import (
"bufio"
"fmt"
"io/ioutil"
"os"
"path/filepath"
"sort"
"strconv"
"strings"
"github.com/StackExchange/blackbox/v2/pkg/bbutil"
"github.com/StackExchange/blackbox/v2/pkg/makesafe"
"github.com/olekukonko/tablewriter"
)
// AdminAdd adds admins.
func (bx *Box) AdminAdd(nom string, sdir string) error {
err := bx.getAdmins()
if err != nil {
return err
}
//fmt.Printf("ADMINS=%q\n", bx.Admins)
// Check for duplicates.
if i := sort.SearchStrings(bx.Admins, nom); i < len(bx.Admins) && bx.Admins[i] == nom {
return fmt.Errorf("Admin %v already an admin", nom)
}
bx.logDebug.Printf("ADMIN ADD rbd=%q\n", bx.RepoBaseDir)
changedFiles, err := bx.Crypter.AddNewKey(nom, bx.RepoBaseDir, sdir, bx.ConfigPath)
if err != nil {
return fmt.Errorf("AdminAdd failed AddNewKey: %v", err)
}
// TODO(tlim): Try the json file.
// Try the legacy file:
fn := filepath.Join(bx.ConfigPath, "blackbox-admins.txt")
bx.logDebug.Printf("Admins file: %q", fn)
err = bbutil.AddLinesToSortedFile(fn, nom)
if err != nil {
return fmt.Errorf("could not update file (%q,%q): %v", fn, nom, err)
}
changedFiles = append([]string{fn}, changedFiles...)
bx.Vcs.NeedsCommit("NEW ADMIN: "+nom, bx.RepoBaseDir, changedFiles)
return nil
}
// AdminList lists the admin id's.
func (bx *Box) AdminList() error {
err := bx.getAdmins()
if err != nil {
return err
}
for _, v := range bx.Admins {
fmt.Println(v)
}
return nil
}
// AdminRemove removes an id from the admin list.
func (bx *Box) AdminRemove([]string) error {
return fmt.Errorf("NOT IMPLEMENTED: AdminRemove")
}
// Cat outputs a file, unencrypting if needed.
func (bx *Box) Cat(names []string) error {
if err := anyGpg(names); err != nil {
return fmt.Errorf("cat: %w", err)
}
err := bx.getFiles()
if err != nil {
return err
}
for _, name := range names {
var out []byte
var err error
if _, ok := bx.FilesSet[name]; ok {
out, err = bx.Crypter.Cat(name)
} else {
out, err = ioutil.ReadFile(name)
}
if err != nil {
bx.logErr.Printf("BX_CRY3\n")
return fmt.Errorf("cat: %w", err)
}
fmt.Print(string(out))
}
return nil
}
// Decrypt decrypts a file.
func (bx *Box) Decrypt(names []string, overwrite bool, bulkpause bool, setgroup string) error {
var err error
if err := anyGpg(names); err != nil {
return err
}
err = bx.getFiles()
if err != nil {
return err
}
if bulkpause {
gpgAgentNotice()
}
groupchange := false
gid := -1
if setgroup != "" {
gid, err = parseGroup(setgroup)
if err != nil {
return fmt.Errorf("Invalid group name or gid: %w", err)
}
groupchange = true
}
bx.logDebug.Printf("DECRYPT GROUP %q %v,%v\n", setgroup, groupchange, gid)
if len(names) == 0 {
names = bx.Files
}
return decryptMany(bx, names, overwrite, groupchange, gid)
}
func decryptMany(bx *Box, names []string, overwrite bool, groupchange bool, gid int) error {
// TODO(tlim): If we want to decrypt them in parallel, go has a helper function
// called "sync.WaitGroup()"" which would be useful here. We would probably
// want to add a flag on the command line (stored in a field such as bx.ParallelMax)
// that limits the amount of parallelism. The default for the flag should
// probably be runtime.NumCPU().
for _, name := range names {
fmt.Printf("========== DECRYPTING %q\n", name)
if !bx.FilesSet[name] {
bx.logErr.Printf("Skipping %q: File not registered with Blackbox", name)
continue
}
if (!overwrite) && bbutil.FileExistsOrProblem(name) {
bx.logErr.Printf("Skipping %q: Will not overwrite existing file", name)
continue
}
// TODO(tlim) v1 detects zero-length files and removes them, even
// if overwrite is disabled. I don't think anyone has ever used that
// feature. That said, if we want to do that, we would implement it here.
// TODO(tlim) v1 takes the md5 hash of the plaintext before it decrypts,
// then compares the new plaintext's md5. It prints "EXTRACTED" if
// there is a change.
err := bx.Crypter.Decrypt(name, bx.Umask, overwrite)
if err != nil {
bx.logErr.Printf("%q: %v", name, err)
continue
}
// FIXME(tlim): Clone the file perms from the .gpg file to the plaintext file.
if groupchange {
// FIXME(tlim): Also "chmod g+r" the file.
os.Chown(name, -1, gid)
}
}
return nil
}
// Diff ...
func (bx *Box) Diff([]string) error {
return fmt.Errorf("NOT IMPLEMENTED: Diff")
}
// Edit unencrypts, calls editor, calls encrypt.
func (bx *Box) Edit(names []string) error {
if err := anyGpg(names); err != nil {
return err
}
err := bx.getFiles()
if err != nil {
return err
}
for _, name := range names {
if _, ok := bx.FilesSet[name]; ok {
if !bbutil.FileExistsOrProblem(name) {
err := bx.Crypter.Decrypt(name, bx.Umask, false)
if err != nil {
return fmt.Errorf("edit failed %q: %w", name, err)
}
}
}
err := bbutil.RunBash(bx.Editor, name)
if err != nil {
return err
}
}
return nil
}
// Encrypt encrypts a file.
func (bx *Box) Encrypt(names []string, shred bool) error {
var err error
if err = anyGpg(names); err != nil {
return err
}
err = bx.getAdmins()
if err != nil {
return err
}
err = bx.getFiles()
if err != nil {
return err
}
if len(names) == 0 {
names = bx.Files
}
enames, err := encryptMany(bx, names, shred)
bx.Vcs.NeedsCommit(
PrettyCommitMessage("ENCRYPTED", names),
bx.RepoBaseDir,
enames,
)
return err
}
func encryptMany(bx *Box, names []string, shred bool) ([]string, error) {
var enames []string
for _, name := range names {
fmt.Printf("========== ENCRYPTING %q\n", name)
if !bx.FilesSet[name] {
bx.logErr.Printf("Skipping %q: File not registered with Blackbox", name)
continue
}
if !bbutil.FileExistsOrProblem(name) {
bx.logErr.Printf("Skipping. Plaintext does not exist: %q", name)
continue
}
ename, err := bx.Crypter.Encrypt(name, bx.Umask, bx.Admins)
if err != nil {
bx.logErr.Printf("Failed to encrypt %q: %v", name, err)
continue
}
enames = append(enames, ename)
if shred {
bx.Shred([]string{name})
}
}
return enames, nil
}
// FileAdd enrolls files.
func (bx *Box) FileAdd(names []string, shred bool) error {
bx.logDebug.Printf("FileAdd(shred=%v, %v)", shred, names)
// Check for dups.
// Encrypt them all.
// If that succeeds, add to the blackbox-files.txt file.
// (optionally) shred the plaintext.
// FIXME(tlim): Check if the plaintext is in GIT. If it is,
// remove it from Git and print a warning that they should
// eliminate the history or rotate any secrets.
if err := anyGpg(names); err != nil {
return err
}
err := bx.getAdmins()
if err != nil {
return err
}
err = bx.getFiles()
if err != nil {
return err
}
if err := anyGpg(names); err != nil {
return err
}
// Check for newlines
for _, n := range names {
if strings.ContainsAny(n, "\n") {
return fmt.Errorf("file %q contains a newlineregistered", n)
}
}
// Check for duplicates.
for _, n := range names {
if i := sort.SearchStrings(bx.Files, n); i < len(bx.Files) && bx.Files[i] == n {
return fmt.Errorf("file %q already registered", n)
}
}
// Encrypt
var needsCommit []string
for _, name := range names {
s, err := bx.Crypter.Encrypt(name, bx.Umask, bx.Admins)
if err != nil {
return fmt.Errorf("AdminAdd failed AddNewKey: %v", err)
}
needsCommit = append(needsCommit, s)
}
// TODO(tlim): Try the json file.
// Try the legacy file:
fn := filepath.Join(bx.ConfigPath, "blackbox-files.txt")
bx.logDebug.Printf("Files file: %q", fn)
err = bbutil.AddLinesToSortedFile(fn, names...)
if err != nil {
return fmt.Errorf("could not update file (%q,%q): %v", fn, names, err)
}
err = bx.Shred(names)
if err != nil {
bx.logErr.Printf("Error while shredding: %v", err)
}
bx.Vcs.CommitTitle("BLACKBOX ADD FILE: " + makesafe.FirstFew(makesafe.ShellMany(names)))
bx.Vcs.IgnoreFiles(bx.RepoBaseDir, names)
bx.Vcs.NeedsCommit(
PrettyCommitMessage("blackbox-files.txt add", names),
bx.RepoBaseDir,
append([]string{filepath.Join(bx.ConfigPath, "blackbox-files.txt")}, needsCommit...),
)
return nil
}
// FileList lists the files.
func (bx *Box) FileList() error {
err := bx.getFiles()
if err != nil {
return err
}
for _, v := range bx.Files {
fmt.Println(v)
}
return nil
}
// FileRemove de-enrolls files.
func (bx *Box) FileRemove(names []string) error {
return fmt.Errorf("NOT IMPLEMENTED: FileRemove")
}
// Info prints debugging info.
func (bx *Box) Info() error {
err := bx.getFiles()
if err != nil {
bx.logErr.Printf("Info getFiles: %v", err)
}
err = bx.getAdmins()
if err != nil {
bx.logErr.Printf("Info getAdmins: %v", err)
}
fmt.Println("BLACKBOX:")
fmt.Printf(" Debug: %v\n", bx.Debug)
fmt.Printf(" Team: %q\n", bx.Team)
fmt.Printf(" RepoBaseDir: %q\n", bx.RepoBaseDir)
fmt.Printf(" ConfigPath: %q\n", bx.ConfigPath)
fmt.Printf(" Umask: %04o\n", bx.Umask)
fmt.Printf(" Editor: %v\n", bx.Editor)
fmt.Printf(" Shredder: %v\n", bbutil.ShredInfo())
fmt.Printf(" Admins: count=%v\n", len(bx.Admins))
fmt.Printf(" Files: count=%v\n", len(bx.Files))
fmt.Printf(" FilesSet: count=%v\n", len(bx.FilesSet))
fmt.Printf(" Vcs: %v\n", bx.Vcs)
fmt.Printf(" VcsName: %q\n", bx.Vcs.Name())
fmt.Printf(" Crypter: %v\n", bx.Crypter)
fmt.Printf(" CrypterName: %q\n", bx.Crypter.Name())
return nil
}
// Init initializes a repo.
func (bx *Box) Init(yes, vcsname string) error {
fmt.Printf("VCS root is: %q\n", bx.RepoBaseDir)
fmt.Printf("team is: %q\n", bx.Team)
fmt.Printf("configdir will be: %q\n", bx.ConfigPath)
if yes != "yes" {
fmt.Printf("Enable blackbox for this %v repo? (yes/no)? ", bx.Vcs.Name())
input := bufio.NewScanner(os.Stdin)
input.Scan()
ans := input.Text()
b, err := strconv.ParseBool(ans)
if err != nil {
b = false
if len(ans) > 0 {
if ans[0] == 'y' || ans[0] == 'Y' {
b = true
}
}
}
if !b {
fmt.Println("Ok. Maybe some other time.")
return nil
}
}
err := os.Mkdir(bx.ConfigPath, 0o750)
if err != nil {
return err
}
ba := filepath.Join(bx.ConfigPath, "blackbox-admins.txt")
bf := filepath.Join(bx.ConfigPath, "blackbox-files.txt")
bbutil.Touch(ba)
bbutil.Touch(bf)
bx.Vcs.SetFileTypeUnix(bx.RepoBaseDir, ba, bf)
bx.Vcs.IgnoreAnywhere(bx.RepoBaseDir, []string{
"pubring.gpg~",
"pubring.kbx~",
"secring.gpg",
})
fs := []string{ba, bf}
bx.Vcs.NeedsCommit(
"NEW: "+strings.Join(makesafe.RedactMany(fs), " "),
bx.RepoBaseDir,
fs,
)
bx.Vcs.CommitTitle("INITIALIZE BLACKBOX")
return nil
}
// Reencrypt decrypts and reencrypts files.
func (bx *Box) Reencrypt(names []string, overwrite bool, bulkpause bool) error {
allFiles := false
if err := anyGpg(names); err != nil {
return err
}
if err := bx.getAdmins(); err != nil {
return err
}
if err := bx.getFiles(); err != nil {
return err
}
if len(names) == 0 {
names = bx.Files
allFiles = true
}
if bulkpause {
gpgAgentNotice()
}
fmt.Println("========== blackbox administrators are:")
bx.AdminList()
fmt.Println("========== (the above people will be able to access the file)")
if overwrite {
bbutil.ShredFiles(names)
} else {
warned := false
for _, n := range names {
if bbutil.FileExistsOrProblem(n) {
if !warned {
fmt.Printf("========== Shred these files?\n")
warned = true
}
fmt.Println("SHRED?", n)
}
}
if warned {
shouldWeOverwrite()
}
}
// Decrypt
if err := decryptMany(bx, names, overwrite, false, 0); err != nil {
return fmt.Errorf("reencrypt failed decrypt: %w", err)
}
enames, err := encryptMany(bx, names, false)
if err != nil {
return fmt.Errorf("reencrypt failed encrypt: %w", err)
}
if err := bbutil.ShredFiles(names); err != nil {
return fmt.Errorf("reencrypt failed shred: %w", err)
}
if allFiles {
// If the "--all" flag was used, don't try to list all the files.
bx.Vcs.NeedsCommit(
"REENCRYPT all files",
bx.RepoBaseDir,
enames,
)
} else {
bx.Vcs.NeedsCommit(
PrettyCommitMessage("REENCRYPT", names),
bx.RepoBaseDir,
enames,
)
}
return nil
}
// Shred shreds files.
func (bx *Box) Shred(names []string) error {
if err := anyGpg(names); err != nil {
return err
}
err := bx.getFiles()
// Calling getFiles() has the benefit of making sure we are in a repo.
if err != nil {
return err
}
if len(names) == 0 {
names = bx.Files
}
return bbutil.ShredFiles(names)
}
// Status prints the status of files.
func (bx *Box) Status(names []string, nameOnly bool, match string) error {
err := bx.getFiles()
if err != nil {
return err
}
var flist []string
if len(names) == 0 {
flist = bx.Files
} else {
flist = names
}
var data [][]string
var onlylist []string
thirdColumn := false
var tcData bool
for _, name := range flist {
var stat string
var err error
if _, ok := bx.FilesSet[name]; ok {
stat, err = FileStatus(name)
} else {
stat, err = "NOTREG", nil
}
if (match == "") || (stat == match) {
if err == nil {
data = append(data, []string{stat, name})
onlylist = append(onlylist, name)
} else {
thirdColumn = tcData
data = append(data, []string{stat, name, fmt.Sprintf("%v", err)})
onlylist = append(onlylist, fmt.Sprintf("%v: %v", name, err))
}
}
}
if nameOnly {
fmt.Println(strings.Join(onlylist, "\n"))
return nil
}
table := tablewriter.NewWriter(os.Stdout)
table.SetAutoWrapText(false)
if thirdColumn {
table.SetHeader([]string{"Status", "Name", "Error"})
} else {
table.SetHeader([]string{"Status", "Name"})
}
for _, v := range data {
table.Append(v)
}
table.Render() // Send output
return nil
}
// TestingInitRepo initializes a repo.
// Uses bx.Vcs to create ".git" or whatever.
// Uses bx.Vcs to discover what was created, testing its work.
func (bx *Box) TestingInitRepo() error {
if bx.Vcs == nil {
fmt.Println("bx.Vcs is nil")
fmt.Printf("BLACKBOX_VCS=%q\n", os.Getenv("BLACKBOX_VCS"))
os.Exit(1)
}
fmt.Printf("ABOUT TO CALL TestingInitRepo\n")
fmt.Printf("vcs = %v\n", bx.Vcs.Name())
err := bx.Vcs.TestingInitRepo()
fmt.Printf("RETURNED from TestingInitRepo: %v\n", err)
fmt.Println(os.Getwd())
if err != nil {
return fmt.Errorf("TestingInitRepo returned: %w", err)
}
if b, _ := bx.Vcs.Discover(); !b {
return fmt.Errorf("TestingInitRepo failed Discovery")
}
return nil
}

View File

@@ -0,0 +1,84 @@
package commitlater
import (
"fmt"
)
type future struct {
message string // Message that describes this transaction.
dir string // Basedir of the files
files []string // Names of the files
display []string // Names as to be displayed to the user
}
// List of futures to be done in the future.
type List struct {
items []*future
}
// Add queues up a future commit.
func (list *List) Add(message string, repobasedir string, files []string) {
item := &future{
message: message,
dir: repobasedir,
files: files,
}
list.items = append(list.items, item)
}
func sameDirs(l *List) bool {
if len(l.items) <= 1 {
return true
}
for _, k := range l.items[1:] {
if k.dir != l.items[0].dir {
return false
}
}
return true
}
// Flush executes queued commits.
func (list *List) Flush(
title string,
fadd func([]string) error,
fcommit func([]string, string, []string) error,
) error {
// Just list the individual commit commands.
if title == "" || len(list.items) < 2 || !sameDirs(list) {
for _, fut := range list.items {
err := fadd(fut.files)
if err != nil {
return fmt.Errorf("add files1 (%q) failed: %w", fut.files, err)
}
err = fcommit([]string{fut.message}, fut.dir, fut.files)
if err != nil {
return fmt.Errorf("commit files (%q) failed: %w", fut.files, err)
}
}
return nil
}
// Create a long commit message.
var m []string
var f []string
for _, fut := range list.items {
err := fadd(fut.files)
if err != nil {
return fmt.Errorf("add files2 (%q) failed: %w", fut.files, err)
}
m = append(m, fut.message)
f = append(f, fut.files...)
}
msg := []string{title}
for _, mm := range m {
msg = append(msg, " * "+mm)
}
err := fcommit(msg, list.items[0].dir, f)
if err != nil {
return fmt.Errorf("commit files (%q) failed: %w", f, err)
}
return nil
}

5
pkg/crypters/_all/all.go Normal file
View File

@@ -0,0 +1,5 @@
package all
import (
_ "github.com/StackExchange/blackbox/v2/pkg/crypters/gnupg"
)

58
pkg/crypters/crypters.go Normal file
View File

@@ -0,0 +1,58 @@
package crypters
import (
"sort"
"strings"
"github.com/StackExchange/blackbox/v2/models"
)
// Crypter is the handle
type Crypter interface {
models.Crypter
}
// NewFnSig function signature needed by reg.
type NewFnSig func(debug bool) (Crypter, error)
// Item stores one item
type Item struct {
Name string
New NewFnSig
Priority int
}
// Catalog is the list of registered vcs's.
var Catalog []*Item
// SearchByName returns a Crypter handle for name.
// The search is case insensitive.
func SearchByName(name string, debug bool) Crypter {
name = strings.ToLower(name)
for _, v := range Catalog {
//fmt.Printf("Trying %v %v\n", v.Name)
if strings.ToLower(v.Name) == name {
chandle, err := v.New(debug)
if err != nil {
return nil // No idea how that would happen.
}
//fmt.Printf("USING! %v\n", v.Name)
return chandle
}
}
return nil
}
// Register a new VCS.
func Register(name string, priority int, newfn NewFnSig) {
//fmt.Printf("CRYPTER registered: %v\n", name)
item := &Item{
Name: name,
New: newfn,
Priority: priority,
}
Catalog = append(Catalog, item)
// Keep the list sorted.
sort.Slice(Catalog, func(i, j int) bool { return Catalog[j].Priority < Catalog[i].Priority })
}

179
pkg/crypters/gnupg/gnupg.go Normal file
View File

@@ -0,0 +1,179 @@
package gnupg
import (
"fmt"
"io/ioutil"
"log"
"os"
"os/exec"
"path/filepath"
"github.com/StackExchange/blackbox/v2/pkg/bblog"
"github.com/StackExchange/blackbox/v2/pkg/bbutil"
"github.com/StackExchange/blackbox/v2/pkg/crypters"
)
var pluginName = "GnuPG"
func init() {
crypters.Register(pluginName, 100, registerNew)
}
// CrypterHandle is the handle
type CrypterHandle struct {
GPGCmd string // "gpg2" or "gpg"
logErr *log.Logger
logDebug *log.Logger
}
func registerNew(debug bool) (crypters.Crypter, error) {
crypt := &CrypterHandle{
logErr: bblog.GetErr(),
logDebug: bblog.GetDebug(debug),
}
// Which binary to use?
path, err := exec.LookPath("gpg2")
if err != nil {
path, err = exec.LookPath("gpg")
if err != nil {
path = "gpg2"
}
}
crypt.GPGCmd = path
return crypt, nil
}
// Name returns my name.
func (crypt CrypterHandle) Name() string {
return pluginName
}
// Decrypt name+".gpg", possibly overwriting name.
func (crypt CrypterHandle) Decrypt(filename string, umask int, overwrite bool) error {
a := []string{
"--use-agent",
"-q",
"--decrypt",
"-o", filename,
}
if overwrite {
a = append(a, "--yes")
}
a = append(a, filename+".gpg")
oldumask := bbutil.Umask(umask)
err := bbutil.RunBash(crypt.GPGCmd, a...)
bbutil.Umask(oldumask)
return err
}
// Cat returns the plaintext or, if it is missing, the decrypted cyphertext.
func (crypt CrypterHandle) Cat(filename string) ([]byte, error) {
a := []string{
"--use-agent",
"-q",
"--decrypt",
}
// TODO(tlim): This assumes the entire gpg file fits in memory. If
// this becomes a problem, re-implement this using exec Cmd.StdinPipe()
// and feed the input in chunks.
in, err := ioutil.ReadFile(filename + ".gpg")
if err != nil {
if os.IsNotExist(err) {
// Encrypted file doesn't exit? Return the plaintext.
return ioutil.ReadFile(filename)
}
return nil, err
}
return bbutil.RunBashInputOutput(in, crypt.GPGCmd, a...)
}
// Encrypt name, overwriting name+".gpg"
func (crypt CrypterHandle) Encrypt(filename string, umask int, receivers []string) (string, error) {
var err error
crypt.logDebug.Printf("Encrypt(%q, %d, %q)", filename, umask, receivers)
encrypted := filename + ".gpg"
a := []string{
"--use-agent",
"--yes",
"--trust-model=always",
"--encrypt",
"-o", encrypted,
}
for _, f := range receivers {
a = append(a, "-r", f)
}
a = append(a, "--encrypt")
a = append(a, filename)
//err = bbutil.RunBash("ls", "-la")
oldumask := bbutil.Umask(umask)
crypt.logDebug.Printf("Args = %q", a)
err = bbutil.RunBash(crypt.GPGCmd, a...)
bbutil.Umask(oldumask)
return encrypted, err
}
// AddNewKey extracts keyname from sourcedir's GnuPG chain to destdir keychain.
// It returns a list of files that may have changed.
func (crypt CrypterHandle) AddNewKey(keyname, repobasedir, sourcedir, destdir string) ([]string, error) {
// $GPG --homedir="$2" --export -a "$KEYNAME" >"$pubkeyfile"
args := []string{
"--export",
"-a",
}
if sourcedir != "" {
args = append(args, "--homedir", sourcedir)
}
args = append(args, keyname)
crypt.logDebug.Printf("ADDNEWKEY: Extracting key=%v: gpg, %v\n", keyname, args)
pubkey, err := bbutil.RunBashOutput("gpg", args...)
if err != nil {
return nil, err
}
if len(pubkey) == 0 {
return nil, fmt.Errorf("Nothing found when %q exported from %q", keyname, sourcedir)
}
// $GPG --no-permission-warning --homedir="$KEYRINGDIR" --import "$pubkeyfile"
args = []string{
"--no-permission-warning",
"--homedir", destdir,
"--import",
}
crypt.logDebug.Printf("ADDNEWKEY: Importing: gpg %v\n", args)
// fmt.Printf("DEBUG: crypter ADD %q", args)
err = bbutil.RunBashInput(pubkey, "gpg", args...)
if err != nil {
return nil, fmt.Errorf("AddNewKey failed: %w", err)
}
// Suggest: ${pubring_path} trustdb.gpg blackbox-admins.txt
var changed []string
// Prefix each file with the relative path to it.
prefix, err := filepath.Rel(repobasedir, destdir)
if err != nil {
//fmt.Printf("FAIL (%v) (%v) (%v)\n", repobasedir, destdir, err)
prefix = destdir
}
for _, file := range []string{"pubring.gpg", "pubring.kbx", "trustdb.gpg"} {
path := filepath.Join(destdir, file)
if bbutil.FileExistsOrProblem(path) {
changed = append(changed, filepath.Join(prefix, file))
}
}
return changed, nil
}

Some files were not shown because too many files have changed in this diff Show More