Implement blackbox in Golang (#250)

* Initial release
This commit is contained in:
Tom Limoncelli
2020-07-24 14:21:33 -04:00
committed by GitHub
parent e049c02655
commit 1c77c87555
86 changed files with 6074 additions and 22 deletions

48
pkg/bblog/bblog.go Normal file
View File

@@ -0,0 +1,48 @@
package bblog
import (
"io/ioutil"
"log"
"os"
)
/*
To use this, include the following lines in your .go file.
var logErr *log.Logger
var logDebug *log.Logger
func init() {
logErr = bblog.GetErr()
logDebug = bblog.GetDebug(debug)
}
Or in a function:
logErr := bblog.GetErr()
logDebug := bblog.GetDebug(debug)
logDebug.Printf("whatever: %v", err)
*/
var logErr *log.Logger
var logDebug *log.Logger
// GetErr returns a logger handle used for errors
func GetErr() *log.Logger {
if logErr == nil {
logErr = log.New(os.Stderr, "", 0)
}
return logErr
}
// GetDebug returns a Logger handle used for debug info (output is discarded if viable=false)
func GetDebug(visible bool) *log.Logger {
if visible {
logDebug = log.New(os.Stderr, "", 0)
} else {
// Invisible mode (i.e. display nothing)
logDebug = log.New(ioutil.Discard, "", 0)
}
return logDebug
}

130
pkg/bbutil/filestats.go Normal file
View File

@@ -0,0 +1,130 @@
package bbutil
import (
"fmt"
"io/ioutil"
"os"
"path/filepath"
"sort"
"strings"
"time"
)
// DirExists returns true if directory exists.
func DirExists(path string) (bool, error) {
stat, err := os.Stat(path)
if err == nil {
return stat.IsDir(), nil
}
if os.IsNotExist(err) {
return false, nil
}
return true, err
}
// FileExistsOrProblem returns true if the file exists or if we can't determine its existence.
func FileExistsOrProblem(path string) bool {
_, err := os.Stat(path)
if err == nil {
return true
}
if os.IsNotExist(err) {
return false
}
return true
}
// Touch updates the timestamp of a file.
func Touch(name string) error {
var err error
_, err = os.Stat(name)
if os.IsNotExist(err) {
file, err := os.Create(name)
if err != nil {
return fmt.Errorf("TouchFile failed: %w", err)
}
file.Close()
}
currentTime := time.Now().Local()
return os.Chtimes(name, currentTime, currentTime)
}
// ReadFileLines is like ioutil.ReadFile() but returns an []string.
func ReadFileLines(filename string) ([]string, error) {
b, err := ioutil.ReadFile(filename)
if err != nil {
return nil, err
}
s := string(b)
s = strings.TrimSuffix(s, "\n")
if s == "" {
return []string{}, nil
}
l := strings.Split(s, "\n")
return l, nil
}
// AddLinesToSortedFile adds a line to a sorted file.
func AddLinesToSortedFile(filename string, newlines ...string) error {
lines, err := ReadFileLines(filename)
//fmt.Printf("DEBUG: read=%q\n", lines)
if err != nil {
return fmt.Errorf("AddLinesToSortedFile can't read %q: %w", filename, err)
}
if !sort.StringsAreSorted(lines) {
return fmt.Errorf("AddLinesToSortedFile: file wasn't sorted: %v", filename)
}
lines = append(lines, newlines...)
sort.Strings(lines)
contents := strings.Join(lines, "\n") + "\n"
//fmt.Printf("DEBUG: write=%q\n", contents)
err = ioutil.WriteFile(filename, []byte(contents), 0o660)
if err != nil {
return fmt.Errorf("AddLinesToSortedFile can't write %q: %w", filename, err)
}
return nil
}
// AddLinesToFile adds lines to the end of a file.
func AddLinesToFile(filename string, newlines ...string) error {
lines, err := ReadFileLines(filename)
if err != nil {
return fmt.Errorf("AddLinesToFile can't read %q: %w", filename, err)
}
lines = append(lines, newlines...)
contents := strings.Join(lines, "\n") + "\n"
err = ioutil.WriteFile(filename, []byte(contents), 0o660)
if err != nil {
return fmt.Errorf("AddLinesToFile can't write %q: %w", filename, err)
}
return nil
}
// FindDirInParent looks for target in CWD, or .., or ../.., etc.
func FindDirInParent(target string) (string, error) {
// Prevent an infinite loop by only doing "cd .." this many times
maxDirLevels := 30
relpath := "."
for i := 0; i < maxDirLevels; i++ {
// Does relpath contain our target?
t := filepath.Join(relpath, target)
//logDebug.Printf("Trying %q\n", t)
_, err := os.Stat(t)
if err == nil {
return t, nil
}
if !os.IsNotExist(err) {
return "", fmt.Errorf("stat failed FindDirInParent (%q): %w", t, err)
}
// Ok, it really wasn't found.
// If we are at the root, stop.
if abs, err := filepath.Abs(relpath); err == nil && abs == "/" {
break
}
// Try one directory up
relpath = filepath.Join("..", relpath)
}
return "", fmt.Errorf("Not found")
}

21
pkg/bbutil/rbio_test.go Normal file
View File

@@ -0,0 +1,21 @@
package bbutil
import (
"testing"
)
func TestRunBashInputOutput(t *testing.T) {
in := "This is a test of the RBIO system.\n"
bin := []byte(in)
out, err := RunBashInputOutput(bin, "cat")
sout := string(out)
if err != nil {
t.Error(err)
}
if in != sout {
t.Errorf("not equal %q %q", in, out)
}
}

77
pkg/bbutil/runbash.go Normal file
View File

@@ -0,0 +1,77 @@
package bbutil
import (
"bytes"
"fmt"
"log"
"os"
"os/exec"
)
// RunBash runs a Bash command.
func RunBash(command string, args ...string) error {
cmd := exec.Command(command, args...)
cmd.Stdin = os.Stdin
cmd.Stdout = os.Stdout
cmd.Stderr = os.Stderr
err := cmd.Start()
if err != nil {
log.Fatal(err)
}
err = cmd.Wait()
if err != nil {
return fmt.Errorf("RunBash cmd=%q err=%w", command, err)
}
return nil
}
// RunBashOutput runs a Bash command, captures output.
func RunBashOutput(command string, args ...string) (string, error) {
cmd := exec.Command(command, args...)
cmd.Stdin = os.Stdin
cmd.Stderr = os.Stderr
out, err := cmd.Output()
if err != nil {
return "", fmt.Errorf("RunBashOutput err=%w", err)
}
return string(out), err
}
// RunBashOutputSilent runs a Bash command, captures output, discards stderr.
func RunBashOutputSilent(command string, args ...string) (string, error) {
cmd := exec.Command(command, args...)
cmd.Stdin = os.Stdin
// Leave cmd.Stderr unmodified and stderr is discarded.
out, err := cmd.Output()
if err != nil {
return "", fmt.Errorf("RunBashOutputSilent err=%w", err)
}
return string(out), err
}
// RunBashInput runs a Bash command, sends input on stdin.
func RunBashInput(input string, command string, args ...string) error {
cmd := exec.Command(command, args...)
cmd.Stdin = bytes.NewBuffer([]byte(input))
cmd.Stdout = os.Stdout
cmd.Stderr = os.Stderr
err := cmd.Run()
if err != nil {
return fmt.Errorf("RunBashInput err=%w", err)
}
return nil
}
// RunBashInputOutput runs a Bash command, sends input on stdin.
func RunBashInputOutput(input []byte, command string, args ...string) ([]byte, error) {
cmd := exec.Command(command, args...)
cmd.Stdin = bytes.NewBuffer(input)
cmd.Stderr = os.Stderr
out, err := cmd.Output()
if err != nil {
return nil, fmt.Errorf("RunBashInputOutput err=%w", err)
}
return out, nil
}

109
pkg/bbutil/shred.go Normal file
View File

@@ -0,0 +1,109 @@
package bbutil
// Pick an appropriate secure erase command for this operating system
// or just delete the file with os.Remove().
// Code rewritten based https://codereview.stackexchange.com/questions/245072
import (
"fmt"
"io/ioutil"
"os"
"os/exec"
)
var shredCmds = []struct {
name, opts string
}{
{"sdelete", "-a"},
{"shred", "-u"},
{"srm", "-f"},
{"rm", "-Pf"},
}
func shredTemp(path, opts string) error {
file, err := ioutil.TempFile("", "shredTemp.")
if err != nil {
return err
}
filename := file.Name()
defer os.Remove(filename)
defer file.Close()
err = file.Close()
if err != nil {
return err
}
err = RunBash(path, opts, filename)
if err != nil {
return err
}
return nil
}
var shredPath, shredOpts = func() (string, string) {
for _, cmd := range shredCmds {
path, err := exec.LookPath(cmd.name)
if err != nil {
continue
}
err = shredTemp(path, cmd.opts)
if err == nil {
return path, cmd.opts
}
}
return "", ""
}()
// ShredInfo reveals the shred command and flags (for "blackbox info")
func ShredInfo() string {
return shredPath + " " + shredOpts
}
// shredFile shreds one file.
func shredFile(filename string) error {
fi, err := os.Stat(filename)
if err != nil {
return err
}
if !fi.Mode().IsRegular() {
err := fmt.Errorf("filename is not mode regular")
return err
}
if shredPath == "" {
// No secure erase command found. Default to a normal file delete.
// TODO(tlim): Print a warning? Have a flag that causes this to be an error?
return os.Remove(filename)
}
err = RunBash(shredPath, shredOpts, filename)
if err != nil {
return err
}
return nil
}
// ShredFiles securely erases a list of files.
func ShredFiles(names []string) error {
// TODO(tlim) DO the shredding in parallel like in v1.
var eerr error
for _, n := range names {
_, err := os.Stat(n)
if err != nil {
if os.IsNotExist(err) {
fmt.Printf("======= already gone: %q\n", n)
continue
}
}
fmt.Printf("========== SHREDDING: %q\n", n)
e := shredFile(n)
if e != nil {
eerr = e
fmt.Printf("ERROR: %v\n", e)
}
}
return eerr
}

View File

@@ -0,0 +1,66 @@
package bbutil
import (
"io/ioutil"
"os"
"testing"
)
func TestAddLinesToSortedFile(t *testing.T) {
var tests = []struct {
start string
add []string
expected string
}{
{
"",
[]string{"one"},
"one\n",
},
{
"begin\ntwo\n",
[]string{"at top"},
"at top\nbegin\ntwo\n",
},
{
"begin\ntwo\n",
[]string{"zbottom"},
"begin\ntwo\nzbottom\n",
},
{
"begin\ntwo\n",
[]string{"middle"},
"begin\nmiddle\ntwo\n",
},
}
for i, test := range tests {
content := []byte(test.start)
tmpfile, err := ioutil.TempFile("", "example")
if err != nil {
t.Fatal(err)
}
tmpfilename := tmpfile.Name()
defer os.Remove(tmpfilename)
if _, err := tmpfile.Write(content); err != nil {
t.Fatal(err)
}
if err := tmpfile.Close(); err != nil {
t.Fatal(err)
}
AddLinesToSortedFile(tmpfilename, test.add...)
expected := test.expected
got, err := ioutil.ReadFile(tmpfilename)
if err != nil {
t.Fatal(err)
}
if expected != string(got) {
t.Errorf("test %v: contents wrong:\nexpected: %q\n got: %q", i, expected, got)
}
os.Remove(tmpfilename)
}
}

233
pkg/box/box.go Normal file
View File

@@ -0,0 +1,233 @@
package box
// box implements the box model.
import (
"fmt"
"log"
"os"
"path/filepath"
"sort"
"strings"
"github.com/StackExchange/blackbox/v2/pkg/bblog"
"github.com/StackExchange/blackbox/v2/pkg/bbutil"
"github.com/StackExchange/blackbox/v2/pkg/crypters"
"github.com/StackExchange/blackbox/v2/pkg/vcs"
"github.com/urfave/cli/v2"
)
var logErr *log.Logger
var logDebug *log.Logger
// Box describes what we know about a box.
type Box struct {
// Paths:
Team string // Name of the team (i.e. .blackbox-$TEAM)
RepoBaseDir string // Rel path to the VCS repo.
ConfigPath string // Abs or Rel path to the .blackbox (or whatever) directory.
ConfigRO bool // True if we should not try to change files in ConfigPath.
// Settings:
Umask int // umask to set when decrypting
Editor string // Editor to call
Debug bool // Are we in debug logging mode?
// Cache of data gathered from .blackbox:
Admins []string // If non-empty, the list of admins.
Files []string // If non-empty, the list of files.
FilesSet map[string]bool // If non-nil, a set of Files.
// Handles to interfaces:
Vcs vcs.Vcs // Interface access to the VCS.
Crypter crypters.Crypter // Inteface access to GPG.
logErr *log.Logger
logDebug *log.Logger
}
// StatusMode is a type of query.
type StatusMode int
const (
// Itemized is blah
Itemized StatusMode = iota // Individual files by name
// All files is blah
All
// Unchanged is blah
Unchanged
// Changed is blah
Changed
)
// NewFromFlags creates a box using items from flags. Nearly all subcommands use this.
func NewFromFlags(c *cli.Context) *Box {
// The goal of this is to create a fully-populated box (and box.Vcs)
// so that all subcommands have all the fields and interfaces they need
// to do their job.
logErr = bblog.GetErr()
logDebug = bblog.GetDebug(c.Bool("debug"))
bx := &Box{
Umask: c.Int("umask"),
Editor: c.String("editor"),
Team: c.String("team"),
logErr: bblog.GetErr(),
logDebug: bblog.GetDebug(c.Bool("debug")),
Debug: c.Bool("debug"),
}
// Discover which kind of VCS is in use, and the repo root.
bx.Vcs, bx.RepoBaseDir = vcs.Discover()
// Discover the crypto backend (GnuPG, go-openpgp, etc.)
bx.Crypter = crypters.SearchByName(c.String("crypto"), c.Bool("debug"))
if bx.Crypter == nil {
fmt.Printf("ERROR! No CRYPTER found! Please set --crypto correctly or use the damn default\n")
os.Exit(1)
}
// Find the .blackbox (or equiv.) directory.
var err error
configFlag := c.String("config")
if configFlag != "" {
// Flag is set. Better make sure it is valid.
if !filepath.IsAbs(configFlag) {
fmt.Printf("config flag value is a relative path. Too risky. Exiting.\n")
os.Exit(1)
// NB(tlim): We could return filepath.Abs(config) or maybe it just
// works as is. I don't know, and until we have a use case to prove
// it out, it's best to just not implement this.
}
bx.ConfigPath = configFlag
bx.ConfigRO = true // External configs treated as read-only.
// TODO(tlim): We could get fancy here and set ConfigReadOnly=true only
// if we are sure configFlag is not within bx.RepoBaseDir. Again, I'd
// like to see a use-case before we implement this.
return bx
}
// Normal path. Flag not set, so we discover the path.
bx.ConfigPath, err = FindConfigDir(bx.RepoBaseDir, c.String("team"))
if err != nil && c.Command.Name != "info" {
fmt.Printf("Can't find .blackbox or equiv. Have you run init?\n")
os.Exit(1)
}
return bx
}
// NewUninitialized creates a box in a pre-init situation.
func NewUninitialized(c *cli.Context) *Box {
/*
This is for "blackbox init" (used before ".blackbox*" exists)
Init needs: How we populate it:
bx.Vcs: Discovered by calling each plug-in until succeeds.
bx.ConfigDir: Generated algorithmically (it doesn't exist yet).
*/
bx := &Box{
Umask: c.Int("umask"),
Editor: c.String("editor"),
Team: c.String("team"),
logErr: bblog.GetErr(),
logDebug: bblog.GetDebug(c.Bool("debug")),
Debug: c.Bool("debug"),
}
bx.Vcs, bx.RepoBaseDir = vcs.Discover()
if c.String("configdir") == "" {
rel := ".blackbox"
if bx.Team != "" {
rel = ".blackbox-" + bx.Team
}
bx.ConfigPath = filepath.Join(bx.RepoBaseDir, rel)
} else {
// Wait. The user is using the --config flag on a repo that
// hasn't been created yet? I hope this works!
fmt.Printf("ERROR: You can not set --config when initializing a new repo. Please run this command from within a repo, with no --config flag. Or, file a bug explaining your use caseyour use-case. Exiting!\n")
os.Exit(1)
// TODO(tlim): We could get fancy here and query the Vcs to see if the
// path would fall within the repo, figure out the relative path, and
// use that value. (and error if configflag is not within the repo).
// That would be error prone and would only help the zero users that
// ever see the above error message.
}
return bx
}
// NewForTestingInit creates a box in a bare environment.
func NewForTestingInit(vcsname string) *Box {
/*
This is for "blackbox test_init" (secret command used in integration tests; when nothing exists)
TestingInitRepo only uses bx.Vcs, so that's all we set.
Populates bx.Vcs by finding the provider named vcsname.
*/
bx := &Box{}
// Find the
var vh vcs.Vcs
var err error
vcsname = strings.ToLower(vcsname)
for _, v := range vcs.Catalog {
if strings.ToLower(v.Name) == vcsname {
vh, err = v.New()
if err != nil {
return nil // No idea how that would happen.
}
}
}
bx.Vcs = vh
return bx
}
func (bx *Box) getAdmins() error {
// Memoized
if len(bx.Admins) != 0 {
return nil
}
// TODO(tlim): Try the json file.
// Try the legacy file:
fn := filepath.Join(bx.ConfigPath, "blackbox-admins.txt")
bx.logDebug.Printf("Admins file: %q", fn)
a, err := bbutil.ReadFileLines(fn)
if err != nil {
return fmt.Errorf("getAdmins can't load %q: %v", fn, err)
}
if !sort.StringsAreSorted(a) {
return fmt.Errorf("file corrupt. Lines not sorted: %v", fn)
}
bx.Admins = a
return nil
}
// getFiles populates Files and FileMap.
func (bx *Box) getFiles() error {
if len(bx.Files) != 0 {
return nil
}
// TODO(tlim): Try the json file.
// Try the legacy file:
fn := filepath.Join(bx.ConfigPath, "blackbox-files.txt")
bx.logDebug.Printf("Files file: %q", fn)
a, err := bbutil.ReadFileLines(fn)
if err != nil {
return fmt.Errorf("getFiles can't load %q: %v", fn, err)
}
if !sort.StringsAreSorted(a) {
return fmt.Errorf("file corrupt. Lines not sorted: %v", fn)
}
for _, n := range a {
bx.Files = append(bx.Files, filepath.Join(bx.RepoBaseDir, n))
}
bx.FilesSet = make(map[string]bool, len(bx.Files))
for _, s := range bx.Files {
bx.FilesSet[s] = true
}
return nil
}

224
pkg/box/boxutils.go Normal file
View File

@@ -0,0 +1,224 @@
package box
import (
"bufio"
"fmt"
"os"
"os/user"
"path/filepath"
"runtime"
"strconv"
"strings"
"github.com/StackExchange/blackbox/v2/pkg/makesafe"
)
// FileStatus returns the status of a file.
func FileStatus(name string) (string, error) {
/*
DECRYPTED: File is decrypted and ready to edit (unknown if it has been edited).
ENCRYPTED: GPG file is newer than plaintext. Indicates recented edited then encrypted.
SHREDDED: Plaintext is missing.
GPGMISSING: The .gpg file is missing. Oops?
PLAINERROR: Can't access the plaintext file to determine status.
GPGERROR: Can't access .gpg file to determine status.
*/
p := name
e := p + ".gpg"
ps, perr := os.Stat(p)
es, eerr := os.Stat(e)
if perr == nil && eerr == nil {
if ps.ModTime().Before(es.ModTime()) {
return "ENCRYPTED", nil
}
return "DECRYPTED", nil
}
if os.IsNotExist(perr) && os.IsNotExist(eerr) {
return "BOTHMISSING", nil
}
if eerr != nil {
if os.IsNotExist(eerr) {
return "GPGMISSING", nil
}
return "GPGERROR", eerr
}
if perr != nil {
if os.IsNotExist(perr) {
return "SHREDDED", nil
}
}
return "PLAINERROR", perr
}
func anyGpg(names []string) error {
for _, name := range names {
if strings.HasSuffix(name, ".gpg") {
return fmt.Errorf(
"no not specify .gpg files. Specify %q not %q",
strings.TrimSuffix(name, ".gpg"), name)
}
}
return nil
}
// func isChanged(pname string) (bool, error) {
// // if .gpg exists but not plainfile: unchanged
// // if plaintext exists but not .gpg: changed
// // if plainfile < .gpg: unchanged
// // if plainfile > .gpg: don't know, need to try diff
// // Gather info about the files:
// pstat, perr := os.Stat(pname)
// if perr != nil && (!os.IsNotExist(perr)) {
// return false, fmt.Errorf("isChanged(%q) returned error: %w", pname, perr)
// }
// gname := pname + ".gpg"
// gstat, gerr := os.Stat(gname)
// if gerr != nil && (!os.IsNotExist(perr)) {
// return false, fmt.Errorf("isChanged(%q) returned error: %w", gname, gerr)
// }
// pexists := perr == nil
// gexists := gerr == nil
// // Use the above rules:
// // if .gpg exists but not plainfile: unchanged
// if gexists && !pexists {
// return false, nil
// }
// // if plaintext exists but not .gpg: changed
// if pexists && !gexists {
// return true, nil
// }
// // At this point we can conclude that both p and g exist.
// // Can't hurt to test that assertion.
// if (!pexists) && (!gexists) {
// return false, fmt.Errorf("Assertion failed. p and g should exist: pn=%q", pname)
// }
// pmodtime := pstat.ModTime()
// gmodtime := gstat.ModTime()
// // if plainfile < .gpg: unchanged
// if pmodtime.Before(gmodtime) {
// return false, nil
// }
// // if plainfile > .gpg: don't know, need to try diff
// return false, fmt.Errorf("Can not know for sure. Try git diff?")
// }
func parseGroup(userinput string) (int, error) {
if userinput == "" {
return -1, fmt.Errorf("group spec is empty string")
}
// If it is a valid number, use it.
i, err := strconv.Atoi(userinput)
if err == nil {
return i, nil
}
// If not a number, look it up by name.
g, err := user.LookupGroup(userinput)
if err == nil {
i, err = strconv.Atoi(g.Gid)
return i, nil
}
// Give up.
return -1, err
}
// FindConfigDir tests various places until it finds the config dir.
// If we can't determine the relative path, "" is returned.
func FindConfigDir(reporoot, team string) (string, error) {
candidates := []string{}
if team != "" {
candidates = append(candidates, ".blackbox-"+team)
}
candidates = append(candidates, ".blackbox")
candidates = append(candidates, "keyrings/live")
logDebug.Printf("DEBUG: candidates = %q\n", candidates)
maxDirLevels := 30 // Prevent an infinite loop
relpath := "."
for i := 0; i < maxDirLevels; i++ {
// Does relpath contain any of our directory names?
for _, c := range candidates {
t := filepath.Join(relpath, c)
logDebug.Printf("Trying %q\n", t)
fi, err := os.Stat(t)
if err == nil && fi.IsDir() {
return t, nil
}
if err == nil {
return "", fmt.Errorf("path %q is not a directory: %w", t, err)
}
if !os.IsNotExist(err) {
return "", fmt.Errorf("dirExists access error: %w", err)
}
}
// If we are at the root, stop.
if abs, _ := filepath.Abs(relpath); abs == "/" {
break
}
// Try one directory up
relpath = filepath.Join("..", relpath)
}
return "", fmt.Errorf("No .blackbox (or equiv) directory found")
}
func gpgAgentNotice() {
// Is gpg-agent configured?
if os.Getenv("GPG_AGENT_INFO") != "" {
return
}
// Are we on macOS?
if runtime.GOOS == "darwin" {
// We assume the use of https://gpgtools.org, which
// uses the keychain.
return
}
// TODO(tlim): v1 verifies that "gpg-agent --version" outputs a version
// string that is 2.1.0 or higher. It seems that 1.x is incompatible.
fmt.Println("WARNING: You probably want to run gpg-agent as")
fmt.Println("you will be asked for your passphrase many times.")
fmt.Println("Example: $ eval $(gpg-agent --daemon)")
fmt.Print("Press CTRL-C now to stop. ENTER to continue: ")
input := bufio.NewScanner(os.Stdin)
input.Scan()
}
func shouldWeOverwrite() {
fmt.Println()
fmt.Println("WARNING: This will overwrite any unencrypted files laying about.")
fmt.Print("Press CTRL-C now to stop. ENTER to continue: ")
input := bufio.NewScanner(os.Stdin)
input.Scan()
}
// PrettyCommitMessage generates a pretty commit message.
func PrettyCommitMessage(verb string, files []string) string {
if len(files) == 0 {
// This use-case should probably be an error.
return verb + " (no files)"
}
rfiles := makesafe.RedactMany(files)
m, truncated := makesafe.FirstFewFlag(rfiles)
if truncated {
return verb + ": " + m
}
return verb + ": " + m
}

35
pkg/box/pretty_test.go Normal file
View File

@@ -0,0 +1,35 @@
package box
import "testing"
func TestPrettyCommitMessage(t *testing.T) {
long := "aVeryVeryLongLongLongStringStringString"
for i, test := range []struct {
data []string
expected string
}{
{[]string{}, `HEADING (no files)`},
{[]string{"one"}, `HEADING: one`},
{[]string{"one", "two"}, `HEADING: one two`},
{[]string{"one", "two", "three"}, `HEADING: one two three`},
{[]string{"one", "two", "three", "four"},
`HEADING: one two three four`},
{[]string{"one", "two", "three", "four", "five"},
`HEADING: one two three four five`},
{[]string{"has spaces.txt"}, `HEADING: "has spaces.txt"`},
{[]string{"two\n"}, `HEADING: "twoX"(redacted)`},
{[]string{"smile😁eyes"}, `HEADING: smile😁eyes`},
{[]string{"tab\ttab", "two very long strings.txt"},
`HEADING: "tabXtab"(redacted) "two very long strings.txt"`},
{[]string{long, long, long, long},
"HEADING: " + long + " " + long + " " + long + " " + long + " ... " + long + "\n " + long + "\n " + long + "\n " + long + "\n"},
} {
g := PrettyCommitMessage("HEADING", test.data)
if g == test.expected {
//t.Logf("%03d: PASSED files=%q\n", i, test.data)
t.Logf("%03d: PASSED", i)
} else {
t.Errorf("%03d: FAILED files==%q got=(%q) wanted=(%q)\n", i, test.data, g, test.expected)
}
}
}

633
pkg/box/verbs.go Normal file
View File

@@ -0,0 +1,633 @@
package box
// This file implements the business logic related to a black box.
// These functions are usually called from cmd/blackbox/drive.go or
// external sytems that use box as a module.
import (
"bufio"
"fmt"
"io/ioutil"
"os"
"path/filepath"
"sort"
"strconv"
"strings"
"github.com/StackExchange/blackbox/v2/pkg/bbutil"
"github.com/StackExchange/blackbox/v2/pkg/makesafe"
"github.com/olekukonko/tablewriter"
)
// AdminAdd adds admins.
func (bx *Box) AdminAdd(nom string, sdir string) error {
err := bx.getAdmins()
if err != nil {
return err
}
//fmt.Printf("ADMINS=%q\n", bx.Admins)
// Check for duplicates.
if i := sort.SearchStrings(bx.Admins, nom); i < len(bx.Admins) && bx.Admins[i] == nom {
return fmt.Errorf("Admin %v already an admin", nom)
}
bx.logDebug.Printf("ADMIN ADD rbd=%q\n", bx.RepoBaseDir)
changedFiles, err := bx.Crypter.AddNewKey(nom, bx.RepoBaseDir, sdir, bx.ConfigPath)
if err != nil {
return fmt.Errorf("AdminAdd failed AddNewKey: %v", err)
}
// TODO(tlim): Try the json file.
// Try the legacy file:
fn := filepath.Join(bx.ConfigPath, "blackbox-admins.txt")
bx.logDebug.Printf("Admins file: %q", fn)
err = bbutil.AddLinesToSortedFile(fn, nom)
if err != nil {
return fmt.Errorf("could not update file (%q,%q): %v", fn, nom, err)
}
changedFiles = append([]string{fn}, changedFiles...)
bx.Vcs.NeedsCommit("NEW ADMIN: "+nom, bx.RepoBaseDir, changedFiles)
return nil
}
// AdminList lists the admin id's.
func (bx *Box) AdminList() error {
err := bx.getAdmins()
if err != nil {
return err
}
for _, v := range bx.Admins {
fmt.Println(v)
}
return nil
}
// AdminRemove removes an id from the admin list.
func (bx *Box) AdminRemove([]string) error {
return fmt.Errorf("NOT IMPLEMENTED: AdminRemove")
}
// Cat outputs a file, unencrypting if needed.
func (bx *Box) Cat(names []string) error {
if err := anyGpg(names); err != nil {
return fmt.Errorf("cat: %w", err)
}
err := bx.getFiles()
if err != nil {
return err
}
for _, name := range names {
var out []byte
var err error
if _, ok := bx.FilesSet[name]; ok {
out, err = bx.Crypter.Cat(name)
} else {
out, err = ioutil.ReadFile(name)
}
if err != nil {
bx.logErr.Printf("BX_CRY3\n")
return fmt.Errorf("cat: %w", err)
}
fmt.Print(string(out))
}
return nil
}
// Decrypt decrypts a file.
func (bx *Box) Decrypt(names []string, overwrite bool, bulkpause bool, setgroup string) error {
var err error
if err := anyGpg(names); err != nil {
return err
}
err = bx.getFiles()
if err != nil {
return err
}
if bulkpause {
gpgAgentNotice()
}
groupchange := false
gid := -1
if setgroup != "" {
gid, err = parseGroup(setgroup)
if err != nil {
return fmt.Errorf("Invalid group name or gid: %w", err)
}
groupchange = true
}
bx.logDebug.Printf("DECRYPT GROUP %q %v,%v\n", setgroup, groupchange, gid)
if len(names) == 0 {
names = bx.Files
}
return decryptMany(bx, names, overwrite, groupchange, gid)
}
func decryptMany(bx *Box, names []string, overwrite bool, groupchange bool, gid int) error {
// TODO(tlim): If we want to decrypt them in parallel, go has a helper function
// called "sync.WaitGroup()"" which would be useful here. We would probably
// want to add a flag on the command line (stored in a field such as bx.ParallelMax)
// that limits the amount of parallelism. The default for the flag should
// probably be runtime.NumCPU().
for _, name := range names {
fmt.Printf("========== DECRYPTING %q\n", name)
if !bx.FilesSet[name] {
bx.logErr.Printf("Skipping %q: File not registered with Blackbox", name)
continue
}
if (!overwrite) && bbutil.FileExistsOrProblem(name) {
bx.logErr.Printf("Skipping %q: Will not overwrite existing file", name)
continue
}
// TODO(tlim) v1 detects zero-length files and removes them, even
// if overwrite is disabled. I don't think anyone has ever used that
// feature. That said, if we want to do that, we would implement it here.
// TODO(tlim) v1 takes the md5 hash of the plaintext before it decrypts,
// then compares the new plaintext's md5. It prints "EXTRACTED" if
// there is a change.
err := bx.Crypter.Decrypt(name, bx.Umask, overwrite)
if err != nil {
bx.logErr.Printf("%q: %v", name, err)
continue
}
// FIXME(tlim): Clone the file perms from the .gpg file to the plaintext file.
if groupchange {
// FIXME(tlim): Also "chmod g+r" the file.
os.Chown(name, -1, gid)
}
}
return nil
}
// Diff ...
func (bx *Box) Diff([]string) error {
return fmt.Errorf("NOT IMPLEMENTED: Diff")
}
// Edit unencrypts, calls editor, calls encrypt.
func (bx *Box) Edit(names []string) error {
if err := anyGpg(names); err != nil {
return err
}
err := bx.getFiles()
if err != nil {
return err
}
for _, name := range names {
if _, ok := bx.FilesSet[name]; ok {
if !bbutil.FileExistsOrProblem(name) {
err := bx.Crypter.Decrypt(name, bx.Umask, false)
if err != nil {
return fmt.Errorf("edit failed %q: %w", name, err)
}
}
}
err := bbutil.RunBash(bx.Editor, name)
if err != nil {
return err
}
}
return nil
}
// Encrypt encrypts a file.
func (bx *Box) Encrypt(names []string, shred bool) error {
var err error
if err = anyGpg(names); err != nil {
return err
}
err = bx.getAdmins()
if err != nil {
return err
}
err = bx.getFiles()
if err != nil {
return err
}
if len(names) == 0 {
names = bx.Files
}
enames, err := encryptMany(bx, names, shred)
bx.Vcs.NeedsCommit(
PrettyCommitMessage("ENCRYPTED", names),
bx.RepoBaseDir,
enames,
)
return err
}
func encryptMany(bx *Box, names []string, shred bool) ([]string, error) {
var enames []string
for _, name := range names {
fmt.Printf("========== ENCRYPTING %q\n", name)
if !bx.FilesSet[name] {
bx.logErr.Printf("Skipping %q: File not registered with Blackbox", name)
continue
}
if !bbutil.FileExistsOrProblem(name) {
bx.logErr.Printf("Skipping. Plaintext does not exist: %q", name)
continue
}
ename, err := bx.Crypter.Encrypt(name, bx.Umask, bx.Admins)
if err != nil {
bx.logErr.Printf("Failed to encrypt %q: %v", name, err)
continue
}
enames = append(enames, ename)
if shred {
bx.Shred([]string{name})
}
}
return enames, nil
}
// FileAdd enrolls files.
func (bx *Box) FileAdd(names []string, shred bool) error {
bx.logDebug.Printf("FileAdd(shred=%v, %v)", shred, names)
// Check for dups.
// Encrypt them all.
// If that succeeds, add to the blackbox-files.txt file.
// (optionally) shred the plaintext.
// FIXME(tlim): Check if the plaintext is in GIT. If it is,
// remove it from Git and print a warning that they should
// eliminate the history or rotate any secrets.
if err := anyGpg(names); err != nil {
return err
}
err := bx.getAdmins()
if err != nil {
return err
}
err = bx.getFiles()
if err != nil {
return err
}
if err := anyGpg(names); err != nil {
return err
}
// Check for newlines
for _, n := range names {
if strings.ContainsAny(n, "\n") {
return fmt.Errorf("file %q contains a newlineregistered", n)
}
}
// Check for duplicates.
for _, n := range names {
if i := sort.SearchStrings(bx.Files, n); i < len(bx.Files) && bx.Files[i] == n {
return fmt.Errorf("file %q already registered", n)
}
}
// Encrypt
var needsCommit []string
for _, name := range names {
s, err := bx.Crypter.Encrypt(name, bx.Umask, bx.Admins)
if err != nil {
return fmt.Errorf("AdminAdd failed AddNewKey: %v", err)
}
needsCommit = append(needsCommit, s)
}
// TODO(tlim): Try the json file.
// Try the legacy file:
fn := filepath.Join(bx.ConfigPath, "blackbox-files.txt")
bx.logDebug.Printf("Files file: %q", fn)
err = bbutil.AddLinesToSortedFile(fn, names...)
if err != nil {
return fmt.Errorf("could not update file (%q,%q): %v", fn, names, err)
}
err = bx.Shred(names)
if err != nil {
bx.logErr.Printf("Error while shredding: %v", err)
}
bx.Vcs.CommitTitle("BLACKBOX ADD FILE: " + makesafe.FirstFew(makesafe.ShellMany(names)))
bx.Vcs.IgnoreFiles(bx.RepoBaseDir, names)
bx.Vcs.NeedsCommit(
PrettyCommitMessage("blackbox-files.txt add", names),
bx.RepoBaseDir,
append([]string{filepath.Join(bx.ConfigPath, "blackbox-files.txt")}, needsCommit...),
)
return nil
}
// FileList lists the files.
func (bx *Box) FileList() error {
err := bx.getFiles()
if err != nil {
return err
}
for _, v := range bx.Files {
fmt.Println(v)
}
return nil
}
// FileRemove de-enrolls files.
func (bx *Box) FileRemove(names []string) error {
return fmt.Errorf("NOT IMPLEMENTED: FileRemove")
}
// Info prints debugging info.
func (bx *Box) Info() error {
err := bx.getFiles()
if err != nil {
bx.logErr.Printf("Info getFiles: %v", err)
}
err = bx.getAdmins()
if err != nil {
bx.logErr.Printf("Info getAdmins: %v", err)
}
fmt.Println("BLACKBOX:")
fmt.Printf(" Debug: %v\n", bx.Debug)
fmt.Printf(" Team: %q\n", bx.Team)
fmt.Printf(" RepoBaseDir: %q\n", bx.RepoBaseDir)
fmt.Printf(" ConfigPath: %q\n", bx.ConfigPath)
fmt.Printf(" Umask: %04o\n", bx.Umask)
fmt.Printf(" Editor: %v\n", bx.Editor)
fmt.Printf(" Shredder: %v\n", bbutil.ShredInfo())
fmt.Printf(" Admins: count=%v\n", len(bx.Admins))
fmt.Printf(" Files: count=%v\n", len(bx.Files))
fmt.Printf(" FilesSet: count=%v\n", len(bx.FilesSet))
fmt.Printf(" Vcs: %v\n", bx.Vcs)
fmt.Printf(" VcsName: %q\n", bx.Vcs.Name())
fmt.Printf(" Crypter: %v\n", bx.Crypter)
fmt.Printf(" CrypterName: %q\n", bx.Crypter.Name())
return nil
}
// Init initializes a repo.
func (bx *Box) Init(yes, vcsname string) error {
fmt.Printf("VCS root is: %q\n", bx.RepoBaseDir)
fmt.Printf("team is: %q\n", bx.Team)
fmt.Printf("configdir will be: %q\n", bx.ConfigPath)
if yes != "yes" {
fmt.Printf("Enable blackbox for this %v repo? (yes/no)? ", bx.Vcs.Name())
input := bufio.NewScanner(os.Stdin)
input.Scan()
ans := input.Text()
b, err := strconv.ParseBool(ans)
if err != nil {
b = false
if len(ans) > 0 {
if ans[0] == 'y' || ans[0] == 'Y' {
b = true
}
}
}
if !b {
fmt.Println("Ok. Maybe some other time.")
return nil
}
}
err := os.Mkdir(bx.ConfigPath, 0o750)
if err != nil {
return err
}
ba := filepath.Join(bx.ConfigPath, "blackbox-admins.txt")
bf := filepath.Join(bx.ConfigPath, "blackbox-files.txt")
bbutil.Touch(ba)
bbutil.Touch(bf)
bx.Vcs.SetFileTypeUnix(bx.RepoBaseDir, ba, bf)
bx.Vcs.IgnoreAnywhere(bx.RepoBaseDir, []string{
"pubring.gpg~",
"pubring.kbx~",
"secring.gpg",
})
fs := []string{ba, bf}
bx.Vcs.NeedsCommit(
"NEW: "+strings.Join(makesafe.RedactMany(fs), " "),
bx.RepoBaseDir,
fs,
)
bx.Vcs.CommitTitle("INITIALIZE BLACKBOX")
return nil
}
// Reencrypt decrypts and reencrypts files.
func (bx *Box) Reencrypt(names []string, overwrite bool, bulkpause bool) error {
allFiles := false
if err := anyGpg(names); err != nil {
return err
}
if err := bx.getAdmins(); err != nil {
return err
}
if err := bx.getFiles(); err != nil {
return err
}
if len(names) == 0 {
names = bx.Files
allFiles = true
}
if bulkpause {
gpgAgentNotice()
}
fmt.Println("========== blackbox administrators are:")
bx.AdminList()
fmt.Println("========== (the above people will be able to access the file)")
if overwrite {
bbutil.ShredFiles(names)
} else {
warned := false
for _, n := range names {
if bbutil.FileExistsOrProblem(n) {
if !warned {
fmt.Printf("========== Shred these files?\n")
warned = true
}
fmt.Println("SHRED?", n)
}
}
if warned {
shouldWeOverwrite()
}
}
// Decrypt
if err := decryptMany(bx, names, overwrite, false, 0); err != nil {
return fmt.Errorf("reencrypt failed decrypt: %w", err)
}
enames, err := encryptMany(bx, names, false)
if err != nil {
return fmt.Errorf("reencrypt failed encrypt: %w", err)
}
if err := bbutil.ShredFiles(names); err != nil {
return fmt.Errorf("reencrypt failed shred: %w", err)
}
if allFiles {
// If the "--all" flag was used, don't try to list all the files.
bx.Vcs.NeedsCommit(
"REENCRYPT all files",
bx.RepoBaseDir,
enames,
)
} else {
bx.Vcs.NeedsCommit(
PrettyCommitMessage("REENCRYPT", names),
bx.RepoBaseDir,
enames,
)
}
return nil
}
// Shred shreds files.
func (bx *Box) Shred(names []string) error {
if err := anyGpg(names); err != nil {
return err
}
err := bx.getFiles()
// Calling getFiles() has the benefit of making sure we are in a repo.
if err != nil {
return err
}
if len(names) == 0 {
names = bx.Files
}
return bbutil.ShredFiles(names)
}
// Status prints the status of files.
func (bx *Box) Status(names []string, nameOnly bool, match string) error {
err := bx.getFiles()
if err != nil {
return err
}
var flist []string
if len(names) == 0 {
flist = bx.Files
} else {
flist = names
}
var data [][]string
var onlylist []string
thirdColumn := false
var tcData bool
for _, name := range flist {
var stat string
var err error
if _, ok := bx.FilesSet[name]; ok {
stat, err = FileStatus(name)
} else {
stat, err = "NOTREG", nil
}
if (match == "") || (stat == match) {
if err == nil {
data = append(data, []string{stat, name})
onlylist = append(onlylist, name)
} else {
thirdColumn = tcData
data = append(data, []string{stat, name, fmt.Sprintf("%v", err)})
onlylist = append(onlylist, fmt.Sprintf("%v: %v", name, err))
}
}
}
if nameOnly {
fmt.Println(strings.Join(onlylist, "\n"))
return nil
}
table := tablewriter.NewWriter(os.Stdout)
table.SetAutoWrapText(false)
if thirdColumn {
table.SetHeader([]string{"Status", "Name", "Error"})
} else {
table.SetHeader([]string{"Status", "Name"})
}
for _, v := range data {
table.Append(v)
}
table.Render() // Send output
return nil
}
// TestingInitRepo initializes a repo.
// Uses bx.Vcs to create ".git" or whatever.
// Uses bx.Vcs to discover what was created, testing its work.
func (bx *Box) TestingInitRepo() error {
if bx.Vcs == nil {
fmt.Println("bx.Vcs is nil")
fmt.Printf("BLACKBOX_VCS=%q\n", os.Getenv("BLACKBOX_VCS"))
os.Exit(1)
}
fmt.Printf("ABOUT TO CALL TestingInitRepo\n")
fmt.Printf("vcs = %v\n", bx.Vcs.Name())
err := bx.Vcs.TestingInitRepo()
fmt.Printf("RETURNED from TestingInitRepo: %v\n", err)
fmt.Println(os.Getwd())
if err != nil {
return fmt.Errorf("TestingInitRepo returned: %w", err)
}
if b, _ := bx.Vcs.Discover(); !b {
return fmt.Errorf("TestingInitRepo failed Discovery")
}
return nil
}

View File

@@ -0,0 +1,84 @@
package commitlater
import (
"fmt"
)
type future struct {
message string // Message that describes this transaction.
dir string // Basedir of the files
files []string // Names of the files
display []string // Names as to be displayed to the user
}
// List of futures to be done in the future.
type List struct {
items []*future
}
// Add queues up a future commit.
func (list *List) Add(message string, repobasedir string, files []string) {
item := &future{
message: message,
dir: repobasedir,
files: files,
}
list.items = append(list.items, item)
}
func sameDirs(l *List) bool {
if len(l.items) <= 1 {
return true
}
for _, k := range l.items[1:] {
if k.dir != l.items[0].dir {
return false
}
}
return true
}
// Flush executes queued commits.
func (list *List) Flush(
title string,
fadd func([]string) error,
fcommit func([]string, string, []string) error,
) error {
// Just list the individual commit commands.
if title == "" || len(list.items) < 2 || !sameDirs(list) {
for _, fut := range list.items {
err := fadd(fut.files)
if err != nil {
return fmt.Errorf("add files1 (%q) failed: %w", fut.files, err)
}
err = fcommit([]string{fut.message}, fut.dir, fut.files)
if err != nil {
return fmt.Errorf("commit files (%q) failed: %w", fut.files, err)
}
}
return nil
}
// Create a long commit message.
var m []string
var f []string
for _, fut := range list.items {
err := fadd(fut.files)
if err != nil {
return fmt.Errorf("add files2 (%q) failed: %w", fut.files, err)
}
m = append(m, fut.message)
f = append(f, fut.files...)
}
msg := []string{title}
for _, mm := range m {
msg = append(msg, " * "+mm)
}
err := fcommit(msg, list.items[0].dir, f)
if err != nil {
return fmt.Errorf("commit files (%q) failed: %w", f, err)
}
return nil
}

5
pkg/crypters/_all/all.go Normal file
View File

@@ -0,0 +1,5 @@
package all
import (
_ "github.com/StackExchange/blackbox/v2/pkg/crypters/gnupg"
)

58
pkg/crypters/crypters.go Normal file
View File

@@ -0,0 +1,58 @@
package crypters
import (
"sort"
"strings"
"github.com/StackExchange/blackbox/v2/models"
)
// Crypter is the handle
type Crypter interface {
models.Crypter
}
// NewFnSig function signature needed by reg.
type NewFnSig func(debug bool) (Crypter, error)
// Item stores one item
type Item struct {
Name string
New NewFnSig
Priority int
}
// Catalog is the list of registered vcs's.
var Catalog []*Item
// SearchByName returns a Crypter handle for name.
// The search is case insensitive.
func SearchByName(name string, debug bool) Crypter {
name = strings.ToLower(name)
for _, v := range Catalog {
//fmt.Printf("Trying %v %v\n", v.Name)
if strings.ToLower(v.Name) == name {
chandle, err := v.New(debug)
if err != nil {
return nil // No idea how that would happen.
}
//fmt.Printf("USING! %v\n", v.Name)
return chandle
}
}
return nil
}
// Register a new VCS.
func Register(name string, priority int, newfn NewFnSig) {
//fmt.Printf("CRYPTER registered: %v\n", name)
item := &Item{
Name: name,
New: newfn,
Priority: priority,
}
Catalog = append(Catalog, item)
// Keep the list sorted.
sort.Slice(Catalog, func(i, j int) bool { return Catalog[j].Priority < Catalog[i].Priority })
}

180
pkg/crypters/gnupg/gnupg.go Normal file
View File

@@ -0,0 +1,180 @@
package gnupg
import (
"fmt"
"io/ioutil"
"log"
"os"
"os/exec"
"path/filepath"
"syscall"
"github.com/StackExchange/blackbox/v2/pkg/bblog"
"github.com/StackExchange/blackbox/v2/pkg/bbutil"
"github.com/StackExchange/blackbox/v2/pkg/crypters"
)
var pluginName = "GnuPG"
func init() {
crypters.Register(pluginName, 100, registerNew)
}
// CrypterHandle is the handle
type CrypterHandle struct {
GPGCmd string // "gpg2" or "gpg"
logErr *log.Logger
logDebug *log.Logger
}
func registerNew(debug bool) (crypters.Crypter, error) {
crypt := &CrypterHandle{
logErr: bblog.GetErr(),
logDebug: bblog.GetDebug(debug),
}
// Which binary to use?
path, err := exec.LookPath("gpg2")
if err != nil {
path, err = exec.LookPath("gpg")
if err != nil {
path = "gpg2"
}
}
crypt.GPGCmd = path
return crypt, nil
}
// Name returns my name.
func (crypt CrypterHandle) Name() string {
return pluginName
}
// Decrypt name+".gpg", possibly overwriting name.
func (crypt CrypterHandle) Decrypt(filename string, umask int, overwrite bool) error {
a := []string{
"--use-agent",
"-q",
"--decrypt",
"-o", filename,
}
if overwrite {
a = append(a, "--yes")
}
a = append(a, filename+".gpg")
oldumask := syscall.Umask(umask)
err := bbutil.RunBash(crypt.GPGCmd, a...)
syscall.Umask(oldumask)
return err
}
// Cat returns the plaintext or, if it is missing, the decrypted cyphertext.
func (crypt CrypterHandle) Cat(filename string) ([]byte, error) {
a := []string{
"--use-agent",
"-q",
"--decrypt",
}
// TODO(tlim): This assumes the entire gpg file fits in memory. If
// this becomes a problem, re-implement this using exec Cmd.StdinPipe()
// and feed the input in chunks.
in, err := ioutil.ReadFile(filename + ".gpg")
if err != nil {
if os.IsNotExist(err) {
// Encrypted file doesn't exit? Return the plaintext.
return ioutil.ReadFile(filename)
}
return nil, err
}
return bbutil.RunBashInputOutput(in, crypt.GPGCmd, a...)
}
// Encrypt name, overwriting name+".gpg"
func (crypt CrypterHandle) Encrypt(filename string, umask int, receivers []string) (string, error) {
var err error
crypt.logDebug.Printf("Encrypt(%q, %d, %q)", filename, umask, receivers)
encrypted := filename + ".gpg"
a := []string{
"--use-agent",
"--yes",
"--trust-model=always",
"--encrypt",
"-o", encrypted,
}
for _, f := range receivers {
a = append(a, "-r", f)
}
a = append(a, "--encrypt")
a = append(a, filename)
//err = bbutil.RunBash("ls", "-la")
oldumask := syscall.Umask(umask)
crypt.logDebug.Printf("Args = %q", a)
err = bbutil.RunBash(crypt.GPGCmd, a...)
syscall.Umask(oldumask)
return encrypted, err
}
// AddNewKey extracts keyname from sourcedir's GnuPG chain to destdir keychain.
// It returns a list of files that may have changed.
func (crypt CrypterHandle) AddNewKey(keyname, repobasedir, sourcedir, destdir string) ([]string, error) {
// $GPG --homedir="$2" --export -a "$KEYNAME" >"$pubkeyfile"
args := []string{
"--export",
"-a",
}
if sourcedir != "" {
args = append(args, "--homedir", sourcedir)
}
args = append(args, keyname)
crypt.logDebug.Printf("ADDNEWKEY: Extracting key=%v: gpg, %v\n", keyname, args)
pubkey, err := bbutil.RunBashOutput("gpg", args...)
if err != nil {
return nil, err
}
if len(pubkey) == 0 {
return nil, fmt.Errorf("Nothing found when %q exported from %q", keyname, sourcedir)
}
// $GPG --no-permission-warning --homedir="$KEYRINGDIR" --import "$pubkeyfile"
args = []string{
"--no-permission-warning",
"--homedir", destdir,
"--import",
}
crypt.logDebug.Printf("ADDNEWKEY: Importing: gpg %v\n", args)
// fmt.Printf("DEBUG: crypter ADD %q", args)
err = bbutil.RunBashInput(pubkey, "gpg", args...)
if err != nil {
return nil, fmt.Errorf("AddNewKey failed: %w", err)
}
// Suggest: ${pubring_path} trustdb.gpg blackbox-admins.txt
var changed []string
// Prefix each file with the relative path to it.
prefix, err := filepath.Rel(repobasedir, destdir)
if err != nil {
//fmt.Printf("FAIL (%v) (%v) (%v)\n", repobasedir, destdir, err)
prefix = destdir
}
for _, file := range []string{"pubring.gpg", "pubring.kbx", "trustdb.gpg"} {
path := filepath.Join(destdir, file)
if bbutil.FileExistsOrProblem(path) {
changed = append(changed, filepath.Join(prefix, file))
}
}
return changed, nil
}

View File

@@ -0,0 +1,107 @@
package gnupg
/*
# How does Blackbox manage key rings?
Blackbox uses the user's .gnupg directory for most actions, such as decrypting data.
Decrypting requires the user's private key, which is stored by the user in their
home directory (and up to them to store safely).
Black box does not store the user's private key in the repo.
When encrypting data, black needs the public key of all the admins, not just the users.
To assure that the user's `.gnupg` has all these public keys, prior to
encrypting data the public keys are imported from .blackbox, which stores
a keychain that stores the public (not private!) keys of all the admins.
FYI: v1 does this import before decrypting, because I didn't know any better.
# Binary compatibility:
When writing v1, we didn't realize that the pubkey.gpg file is a binary format
that is not intended to be portable. In fact, it is intentionally not portable.
This means that all admins must use the exact same version of GnuPG
or the files (pubring.gpg or pubring.kbx) may get corrupted.
In v2, we store the public keys in the portable ascii format
in a file called `.blackbox/public-keys-db.asc`.
It will also update the binary files if they exist.
If `.blackbox/public-keys-db.asc` doesn't exist, it will be created.
Eventually we will stop updating the binary files.
# Importing public keys to the user
How to import the public keys to the user's GPG system:
If pubkeyring-ascii.txt exists:
gpg --import pubkeyring-ascii.asc
Else if pubring.kbx
gpg --import pubring.kbx
Else if pubring.gpg
gpg --import pubring.gpg
This is what v1 does:
#if gpg2 is installed next to gpg like on ubuntu 16
if [[ "$GPG" != "gpg2" ]]; then
$GPG --export --no-default-keyring --keyring "$(get_pubring_path)" >"$keyringasc"
$GPG --import "$keyringasc" 2>&1 | egrep -v 'not changed$' >&2
Else
$GPG --keyring "$(get_pubring_path)" --export | $GPG --import
fi
# How to add a key to the keyring?
Old, binary format:
# Get the key they want to add:
FOO is a user-specified directory, otherwise $HOME/.gnupg:
$GPG --homedir="FOO" --export -a "$KEYNAME" >TEMPFILE
# Import into the binary files:
KEYRINGDIR is .blackbox
$GPG --no-permission-warning --homedir="$KEYRINGDIR" --import TEMPFILE
# Git add any of these files if they exist:
pubring.gpg pubring.kbx trustdb.gpg blackbox-admins.txt
# Tell the user to git commit them.
New, ascii format:
# Get the key to be added. Write to a TEMPFILE
FOO is a user-specified directory, otherwise $HOME/.gnupg:
$GPG --homedir="FOO" --export -a "$KEYNAME" >TEMPFILE
# Make a tempdir called TEMPDIR
# Import the pubkeyring-ascii.txt to TEMPDIR's keyring. (Skip if file not found)
# Import the temp1 data to TEMPDIR
# Export the TEMPDIR to create a new .blackbox/pubkeyring-ascii.txt
PATH_TO_BINARY is the path to .blackbox/pubring.gpg; if that's not found then pubring.kbx
$GPG --keyring PATH_TO_BINARY --export -a --output .blackbox/pubkeyring-ascii.txt
# Git add .blackbox/pubkeyring-ascii.txt and .blackbox/blackbox-admins.txt
# Tell the user to git commit them.
# Delete TEMPDIR
# How to remove a key from the keyring?
Old, binary format:
# Remove key from the binary file
$GPG --no-permission-warning --homedir="$KEYRINGDIR" --batch --yes --delete-key "$KEYNAME" || true
# Git add any of these files if they exist:
pubring.gpg pubring.kbx trustdb.gpg blackbox-admins.txt
# Tell the user to git commit them.
New, ascii format:
# Make a tempdir called TEMPDIR
# Import the pubkeyring-ascii.txt to TEMPDIR's keyring. (Skip if file not found)
# Remove key from the ring file
$GPG --no-permission-warning --homedir="$KEYRINGDIR" --batch --yes --delete-key "$KEYNAME" || true
# Export the TEMPDIR to create a new .blackbox/pubkeyring-ascii.txt
PATH_TO_BINARY is the path to .blackbox/pubring.gpg; if that's not found then pubring.kbx
$GPG --keyring PATH_TO_BINARY --export -a --output .blackbox/pubkeyring-ascii.txt
# Git add .blackbox/pubkeyring-ascii.txt and .blackbox/blackbox-admins.txt
# Update the .blackbox copy of pubring.gpg, pubring.kbx, or trustdb.gpg (if they exist)
# with copies from TEMPDIR (if they exist). Git add any files that are updated.
# Tell the user to git commit them.
# Delete TEMPDIR
*/
//func prepareUserKeychain() error {
// return nil
//}

285
pkg/makesafe/makesafe.go Normal file
View File

@@ -0,0 +1,285 @@
package makesafe
// untaint -- A string with a Stringer that is shell safe.
// This goes to great lengths to make sure the String() is pastable.
// Whitespace and shell "special chars" are handled as expected.
// However to be extra paranoid, unicode is turned into backtick
// printf statements. I don't know anyone that puts unicode in their
// filenames, but I hope they appreciate this.
// Most people would just use strconv.QuoteToGraphic() but I'm a
// control freak.
import (
"fmt"
"strings"
"unicode"
)
type protection int
const (
// Unknown indicates we don't know if it is safe.
Unknown protection = iota
// None requires no special escaping.
None // Nothing special
// SingleQuote is unsafe in bash and requires a single quote.
SingleQuote // Requires at least a single quote
// DoubleQuote is unsafe in bash and requires escaping or other double-quote features.
DoubleQuote // Can only be in a double-quoted string
)
const (
// IsAQuote is either a `'` or `"`
IsAQuote = None
// IsSpace is ascii 32
IsSpace = SingleQuote
// ShellUnsafe is ()!$ or other bash special char
ShellUnsafe = SingleQuote
// GlobUnsafe means could be a glob char (* or ?)
GlobUnsafe = SingleQuote
// InterpolationUnsafe used in bash string interpolation ($)
InterpolationUnsafe = SingleQuote
// HasBackslash things like \n \t \r \000 \xFF
HasBackslash = DoubleQuote
)
func max(i, j protection) protection {
if i > j {
return i
}
return j
}
type tabEntry struct {
level protection
fn func(s rune) string
}
var tab [128]tabEntry
func init() {
for i := 0; i <= 31; i++ { // Control chars
tab[i] = tabEntry{HasBackslash, oct()}
}
tab['\t'] = tabEntry{HasBackslash, literal(`\t`)} // Override
tab['\n'] = tabEntry{HasBackslash, literal(`\n`)} // Override
tab['\r'] = tabEntry{HasBackslash, literal(`\r`)} // Override
tab[' '] = tabEntry{IsSpace, same()}
tab['!'] = tabEntry{ShellUnsafe, same()}
tab['"'] = tabEntry{IsAQuote, same()}
tab['#'] = tabEntry{ShellUnsafe, same()}
tab['@'] = tabEntry{InterpolationUnsafe, same()}
tab['$'] = tabEntry{InterpolationUnsafe, same()}
tab['%'] = tabEntry{InterpolationUnsafe, same()}
tab['&'] = tabEntry{ShellUnsafe, same()}
tab['\''] = tabEntry{IsAQuote, same()}
tab['('] = tabEntry{ShellUnsafe, same()}
tab[')'] = tabEntry{ShellUnsafe, same()}
tab['*'] = tabEntry{GlobUnsafe, same()}
tab['+'] = tabEntry{GlobUnsafe, same()}
tab[','] = tabEntry{None, same()}
tab['-'] = tabEntry{None, same()}
tab['.'] = tabEntry{None, same()}
tab['/'] = tabEntry{None, same()}
for i := '0'; i <= '9'; i++ {
tab[i] = tabEntry{None, same()}
}
tab[':'] = tabEntry{InterpolationUnsafe, same()} // ${foo:=default}
tab[';'] = tabEntry{ShellUnsafe, same()}
tab['<'] = tabEntry{ShellUnsafe, same()}
tab['='] = tabEntry{InterpolationUnsafe, same()} // ${foo:=default}
tab['>'] = tabEntry{ShellUnsafe, same()}
tab['?'] = tabEntry{GlobUnsafe, same()}
tab['@'] = tabEntry{InterpolationUnsafe, same()} // ${myarray[@]};
for i := 'A'; i <= 'Z'; i++ {
tab[i] = tabEntry{None, same()}
}
tab['['] = tabEntry{ShellUnsafe, same()}
tab['\\'] = tabEntry{ShellUnsafe, same()}
tab[']'] = tabEntry{GlobUnsafe, same()}
tab['^'] = tabEntry{GlobUnsafe, same()}
tab['_'] = tabEntry{None, same()}
tab['`'] = tabEntry{ShellUnsafe, same()}
for i := 'a'; i <= 'z'; i++ {
tab[i] = tabEntry{None, same()}
}
tab['{'] = tabEntry{ShellUnsafe, same()}
tab['|'] = tabEntry{ShellUnsafe, same()}
tab['}'] = tabEntry{ShellUnsafe, same()}
tab['~'] = tabEntry{ShellUnsafe, same()}
tab[127] = tabEntry{HasBackslash, oct()}
// Check our work. All indexes should have been set.
for i, e := range tab {
if e.level == 0 || e.fn == nil {
panic(fmt.Sprintf("tabEntry %d not set!", i))
}
}
}
// literal return this exact string.
func literal(s string) func(s rune) string {
return func(rune) string { return s }
}
// same converts the rune to a string.
func same() func(r rune) string {
return func(r rune) string { return string(r) }
}
// oct returns the octal representing the value.
func oct() func(r rune) string {
return func(r rune) string { return fmt.Sprintf(`\%03o`, r) }
}
// Redact returns a string that can be used in a shell single-quoted
// string. It may not be an exact representation, but it is safe
// to include on a command line.
//
// Redacted chars are changed to "X".
// If anything is redacted, the string is surrounded by double quotes
// ("air quotes") and the string "(redacted)" is added to the end.
// If nothing is redacted, but it contains spaces, it is surrounded
// by double quotes.
//
// Example: `s` -> `s`
// Example: `space cadet.txt` -> `"space cadet.txt"`
// Example: `drink a \t soda` -> `"drink a X soda"(redacted)`
// Example: `smile☺` -> `"smile☺`
func Redact(tainted string) string {
if tainted == "" {
return `""`
}
var b strings.Builder
b.Grow(len(tainted) + 10)
redacted := false
needsQuote := false
for _, r := range tainted {
if r == ' ' {
b.WriteRune(r)
needsQuote = true
} else if r == '\'' {
b.WriteRune('X')
redacted = true
} else if r == '"' {
b.WriteRune('\\')
b.WriteRune(r)
needsQuote = true
} else if unicode.IsPrint(r) {
b.WriteRune(r)
} else {
b.WriteRune('X')
redacted = true
}
}
if redacted {
return `"` + b.String() + `"(redacted)`
}
if needsQuote {
return `"` + b.String() + `"`
}
return tainted
}
// RedactMany returns the list after processing each element with Redact().
func RedactMany(items []string) []string {
var r []string
for _, n := range items {
r = append(r, Redact(n))
}
return r
}
// Shell returns the string formatted so that it is safe to be pasted
// into a command line to produce the desired filename as an argument
// to the command.
func Shell(tainted string) string {
if tainted == "" {
return `""`
}
var b strings.Builder
b.Grow(len(tainted) + 10)
level := Unknown
for _, r := range tainted {
if r < 128 {
level = max(level, tab[r].level)
b.WriteString(tab[r].fn(r))
} else {
level = max(level, DoubleQuote)
b.WriteString(escapeRune(r))
}
}
s := b.String()
if level == None {
return tainted
} else if level == SingleQuote {
// A single quoted string accepts all chars except the single
// quote itself, which must be replaced with: '"'"'
return "'" + strings.Join(strings.Split(s, "'"), `'"'"'`) + "'"
} else if level == DoubleQuote {
// A double-quoted string may include \xxx escapes and other
// things. Sadly bash doesn't interpret those, but printf will!
return `$(printf '%q' '` + s + `')`
}
// should not happen
return fmt.Sprintf("%q", s)
}
// escapeRune returns a string of octal escapes that represent the rune.
func escapeRune(r rune) string {
b := []byte(string(rune(r))) // Convert to the indivdual bytes, utf8-encoded.
// fmt.Printf("rune: len=%d %s %v\n", len(s), s, []byte(s))
switch len(b) {
case 1:
return fmt.Sprintf(`\%03o`, b[0])
case 2:
return fmt.Sprintf(`\%03o\%03o`, b[0], b[1])
case 3:
return fmt.Sprintf(`\%03o\%03o\%03o`, b[0], b[1], b[2])
case 4:
return fmt.Sprintf(`\%03o\%03o\%03o\%03o`, b[0], b[1], b[2], b[3])
default:
return string(rune(r))
}
}
// ShellMany returns the list after processing each element with Shell().
func ShellMany(items []string) []string {
var r []string
for _, n := range items {
r = append(r, Redact(n))
}
return r
}
// FirstFew returns the first few names. If any are truncated, it is
// noted by appending "...". The exact definition of "few" may change
// over time, and may be based on the number of chars not the list
func FirstFew(sl []string) string {
s, _ := FirstFewFlag(sl)
return s
}
// FirstFewFlag is like FirstFew but returns true if truncation done.
func FirstFewFlag(sl []string) (string, bool) {
const maxitems = 2
const maxlen = 70
if len(sl) < maxitems || len(strings.Join(sl, " ")) < maxlen {
return strings.Join(sl, " "), false
}
return strings.Join(sl[:maxitems], " ") + " (and others)", true
}

View File

@@ -0,0 +1,136 @@
package makesafe
import (
"testing"
)
func TestRedact(t *testing.T) {
for i, test := range []struct{ data, expected string }{
{"", `""`},
{"one", "one"},
{"has space.txt", `"has space.txt"`},
{"has\ttab.txt", `"hasXtab.txt"(redacted)`},
{"has\nnl.txt", `"hasXnl.txt"(redacted)`},
{"has\rret.txt", `"hasXret.txt"(redacted)`},
{"¡que!", `¡que!`},
{"thé", `thé`},
{"pound£", `pound£`},
{"*.go", `*.go`},
{"rm -rf / ; echo done", `"rm -rf / ; echo done"`},
{"smile\u263a", `smile☺`},
{"dub\U0001D4E6", `dub𝓦`},
{"four\U0010FFFF", `"fourX"(redacted)`},
} {
g := Redact(test.data)
if g == test.expected {
t.Logf("%03d: PASSED", i)
} else {
t.Errorf("%03d: FAILED data=%q got=(%s) wanted=(%s)", i, test.data, g, test.expected)
}
}
}
func TestRedactMany(t *testing.T) {
data := []string{
"",
"one",
"has space.txt",
"has\ttab.txt",
}
g := RedactMany(data)
if len(g) != 4 || g[0] != `""` || g[1] != `"has space.txt"` || g[2] != `"hasXtab.txt"(redacted)` {
t.Logf("PASSED")
} else {
t.Errorf("FAILED got=(%q)", g)
}
}
func TestShell(t *testing.T) {
for i, test := range []struct{ data, expected string }{
{"", `""`},
{"one", "one"},
{"two\n", `$(printf '%q' 'two\n')`},
{"ta tab", `$(printf '%q' 'ta\ttab')`},
{"tab\ttab", `$(printf '%q' 'tab\ttab')`},
{"new\nline", `$(printf '%q' 'new\nline')`},
{"¡que!", `$(printf '%q' '\302\241que!')`},
{"thé", `$(printf '%q' 'th\303\251')`},
{"pound£", `$(printf '%q' 'pound\302\243')`},
{"*.go", `'*.go'`},
{"rm -rf / ; echo done", `'rm -rf / ; echo done'`},
{"smile\u263a", `$(printf '%q' 'smile\342\230\272')`},
{"dub\U0001D4E6", `$(printf '%q' 'dub\360\235\223\246')`},
{"four\U0010FFFF", `$(printf '%q' 'four\364\217\277\277')`},
} {
g := Shell(test.data)
if g == test.expected {
t.Logf("%03d: PASSED", i)
//t.Logf("%03d: PASSED go(%q) bash: %s", i, test.data, test.expected)
} else {
t.Errorf("%03d: FAILED data=%q got=`%s` wanted=`%s`", i, test.data, g, test.expected)
}
}
}
func TestEscapeRune(t *testing.T) {
for i, test := range []struct {
data rune
expected string
}{
{'a', `\141`},
{'é', `\303\251`},
{'☺', `\342\230\272`},
{'글', `\352\270\200`},
{'𩸽', `\360\251\270\275`},
//{"\U0010FEDC", `"'\U0010fedc'"`},
} {
g := escapeRune(test.data)
if g == test.expected {
t.Logf("%03d: PASSED go=(%q) bash=(%s)", i, test.data, test.expected)
} else {
t.Errorf("%03d: FAILED data=%q got=(%s) wanted=(%s)", i, test.data, g, test.expected)
}
}
}
func TestShellMany(t *testing.T) {
data := []string{
"",
"one",
"has space.txt",
"¡que!",
}
g := ShellMany(data)
if len(g) != 4 || g[0] != `""` || g[1] != "one" || g[2] != `"has space.txt"` || g[3] != `$(printf '%q' '\302\241que!')` {
t.Logf("PASSED")
} else {
t.Errorf("FAILED got=(%q)", g)
}
}
func TestFirstFewFlag(t *testing.T) {
for i, test := range []struct {
data []string
expectedFlag bool
expectedString string
}{
{[]string{"", "one"}, false, ` one`},
{[]string{"one"}, false, `one`},
{[]string{"one", "two", "three", "longlonglong", "longlonglonglong", "manylonglonglog", "morelongonglonglong"}, true, ``},
} {
gs, gf := FirstFewFlag(test.data)
if test.expectedFlag {
if gf == test.expectedFlag {
t.Logf("%03d: PASSED", i)
} else {
t.Errorf("%03d: FAILED data=%q got=(%q) wanted=(%q)", i, test.data, gs, test.expectedString)
}
} else {
if gf == test.expectedFlag && gs == test.expectedString {
t.Logf("%03d: PASSED", i)
} else {
t.Errorf("%03d: FAILED data=%q got=(%q) wanted=(%q)", i, test.data, gs, test.expectedString)
}
}
}
}

6
pkg/vcs/_all/all.go Normal file
View File

@@ -0,0 +1,6 @@
package all
import (
_ "github.com/StackExchange/blackbox/v2/pkg/vcs/git"
_ "github.com/StackExchange/blackbox/v2/pkg/vcs/none"
)

226
pkg/vcs/git/git.go Normal file
View File

@@ -0,0 +1,226 @@
package git
import (
"fmt"
"path/filepath"
"strings"
"github.com/StackExchange/blackbox/v2/pkg/bbutil"
"github.com/StackExchange/blackbox/v2/pkg/commitlater"
"github.com/StackExchange/blackbox/v2/pkg/makesafe"
"github.com/StackExchange/blackbox/v2/pkg/vcs"
)
var pluginName = "GIT"
func init() {
vcs.Register(pluginName, 100, newGit)
}
// VcsHandle is the handle
type VcsHandle struct {
commitTitle string
commitHeaderPrinted bool // Has the "NEXT STEPS" header been printed?
toCommit *commitlater.List // List of future commits
}
func newGit() (vcs.Vcs, error) {
l := &commitlater.List{}
return &VcsHandle{toCommit: l}, nil
}
// Name returns my name.
func (v VcsHandle) Name() string {
return pluginName
}
func ultimate(s string) int { return len(s) - 1 }
// Discover returns true if we are a repo of this type; along with the Abs path to the repo root (or "" if we don't know).
func (v VcsHandle) Discover() (bool, string) {
out, err := bbutil.RunBashOutputSilent("git", "rev-parse", "--show-toplevel")
if err != nil {
return false, ""
}
if out == "" {
fmt.Printf("WARNING: git rev-parse --show-toplevel has NO output??. Seems broken.")
return false, ""
}
if out[ultimate(out)] == '\n' {
out = out[0:ultimate(out)]
}
return err == nil, out
}
// SetFileTypeUnix informs the VCS that files should maintain unix-style line endings.
func (v VcsHandle) SetFileTypeUnix(repobasedir string, files ...string) error {
seen := make(map[string]bool)
// Add to the .gitattributes in the same directory as the file.
for _, file := range files {
d, n := filepath.Split(file)
af := filepath.Join(repobasedir, d, ".gitattributes")
err := bbutil.Touch(af)
if err != nil {
return err
}
err = bbutil.AddLinesToFile(af, fmt.Sprintf("%q text eol=lf", n))
if err != nil {
return err
}
seen[af] = true
}
var changedfiles []string
for k := range seen {
changedfiles = append(changedfiles, k)
}
v.NeedsCommit(
"set gitattr=UNIX "+strings.Join(makesafe.RedactMany(files), " "),
repobasedir,
changedfiles,
)
return nil
}
// IgnoreAnywhere tells the VCS to ignore these files anywhere rin the repo.
func (v VcsHandle) IgnoreAnywhere(repobasedir string, files []string) error {
// Add to the .gitignore file in the repobasedir.
ignore := filepath.Join(repobasedir, ".gitignore")
err := bbutil.Touch(ignore)
if err != nil {
return err
}
err = bbutil.AddLinesToFile(ignore, files...)
if err != nil {
return err
}
v.NeedsCommit(
"gitignore "+strings.Join(makesafe.RedactMany(files), " "),
repobasedir,
[]string{".gitignore"},
)
return nil
}
func gitSafeFilename(name string) string {
// TODO(tlim): Add unit tests.
// TODO(tlim): Confirm that *?[] escaping works.
if name == "" {
return "ERROR"
}
var b strings.Builder
b.Grow(len(name) + 2)
for _, r := range name {
if r == ' ' || r == '*' || r == '?' || r == '[' || r == ']' {
b.WriteRune('\\')
b.WriteRune(r)
} else {
b.WriteRune(r)
}
}
if name[0] == '!' || name[0] == '#' {
return `\` + b.String()
}
return b.String()
}
// IgnoreFiles tells the VCS to ignore these files, specified relative to RepoBaseDir.
func (v VcsHandle) IgnoreFiles(repobasedir string, files []string) error {
var lines []string
for _, f := range files {
lines = append(lines, "/"+gitSafeFilename(f))
}
// Add to the .gitignore file in the repobasedir.
ignore := filepath.Join(repobasedir, ".gitignore")
err := bbutil.Touch(ignore)
if err != nil {
return err
}
err = bbutil.AddLinesToFile(ignore, lines...)
if err != nil {
return err
}
v.NeedsCommit(
"gitignore "+strings.Join(makesafe.RedactMany(files), " "),
repobasedir,
[]string{".gitignore"},
)
return nil
}
// Add makes a file visible to the VCS (like "git add").
func (v VcsHandle) Add(repobasedir string, files []string) error {
if len(files) == 0 {
return nil
}
// TODO(tlim): Make sure that files are within repobasedir.
var gpgnames []string
for _, n := range files {
gpgnames = append(gpgnames, n+".gpg")
}
return bbutil.RunBash("git", append([]string{"add"}, gpgnames...)...)
}
// CommitTitle indicates what the next commit title will be.
// This is used if a group of commits are merged into one.
func (v *VcsHandle) CommitTitle(title string) {
v.commitTitle = title
}
// NeedsCommit queues up commits for later execution.
func (v *VcsHandle) NeedsCommit(message string, repobasedir string, names []string) {
v.toCommit.Add(message, repobasedir, names)
}
// DebugCommits dumps the list of future commits.
func (v VcsHandle) DebugCommits() commitlater.List {
return *v.toCommit
}
// FlushCommits informs the VCS to do queued up commits.
func (v VcsHandle) FlushCommits() error {
return v.toCommit.Flush(
v.commitTitle,
func(files []string) error {
return bbutil.RunBash("git", append([]string{"add"}, files...)...)
},
v.suggestCommit,
)
// TODO(tlim): Some day we can add a command line flag that indicates that commits are
// to be done for real, not just suggested to the user. At that point, this function
// can call v.toCommit.Flush() with a function that actually does the commits instead
// of suggesting them. Flag could be called --commit=auto vs --commit=suggest.
}
// suggestCommit tells the user what commits are needed.
func (v *VcsHandle) suggestCommit(messages []string, repobasedir string, files []string) error {
if !v.commitHeaderPrinted {
fmt.Printf("NEXT STEP: You need to manually check these in:\n")
}
v.commitHeaderPrinted = true
fmt.Print(` git commit -m'`, strings.Join(messages, `' -m'`)+`'`)
fmt.Print(" ")
fmt.Print(strings.Join(makesafe.ShellMany(files), " "))
fmt.Println()
return nil
}
// The following are "secret" functions only used by the integration testing system.
// TestingInitRepo initializes a repo.
func (v VcsHandle) TestingInitRepo() error {
return bbutil.RunBash("git", "init")
}

79
pkg/vcs/none/none.go Normal file
View File

@@ -0,0 +1,79 @@
package none
import (
"fmt"
"github.com/StackExchange/blackbox/v2/pkg/commitlater"
"github.com/StackExchange/blackbox/v2/pkg/vcs"
)
var pluginName = "NONE"
func init() {
vcs.Register(pluginName, 0, newNone)
}
// VcsHandle is
type VcsHandle struct {
repoRoot string
}
func newNone() (vcs.Vcs, error) {
return &VcsHandle{}, nil
}
// Name returns my name.
func (v VcsHandle) Name() string {
return pluginName
}
// Discover returns true if we are a repo of this type; along with the Abs path to the repo root (or "" if we don't know).
func (v VcsHandle) Discover() (bool, string) {
return true, "" // We don't know the root.
}
//// SetRepoRoot informs the Vcs of the VCS root.
//func (v *VcsHandle) SetRepoRoot(dir string) {
// v.repoRoot = dir
//}
// SetFileTypeUnix informs the VCS that files should maintain unix-style line endings.
func (v VcsHandle) SetFileTypeUnix(repobasedir string, files ...string) error {
return nil
}
// IgnoreAnywhere tells the VCS to ignore these files anywhere in the repo.
func (v VcsHandle) IgnoreAnywhere(repobasedir string, files []string) error {
return nil
}
// IgnoreFiles tells the VCS to ignore these files anywhere in the repo.
func (v VcsHandle) IgnoreFiles(repobasedir string, files []string) error {
return nil
}
// CommitTitle sets the title of the next commit.
func (v VcsHandle) CommitTitle(title string) {}
// NeedsCommit queues up commits for later execution.
func (v VcsHandle) NeedsCommit(message string, repobasedir string, names []string) {
return
}
// DebugCommits dumps a list of future commits.
func (v VcsHandle) DebugCommits() commitlater.List {
return commitlater.List{}
}
// FlushCommits informs the VCS to do queued up commits.
func (v VcsHandle) FlushCommits() error {
return nil
}
// The following are "secret" functions only used by the integration testing system.
// TestingInitRepo initializes a repo.
func (v VcsHandle) TestingInitRepo() error {
fmt.Println("VCS=none, TestingInitRepo")
return nil
}

82
pkg/vcs/vcs.go Normal file
View File

@@ -0,0 +1,82 @@
package vcs
import (
"fmt"
"os"
"path/filepath"
"sort"
"strings"
"github.com/StackExchange/blackbox/v2/models"
)
// Vcs is the handle
type Vcs interface {
models.Vcs
}
// NewFnSig function signature needed by reg.
type NewFnSig func() (Vcs, error)
// Item stores one item
type Item struct {
Name string
New NewFnSig
Priority int
}
// Catalog is the list of registered vcs's.
var Catalog []*Item
// Discover polls the VCS plug-ins to determine the VCS of directory.
// The first to succeed is returned.
// It never returns nil, since "NONE" is always valid.
func Discover() (Vcs, string) {
for _, v := range Catalog {
h, err := v.New()
if err != nil {
return nil, "" // No idea how that would happen.
}
if b, repodir := h.Discover(); b {
// Try to find the rel path from CWD to RepoBase
wd, err := os.Getwd()
if err != nil {
fmt.Printf("ERROR: Can not determine cwd! Failing!\n")
os.Exit(1)
}
//fmt.Printf("DISCCOVER: WD=%q REPO=%q\n", wd, repodir)
if repodir != wd && strings.HasSuffix(repodir, wd) {
// This is a terrible hack. We're basically guessing
// at the filesystem layout. That said, it works on macOS.
// TODO(tlim): Abstract this out into a separate function
// so we can do integration tests on it (to know if it fails on
// a particular operating system.)
repodir = wd
}
r, err := filepath.Rel(wd, repodir)
if err != nil {
// Wait, we're not relative to each other? Give up and
// just return the abs repodir.
return h, repodir
}
return h, r
}
}
// This can't happen. If it does, we'll panic and that's ok.
return nil, ""
}
// Register a new VCS.
func Register(name string, priority int, newfn NewFnSig) {
//fmt.Printf("VCS registered: %v\n", name)
item := &Item{
Name: name,
New: newfn,
Priority: priority,
}
Catalog = append(Catalog, item)
// Keep the list sorted.
sort.Slice(Catalog, func(i, j int) bool { return Catalog[j].Priority < Catalog[i].Priority })
}